diff -pruN 14.2.16-2/admin/build-doc 15.2.7-0ubuntu4/admin/build-doc
--- 14.2.16-2/admin/build-doc	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/admin/build-doc	2020-11-30 19:58:30.000000000 +0000
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/sh -e
 
 cd "$(dirname "$0")"
 cd ..
@@ -20,8 +20,7 @@ if command -v dpkg >/dev/null; then
         exit 1
     fi
 elif command -v yum >/dev/null; then
-    python_package="python$(rpm --eval '%{python3_pkgversion}')"
-    for package in "$python_package"-devel "$python_package"-pip "$python_package"-virtualenv doxygen ditaa ant libxml2-devel libxslt-devel "$python_package"-Cython graphviz; do
+    for package in ant ditaa doxygen libxslt-devel libxml2-devel graphviz python3-devel python3-pip python3-virtualenv python3-Cython; do
 	if ! rpm -q --whatprovides $package >/dev/null ; then
 		missing="${missing:+$missing }$package"
 	fi
@@ -51,14 +50,14 @@ fi
 # for availability of commands
 set -e
 
-cat $TOPDIR/src/osd/PG.h $TOPDIR/src/osd/PG.cc | $TOPDIR/doc/scripts/gen_state_diagram.py > $TOPDIR/doc/dev/peering_graph.generated.dot
+cat $TOPDIR/src/osd/PeeringState.h $TOPDIR/src/osd/PeeringState.cc | $TOPDIR/doc/scripts/gen_state_diagram.py > $TOPDIR/doc/dev/peering_graph.generated.dot
 
 cd build-doc
 
 [ -z "$vdir" ] && vdir="$TOPDIR/build-doc/virtualenv"
 
 if [ ! -e $vdir ]; then
-    virtualenv --python=python3 --system-site-packages $vdir
+    virtualenv --python=python3 $vdir
 fi
 $vdir/bin/pip install --quiet -r $TOPDIR/admin/doc-requirements.txt
 
@@ -119,7 +118,7 @@ for target in $sphinx_targets; do
             ;;
     esac
     # Build with -W so that warnings are treated as errors and this fails
-    $vdir/bin/sphinx-build -W -a -b $builder $extra_opt -d doctrees \
+    $vdir/bin/sphinx-build -W --keep-going -a -b $builder $extra_opt -d doctrees \
                            $TOPDIR/doc $TOPDIR/build-doc/output/$target
 
 
@@ -139,7 +138,7 @@ def json_serialize(obj):
         return obj.isoformat()
 
 with open("$TOPDIR/doc/releases/releases.yml", 'r') as fp:
-    releases = yaml.load(fp)
+    releases = yaml.safe_load(fp)
     print(json.dumps(releases, indent=2, default=json_serialize))
 EOF
 
@@ -159,3 +158,5 @@ mkdir $JAVA_OUTDIR
 
 # Copy JavaDocs to target directory
 cp -a $JAVADIR/doc/* $JAVA_OUTDIR/
+
+echo "SUCCESS"
diff -pruN 14.2.16-2/admin/doc-read-the-docs.txt 15.2.7-0ubuntu4/admin/doc-read-the-docs.txt
--- 14.2.16-2/admin/doc-read-the-docs.txt	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/admin/doc-read-the-docs.txt	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-plantweb
-git+https://github.com/readthedocs/readthedocs-sphinx-search@master
diff -pruN 14.2.16-2/admin/doc-requirements.txt 15.2.7-0ubuntu4/admin/doc-requirements.txt
--- 14.2.16-2/admin/doc-requirements.txt	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/admin/doc-requirements.txt	2020-11-30 19:58:30.000000000 +0000
@@ -1,4 +1,8 @@
-Sphinx == 2.1.2
+Sphinx == 2.4.3
 git+https://github.com/ceph/sphinx-ditaa.git@py3#egg=sphinx-ditaa
-breathe == 4.13.1
+breathe == 4.14.0
 pyyaml >= 5.1.2
+Cython
+prettytable
+sphinx-autodoc-typehints
+typed-ast
diff -pruN 14.2.16-2/admin/serve-doc 15.2.7-0ubuntu4/admin/serve-doc
--- 14.2.16-2/admin/serve-doc	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/admin/serve-doc	2020-11-30 19:58:30.000000000 +0000
@@ -1,8 +1,9 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+
 from __future__ import print_function
 
-import SimpleHTTPServer
-import SocketServer
+import http.server
+import socketserver
 import os
 import sys
 
@@ -11,7 +12,7 @@ os.chdir(path)
 os.chdir('..')
 os.chdir('build-doc/output/html')
 
-class ReusingTCPServer(SimpleHTTPServer.SimpleHTTPRequestHandler):
+class ReusingTCPServer(http.server.SimpleHTTPRequestHandler):
     allow_reuse_address = True
 
     def send_head(self):
@@ -19,9 +20,9 @@ class ReusingTCPServer(SimpleHTTPServer.
         # slash-redirecting of requests with query arguments, and will
         # redirect to /foo?q=bar/ -- wrong slash placement
         self.path = self.path.split('?', 1)[0]
-        return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
+        return http.server.SimpleHTTPRequestHandler.send_head(self)
 
-httpd = SocketServer.TCPServer(
+httpd = socketserver.TCPServer(
     ("", 8080),
     ReusingTCPServer,
     )
diff -pruN 14.2.16-2/alpine/APKBUILD 15.2.7-0ubuntu4/alpine/APKBUILD
--- 14.2.16-2/alpine/APKBUILD	2020-12-16 17:38:56.000000000 +0000
+++ 15.2.7-0ubuntu4/alpine/APKBUILD	2020-11-30 20:01:13.000000000 +0000
@@ -1,14 +1,14 @@
 # Contributor: John Coyle <dx9err@gmail.com>
 # Maintainer: John Coyle <dx9err@gmail.com>
 pkgname=ceph
-pkgver=14.2.16
+pkgver=15.2.7
 pkgrel=0
 pkgdesc="Ceph is a distributed object store and file system"
 pkgusers="ceph"
 pkggroups="ceph"
 url="http://ceph.com"
 arch="x86_64"
-license="LGPL-2.1 and CC-BY-SA-1.0 and GPL-2.0 and BSL-1.0 and \
+license="LGPL-2.1 and LGPL-3.0 and CC-BY-SA-1.0 and GPL-2.0 and BSL-1.0 and \
 GPL-2.0-with-autoconf-exception and BSD-3-Clause and MIT"
 depends="ceph-osd ceph-mds ceph-mgr	ceph-mon"
 # grep --quiet option required
@@ -58,13 +58,12 @@ makedepends="
 	readline-dev
 	rpcgen
 	snappy-dev
-	userspace-rcu-dev
 	util-linux
 	xfsprogs-dev
 	xmlstarlet
 	yasm
 "
-source="ceph-14.2.16.tar.bz2"
+source="ceph-15.2.7.tar.bz2"
 subpackages="
 	$pkgname-base
 	$pkgname-common
@@ -117,7 +116,7 @@ _sysconfdir=/etc
 _udevrulesdir=/etc/udev/rules.d
 _python_sitelib=/usr/lib/python2.7/site-packages
 
-builddir=$srcdir/ceph-14.2.16
+builddir=$srcdir/ceph-15.2.7
 
 build() {
 	export CEPH_BUILD_VIRTUALENV=$builddir
@@ -436,7 +435,7 @@ ceph_test() {
 		ceph-osdomap-tool \
 		ceph-kvstore-tool \
 		ceph-debugpack \
-		cephdeduptool
+		ceph-dedup-tool
 
 	_pkg $_libdir ceph/ceph-monstore-update-crush.sh
 }
diff -pruN 14.2.16-2/alpine/APKBUILD.in 15.2.7-0ubuntu4/alpine/APKBUILD.in
--- 14.2.16-2/alpine/APKBUILD.in	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/alpine/APKBUILD.in	2020-11-30 19:58:30.000000000 +0000
@@ -1,14 +1,14 @@
 # Contributor: John Coyle <dx9err@gmail.com>
 # Maintainer: John Coyle <dx9err@gmail.com>
 pkgname=ceph
-pkgver=@VERSION@
+pkgver=@PROJECT_VERSION@
 pkgrel=@RPM_RELEASE@
 pkgdesc="Ceph is a distributed object store and file system"
 pkgusers="ceph"
 pkggroups="ceph"
 url="http://ceph.com"
 arch="x86_64"
-license="LGPL-2.1 and CC-BY-SA-1.0 and GPL-2.0 and BSL-1.0 and \
+license="LGPL-2.1 and LGPL-3.0 and CC-BY-SA-1.0 and GPL-2.0 and BSL-1.0 and \
 GPL-2.0-with-autoconf-exception and BSD-3-Clause and MIT"
 depends="ceph-osd ceph-mds ceph-mgr	ceph-mon"
 # grep --quiet option required
@@ -58,7 +58,6 @@ makedepends="
 	readline-dev
 	rpcgen
 	snappy-dev
-	userspace-rcu-dev
 	util-linux
 	xfsprogs-dev
 	xmlstarlet
@@ -436,7 +435,7 @@ ceph_test() {
 		ceph-osdomap-tool \
 		ceph-kvstore-tool \
 		ceph-debugpack \
-		cephdeduptool
+		ceph-dedup-tool
 
 	_pkg $_libdir ceph/ceph-monstore-update-crush.sh
 }
diff -pruN 14.2.16-2/ceph.spec 15.2.7-0ubuntu4/ceph.spec
--- 14.2.16-2/ceph.spec	2020-12-16 17:38:56.000000000 +0000
+++ 15.2.7-0ubuntu4/ceph.spec	2020-11-30 20:01:13.000000000 +0000
@@ -1,4 +1,3 @@
-# vim: set noexpandtab ts=8 sw=8 :
 #
 # spec file for package ceph
 #
@@ -22,6 +21,7 @@
 # bcond syntax!
 #################################################################################
 %bcond_with make_check
+%bcond_with cmake_verbose_logging
 %bcond_without ceph_test_package
 %ifarch s390 s390x
 %bcond_with tcmalloc
@@ -36,42 +36,30 @@
 %bcond_without cephfs_java
 %endif
 %bcond_without amqp_endpoint
+%bcond_without kafka_endpoint
 %bcond_without lttng
 %bcond_without libradosstriper
 %bcond_without ocf
-%bcond_without kafka_endpoint
 %global _remote_tarball_prefix https://download.ceph.com/tarballs/
 %endif
 %if 0%{?suse_version}
-%bcond_with selinux
-%bcond_with cephfs_java
 %bcond_with amqp_endpoint
+%bcond_with cephfs_java
 %bcond_with kafka_endpoint
-#Compat macro for new _fillupdir macro introduced in Nov 2017
-%if ! %{defined _fillupdir}
-%global _fillupdir /var/adm/fillup-templates
-%endif
-%if 0%{?is_opensuse}
-%bcond_without libradosstriper
-%bcond_without ocf
-%else
 %bcond_with libradosstriper
-%bcond_with ocf
-%endif
 %ifarch x86_64 aarch64 ppc64le
 %bcond_without lttng
 %else
 %bcond_with lttng
 %endif
+%bcond_with ocf
+%bcond_with selinux
+#Compat macro for _fillupdir macro introduced in Nov 2017
+%if ! %{defined _fillupdir}
+%global _fillupdir /var/adm/fillup-templates
 %endif
-%bcond_with seastar
-%if 0%{?fedora} >= 29 || 0%{?suse_version} >= 1500 || 0%{?rhel} >= 8
-# distros that need a py3 Ceph build
-%bcond_with python2
-%else
-# distros that need a py2 Ceph build
-%bcond_without python2
 %endif
+%bcond_with seastar
 %if 0%{?fedora} || 0%{?suse_version} >= 1500
 # distros that ship cmd2 and/or colorama
 %bcond_without cephfs_shell
@@ -79,26 +67,27 @@
 # distros that do _not_ ship cmd2/colorama
 %bcond_with cephfs_shell
 %endif
-%if 0%{without python2}
-%global _defined_if_python2_absent 1
-%endif
 %if 0%{?fedora} || 0%{?suse_version} || 0%{?rhel} >= 8
 %global weak_deps 1
 %endif
 %if %{with selinux}
 # get selinux policy version
+# Force 0.0.0 policy version for centos builds to avoid repository sync issues between rhel and centos
+%if 0%{?centos}
+%global _selinux_policy_version 0.0.0
+%else
 %{!?_selinux_policy_version: %global _selinux_policy_version 0.0.0}
 %endif
+%endif
 
 %{!?_udevrulesdir: %global _udevrulesdir /lib/udev/rules.d}
 %{!?tmpfiles_create: %global tmpfiles_create systemd-tmpfiles --create}
 %{!?python3_pkgversion: %global python3_pkgversion 3}
 %{!?python3_version_nodots: %global python3_version_nodots 3}
 %{!?python3_version: %global python3_version 3}
-# define _python_buildid macro which will expand to the empty string when
-# building with python2
-%global _python_buildid %{?_defined_if_python2_absent:%{python3_pkgversion}}
-
+%if 0%{?rhel} == 7
+%define __python %{__python3}
+%endif
 # unify libexec for all targets
 %global _libexecdir %{_exec_prefix}/lib
 
@@ -109,7 +98,7 @@
 # main package definition
 #################################################################################
 Name:		ceph
-Version:	14.2.16
+Version:	15.2.7
 Release:	0%{?dist}
 %if 0%{?fedora} || 0%{?rhel}
 Epoch:		2
@@ -120,12 +109,12 @@ Epoch:		2
 %global _epoch_prefix %{?epoch:%{epoch}:}
 
 Summary:	User space components of the Ceph file system
-License:	LGPL-2.1 and CC-BY-SA-3.0 and GPL-2.0 and BSL-1.0 and BSD-3-Clause and MIT
+License:	LGPL-2.1 and LGPL-3.0 and CC-BY-SA-3.0 and GPL-2.0 and BSL-1.0 and BSD-3-Clause and MIT
 %if 0%{?suse_version}
 Group:		System/Filesystems
 %endif
 URL:		http://ceph.com/
-Source0:	%{?_remote_tarball_prefix}ceph-14.2.16.tar.bz2
+Source0:	%{?_remote_tarball_prefix}ceph-15.2.7.tar.bz2
 %if 0%{?suse_version}
 # _insert_obs_source_lines_here
 ExclusiveArch:  x86_64 aarch64 ppc64le s390x
@@ -157,7 +146,7 @@ BuildRequires:	fuse-devel
 %if 0%{?rhel} == 7
 # devtoolset offers newer make and valgrind-devel, but the old ones are good
 # enough.
-BuildRequires:	devtoolset-8-gcc-c++ >= 8.2.1
+BuildRequires:	devtoolset-8-gcc-c++ >= 8.3.1-3.1
 %else
 BuildRequires:	gcc-c++
 %endif
@@ -175,7 +164,7 @@ BuildRequires:	libaio-devel
 BuildRequires:	libblkid-devel >= 2.17
 BuildRequires:	libcurl-devel
 BuildRequires:	libcap-ng-devel
-BuildRequires:	libudev-devel
+BuildRequires:	pkgconfig(libudev)
 BuildRequires:	libnl3-devel
 BuildRequires:	liboath-devel
 BuildRequires:	libtool
@@ -187,11 +176,11 @@ BuildRequires:	patch
 BuildRequires:	perl
 BuildRequires:	pkgconfig
 BuildRequires:  procps
-BuildRequires:	python%{_python_buildid}
-BuildRequires:	python%{_python_buildid}-devel
+BuildRequires:	python%{python3_pkgversion}
+BuildRequires:	python%{python3_pkgversion}-devel
 BuildRequires:	snappy-devel
 BuildRequires:	sudo
-BuildRequires:	udev
+BuildRequires:	pkgconfig(udev)
 BuildRequires:	util-linux
 BuildRequires:	valgrind-devel
 BuildRequires:	which
@@ -208,12 +197,35 @@ BuildRequires:  librdkafka-devel
 %if 0%{with make_check}
 BuildRequires:  jq
 BuildRequires:	libuuid-devel
-BuildRequires:	python%{_python_buildid}-bcrypt
-BuildRequires:	python%{_python_buildid}-nose
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-requests
-BuildRequires:	python%{_python_buildid}-six
-BuildRequires:	python%{_python_buildid}-virtualenv
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-bcrypt
+BuildRequires:	python%{python3_version_nodots}-nose
+BuildRequires:	python%{python3_version_nodots}-requests
+BuildRequires:	python%{python3_version_nodots}-dateutil
+%else
+BuildRequires:	python%{python3_pkgversion}-bcrypt
+BuildRequires:	python%{python3_pkgversion}-nose
+BuildRequires:	python%{python3_pkgversion}-pecan
+BuildRequires:	python%{python3_pkgversion}-requests
+BuildRequires:	python%{python3_pkgversion}-dateutil
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-six
+BuildRequires:	python%{python3_version_nodots}-virtualenv
+%else
+BuildRequires:	python%{python3_pkgversion}-six
+BuildRequires:	python%{python3_pkgversion}-virtualenv
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-coverage
+%else
+BuildRequires:	python%{python3_pkgversion}-coverage
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-pyOpenSSL
+%else
+BuildRequires:	python%{python3_pkgversion}-pyOpenSSL
+%endif
 BuildRequires:	socat
 %endif
 %if 0%{with seastar}
@@ -246,10 +258,10 @@ BuildRequires:  openldap2-devel
 #BuildRequires:  krb5
 #BuildRequires:  krb5-devel
 BuildRequires:  cunit-devel
-BuildRequires:	python%{_python_buildid}-setuptools
-BuildRequires:	python%{_python_buildid}-Cython
-BuildRequires:	python%{_python_buildid}-PrettyTable
-BuildRequires:	python%{_python_buildid}-Sphinx
+BuildRequires:	python%{python3_pkgversion}-setuptools
+BuildRequires:	python%{python3_pkgversion}-Cython
+BuildRequires:	python%{python3_pkgversion}-PrettyTable
+BuildRequires:	python%{python3_pkgversion}-Sphinx
 BuildRequires:  rdma-core-devel
 BuildRequires:	liblz4-devel >= 1.7
 # for prometheus-alerts
@@ -267,9 +279,6 @@ BuildRequires:  openldap-devel
 BuildRequires:  openssl-devel
 BuildRequires:  CUnit-devel
 BuildRequires:  redhat-lsb-core
-%if 0%{with python2}
-BuildRequires:	python2-Cython
-%endif
 BuildRequires:	python%{python3_pkgversion}-devel
 BuildRequires:	python%{python3_pkgversion}-setuptools
 %if 0%{?rhel} == 7
@@ -277,40 +286,48 @@ BuildRequires:	python%{python3_version_n
 %else
 BuildRequires:	python%{python3_pkgversion}-Cython
 %endif
-BuildRequires:	python%{_python_buildid}-prettytable
-BuildRequires:	python%{_python_buildid}-sphinx
+BuildRequires:	python%{python3_pkgversion}-prettytable
+BuildRequires:	python%{python3_pkgversion}-sphinx
 BuildRequires:	lz4-devel >= 1.7
 %endif
 # distro-conditional make check dependencies
 %if 0%{with make_check}
 %if 0%{?fedora} || 0%{?rhel}
-BuildRequires:	python%{_python_buildid}-coverage
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-tox
-BuildRequires:  xmlsec1
+BuildRequires:	libtool-ltdl-devel
+BuildRequires:	xmlsec1
+BuildRequires:	xmlsec1-devel
+%ifarch x86_64
+BuildRequires:	xmlsec1-nss
+%endif
+BuildRequires:	xmlsec1-openssl
+BuildRequires:	xmlsec1-openssl-devel
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-jwt
+BuildRequires:	python%{python3_version_nodots}-scipy
+%else
+BuildRequires:	python%{python3_pkgversion}-cherrypy
+BuildRequires:	python%{python3_pkgversion}-jwt
+BuildRequires:	python%{python3_pkgversion}-routes
+BuildRequires:	python%{python3_pkgversion}-scipy
+BuildRequires:	python%{python3_pkgversion}-werkzeug
+%endif
 %if 0%{?rhel} == 7
-BuildRequires:  pyOpenSSL%{_python_buildid}
+BuildRequires:	python%{python3_version_nodots}-pyOpenSSL
 %else
-BuildRequires:  python%{_python_buildid}-pyOpenSSL
+BuildRequires:	python%{python3_pkgversion}-pyOpenSSL
 %endif
-BuildRequires:	python%{_python_buildid}-cherrypy
-BuildRequires:	python%{_python_buildid}-jwt
-BuildRequires:	python%{_python_buildid}-routes
-BuildRequires:  python%{_python_buildid}-scipy
-BuildRequires:	python%{_python_buildid}-werkzeug
-%endif
-%if 0%{?suse_version}
-BuildRequires:	python%{_python_buildid}-CherryPy
-BuildRequires:	python%{_python_buildid}-PyJWT
-BuildRequires:	python%{_python_buildid}-Routes
-BuildRequires:	python%{_python_buildid}-Werkzeug
-BuildRequires:	python%{_python_buildid}-coverage
-BuildRequires:	python%{_python_buildid}-numpy-devel
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-pyOpenSSL
-BuildRequires:	python%{_python_buildid}-tox
-BuildRequires:	rpm-build
-BuildRequires:  xmlsec1-devel
+%endif
+%if 0%{?suse_version}
+BuildRequires:	libxmlsec1-1
+BuildRequires:	libxmlsec1-nss1
+BuildRequires:	libxmlsec1-openssl1
+BuildRequires:	python%{python3_pkgversion}-CherryPy
+BuildRequires:	python%{python3_pkgversion}-PyJWT
+BuildRequires:	python%{python3_pkgversion}-Routes
+BuildRequires:	python%{python3_pkgversion}-Werkzeug
+BuildRequires:	python%{python3_pkgversion}-numpy-devel
+BuildRequires:	xmlsec1-devel
+BuildRequires:	xmlsec1-openssl-devel
 %endif
 %endif
 # lttng and babeltrace for rbd-replay-prep
@@ -378,7 +395,7 @@ Requires:      grep
 Requires:      logrotate
 Requires:      parted
 Requires:      psmisc
-Requires:      python%{_python_buildid}-setuptools
+Requires:      python%{python3_pkgversion}-setuptools
 Requires:      util-linux
 Requires:      xfsprogs
 Requires:      which
@@ -395,6 +412,20 @@ Recommends:    chrony
 %description base
 Base is the package that includes all the files shared amongst ceph servers
 
+%package -n cephadm
+Summary:        Utility to bootstrap Ceph clusters
+Requires:       lvm2
+%if 0%{?suse_version}
+Requires:       apparmor-abstractions
+%endif
+Requires:       python%{python3_pkgversion}
+%if 0%{?weak_deps}
+Recommends:     podman
+%endif
+%description -n cephadm
+Utility to bootstrap a Ceph cluster and manage Ceph daemons deployed 
+with systemd and podman.
+
 %package -n ceph-common
 Summary:	Ceph Common
 %if 0%{?suse_version}
@@ -403,16 +434,17 @@ Group:		System/Filesystems
 Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
 Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
 Requires:	libcephfs2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rbd = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-cephfs = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rgw = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rbd = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-cephfs = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rgw = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-ceph-common = %{_epoch_prefix}%{version}-%{release}
 %if 0%{?fedora} || 0%{?rhel}
-Requires:	python%{_python_buildid}-prettytable
+Requires:	python%{python3_pkgversion}-prettytable
 %endif
 %if 0%{?suse_version}
-Requires:	python%{_python_buildid}-PrettyTable
+Requires:	python%{python3_pkgversion}-PrettyTable
 %endif
 %if 0%{with libradosstriper}
 Requires:	libradosstriper1 = %{_epoch_prefix}%{version}-%{release}
@@ -455,32 +487,19 @@ Summary:        Ceph Manager Daemon
 Group:          System/Filesystems
 %endif
 Requires:       ceph-base = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-bcrypt
-Requires:       python%{_python_buildid}-pecan
-Requires:       python%{_python_buildid}-requests
-Requires:       python%{_python_buildid}-six
-%if 0%{?fedora} || 0%{?rhel}
-Requires:       python%{_python_buildid}-cherrypy
-Requires:       python%{_python_buildid}-werkzeug
-%endif
-%if 0%{?suse_version}
-Requires:       python%{_python_buildid}-CherryPy
-Requires:       python%{_python_buildid}-Werkzeug
+Requires:       ceph-mgr-modules-core = %{_epoch_prefix}%{version}-%{release}
+%if 0%{?rhel} == 7
+Requires:       python%{python3_version_nodots}-six
+%else
+Requires:       python%{python3_pkgversion}-six
 %endif
 %if 0%{?weak_deps}
 Recommends:	ceph-mgr-dashboard = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-diskprediction-local = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-diskprediction-cloud = %{_epoch_prefix}%{version}-%{release}
-Recommends:	ceph-mgr-rook = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-k8sevents = %{_epoch_prefix}%{version}-%{release}
-Recommends:	ceph-mgr-ssh = %{_epoch_prefix}%{version}-%{release}
-Recommends:	python%{_python_buildid}-influxdb
-%endif
-%if 0%{?rhel} == 7
-Requires:       pyOpenSSL
-Requires:       python-enum34
-%else
-Requires:       python%{_python_buildid}-pyOpenSSL
+Recommends:	ceph-mgr-cephadm = %{_epoch_prefix}%{version}-%{release}
+Recommends:	python%{python3_pkgversion}-influxdb
 %endif
 %description mgr
 ceph-mgr enables python modules that provide services (such as the REST
@@ -496,101 +515,145 @@ Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
 Requires:       ceph-grafana-dashboards = %{_epoch_prefix}%{version}-%{release}
+Requires:       ceph-prometheus-alerts = %{_epoch_prefix}%{version}-%{release}
 %if 0%{?fedora} || 0%{?rhel}
-Requires:       python%{_python_buildid}-cherrypy
-Requires:       python%{_python_buildid}-jwt
-Requires:       python%{_python_buildid}-routes
-Requires:       python%{_python_buildid}-werkzeug
+Requires:       python%{python3_pkgversion}-cherrypy
+Requires:       python%{python3_pkgversion}-jwt
+Requires:       python%{python3_pkgversion}-routes
+Requires:       python%{python3_pkgversion}-werkzeug
 %if 0%{?weak_deps}
-Recommends:     python%{_python_buildid}-saml
+Recommends:     python%{python3_pkgversion}-saml
 %endif
 %endif
 %if 0%{?suse_version}
-Requires:       python%{_python_buildid}-CherryPy
-Requires:       python%{_python_buildid}-PyJWT
-Requires:       python%{_python_buildid}-Routes
-Requires:       python%{_python_buildid}-Werkzeug
-%endif
-%if 0%{?rhel} == 7
-Requires:       pyOpenSSL
-%else
-Requires:       python%{_python_buildid}-pyOpenSSL
+Requires:       python%{python3_pkgversion}-CherryPy
+Requires:       python%{python3_pkgversion}-PyJWT
+Requires:       python%{python3_pkgversion}-Routes
+Requires:       python%{python3_pkgversion}-Werkzeug
+Recommends:     python%{python3_pkgversion}-python3-saml
 %endif
 %description mgr-dashboard
-ceph-mgr-dashboard is a manager plugin, providing a web-based application
+ceph-mgr-dashboard is a manager module, providing a web-based application
 to monitor and manage many aspects of a Ceph cluster and related components.
 See the Dashboard documentation at http://docs.ceph.com/ for details and a
 detailed feature overview.
 
 %package mgr-diskprediction-local
-Summary:        ceph-mgr diskprediction_local plugin
+Summary:        Ceph Manager module for predicting disk failures
 BuildArch:      noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-%if 0%{?fedora} || 0%{?rhel} > 7 || 0%{?suse_version}
-Requires:       python%{_python_buildid}-numpy
-%if 0%{without python2}
+Requires:       python%{python3_pkgversion}-numpy
 Requires:       python3-scipy
-%else
-Requires:       python2-scipy
-%endif
-%endif
 %if 0%{?rhel} == 7
 Requires:       numpy
 Requires:       scipy
 %endif
 %description mgr-diskprediction-local
-ceph-mgr-diskprediction-local is a ceph-mgr plugin that tries to predict
+ceph-mgr-diskprediction-local is a ceph-mgr module that tries to predict
 disk failures using local algorithms and machine-learning databases.
 
 %package mgr-diskprediction-cloud
-Summary:        ceph-mgr diskprediction_cloud plugin
+Summary:        Ceph Manager module for cloud-based disk failure prediction
 BuildArch:      noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
+%if 0%{without python2}
+Requires:       python3-grpcio
+Requires:       python3-protobuf
+%else
+Requires:       python2-grpcio
+Requires:       python2-protobuf
+%endif
 %description mgr-diskprediction-cloud
-ceph-mgr-diskprediction-cloud is a ceph-mgr plugin that tries to predict
+ceph-mgr-diskprediction-cloud is a ceph-mgr module that tries to predict
 disk failures using services in the Google cloud.
 
+%package mgr-modules-core
+Summary:        Ceph Manager modules which are always enabled
+BuildArch:      noarch
+%if 0%{?suse_version}
+Group:          System/Filesystems
+%endif
+%if 0%{?rhel} == 7
+Requires:       python%{python3_version_nodots}-bcrypt
+Requires:       python%{python3_version_nodots}-pyOpenSSL
+Requires:       python%{python3_version_nodots}-requests
+Requires:       python%{python3_version_nodots}-PyYAML
+Requires:       python%{python3_version_nodots}-dateutil
+%else
+Requires:       python%{python3_pkgversion}-bcrypt
+Requires:       python%{python3_pkgversion}-pecan
+Requires:       python%{python3_pkgversion}-pyOpenSSL
+Requires:       python%{python3_pkgversion}-requests
+Requires:       python%{python3_pkgversion}-dateutil
+%endif
+%if 0%{?fedora} || 0%{?rhel} >= 8
+Requires:       python%{python3_pkgversion}-cherrypy
+Requires:       python%{python3_pkgversion}-pyyaml
+Requires:       python%{python3_pkgversion}-werkzeug
+%endif
+%if 0%{?suse_version}
+Requires:       python%{python3_pkgversion}-CherryPy
+Requires:       python%{python3_pkgversion}-PyYAML
+Requires:       python%{python3_pkgversion}-Werkzeug
+%endif
+%if 0%{?weak_deps}
+Recommends:	ceph-mgr-rook = %{_epoch_prefix}%{version}-%{release}
+%endif
+%description mgr-modules-core
+ceph-mgr-modules-core provides a set of modules which are always
+enabled by ceph-mgr.
+
 %package mgr-rook
 BuildArch:      noarch
-Summary:        ceph-mgr rook plugin
+Summary:        Ceph Manager module for Rook-based orchestration
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-kubernetes
+Requires:       python%{python3_pkgversion}-kubernetes
+Requires:       python%{python3_pkgversion}-jsonpatch
 %description mgr-rook
-ceph-mgr-rook is a ceph-mgr plugin for orchestration functions using
+ceph-mgr-rook is a ceph-mgr module for orchestration functions using
 a Rook backend.
 
 %package mgr-k8sevents
 BuildArch:      noarch
-Summary:        Ceph Manager plugin to orchestrate ceph-events to kubernetes' events API
+Summary:        Ceph Manager module to orchestrate ceph-events to kubernetes' events API
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-kubernetes
+Requires:       python%{python3_pkgversion}-kubernetes
 %description mgr-k8sevents
-ceph-mgr-k8sevents is a ceph-mgr plugin that sends every ceph-events
+ceph-mgr-k8sevents is a ceph-mgr module that sends every ceph-events
 to kubernetes' events API
 
-%package mgr-ssh
-Summary:        ceph-mgr ssh module
+%package mgr-cephadm
+Summary:        Ceph Manager module for cephadm-based orchestration
 BuildArch:	noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-remoto
-%description mgr-ssh
-ceph-mgr-ssh is a ceph-mgr module for orchestration functions using
-direct SSH connections for management operations.
+Requires:       python%{python3_pkgversion}-remoto
+Requires:       cephadm = %{_epoch_prefix}%{version}-%{release}
+%if 0%{?suse_version}
+Requires:       openssh
+Requires:       python%{python3_pkgversion}-Jinja2
+%endif
+%if 0%{?rhel} || 0%{?fedora}
+Requires:       openssh-clients
+Requires:       python%{python3_pkgversion}-jinja2
+%endif
+%description mgr-cephadm
+ceph-mgr-cephadm is a ceph-mgr module for orchestration functions using
+the integrated cephadm deployment tool management operations.
 
 %package fuse
 Summary:	Ceph fuse-based client
@@ -624,6 +687,15 @@ Requires:	librbd1 = %{_epoch_prefix}%{ve
 Daemon for mirroring RBD images between Ceph clusters, streaming
 changes asynchronously.
 
+%package immutable-object-cache
+Summary:	Ceph daemon for immutable object cache
+%if 0%{?suse_version}
+Group:		System/Filesystems
+%endif
+Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
+%description immutable-object-cache
+Daemon for immutable object cache.
+
 %package -n rbd-nbd
 Summary:	Ceph RBD client base on NBD
 %if 0%{?suse_version}
@@ -677,16 +749,26 @@ Provides:	ceph-test:/usr/bin/ceph-osdoma
 Requires:	ceph-base = %{_epoch_prefix}%{version}-%{release}
 Requires:	lvm2
 Requires:	sudo
-Requires: libstoragemgmt
-%if 0%{?weak_deps}
-Recommends:	nvme-cli
-Recommends:	smartmontools
-%endif
+Requires:	libstoragemgmt
+Requires:	python%{python3_pkgversion}-ceph-common = %{_epoch_prefix}%{version}-%{release}
 %description osd
 ceph-osd is the object storage daemon for the Ceph distributed file
 system.  It is responsible for storing objects on a local file system
 and providing access to them over the network.
 
+%if 0%{with seastar}
+%package crimson-osd
+Summary:	Ceph Object Storage Daemon (crimson)
+%if 0%{?suse_version}
+Group:		System/Filesystems
+%endif
+Requires:	ceph-osd = %{_epoch_prefix}%{version}-%{release}
+%description crimson-osd
+crimson-osd is the object storage daemon for the Ceph distributed file
+system.  It is responsible for storing objects on a local file system
+and providing access to them over the network.
+%endif
+
 %package -n librados2
 Summary:	RADOS distributed object store client library
 %if 0%{?suse_version}
@@ -748,21 +830,6 @@ Obsoletes:	librgw2-devel < %{_epoch_pref
 This package contains libraries and headers needed to develop programs
 that use RADOS gateway client library.
 
-%if 0%{with python2}
-%package -n python-rgw
-Summary:	Python 2 libraries for the RADOS gateway
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librgw2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rgw}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rgw
-This package contains Python 2 libraries for interacting with Cephs RADOS
-gateway.
-%endif
-
 %package -n python%{python3_pkgversion}-rgw
 Summary:	Python 3 libraries for the RADOS gateway
 %if 0%{?suse_version}
@@ -771,28 +838,12 @@ Group:		Development/Libraries/Python
 Requires:	librgw2 = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rgw}
-%if 0%{without python2}
 Provides:	python-rgw = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-rgw < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-rgw
 This package contains Python 3 libraries for interacting with Cephs RADOS
 gateway.
 
-%if 0%{with python2}
-%package -n python-rados
-Summary:	Python 2 libraries for the RADOS object store
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rados}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rados
-This package contains Python 2 libraries for interacting with Cephs RADOS
-object store.
-%endif
-
 %package -n python%{python3_pkgversion}-rados
 Summary:	Python 3 libraries for the RADOS object store
 %if 0%{?suse_version}
@@ -801,10 +852,8 @@ Group:		Development/Libraries/Python
 Requires:	python%{python3_pkgversion}
 Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rados}
-%if 0%{without python2}
 Provides:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Obsoletes:      python-rados < %{_epoch_prefix}%{version}-%{release}
-%endif
+Obsoletes:	python-rados < %{_epoch_prefix}%{version}-%{release}
 %description -n python%{python3_pkgversion}-rados
 This package contains Python 3 libraries for interacting with Cephs RADOS
 object store.
@@ -870,21 +919,6 @@ Obsoletes:	librbd1-devel < %{_epoch_pref
 This package contains libraries and headers needed to develop programs
 that use RADOS block device.
 
-%if 0%{with python2}
-%package -n python-rbd
-Summary:	Python 2 libraries for the RADOS block device
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rbd}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rbd
-This package contains Python 2 libraries for interacting with Cephs RADOS
-block device.
-%endif
-
 %package -n python%{python3_pkgversion}-rbd
 Summary:	Python 3 libraries for the RADOS block device
 %if 0%{?suse_version}
@@ -893,11 +927,8 @@ Group:		Development/Libraries/Python
 Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rbd}
-Provides:	python3-rbd = %{_epoch_prefix}%{version}-%{release}
-%if 0%{without python2}
 Provides:	python-rbd = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-rbd < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-rbd
 This package contains Python 3 libraries for interacting with Cephs RADOS
 block device.
@@ -932,22 +963,6 @@ Obsoletes:	libcephfs2-devel < %{_epoch_p
 This package contains libraries and headers needed to develop programs
 that use Cephs distributed file system.
 
-%if 0%{with python2}
-%package -n python-cephfs
-Summary:	Python 2 libraries for Ceph distributed file system
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	libcephfs2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-cephfs}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-cephfs
-This package contains Python 2 libraries for interacting with Cephs distributed
-file system.
-%endif
-
 %package -n python%{python3_pkgversion}-cephfs
 Summary:	Python 3 libraries for Ceph distributed file system
 %if 0%{?suse_version}
@@ -957,27 +972,12 @@ Requires:	libcephfs2 = %{_epoch_prefix}%
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-cephfs}
-%if 0%{without python2}
 Provides:	python-cephfs = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-cephfs < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-cephfs
 This package contains Python 3 libraries for interacting with Cephs distributed
 file system.
 
-%if 0%{with python2}
-%package -n python-ceph-argparse
-Summary:	Python 2 utility libraries for Ceph CLI
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-%description -n python-ceph-argparse
-This package contains types and routines for Python 2 used by the Ceph CLI as
-well as the RESTful interface. These have to do with querying the daemons for
-command-description information, validating user command input against those
-descriptions, and submitting the command to the appropriate daemon.
-%endif
-
 %package -n python%{python3_pkgversion}-ceph-argparse
 Summary:	Python 3 utility libraries for Ceph CLI
 %if 0%{?suse_version}
@@ -990,6 +990,22 @@ well as the RESTful interface. These hav
 command-description information, validating user command input against those
 descriptions, and submitting the command to the appropriate daemon.
 
+%package -n python%{python3_pkgversion}-ceph-common
+Summary:	Python 3 utility libraries for Ceph
+%if 0%{?fedora} || 0%{?rhel} >= 8
+Requires:       python%{python3_pkgversion}-pyyaml
+%endif
+%if 0%{?suse_version}
+Requires:       python%{python3_pkgversion}-PyYAML
+%endif
+%if 0%{?suse_version}
+Group:		Development/Libraries/Python
+%endif
+%{?python_provide:%python_provide python%{python3_pkgversion}-ceph-common}
+%description -n python%{python3_pkgversion}-ceph-common
+This package contains data structures, classes and functions used by Ceph.
+It also contains utilities used for the cephadm orchestrator.
+
 %if 0%{with cephfs_shell}
 %package -n cephfs-shell
 Summary:    Interactive shell for Ceph file system
@@ -1086,25 +1102,6 @@ populated file-systems.
 
 %endif
 
-%if 0%{with python2}
-%package -n python-ceph-compat
-Summary:	Compatibility package for Cephs python libraries
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Obsoletes:	python-ceph
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rbd = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-cephfs = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rgw = %{_epoch_prefix}%{version}-%{release}
-Provides:	python-ceph
-%description -n python-ceph-compat
-This is a compatibility package to accommodate python-ceph split into
-python-rados, python-rbd, python-rgw and python-cephfs. Packages still
-depending on python-ceph should be fixed to depend on python-rados,
-python-rbd, python-rgw or python-cephfs instead.
-%endif
-
 %package grafana-dashboards
 Summary:	The set of Grafana dashboards for monitoring purposes
 BuildArch:	noarch
@@ -1118,20 +1115,18 @@ collecting data from Ceph Manager "prome
 project "node_exporter" module. The dashboards are designed to be
 integrated with the Ceph Manager Dashboard web UI.
 
-%if 0%{?suse_version}
 %package prometheus-alerts
 Summary:        Prometheus alerts for a Ceph deplyoment
 BuildArch:      noarch
 Group:          System/Monitoring
 %description prometheus-alerts
 This package provides Ceph’s default alerts for Prometheus.
-%endif
 
 #################################################################################
 # common
 #################################################################################
 %prep
-%autosetup -p1 -n ceph-14.2.16
+%autosetup -p1 -n ceph-15.2.7
 
 %build
 # LTO can be enabled as soon as the following GCC bug is fixed:
@@ -1202,12 +1197,6 @@ ${CMAKE} .. \
     -DWITH_MANPAGE=ON \
     -DWITH_PYTHON3=%{python3_version} \
     -DWITH_MGR_DASHBOARD_FRONTEND=OFF \
-%if %{with python2}
-    -DWITH_PYTHON2=ON \
-%else
-    -DWITH_PYTHON2=OFF \
-    -DMGR_PYTHON_VERSION=3 \
-%endif
 %if 0%{without ceph_test_package}
     -DWITH_TESTS=OFF \
 %endif
@@ -1251,9 +1240,17 @@ ${CMAKE} .. \
 %else
     -DWITH_RADOSGW_KAFKA_ENDPOINT=OFF \
 %endif
+%if 0%{with cmake_verbose_logging}
+    -DCMAKE_VERBOSE_MAKEFILE=ON \
+%endif
     -DBOOST_J=$CEPH_SMP_NCPUS \
     -DWITH_GRAFANA=ON
 
+%if %{with cmake_verbose_logging}
+cat ./CMakeFiles/CMakeOutput.log
+cat ./CMakeFiles/CMakeError.log
+%endif
+
 make "$CEPH_MFLAGS_JOBS"
 
 
@@ -1286,6 +1283,14 @@ chmod 0644 %{buildroot}%{_docdir}/ceph/s
 install -m 0644 -D COPYING %{buildroot}%{_docdir}/ceph/COPYING
 install -m 0644 -D etc/sysctl/90-ceph-osd.conf %{buildroot}%{_sysctldir}/90-ceph-osd.conf
 
+install -m 0755 src/cephadm/cephadm %{buildroot}%{_sbindir}/cephadm
+mkdir -p %{buildroot}%{_sharedstatedir}/cephadm
+chmod 0700 %{buildroot}%{_sharedstatedir}/cephadm
+mkdir -p %{buildroot}%{_sharedstatedir}/cephadm/.ssh
+chmod 0700 %{buildroot}%{_sharedstatedir}/cephadm/.ssh
+touch %{buildroot}%{_sharedstatedir}/cephadm/.ssh/authorized_keys
+chmod 0600 %{buildroot}%{_sharedstatedir}/cephadm/.ssh/authorized_keys
+
 # firewall templates and /sbin/mount.ceph symlink
 %if 0%{?suse_version}
 mkdir -p %{buildroot}/sbin
@@ -1297,6 +1302,7 @@ install -m 0644 -D udev/50-rbd.rules %{b
 
 # sudoers.d
 install -m 0600 -D sudoers.d/ceph-osd-smartctl %{buildroot}%{_sysconfdir}/sudoers.d/ceph-osd-smartctl
+install -m 0600 -D sudoers.d/cephadm %{buildroot}%{_sysconfdir}/sudoers.d/cephadm
 
 %if 0%{?rhel} >= 8
 pathfix.py -pni "%{__python3} %{py3_shbang_opts}" %{buildroot}%{_bindir}/*
@@ -1322,11 +1328,12 @@ mkdir -p %{buildroot}%{_localstatedir}/l
 mkdir -p %{buildroot}%{_localstatedir}/lib/ceph/bootstrap-rbd
 mkdir -p %{buildroot}%{_localstatedir}/lib/ceph/bootstrap-rbd-mirror
 
+# prometheus alerts
+install -m 644 -D monitoring/prometheus/alerts/ceph_default_alerts.yml %{buildroot}/etc/prometheus/ceph/ceph_default_alerts.yml
+
 %if 0%{?suse_version}
 # create __pycache__ directories and their contents
 %py3_compile %{buildroot}%{python3_sitelib}
-# prometheus alerts
-install -m 644 -D monitoring/prometheus/alerts/ceph_default_alerts.yml %{buildroot}/etc/prometheus/SUSE/default_rules/ceph_default_alerts.yml
 # hardlink duplicate files under /usr to save space
 %fdupes %{buildroot}%{_prefix}
 %endif
@@ -1376,15 +1383,9 @@ rm -rf %{buildroot}
 %{_fillupdir}/sysconfig.*
 %endif
 %{_unitdir}/ceph.target
-%if 0%{with python2}
-%dir %{python_sitelib}/ceph_volume
-%{python_sitelib}/ceph_volume/*
-%{python_sitelib}/ceph_volume-*
-%else
 %dir %{python3_sitelib}/ceph_volume
 %{python3_sitelib}/ceph_volume/*
 %{python3_sitelib}/ceph_volume-*
-%endif
 %{_mandir}/man8/ceph-deploy.8*
 %{_mandir}/man8/ceph-create-keys.8*
 %{_mandir}/man8/ceph-run.8*
@@ -1444,6 +1445,25 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%pre -n cephadm
+getent group cephadm >/dev/null || groupadd -r cephadm
+getent passwd cephadm >/dev/null || useradd -r -g cephadm -s /bin/bash -c "cephadm user for mgr/cephadm" -d %{_sharedstatedir}/cephadm cephadm
+exit 0
+
+%if ! 0%{?suse_version}
+%postun -n cephadm
+userdel -r cephadm || true
+exit 0
+%endif
+
+%files -n cephadm
+%{_sbindir}/cephadm
+%{_mandir}/man8/cephadm.8*
+%{_sysconfdir}/sudoers.d/cephadm
+%attr(0700,cephadm,cephadm) %dir %{_sharedstatedir}/cephadm
+%attr(0700,cephadm,cephadm) %dir %{_sharedstatedir}/cephadm/.ssh
+%attr(0600,cephadm,cephadm) %{_sharedstatedir}/cephadm/.ssh/authorized_keys
+
 %files common
 %dir %{_docdir}/ceph
 %doc %{_docdir}/ceph/sample.ceph.conf
@@ -1592,33 +1612,11 @@ fi
 %files mgr
 %{_bindir}/ceph-mgr
 %dir %{_datadir}/ceph/mgr
-%{_datadir}/ceph/mgr/alerts
-%{_datadir}/ceph/mgr/ansible
-%{_datadir}/ceph/mgr/balancer
-%{_datadir}/ceph/mgr/crash
-%{_datadir}/ceph/mgr/deepsea
-%{_datadir}/ceph/mgr/devicehealth
-%{_datadir}/ceph/mgr/influx
-%{_datadir}/ceph/mgr/insights
-%{_datadir}/ceph/mgr/iostat
-%{_datadir}/ceph/mgr/localpool
 %{_datadir}/ceph/mgr/mgr_module.*
 %{_datadir}/ceph/mgr/mgr_util.*
-%{_datadir}/ceph/mgr/orchestrator_cli
-%{_datadir}/ceph/mgr/orchestrator.*
-%{_datadir}/ceph/mgr/osd_perf_query
-%{_datadir}/ceph/mgr/pg_autoscaler
-%{_datadir}/ceph/mgr/progress
-%{_datadir}/ceph/mgr/prometheus
-%{_datadir}/ceph/mgr/rbd_support
-%{_datadir}/ceph/mgr/restful
-%{_datadir}/ceph/mgr/selftest
-%{_datadir}/ceph/mgr/status
-%{_datadir}/ceph/mgr/telegraf
-%{_datadir}/ceph/mgr/telemetry
-%{_datadir}/ceph/mgr/test_orchestrator
-%{_datadir}/ceph/mgr/volumes
-%{_datadir}/ceph/mgr/zabbix
+%if 0%{?rhel} == 7
+%{_datadir}/ceph/mgr/__pycache__
+%endif
 %{_unitdir}/ceph-mgr@.service
 %{_unitdir}/ceph-mgr.target
 %attr(750,ceph,ceph) %dir %{_localstatedir}/lib/ceph/mgr
@@ -1703,6 +1701,32 @@ if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
+%files mgr-modules-core
+%dir %{_datadir}/ceph/mgr
+%{_datadir}/ceph/mgr/alerts
+%{_datadir}/ceph/mgr/balancer
+%{_datadir}/ceph/mgr/crash
+%{_datadir}/ceph/mgr/devicehealth
+%{_datadir}/ceph/mgr/influx
+%{_datadir}/ceph/mgr/insights
+%{_datadir}/ceph/mgr/iostat
+%{_datadir}/ceph/mgr/localpool
+%{_datadir}/ceph/mgr/orchestrator
+%{_datadir}/ceph/mgr/osd_perf_query
+%{_datadir}/ceph/mgr/osd_support
+%{_datadir}/ceph/mgr/pg_autoscaler
+%{_datadir}/ceph/mgr/progress
+%{_datadir}/ceph/mgr/prometheus
+%{_datadir}/ceph/mgr/rbd_support
+%{_datadir}/ceph/mgr/restful
+%{_datadir}/ceph/mgr/selftest
+%{_datadir}/ceph/mgr/status
+%{_datadir}/ceph/mgr/telegraf
+%{_datadir}/ceph/mgr/telemetry
+%{_datadir}/ceph/mgr/test_orchestrator
+%{_datadir}/ceph/mgr/volumes
+%{_datadir}/ceph/mgr/zabbix
+
 %files mgr-rook
 %{_datadir}/ceph/mgr/rook
 
@@ -1729,15 +1753,15 @@ if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
-%files mgr-ssh
-%{_datadir}/ceph/mgr/ssh
+%files mgr-cephadm
+%{_datadir}/ceph/mgr/cephadm
 
-%post mgr-ssh
+%post mgr-cephadm
 if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
-%postun mgr-ssh
+%postun mgr-cephadm
 if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
@@ -1849,6 +1873,54 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%files immutable-object-cache
+%{_bindir}/ceph-immutable-object-cache
+%{_mandir}/man8/ceph-immutable-object-cache.8*
+%{_unitdir}/ceph-immutable-object-cache@.service
+%{_unitdir}/ceph-immutable-object-cache.target
+
+%post immutable-object-cache
+%if 0%{?suse_version}
+if [ $1 -eq 1 ] ; then
+  /usr/bin/systemctl preset ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target >/dev/null 2>&1 || :
+fi
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_post ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+if [ $1 -eq 1 ] ; then
+/usr/bin/systemctl start ceph-immutable-object-cache.target >/dev/null 2>&1 || :
+fi
+
+%preun immutable-object-cache
+%if 0%{?suse_version}
+%service_del_preun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_preun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+
+%postun immutable-object-cache
+test -n "$FIRST_ARG" || FIRST_ARG=$1
+%if 0%{?suse_version}
+DISABLE_RESTART_ON_UPDATE="yes"
+%service_del_postun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_postun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+if [ $FIRST_ARG -ge 1 ] ; then
+  # Restart on upgrade, but only if "CEPH_AUTO_RESTART_ON_UPGRADE" is set to
+  # "yes". In any case: if units are not running, do not touch them.
+  SYSCONF_CEPH=%{_sysconfdir}/sysconfig/ceph
+  if [ -f $SYSCONF_CEPH -a -r $SYSCONF_CEPH ] ; then
+    source $SYSCONF_CEPH
+  fi
+  if [ "X$CEPH_AUTO_RESTART_ON_UPGRADE" = "Xyes" ] ; then
+    /usr/bin/systemctl try-restart ceph-immutable-object-cache@\*.service > /dev/null 2>&1 || :
+  fi
+fi
+
 %files -n rbd-nbd
 %{_bindir}/rbd-nbd
 %{_mandir}/man8/rbd-nbd.8*
@@ -1860,12 +1932,14 @@ fi
 %{_bindir}/radosgw-es
 %{_bindir}/radosgw-object-expirer
 %{_bindir}/rgw-orphan-list
+%{_libdir}/libradosgw.so*
 %{_mandir}/man8/radosgw.8*
 %dir %{_localstatedir}/lib/ceph/radosgw
 %{_unitdir}/ceph-radosgw@.service
 %{_unitdir}/ceph-radosgw.target
 
 %post radosgw
+/sbin/ldconfig
 %if 0%{?suse_version}
 if [ $1 -eq 1 ] ; then
   /usr/bin/systemctl preset ceph-radosgw@\*.service ceph-radosgw.target >/dev/null 2>&1 || :
@@ -1887,6 +1961,7 @@ fi
 %endif
 
 %postun radosgw
+/sbin/ldconfig
 %if 0%{?suse_version}
 DISABLE_RESTART_ON_UPDATE="yes"
 %service_del_postun ceph-radosgw@\*.service ceph-radosgw.target
@@ -1973,6 +2048,11 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%if 0%{with seastar}
+%files crimson-osd
+%{_bindir}/crimson-osd
+%endif
+
 %if %{with ocf}
 
 %files resource-agents
@@ -2018,12 +2098,6 @@ fi
 %{_includedir}/rados/page.h
 %{_includedir}/rados/rados_types.hpp
 
-%if 0%{with python2}
-%files -n python-rados
-%{python_sitearch}/rados.so
-%{python_sitearch}/rados-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rados
 %{python3_sitearch}/rados.cpython*.so
 %{python3_sitearch}/rados-*.egg-info
@@ -2087,22 +2161,10 @@ fi
 %{_libdir}/librgw_rados_tp.so
 %endif
 
-%if 0%{with python2}
-%files -n python-rgw
-%{python_sitearch}/rgw.so
-%{python_sitearch}/rgw-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rgw
 %{python3_sitearch}/rgw.cpython*.so
 %{python3_sitearch}/rgw-*.egg-info
 
-%if 0%{with python2}
-%files -n python-rbd
-%{python_sitearch}/rbd.so
-%{python_sitearch}/rbd-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rbd
 %{python3_sitearch}/rbd.cpython*.so
 %{python3_sitearch}/rbd-*.egg-info
@@ -2121,31 +2183,22 @@ fi
 %{_includedir}/cephfs/ceph_ll_client.h
 %{_libdir}/libcephfs.so
 
-%if 0%{with python2}
-%files -n python-cephfs
-%{python_sitearch}/cephfs.so
-%{python_sitearch}/cephfs-*.egg-info
-%{python_sitelib}/ceph_volume_client.py*
-%endif
-
 %files -n python%{python3_pkgversion}-cephfs
 %{python3_sitearch}/cephfs.cpython*.so
 %{python3_sitearch}/cephfs-*.egg-info
 %{python3_sitelib}/ceph_volume_client.py
 %{python3_sitelib}/__pycache__/ceph_volume_client.cpython*.py*
 
-%if 0%{with python2}
-%files -n python-ceph-argparse
-%{python_sitelib}/ceph_argparse.py*
-%{python_sitelib}/ceph_daemon.py*
-%endif
-
 %files -n python%{python3_pkgversion}-ceph-argparse
 %{python3_sitelib}/ceph_argparse.py
 %{python3_sitelib}/__pycache__/ceph_argparse.cpython*.py*
 %{python3_sitelib}/ceph_daemon.py
 %{python3_sitelib}/__pycache__/ceph_daemon.cpython*.py*
 
+%files -n python%{python3_pkgversion}-ceph-common
+%{python3_sitelib}/ceph
+%{python3_sitelib}/ceph-*.egg-info
+
 %if 0%{with cephfs_shell}
 %files -n cephfs-shell
 %{python3_sitelib}/cephfs_shell-*.egg-info
@@ -2175,7 +2228,7 @@ fi
 %{_bindir}/ceph_test_*
 %{_bindir}/ceph-coverage
 %{_bindir}/ceph-debugpack
-%{_bindir}/cephdeduptool
+%{_bindir}/ceph-dedup-tool
 %{_mandir}/man8/ceph-debugpack.8*
 %dir %{_libdir}/ceph
 %{_libdir}/ceph/ceph-monstore-update-crush.sh
@@ -2287,30 +2340,21 @@ fi
 exit 0
 %endif
 
-%if 0%{with python2}
-%files -n python-ceph-compat
-# We need an empty %%files list for python-ceph-compat, to tell rpmbuild to
-# actually build this meta package.
-%endif
-
 %files grafana-dashboards
 %if 0%{?suse_version}
 %attr(0755,root,root) %dir %{_sysconfdir}/grafana
 %attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards
-%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
-%else
-%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
 %endif
+%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
 %config %{_sysconfdir}/grafana/dashboards/ceph-dashboard/*
 %doc monitoring/grafana/dashboards/README
 %doc monitoring/grafana/README.md
 
-%if 0%{?suse_version}
 %files prometheus-alerts
-%dir /etc/prometheus/SUSE/
-%dir /etc/prometheus/SUSE/default_rules/
-%config /etc/prometheus/SUSE/default_rules/ceph_default_alerts.yml
+%if 0%{?suse_version}
+%attr(0755,root,root) %dir %{_sysconfdir}/prometheus
 %endif
-
+%attr(0755,root,root) %dir %{_sysconfdir}/prometheus/ceph
+%config %{_sysconfdir}/prometheus/ceph/ceph_default_alerts.yml
 
 %changelog
diff -pruN 14.2.16-2/ceph.spec.in 15.2.7-0ubuntu4/ceph.spec.in
--- 14.2.16-2/ceph.spec.in	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/ceph.spec.in	2020-11-30 19:58:30.000000000 +0000
@@ -1,4 +1,3 @@
-# vim: set noexpandtab ts=8 sw=8 :
 #
 # spec file for package ceph
 #
@@ -22,6 +21,7 @@
 # bcond syntax!
 #################################################################################
 %bcond_with make_check
+%bcond_with cmake_verbose_logging
 %bcond_without ceph_test_package
 %ifarch s390 s390x
 %bcond_with tcmalloc
@@ -36,42 +36,30 @@
 %bcond_without cephfs_java
 %endif
 %bcond_without amqp_endpoint
+%bcond_without kafka_endpoint
 %bcond_without lttng
 %bcond_without libradosstriper
 %bcond_without ocf
-%bcond_without kafka_endpoint
 %global _remote_tarball_prefix https://download.ceph.com/tarballs/
 %endif
 %if 0%{?suse_version}
-%bcond_with selinux
-%bcond_with cephfs_java
 %bcond_with amqp_endpoint
+%bcond_with cephfs_java
 %bcond_with kafka_endpoint
-#Compat macro for new _fillupdir macro introduced in Nov 2017
-%if ! %{defined _fillupdir}
-%global _fillupdir /var/adm/fillup-templates
-%endif
-%if 0%{?is_opensuse}
-%bcond_without libradosstriper
-%bcond_without ocf
-%else
 %bcond_with libradosstriper
-%bcond_with ocf
-%endif
 %ifarch x86_64 aarch64 ppc64le
 %bcond_without lttng
 %else
 %bcond_with lttng
 %endif
+%bcond_with ocf
+%bcond_with selinux
+#Compat macro for _fillupdir macro introduced in Nov 2017
+%if ! %{defined _fillupdir}
+%global _fillupdir /var/adm/fillup-templates
 %endif
-%bcond_with seastar
-%if 0%{?fedora} >= 29 || 0%{?suse_version} >= 1500 || 0%{?rhel} >= 8
-# distros that need a py3 Ceph build
-%bcond_with python2
-%else
-# distros that need a py2 Ceph build
-%bcond_without python2
 %endif
+%bcond_with seastar
 %if 0%{?fedora} || 0%{?suse_version} >= 1500
 # distros that ship cmd2 and/or colorama
 %bcond_without cephfs_shell
@@ -79,26 +67,27 @@
 # distros that do _not_ ship cmd2/colorama
 %bcond_with cephfs_shell
 %endif
-%if 0%{without python2}
-%global _defined_if_python2_absent 1
-%endif
 %if 0%{?fedora} || 0%{?suse_version} || 0%{?rhel} >= 8
 %global weak_deps 1
 %endif
 %if %{with selinux}
 # get selinux policy version
+# Force 0.0.0 policy version for centos builds to avoid repository sync issues between rhel and centos
+%if 0%{?centos}
+%global _selinux_policy_version 0.0.0
+%else
 %{!?_selinux_policy_version: %global _selinux_policy_version 0.0.0}
 %endif
+%endif
 
 %{!?_udevrulesdir: %global _udevrulesdir /lib/udev/rules.d}
 %{!?tmpfiles_create: %global tmpfiles_create systemd-tmpfiles --create}
 %{!?python3_pkgversion: %global python3_pkgversion 3}
 %{!?python3_version_nodots: %global python3_version_nodots 3}
 %{!?python3_version: %global python3_version 3}
-# define _python_buildid macro which will expand to the empty string when
-# building with python2
-%global _python_buildid %{?_defined_if_python2_absent:%{python3_pkgversion}}
-
+%if 0%{?rhel} == 7
+%define __python %{__python3}
+%endif
 # unify libexec for all targets
 %global _libexecdir %{_exec_prefix}/lib
 
@@ -109,7 +98,7 @@
 # main package definition
 #################################################################################
 Name:		ceph
-Version:	@VERSION@
+Version:	@PROJECT_VERSION@
 Release:	@RPM_RELEASE@%{?dist}
 %if 0%{?fedora} || 0%{?rhel}
 Epoch:		2
@@ -120,7 +109,7 @@ Epoch:		2
 %global _epoch_prefix %{?epoch:%{epoch}:}
 
 Summary:	User space components of the Ceph file system
-License:	LGPL-2.1 and CC-BY-SA-3.0 and GPL-2.0 and BSL-1.0 and BSD-3-Clause and MIT
+License:	LGPL-2.1 and LGPL-3.0 and CC-BY-SA-3.0 and GPL-2.0 and BSL-1.0 and BSD-3-Clause and MIT
 %if 0%{?suse_version}
 Group:		System/Filesystems
 %endif
@@ -157,7 +146,7 @@ BuildRequires:	fuse-devel
 %if 0%{?rhel} == 7
 # devtoolset offers newer make and valgrind-devel, but the old ones are good
 # enough.
-BuildRequires:	devtoolset-8-gcc-c++ >= 8.2.1
+BuildRequires:	devtoolset-8-gcc-c++ >= 8.3.1-3.1
 %else
 BuildRequires:	gcc-c++
 %endif
@@ -175,7 +164,7 @@ BuildRequires:	libaio-devel
 BuildRequires:	libblkid-devel >= 2.17
 BuildRequires:	libcurl-devel
 BuildRequires:	libcap-ng-devel
-BuildRequires:	libudev-devel
+BuildRequires:	pkgconfig(libudev)
 BuildRequires:	libnl3-devel
 BuildRequires:	liboath-devel
 BuildRequires:	libtool
@@ -187,11 +176,11 @@ BuildRequires:	patch
 BuildRequires:	perl
 BuildRequires:	pkgconfig
 BuildRequires:  procps
-BuildRequires:	python%{_python_buildid}
-BuildRequires:	python%{_python_buildid}-devel
+BuildRequires:	python%{python3_pkgversion}
+BuildRequires:	python%{python3_pkgversion}-devel
 BuildRequires:	snappy-devel
 BuildRequires:	sudo
-BuildRequires:	udev
+BuildRequires:	pkgconfig(udev)
 BuildRequires:	util-linux
 BuildRequires:	valgrind-devel
 BuildRequires:	which
@@ -208,12 +197,35 @@ BuildRequires:  librdkafka-devel
 %if 0%{with make_check}
 BuildRequires:  jq
 BuildRequires:	libuuid-devel
-BuildRequires:	python%{_python_buildid}-bcrypt
-BuildRequires:	python%{_python_buildid}-nose
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-requests
-BuildRequires:	python%{_python_buildid}-six
-BuildRequires:	python%{_python_buildid}-virtualenv
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-bcrypt
+BuildRequires:	python%{python3_version_nodots}-nose
+BuildRequires:	python%{python3_version_nodots}-requests
+BuildRequires:	python%{python3_version_nodots}-dateutil
+%else
+BuildRequires:	python%{python3_pkgversion}-bcrypt
+BuildRequires:	python%{python3_pkgversion}-nose
+BuildRequires:	python%{python3_pkgversion}-pecan
+BuildRequires:	python%{python3_pkgversion}-requests
+BuildRequires:	python%{python3_pkgversion}-dateutil
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-six
+BuildRequires:	python%{python3_version_nodots}-virtualenv
+%else
+BuildRequires:	python%{python3_pkgversion}-six
+BuildRequires:	python%{python3_pkgversion}-virtualenv
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-coverage
+%else
+BuildRequires:	python%{python3_pkgversion}-coverage
+%endif
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-pyOpenSSL
+%else
+BuildRequires:	python%{python3_pkgversion}-pyOpenSSL
+%endif
 BuildRequires:	socat
 %endif
 %if 0%{with seastar}
@@ -246,10 +258,10 @@ BuildRequires:  openldap2-devel
 #BuildRequires:  krb5
 #BuildRequires:  krb5-devel
 BuildRequires:  cunit-devel
-BuildRequires:	python%{_python_buildid}-setuptools
-BuildRequires:	python%{_python_buildid}-Cython
-BuildRequires:	python%{_python_buildid}-PrettyTable
-BuildRequires:	python%{_python_buildid}-Sphinx
+BuildRequires:	python%{python3_pkgversion}-setuptools
+BuildRequires:	python%{python3_pkgversion}-Cython
+BuildRequires:	python%{python3_pkgversion}-PrettyTable
+BuildRequires:	python%{python3_pkgversion}-Sphinx
 BuildRequires:  rdma-core-devel
 BuildRequires:	liblz4-devel >= 1.7
 # for prometheus-alerts
@@ -267,9 +279,6 @@ BuildRequires:  openldap-devel
 BuildRequires:  openssl-devel
 BuildRequires:  CUnit-devel
 BuildRequires:  redhat-lsb-core
-%if 0%{with python2}
-BuildRequires:	python2-Cython
-%endif
 BuildRequires:	python%{python3_pkgversion}-devel
 BuildRequires:	python%{python3_pkgversion}-setuptools
 %if 0%{?rhel} == 7
@@ -277,40 +286,48 @@ BuildRequires:	python%{python3_version_n
 %else
 BuildRequires:	python%{python3_pkgversion}-Cython
 %endif
-BuildRequires:	python%{_python_buildid}-prettytable
-BuildRequires:	python%{_python_buildid}-sphinx
+BuildRequires:	python%{python3_pkgversion}-prettytable
+BuildRequires:	python%{python3_pkgversion}-sphinx
 BuildRequires:	lz4-devel >= 1.7
 %endif
 # distro-conditional make check dependencies
 %if 0%{with make_check}
 %if 0%{?fedora} || 0%{?rhel}
-BuildRequires:	python%{_python_buildid}-coverage
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-tox
-BuildRequires:  xmlsec1
+BuildRequires:	libtool-ltdl-devel
+BuildRequires:	xmlsec1
+BuildRequires:	xmlsec1-devel
+%ifarch x86_64
+BuildRequires:	xmlsec1-nss
+%endif
+BuildRequires:	xmlsec1-openssl
+BuildRequires:	xmlsec1-openssl-devel
+%if 0%{?rhel} == 7
+BuildRequires:	python%{python3_version_nodots}-jwt
+BuildRequires:	python%{python3_version_nodots}-scipy
+%else
+BuildRequires:	python%{python3_pkgversion}-cherrypy
+BuildRequires:	python%{python3_pkgversion}-jwt
+BuildRequires:	python%{python3_pkgversion}-routes
+BuildRequires:	python%{python3_pkgversion}-scipy
+BuildRequires:	python%{python3_pkgversion}-werkzeug
+%endif
 %if 0%{?rhel} == 7
-BuildRequires:  pyOpenSSL%{_python_buildid}
+BuildRequires:	python%{python3_version_nodots}-pyOpenSSL
 %else
-BuildRequires:  python%{_python_buildid}-pyOpenSSL
+BuildRequires:	python%{python3_pkgversion}-pyOpenSSL
 %endif
-BuildRequires:	python%{_python_buildid}-cherrypy
-BuildRequires:	python%{_python_buildid}-jwt
-BuildRequires:	python%{_python_buildid}-routes
-BuildRequires:  python%{_python_buildid}-scipy
-BuildRequires:	python%{_python_buildid}-werkzeug
-%endif
-%if 0%{?suse_version}
-BuildRequires:	python%{_python_buildid}-CherryPy
-BuildRequires:	python%{_python_buildid}-PyJWT
-BuildRequires:	python%{_python_buildid}-Routes
-BuildRequires:	python%{_python_buildid}-Werkzeug
-BuildRequires:	python%{_python_buildid}-coverage
-BuildRequires:	python%{_python_buildid}-numpy-devel
-BuildRequires:	python%{_python_buildid}-pecan
-BuildRequires:	python%{_python_buildid}-pyOpenSSL
-BuildRequires:	python%{_python_buildid}-tox
-BuildRequires:	rpm-build
-BuildRequires:  xmlsec1-devel
+%endif
+%if 0%{?suse_version}
+BuildRequires:	libxmlsec1-1
+BuildRequires:	libxmlsec1-nss1
+BuildRequires:	libxmlsec1-openssl1
+BuildRequires:	python%{python3_pkgversion}-CherryPy
+BuildRequires:	python%{python3_pkgversion}-PyJWT
+BuildRequires:	python%{python3_pkgversion}-Routes
+BuildRequires:	python%{python3_pkgversion}-Werkzeug
+BuildRequires:	python%{python3_pkgversion}-numpy-devel
+BuildRequires:	xmlsec1-devel
+BuildRequires:	xmlsec1-openssl-devel
 %endif
 %endif
 # lttng and babeltrace for rbd-replay-prep
@@ -378,7 +395,7 @@ Requires:      grep
 Requires:      logrotate
 Requires:      parted
 Requires:      psmisc
-Requires:      python%{_python_buildid}-setuptools
+Requires:      python%{python3_pkgversion}-setuptools
 Requires:      util-linux
 Requires:      xfsprogs
 Requires:      which
@@ -395,6 +412,20 @@ Recommends:    chrony
 %description base
 Base is the package that includes all the files shared amongst ceph servers
 
+%package -n cephadm
+Summary:        Utility to bootstrap Ceph clusters
+Requires:       lvm2
+%if 0%{?suse_version}
+Requires:       apparmor-abstractions
+%endif
+Requires:       python%{python3_pkgversion}
+%if 0%{?weak_deps}
+Recommends:     podman
+%endif
+%description -n cephadm
+Utility to bootstrap a Ceph cluster and manage Ceph daemons deployed 
+with systemd and podman.
+
 %package -n ceph-common
 Summary:	Ceph Common
 %if 0%{?suse_version}
@@ -403,16 +434,17 @@ Group:		System/Filesystems
 Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
 Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
 Requires:	libcephfs2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rbd = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-cephfs = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-rgw = %{_epoch_prefix}%{version}-%{release}
-Requires:	python%{_python_buildid}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rbd = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-cephfs = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-rgw = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
+Requires:	python%{python3_pkgversion}-ceph-common = %{_epoch_prefix}%{version}-%{release}
 %if 0%{?fedora} || 0%{?rhel}
-Requires:	python%{_python_buildid}-prettytable
+Requires:	python%{python3_pkgversion}-prettytable
 %endif
 %if 0%{?suse_version}
-Requires:	python%{_python_buildid}-PrettyTable
+Requires:	python%{python3_pkgversion}-PrettyTable
 %endif
 %if 0%{with libradosstriper}
 Requires:	libradosstriper1 = %{_epoch_prefix}%{version}-%{release}
@@ -455,32 +487,19 @@ Summary:        Ceph Manager Daemon
 Group:          System/Filesystems
 %endif
 Requires:       ceph-base = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-bcrypt
-Requires:       python%{_python_buildid}-pecan
-Requires:       python%{_python_buildid}-requests
-Requires:       python%{_python_buildid}-six
-%if 0%{?fedora} || 0%{?rhel}
-Requires:       python%{_python_buildid}-cherrypy
-Requires:       python%{_python_buildid}-werkzeug
-%endif
-%if 0%{?suse_version}
-Requires:       python%{_python_buildid}-CherryPy
-Requires:       python%{_python_buildid}-Werkzeug
+Requires:       ceph-mgr-modules-core = %{_epoch_prefix}%{version}-%{release}
+%if 0%{?rhel} == 7
+Requires:       python%{python3_version_nodots}-six
+%else
+Requires:       python%{python3_pkgversion}-six
 %endif
 %if 0%{?weak_deps}
 Recommends:	ceph-mgr-dashboard = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-diskprediction-local = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-diskprediction-cloud = %{_epoch_prefix}%{version}-%{release}
-Recommends:	ceph-mgr-rook = %{_epoch_prefix}%{version}-%{release}
 Recommends:	ceph-mgr-k8sevents = %{_epoch_prefix}%{version}-%{release}
-Recommends:	ceph-mgr-ssh = %{_epoch_prefix}%{version}-%{release}
-Recommends:	python%{_python_buildid}-influxdb
-%endif
-%if 0%{?rhel} == 7
-Requires:       pyOpenSSL
-Requires:       python-enum34
-%else
-Requires:       python%{_python_buildid}-pyOpenSSL
+Recommends:	ceph-mgr-cephadm = %{_epoch_prefix}%{version}-%{release}
+Recommends:	python%{python3_pkgversion}-influxdb
 %endif
 %description mgr
 ceph-mgr enables python modules that provide services (such as the REST
@@ -496,101 +515,145 @@ Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
 Requires:       ceph-grafana-dashboards = %{_epoch_prefix}%{version}-%{release}
+Requires:       ceph-prometheus-alerts = %{_epoch_prefix}%{version}-%{release}
 %if 0%{?fedora} || 0%{?rhel}
-Requires:       python%{_python_buildid}-cherrypy
-Requires:       python%{_python_buildid}-jwt
-Requires:       python%{_python_buildid}-routes
-Requires:       python%{_python_buildid}-werkzeug
+Requires:       python%{python3_pkgversion}-cherrypy
+Requires:       python%{python3_pkgversion}-jwt
+Requires:       python%{python3_pkgversion}-routes
+Requires:       python%{python3_pkgversion}-werkzeug
 %if 0%{?weak_deps}
-Recommends:     python%{_python_buildid}-saml
+Recommends:     python%{python3_pkgversion}-saml
 %endif
 %endif
 %if 0%{?suse_version}
-Requires:       python%{_python_buildid}-CherryPy
-Requires:       python%{_python_buildid}-PyJWT
-Requires:       python%{_python_buildid}-Routes
-Requires:       python%{_python_buildid}-Werkzeug
-%endif
-%if 0%{?rhel} == 7
-Requires:       pyOpenSSL
-%else
-Requires:       python%{_python_buildid}-pyOpenSSL
+Requires:       python%{python3_pkgversion}-CherryPy
+Requires:       python%{python3_pkgversion}-PyJWT
+Requires:       python%{python3_pkgversion}-Routes
+Requires:       python%{python3_pkgversion}-Werkzeug
+Recommends:     python%{python3_pkgversion}-python3-saml
 %endif
 %description mgr-dashboard
-ceph-mgr-dashboard is a manager plugin, providing a web-based application
+ceph-mgr-dashboard is a manager module, providing a web-based application
 to monitor and manage many aspects of a Ceph cluster and related components.
 See the Dashboard documentation at http://docs.ceph.com/ for details and a
 detailed feature overview.
 
 %package mgr-diskprediction-local
-Summary:        ceph-mgr diskprediction_local plugin
+Summary:        Ceph Manager module for predicting disk failures
 BuildArch:      noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-%if 0%{?fedora} || 0%{?rhel} > 7 || 0%{?suse_version}
-Requires:       python%{_python_buildid}-numpy
-%if 0%{without python2}
+Requires:       python%{python3_pkgversion}-numpy
 Requires:       python3-scipy
-%else
-Requires:       python2-scipy
-%endif
-%endif
 %if 0%{?rhel} == 7
 Requires:       numpy
 Requires:       scipy
 %endif
 %description mgr-diskprediction-local
-ceph-mgr-diskprediction-local is a ceph-mgr plugin that tries to predict
+ceph-mgr-diskprediction-local is a ceph-mgr module that tries to predict
 disk failures using local algorithms and machine-learning databases.
 
 %package mgr-diskprediction-cloud
-Summary:        ceph-mgr diskprediction_cloud plugin
+Summary:        Ceph Manager module for cloud-based disk failure prediction
 BuildArch:      noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
+%if 0%{without python2}
+Requires:       python3-grpcio
+Requires:       python3-protobuf
+%else
+Requires:       python2-grpcio
+Requires:       python2-protobuf
+%endif
 %description mgr-diskprediction-cloud
-ceph-mgr-diskprediction-cloud is a ceph-mgr plugin that tries to predict
+ceph-mgr-diskprediction-cloud is a ceph-mgr module that tries to predict
 disk failures using services in the Google cloud.
 
+%package mgr-modules-core
+Summary:        Ceph Manager modules which are always enabled
+BuildArch:      noarch
+%if 0%{?suse_version}
+Group:          System/Filesystems
+%endif
+%if 0%{?rhel} == 7
+Requires:       python%{python3_version_nodots}-bcrypt
+Requires:       python%{python3_version_nodots}-pyOpenSSL
+Requires:       python%{python3_version_nodots}-requests
+Requires:       python%{python3_version_nodots}-PyYAML
+Requires:       python%{python3_version_nodots}-dateutil
+%else
+Requires:       python%{python3_pkgversion}-bcrypt
+Requires:       python%{python3_pkgversion}-pecan
+Requires:       python%{python3_pkgversion}-pyOpenSSL
+Requires:       python%{python3_pkgversion}-requests
+Requires:       python%{python3_pkgversion}-dateutil
+%endif
+%if 0%{?fedora} || 0%{?rhel} >= 8
+Requires:       python%{python3_pkgversion}-cherrypy
+Requires:       python%{python3_pkgversion}-pyyaml
+Requires:       python%{python3_pkgversion}-werkzeug
+%endif
+%if 0%{?suse_version}
+Requires:       python%{python3_pkgversion}-CherryPy
+Requires:       python%{python3_pkgversion}-PyYAML
+Requires:       python%{python3_pkgversion}-Werkzeug
+%endif
+%if 0%{?weak_deps}
+Recommends:	ceph-mgr-rook = %{_epoch_prefix}%{version}-%{release}
+%endif
+%description mgr-modules-core
+ceph-mgr-modules-core provides a set of modules which are always
+enabled by ceph-mgr.
+
 %package mgr-rook
 BuildArch:      noarch
-Summary:        ceph-mgr rook plugin
+Summary:        Ceph Manager module for Rook-based orchestration
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-kubernetes
+Requires:       python%{python3_pkgversion}-kubernetes
+Requires:       python%{python3_pkgversion}-jsonpatch
 %description mgr-rook
-ceph-mgr-rook is a ceph-mgr plugin for orchestration functions using
+ceph-mgr-rook is a ceph-mgr module for orchestration functions using
 a Rook backend.
 
 %package mgr-k8sevents
 BuildArch:      noarch
-Summary:        Ceph Manager plugin to orchestrate ceph-events to kubernetes' events API
+Summary:        Ceph Manager module to orchestrate ceph-events to kubernetes' events API
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-kubernetes
+Requires:       python%{python3_pkgversion}-kubernetes
 %description mgr-k8sevents
-ceph-mgr-k8sevents is a ceph-mgr plugin that sends every ceph-events
+ceph-mgr-k8sevents is a ceph-mgr module that sends every ceph-events
 to kubernetes' events API
 
-%package mgr-ssh
-Summary:        ceph-mgr ssh module
+%package mgr-cephadm
+Summary:        Ceph Manager module for cephadm-based orchestration
 BuildArch:	noarch
 %if 0%{?suse_version}
 Group:          System/Filesystems
 %endif
 Requires:       ceph-mgr = %{_epoch_prefix}%{version}-%{release}
-Requires:       python%{_python_buildid}-remoto
-%description mgr-ssh
-ceph-mgr-ssh is a ceph-mgr module for orchestration functions using
-direct SSH connections for management operations.
+Requires:       python%{python3_pkgversion}-remoto
+Requires:       cephadm = %{_epoch_prefix}%{version}-%{release}
+%if 0%{?suse_version}
+Requires:       openssh
+Requires:       python%{python3_pkgversion}-Jinja2
+%endif
+%if 0%{?rhel} || 0%{?fedora}
+Requires:       openssh-clients
+Requires:       python%{python3_pkgversion}-jinja2
+%endif
+%description mgr-cephadm
+ceph-mgr-cephadm is a ceph-mgr module for orchestration functions using
+the integrated cephadm deployment tool management operations.
 
 %package fuse
 Summary:	Ceph fuse-based client
@@ -624,6 +687,15 @@ Requires:	librbd1 = %{_epoch_prefix}%{ve
 Daemon for mirroring RBD images between Ceph clusters, streaming
 changes asynchronously.
 
+%package immutable-object-cache
+Summary:	Ceph daemon for immutable object cache
+%if 0%{?suse_version}
+Group:		System/Filesystems
+%endif
+Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
+%description immutable-object-cache
+Daemon for immutable object cache.
+
 %package -n rbd-nbd
 Summary:	Ceph RBD client base on NBD
 %if 0%{?suse_version}
@@ -677,16 +749,26 @@ Provides:	ceph-test:/usr/bin/ceph-osdoma
 Requires:	ceph-base = %{_epoch_prefix}%{version}-%{release}
 Requires:	lvm2
 Requires:	sudo
-Requires: libstoragemgmt
-%if 0%{?weak_deps}
-Recommends:	nvme-cli
-Recommends:	smartmontools
-%endif
+Requires:	libstoragemgmt
+Requires:	python%{python3_pkgversion}-ceph-common = %{_epoch_prefix}%{version}-%{release}
 %description osd
 ceph-osd is the object storage daemon for the Ceph distributed file
 system.  It is responsible for storing objects on a local file system
 and providing access to them over the network.
 
+%if 0%{with seastar}
+%package crimson-osd
+Summary:	Ceph Object Storage Daemon (crimson)
+%if 0%{?suse_version}
+Group:		System/Filesystems
+%endif
+Requires:	ceph-osd = %{_epoch_prefix}%{version}-%{release}
+%description crimson-osd
+crimson-osd is the object storage daemon for the Ceph distributed file
+system.  It is responsible for storing objects on a local file system
+and providing access to them over the network.
+%endif
+
 %package -n librados2
 Summary:	RADOS distributed object store client library
 %if 0%{?suse_version}
@@ -748,21 +830,6 @@ Obsoletes:	librgw2-devel < %{_epoch_pref
 This package contains libraries and headers needed to develop programs
 that use RADOS gateway client library.
 
-%if 0%{with python2}
-%package -n python-rgw
-Summary:	Python 2 libraries for the RADOS gateway
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librgw2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rgw}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rgw
-This package contains Python 2 libraries for interacting with Cephs RADOS
-gateway.
-%endif
-
 %package -n python%{python3_pkgversion}-rgw
 Summary:	Python 3 libraries for the RADOS gateway
 %if 0%{?suse_version}
@@ -771,28 +838,12 @@ Group:		Development/Libraries/Python
 Requires:	librgw2 = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rgw}
-%if 0%{without python2}
 Provides:	python-rgw = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-rgw < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-rgw
 This package contains Python 3 libraries for interacting with Cephs RADOS
 gateway.
 
-%if 0%{with python2}
-%package -n python-rados
-Summary:	Python 2 libraries for the RADOS object store
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rados}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rados
-This package contains Python 2 libraries for interacting with Cephs RADOS
-object store.
-%endif
-
 %package -n python%{python3_pkgversion}-rados
 Summary:	Python 3 libraries for the RADOS object store
 %if 0%{?suse_version}
@@ -801,10 +852,8 @@ Group:		Development/Libraries/Python
 Requires:	python%{python3_pkgversion}
 Requires:	librados2 = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rados}
-%if 0%{without python2}
 Provides:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Obsoletes:      python-rados < %{_epoch_prefix}%{version}-%{release}
-%endif
+Obsoletes:	python-rados < %{_epoch_prefix}%{version}-%{release}
 %description -n python%{python3_pkgversion}-rados
 This package contains Python 3 libraries for interacting with Cephs RADOS
 object store.
@@ -870,21 +919,6 @@ Obsoletes:	librbd1-devel < %{_epoch_pref
 This package contains libraries and headers needed to develop programs
 that use RADOS block device.
 
-%if 0%{with python2}
-%package -n python-rbd
-Summary:	Python 2 libraries for the RADOS block device
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-rbd}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-rbd
-This package contains Python 2 libraries for interacting with Cephs RADOS
-block device.
-%endif
-
 %package -n python%{python3_pkgversion}-rbd
 Summary:	Python 3 libraries for the RADOS block device
 %if 0%{?suse_version}
@@ -893,11 +927,8 @@ Group:		Development/Libraries/Python
 Requires:	librbd1 = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-rbd}
-Provides:	python3-rbd = %{_epoch_prefix}%{version}-%{release}
-%if 0%{without python2}
 Provides:	python-rbd = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-rbd < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-rbd
 This package contains Python 3 libraries for interacting with Cephs RADOS
 block device.
@@ -932,22 +963,6 @@ Obsoletes:	libcephfs2-devel < %{_epoch_p
 This package contains libraries and headers needed to develop programs
 that use Cephs distributed file system.
 
-%if 0%{with python2}
-%package -n python-cephfs
-Summary:	Python 2 libraries for Ceph distributed file system
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Requires:	libcephfs2 = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
-%{?python_provide:%python_provide python-cephfs}
-Obsoletes:	python-ceph < %{_epoch_prefix}%{version}-%{release}
-%description -n python-cephfs
-This package contains Python 2 libraries for interacting with Cephs distributed
-file system.
-%endif
-
 %package -n python%{python3_pkgversion}-cephfs
 Summary:	Python 3 libraries for Ceph distributed file system
 %if 0%{?suse_version}
@@ -957,27 +972,12 @@ Requires:	libcephfs2 = %{_epoch_prefix}%
 Requires:	python%{python3_pkgversion}-rados = %{_epoch_prefix}%{version}-%{release}
 Requires:	python%{python3_pkgversion}-ceph-argparse = %{_epoch_prefix}%{version}-%{release}
 %{?python_provide:%python_provide python%{python3_pkgversion}-cephfs}
-%if 0%{without python2}
 Provides:	python-cephfs = %{_epoch_prefix}%{version}-%{release}
 Obsoletes:	python-cephfs < %{_epoch_prefix}%{version}-%{release}
-%endif
 %description -n python%{python3_pkgversion}-cephfs
 This package contains Python 3 libraries for interacting with Cephs distributed
 file system.
 
-%if 0%{with python2}
-%package -n python-ceph-argparse
-Summary:	Python 2 utility libraries for Ceph CLI
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-%description -n python-ceph-argparse
-This package contains types and routines for Python 2 used by the Ceph CLI as
-well as the RESTful interface. These have to do with querying the daemons for
-command-description information, validating user command input against those
-descriptions, and submitting the command to the appropriate daemon.
-%endif
-
 %package -n python%{python3_pkgversion}-ceph-argparse
 Summary:	Python 3 utility libraries for Ceph CLI
 %if 0%{?suse_version}
@@ -990,6 +990,22 @@ well as the RESTful interface. These hav
 command-description information, validating user command input against those
 descriptions, and submitting the command to the appropriate daemon.
 
+%package -n python%{python3_pkgversion}-ceph-common
+Summary:	Python 3 utility libraries for Ceph
+%if 0%{?fedora} || 0%{?rhel} >= 8
+Requires:       python%{python3_pkgversion}-pyyaml
+%endif
+%if 0%{?suse_version}
+Requires:       python%{python3_pkgversion}-PyYAML
+%endif
+%if 0%{?suse_version}
+Group:		Development/Libraries/Python
+%endif
+%{?python_provide:%python_provide python%{python3_pkgversion}-ceph-common}
+%description -n python%{python3_pkgversion}-ceph-common
+This package contains data structures, classes and functions used by Ceph.
+It also contains utilities used for the cephadm orchestrator.
+
 %if 0%{with cephfs_shell}
 %package -n cephfs-shell
 Summary:    Interactive shell for Ceph file system
@@ -1086,25 +1102,6 @@ populated file-systems.
 
 %endif
 
-%if 0%{with python2}
-%package -n python-ceph-compat
-Summary:	Compatibility package for Cephs python libraries
-%if 0%{?suse_version}
-Group:		Development/Libraries/Python
-%endif
-Obsoletes:	python-ceph
-Requires:	python-rados = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rbd = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-cephfs = %{_epoch_prefix}%{version}-%{release}
-Requires:	python-rgw = %{_epoch_prefix}%{version}-%{release}
-Provides:	python-ceph
-%description -n python-ceph-compat
-This is a compatibility package to accommodate python-ceph split into
-python-rados, python-rbd, python-rgw and python-cephfs. Packages still
-depending on python-ceph should be fixed to depend on python-rados,
-python-rbd, python-rgw or python-cephfs instead.
-%endif
-
 %package grafana-dashboards
 Summary:	The set of Grafana dashboards for monitoring purposes
 BuildArch:	noarch
@@ -1118,14 +1115,12 @@ collecting data from Ceph Manager "prome
 project "node_exporter" module. The dashboards are designed to be
 integrated with the Ceph Manager Dashboard web UI.
 
-%if 0%{?suse_version}
 %package prometheus-alerts
 Summary:        Prometheus alerts for a Ceph deplyoment
 BuildArch:      noarch
 Group:          System/Monitoring
 %description prometheus-alerts
 This package provides Ceph’s default alerts for Prometheus.
-%endif
 
 #################################################################################
 # common
@@ -1202,12 +1197,6 @@ ${CMAKE} .. \
     -DWITH_MANPAGE=ON \
     -DWITH_PYTHON3=%{python3_version} \
     -DWITH_MGR_DASHBOARD_FRONTEND=OFF \
-%if %{with python2}
-    -DWITH_PYTHON2=ON \
-%else
-    -DWITH_PYTHON2=OFF \
-    -DMGR_PYTHON_VERSION=3 \
-%endif
 %if 0%{without ceph_test_package}
     -DWITH_TESTS=OFF \
 %endif
@@ -1251,9 +1240,17 @@ ${CMAKE} .. \
 %else
     -DWITH_RADOSGW_KAFKA_ENDPOINT=OFF \
 %endif
+%if 0%{with cmake_verbose_logging}
+    -DCMAKE_VERBOSE_MAKEFILE=ON \
+%endif
     -DBOOST_J=$CEPH_SMP_NCPUS \
     -DWITH_GRAFANA=ON
 
+%if %{with cmake_verbose_logging}
+cat ./CMakeFiles/CMakeOutput.log
+cat ./CMakeFiles/CMakeError.log
+%endif
+
 make "$CEPH_MFLAGS_JOBS"
 
 
@@ -1286,6 +1283,14 @@ chmod 0644 %{buildroot}%{_docdir}/ceph/s
 install -m 0644 -D COPYING %{buildroot}%{_docdir}/ceph/COPYING
 install -m 0644 -D etc/sysctl/90-ceph-osd.conf %{buildroot}%{_sysctldir}/90-ceph-osd.conf
 
+install -m 0755 src/cephadm/cephadm %{buildroot}%{_sbindir}/cephadm
+mkdir -p %{buildroot}%{_sharedstatedir}/cephadm
+chmod 0700 %{buildroot}%{_sharedstatedir}/cephadm
+mkdir -p %{buildroot}%{_sharedstatedir}/cephadm/.ssh
+chmod 0700 %{buildroot}%{_sharedstatedir}/cephadm/.ssh
+touch %{buildroot}%{_sharedstatedir}/cephadm/.ssh/authorized_keys
+chmod 0600 %{buildroot}%{_sharedstatedir}/cephadm/.ssh/authorized_keys
+
 # firewall templates and /sbin/mount.ceph symlink
 %if 0%{?suse_version}
 mkdir -p %{buildroot}/sbin
@@ -1297,6 +1302,7 @@ install -m 0644 -D udev/50-rbd.rules %{b
 
 # sudoers.d
 install -m 0600 -D sudoers.d/ceph-osd-smartctl %{buildroot}%{_sysconfdir}/sudoers.d/ceph-osd-smartctl
+install -m 0600 -D sudoers.d/cephadm %{buildroot}%{_sysconfdir}/sudoers.d/cephadm
 
 %if 0%{?rhel} >= 8
 pathfix.py -pni "%{__python3} %{py3_shbang_opts}" %{buildroot}%{_bindir}/*
@@ -1322,11 +1328,12 @@ mkdir -p %{buildroot}%{_localstatedir}/l
 mkdir -p %{buildroot}%{_localstatedir}/lib/ceph/bootstrap-rbd
 mkdir -p %{buildroot}%{_localstatedir}/lib/ceph/bootstrap-rbd-mirror
 
+# prometheus alerts
+install -m 644 -D monitoring/prometheus/alerts/ceph_default_alerts.yml %{buildroot}/etc/prometheus/ceph/ceph_default_alerts.yml
+
 %if 0%{?suse_version}
 # create __pycache__ directories and their contents
 %py3_compile %{buildroot}%{python3_sitelib}
-# prometheus alerts
-install -m 644 -D monitoring/prometheus/alerts/ceph_default_alerts.yml %{buildroot}/etc/prometheus/SUSE/default_rules/ceph_default_alerts.yml
 # hardlink duplicate files under /usr to save space
 %fdupes %{buildroot}%{_prefix}
 %endif
@@ -1376,15 +1383,9 @@ rm -rf %{buildroot}
 %{_fillupdir}/sysconfig.*
 %endif
 %{_unitdir}/ceph.target
-%if 0%{with python2}
-%dir %{python_sitelib}/ceph_volume
-%{python_sitelib}/ceph_volume/*
-%{python_sitelib}/ceph_volume-*
-%else
 %dir %{python3_sitelib}/ceph_volume
 %{python3_sitelib}/ceph_volume/*
 %{python3_sitelib}/ceph_volume-*
-%endif
 %{_mandir}/man8/ceph-deploy.8*
 %{_mandir}/man8/ceph-create-keys.8*
 %{_mandir}/man8/ceph-run.8*
@@ -1444,6 +1445,25 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%pre -n cephadm
+getent group cephadm >/dev/null || groupadd -r cephadm
+getent passwd cephadm >/dev/null || useradd -r -g cephadm -s /bin/bash -c "cephadm user for mgr/cephadm" -d %{_sharedstatedir}/cephadm cephadm
+exit 0
+
+%if ! 0%{?suse_version}
+%postun -n cephadm
+userdel -r cephadm || true
+exit 0
+%endif
+
+%files -n cephadm
+%{_sbindir}/cephadm
+%{_mandir}/man8/cephadm.8*
+%{_sysconfdir}/sudoers.d/cephadm
+%attr(0700,cephadm,cephadm) %dir %{_sharedstatedir}/cephadm
+%attr(0700,cephadm,cephadm) %dir %{_sharedstatedir}/cephadm/.ssh
+%attr(0600,cephadm,cephadm) %{_sharedstatedir}/cephadm/.ssh/authorized_keys
+
 %files common
 %dir %{_docdir}/ceph
 %doc %{_docdir}/ceph/sample.ceph.conf
@@ -1592,33 +1612,11 @@ fi
 %files mgr
 %{_bindir}/ceph-mgr
 %dir %{_datadir}/ceph/mgr
-%{_datadir}/ceph/mgr/alerts
-%{_datadir}/ceph/mgr/ansible
-%{_datadir}/ceph/mgr/balancer
-%{_datadir}/ceph/mgr/crash
-%{_datadir}/ceph/mgr/deepsea
-%{_datadir}/ceph/mgr/devicehealth
-%{_datadir}/ceph/mgr/influx
-%{_datadir}/ceph/mgr/insights
-%{_datadir}/ceph/mgr/iostat
-%{_datadir}/ceph/mgr/localpool
 %{_datadir}/ceph/mgr/mgr_module.*
 %{_datadir}/ceph/mgr/mgr_util.*
-%{_datadir}/ceph/mgr/orchestrator_cli
-%{_datadir}/ceph/mgr/orchestrator.*
-%{_datadir}/ceph/mgr/osd_perf_query
-%{_datadir}/ceph/mgr/pg_autoscaler
-%{_datadir}/ceph/mgr/progress
-%{_datadir}/ceph/mgr/prometheus
-%{_datadir}/ceph/mgr/rbd_support
-%{_datadir}/ceph/mgr/restful
-%{_datadir}/ceph/mgr/selftest
-%{_datadir}/ceph/mgr/status
-%{_datadir}/ceph/mgr/telegraf
-%{_datadir}/ceph/mgr/telemetry
-%{_datadir}/ceph/mgr/test_orchestrator
-%{_datadir}/ceph/mgr/volumes
-%{_datadir}/ceph/mgr/zabbix
+%if 0%{?rhel} == 7
+%{_datadir}/ceph/mgr/__pycache__
+%endif
 %{_unitdir}/ceph-mgr@.service
 %{_unitdir}/ceph-mgr.target
 %attr(750,ceph,ceph) %dir %{_localstatedir}/lib/ceph/mgr
@@ -1703,6 +1701,32 @@ if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
+%files mgr-modules-core
+%dir %{_datadir}/ceph/mgr
+%{_datadir}/ceph/mgr/alerts
+%{_datadir}/ceph/mgr/balancer
+%{_datadir}/ceph/mgr/crash
+%{_datadir}/ceph/mgr/devicehealth
+%{_datadir}/ceph/mgr/influx
+%{_datadir}/ceph/mgr/insights
+%{_datadir}/ceph/mgr/iostat
+%{_datadir}/ceph/mgr/localpool
+%{_datadir}/ceph/mgr/orchestrator
+%{_datadir}/ceph/mgr/osd_perf_query
+%{_datadir}/ceph/mgr/osd_support
+%{_datadir}/ceph/mgr/pg_autoscaler
+%{_datadir}/ceph/mgr/progress
+%{_datadir}/ceph/mgr/prometheus
+%{_datadir}/ceph/mgr/rbd_support
+%{_datadir}/ceph/mgr/restful
+%{_datadir}/ceph/mgr/selftest
+%{_datadir}/ceph/mgr/status
+%{_datadir}/ceph/mgr/telegraf
+%{_datadir}/ceph/mgr/telemetry
+%{_datadir}/ceph/mgr/test_orchestrator
+%{_datadir}/ceph/mgr/volumes
+%{_datadir}/ceph/mgr/zabbix
+
 %files mgr-rook
 %{_datadir}/ceph/mgr/rook
 
@@ -1729,15 +1753,15 @@ if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
-%files mgr-ssh
-%{_datadir}/ceph/mgr/ssh
+%files mgr-cephadm
+%{_datadir}/ceph/mgr/cephadm
 
-%post mgr-ssh
+%post mgr-cephadm
 if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
 
-%postun mgr-ssh
+%postun mgr-cephadm
 if [ $1 -eq 1 ] ; then
     /usr/bin/systemctl try-restart ceph-mgr.target >/dev/null 2>&1 || :
 fi
@@ -1849,6 +1873,54 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%files immutable-object-cache
+%{_bindir}/ceph-immutable-object-cache
+%{_mandir}/man8/ceph-immutable-object-cache.8*
+%{_unitdir}/ceph-immutable-object-cache@.service
+%{_unitdir}/ceph-immutable-object-cache.target
+
+%post immutable-object-cache
+%if 0%{?suse_version}
+if [ $1 -eq 1 ] ; then
+  /usr/bin/systemctl preset ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target >/dev/null 2>&1 || :
+fi
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_post ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+if [ $1 -eq 1 ] ; then
+/usr/bin/systemctl start ceph-immutable-object-cache.target >/dev/null 2>&1 || :
+fi
+
+%preun immutable-object-cache
+%if 0%{?suse_version}
+%service_del_preun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_preun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+
+%postun immutable-object-cache
+test -n "$FIRST_ARG" || FIRST_ARG=$1
+%if 0%{?suse_version}
+DISABLE_RESTART_ON_UPDATE="yes"
+%service_del_postun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+%if 0%{?fedora} || 0%{?rhel}
+%systemd_postun ceph-immutable-object-cache@\*.service ceph-immutable-object-cache.target
+%endif
+if [ $FIRST_ARG -ge 1 ] ; then
+  # Restart on upgrade, but only if "CEPH_AUTO_RESTART_ON_UPGRADE" is set to
+  # "yes". In any case: if units are not running, do not touch them.
+  SYSCONF_CEPH=%{_sysconfdir}/sysconfig/ceph
+  if [ -f $SYSCONF_CEPH -a -r $SYSCONF_CEPH ] ; then
+    source $SYSCONF_CEPH
+  fi
+  if [ "X$CEPH_AUTO_RESTART_ON_UPGRADE" = "Xyes" ] ; then
+    /usr/bin/systemctl try-restart ceph-immutable-object-cache@\*.service > /dev/null 2>&1 || :
+  fi
+fi
+
 %files -n rbd-nbd
 %{_bindir}/rbd-nbd
 %{_mandir}/man8/rbd-nbd.8*
@@ -1860,12 +1932,14 @@ fi
 %{_bindir}/radosgw-es
 %{_bindir}/radosgw-object-expirer
 %{_bindir}/rgw-orphan-list
+%{_libdir}/libradosgw.so*
 %{_mandir}/man8/radosgw.8*
 %dir %{_localstatedir}/lib/ceph/radosgw
 %{_unitdir}/ceph-radosgw@.service
 %{_unitdir}/ceph-radosgw.target
 
 %post radosgw
+/sbin/ldconfig
 %if 0%{?suse_version}
 if [ $1 -eq 1 ] ; then
   /usr/bin/systemctl preset ceph-radosgw@\*.service ceph-radosgw.target >/dev/null 2>&1 || :
@@ -1887,6 +1961,7 @@ fi
 %endif
 
 %postun radosgw
+/sbin/ldconfig
 %if 0%{?suse_version}
 DISABLE_RESTART_ON_UPDATE="yes"
 %service_del_postun ceph-radosgw@\*.service ceph-radosgw.target
@@ -1973,6 +2048,11 @@ if [ $1 -ge 1 ] ; then
   fi
 fi
 
+%if 0%{with seastar}
+%files crimson-osd
+%{_bindir}/crimson-osd
+%endif
+
 %if %{with ocf}
 
 %files resource-agents
@@ -2018,12 +2098,6 @@ fi
 %{_includedir}/rados/page.h
 %{_includedir}/rados/rados_types.hpp
 
-%if 0%{with python2}
-%files -n python-rados
-%{python_sitearch}/rados.so
-%{python_sitearch}/rados-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rados
 %{python3_sitearch}/rados.cpython*.so
 %{python3_sitearch}/rados-*.egg-info
@@ -2087,22 +2161,10 @@ fi
 %{_libdir}/librgw_rados_tp.so
 %endif
 
-%if 0%{with python2}
-%files -n python-rgw
-%{python_sitearch}/rgw.so
-%{python_sitearch}/rgw-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rgw
 %{python3_sitearch}/rgw.cpython*.so
 %{python3_sitearch}/rgw-*.egg-info
 
-%if 0%{with python2}
-%files -n python-rbd
-%{python_sitearch}/rbd.so
-%{python_sitearch}/rbd-*.egg-info
-%endif
-
 %files -n python%{python3_pkgversion}-rbd
 %{python3_sitearch}/rbd.cpython*.so
 %{python3_sitearch}/rbd-*.egg-info
@@ -2121,31 +2183,22 @@ fi
 %{_includedir}/cephfs/ceph_ll_client.h
 %{_libdir}/libcephfs.so
 
-%if 0%{with python2}
-%files -n python-cephfs
-%{python_sitearch}/cephfs.so
-%{python_sitearch}/cephfs-*.egg-info
-%{python_sitelib}/ceph_volume_client.py*
-%endif
-
 %files -n python%{python3_pkgversion}-cephfs
 %{python3_sitearch}/cephfs.cpython*.so
 %{python3_sitearch}/cephfs-*.egg-info
 %{python3_sitelib}/ceph_volume_client.py
 %{python3_sitelib}/__pycache__/ceph_volume_client.cpython*.py*
 
-%if 0%{with python2}
-%files -n python-ceph-argparse
-%{python_sitelib}/ceph_argparse.py*
-%{python_sitelib}/ceph_daemon.py*
-%endif
-
 %files -n python%{python3_pkgversion}-ceph-argparse
 %{python3_sitelib}/ceph_argparse.py
 %{python3_sitelib}/__pycache__/ceph_argparse.cpython*.py*
 %{python3_sitelib}/ceph_daemon.py
 %{python3_sitelib}/__pycache__/ceph_daemon.cpython*.py*
 
+%files -n python%{python3_pkgversion}-ceph-common
+%{python3_sitelib}/ceph
+%{python3_sitelib}/ceph-*.egg-info
+
 %if 0%{with cephfs_shell}
 %files -n cephfs-shell
 %{python3_sitelib}/cephfs_shell-*.egg-info
@@ -2175,7 +2228,7 @@ fi
 %{_bindir}/ceph_test_*
 %{_bindir}/ceph-coverage
 %{_bindir}/ceph-debugpack
-%{_bindir}/cephdeduptool
+%{_bindir}/ceph-dedup-tool
 %{_mandir}/man8/ceph-debugpack.8*
 %dir %{_libdir}/ceph
 %{_libdir}/ceph/ceph-monstore-update-crush.sh
@@ -2287,30 +2340,21 @@ fi
 exit 0
 %endif
 
-%if 0%{with python2}
-%files -n python-ceph-compat
-# We need an empty %%files list for python-ceph-compat, to tell rpmbuild to
-# actually build this meta package.
-%endif
-
 %files grafana-dashboards
 %if 0%{?suse_version}
 %attr(0755,root,root) %dir %{_sysconfdir}/grafana
 %attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards
-%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
-%else
-%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
 %endif
+%attr(0755,root,root) %dir %{_sysconfdir}/grafana/dashboards/ceph-dashboard
 %config %{_sysconfdir}/grafana/dashboards/ceph-dashboard/*
 %doc monitoring/grafana/dashboards/README
 %doc monitoring/grafana/README.md
 
-%if 0%{?suse_version}
 %files prometheus-alerts
-%dir /etc/prometheus/SUSE/
-%dir /etc/prometheus/SUSE/default_rules/
-%config /etc/prometheus/SUSE/default_rules/ceph_default_alerts.yml
+%if 0%{?suse_version}
+%attr(0755,root,root) %dir %{_sysconfdir}/prometheus
 %endif
-
+%attr(0755,root,root) %dir %{_sysconfdir}/prometheus/ceph
+%config %{_sysconfdir}/prometheus/ceph/ceph_default_alerts.yml
 
 %changelog
diff -pruN 14.2.16-2/cmake/modules/AddCephTest.cmake 15.2.7-0ubuntu4/cmake/modules/AddCephTest.cmake
--- 14.2.16-2/cmake/modules/AddCephTest.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/AddCephTest.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -15,7 +15,7 @@ function(add_ceph_test test_name test_pa
     CEPH_BUILD_DIR=${CMAKE_BINARY_DIR}
     LD_LIBRARY_PATH=${CMAKE_BINARY_DIR}/lib
     PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}:${CMAKE_SOURCE_DIR}/src:$ENV{PATH}
-    PYTHONPATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/cython_modules/lib.${PYTHON${PYTHON_VERSION}_VERSION_MAJOR}:${CMAKE_SOURCE_DIR}/src/pybind
+    PYTHONPATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/cython_modules/lib.3:${CMAKE_SOURCE_DIR}/src/pybind
     CEPH_BUILD_VIRTUALENV=${CEPH_BUILD_VIRTUALENV})
   # none of the tests should take more than 1 hour to complete
   set_property(TEST
@@ -25,19 +25,21 @@ endfunction()
 
 option(WITH_GTEST_PARALLEL "Enable running gtest based tests in parallel" OFF)
 if(WITH_GTEST_PARALLEL)
-  set(gtest_parallel_source_dir ${CMAKE_CURRENT_BINARY_DIR}/gtest-parallel)
-  include(ExternalProject)
-  ExternalProject_Add(gtest-parallel_ext
-    SOURCE_DIR "${gtest_parallel_source_dir}"
-    GIT_REPOSITORY "https://github.com/google/gtest-parallel.git"
-    GIT_TAG "master"
-    CONFIGURE_COMMAND ""
-    BUILD_COMMAND ""
-    INSTALL_COMMAND "")
-  add_dependencies(tests gtest-parallel_ext)
-  find_package(PythonInterp REQUIRED)
-  set(GTEST_PARALLEL_COMMAND
-    ${PYTHON_EXECUTABLE} ${gtest_parallel_source_dir}/gtest-parallel)
+  if(NOT TARGET gtest-parallel_ext)
+    set(gtest_parallel_source_dir ${CMAKE_CURRENT_BINARY_DIR}/gtest-parallel)
+    include(ExternalProject)
+    ExternalProject_Add(gtest-parallel_ext
+      SOURCE_DIR "${gtest_parallel_source_dir}"
+      GIT_REPOSITORY "https://github.com/google/gtest-parallel.git"
+      GIT_TAG "master"
+      CONFIGURE_COMMAND ""
+      BUILD_COMMAND ""
+      INSTALL_COMMAND "")
+    add_dependencies(tests gtest-parallel_ext)
+    find_package(Python3 QUIET REQUIRED)
+    set(GTEST_PARALLEL_COMMAND
+      ${Python3_EXECUTABLE} ${gtest_parallel_source_dir}/gtest-parallel)
+  endif()
 endif()
 
 #sets uniform compiler flags and link libraries
@@ -50,3 +52,47 @@ function(add_ceph_unittest unittest_name
   add_ceph_test(${unittest_name} "${UNITTEST}")
   target_link_libraries(${unittest_name} ${UNITTEST_LIBS})
 endfunction()
+
+function(add_tox_test name)
+  set(test_name run-tox-${name})
+  set(venv_path ${CEPH_BUILD_VIRTUALENV}/${name}-virtualenv)
+  cmake_parse_arguments(TOXTEST "" "TOX_PATH" "TOX_ENVS" ${ARGN})
+  if(DEFINED TOXTEST_TOX_PATH)
+    set(tox_path ${TOXTEST_TOX_PATH})
+  else()
+    set(tox_path ${CMAKE_CURRENT_SOURCE_DIR})
+  endif()
+  list(APPEND tox_envs py3)
+  if(DEFINED TOXTEST_TOX_ENVS)
+    list(APPEND tox_envs ${TOXTEST_TOX_ENVS})
+  endif()
+  string(REPLACE ";" "," tox_envs "${tox_envs}")
+  find_package(Python3 QUIET REQUIRED)
+  add_custom_command(
+    OUTPUT ${venv_path}/bin/activate
+    COMMAND ${CMAKE_SOURCE_DIR}/src/tools/setup-virtualenv.sh --python="${Python3_EXECUTABLE}" ${venv_path}
+    WORKING_DIRECTORY ${tox_path}
+    COMMENT "preparing venv for ${name}")
+  add_custom_target(${name}-venv
+    DEPENDS ${venv_path}/bin/activate)
+  add_dependencies(tests ${name}-venv)
+  add_test(
+    NAME ${test_name}
+    COMMAND ${CMAKE_SOURCE_DIR}/src/script/run_tox.sh
+              --source-dir ${CMAKE_SOURCE_DIR}
+              --build-dir ${CMAKE_BINARY_DIR}
+              --tox-path ${tox_path}
+              --tox-envs ${tox_envs}
+              --venv-path ${venv_path})
+  set_property(
+    TEST ${test_name}
+    PROPERTY ENVIRONMENT
+    CEPH_ROOT=${CMAKE_SOURCE_DIR}
+    CEPH_BIN=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
+    CEPH_LIB=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
+    CEPH_BUILD_VIRTUALENV=${CEPH_BUILD_VIRTUALENV}
+    LD_LIBRARY_PATH=${CMAKE_BINARY_DIR}/lib
+    PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}:${CMAKE_SOURCE_DIR}/src:$ENV{PATH}
+    PYTHONPATH=${CMAKE_SOURCE_DIR}/src/pybind)
+  list(APPEND tox_test run-tox-${name})
+endfunction()
diff -pruN 14.2.16-2/cmake/modules/BuildBoost.cmake 15.2.7-0ubuntu4/cmake/modules/BuildBoost.cmake
--- 14.2.16-2/cmake/modules/BuildBoost.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/BuildBoost.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -109,14 +109,15 @@ function(do_build_boost version)
     " : ${CMAKE_CXX_COMPILER}"
     " ;\n")
   if(with_python_version)
-    find_package(PythonLibs ${with_python_version} QUIET REQUIRED)
-    string(REPLACE ";" " " python_includes "${PYTHON_INCLUDE_DIRS}")
+    find_package(Python3 ${with_python_version} QUIET REQUIRED
+      COMPONENTS Development)
+    string(REPLACE ";" " " python3_includes "${Python3_INCLUDE_DIRS}")
     file(APPEND ${user_config}
       "using python"
       " : ${with_python_version}"
-      " : ${PYTHON_EXECUTABLE}"
-      " : ${python_includes}"
-      " : ${PYTHON_LIBRARIES}"
+      " : ${Python3_EXECUTABLE}"
+      " : ${python3_includes}"
+      " : ${Python3_LIBRARIES}"
       " ;\n")
   endif()
   list(APPEND b2 --user-config=${user_config})
@@ -125,7 +126,11 @@ function(do_build_boost version)
   if(with_python_version)
     list(APPEND b2 python=${with_python_version})
   endif()
-
+  if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM")
+    list(APPEND b2 abi=aapcs)
+    list(APPEND b2 architecture=arm)
+    list(APPEND b2 binary-format=elf)
+  endif()
   set(build_command
     ${b2} headers stage
     #"--buildid=ceph" # changes lib names--can omit for static
@@ -205,7 +210,7 @@ macro(build_boost version)
     endif()
     add_dependencies(Boost::${c} Boost)
     if(c MATCHES "^python")
-      set(c "python${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR}")
+      set(c "python${Python3_VERSION_MAJOR}${Python3_VERSION_MINOR}")
     endif()
     if(Boost_USE_STATIC_LIBS)
       set(Boost_${upper_c}_LIBRARY
@@ -230,6 +235,7 @@ macro(build_boost version)
         INTERFACE_LINK_LIBRARIES "${dependencies}")
       unset(dependencies)
     endif()
+    set(Boost_${c}_FOUND "TRUE")
   endforeach()
 
   # for header-only libraries
diff -pruN 14.2.16-2/cmake/modules/Buildc-ares.cmake 15.2.7-0ubuntu4/cmake/modules/Buildc-ares.cmake
--- 14.2.16-2/cmake/modules/Buildc-ares.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Buildc-ares.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -14,7 +14,7 @@ function(build_c_ares)
   add_library(c-ares::c-ares STATIC IMPORTED)
   add_dependencies(c-ares::c-ares c-ares_ext)
   set_target_properties(c-ares::c-ares PROPERTIES
-    INTERFACE_INCLUDE_DIRECTORIES "${C-ARES_SOURCE_DIR}"
+    INTERFACE_INCLUDE_DIRECTORIES "${C-ARES_SOURCE_DIR};${C-ARES_BINARY_DIR}"
     IMPORTED_LINK_INTERFACE_LANGUAGES "C"
     IMPORTED_LOCATION "${C-ARES_BINARY_DIR}/lib/libcares.a")
   # to appease find_package()
diff -pruN 14.2.16-2/cmake/modules/BuildDPDK.cmake 15.2.7-0ubuntu4/cmake/modules/BuildDPDK.cmake
--- 14.2.16-2/cmake/modules/BuildDPDK.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/BuildDPDK.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -1,4 +1,8 @@
 function(do_build_dpdk dpdk_dir)
+  find_program(MAKE_EXECUTABLE NAMES gmake make)
+  if(NOT MAKE_EXECUTABLE)
+    message(FATAL_ERROR "Can't find make")
+  endif()
   # mk/machine/native/rte.vars.mk
   # rte_cflags are extracted from mk/machine/${machine}/rte.vars.mk
   # only 3 of them have -march=<arch> defined, so copying them here.
@@ -37,9 +41,9 @@ function(do_build_dpdk dpdk_dir)
   endif()
   set(dpdk_rte_CFLAGS "${rte_cflags}" CACHE INTERNAL "")
   if(CMAKE_SYSTEM_NAME MATCHES "Linux")
-    set(execenv "linuxapp")
+    set(execenv "linux")
   elseif(CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
-    set(execenv "bsdapp")
+    set(execenv "freebsd")
   else()
     message(FATAL_ERROR "not able to build DPDK support: "
       "unsupported OS \"${CMAKE_SYSTEM_NAME}\"")
@@ -59,7 +63,7 @@ function(do_build_dpdk dpdk_dir)
   set(target "${arch}-${machine_tmpl}-${execenv}-${toolchain}")
 
   execute_process(
-    COMMAND ${CMAKE_MAKE_PROGRAM} showconfigs
+    COMMAND ${MAKE_EXECUTABLE} showconfigs
     WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/src/spdk/dpdk
     OUTPUT_VARIABLE supported_targets
     OUTPUT_STRIP_TRAILING_WHITESPACE)
@@ -71,25 +75,43 @@ function(do_build_dpdk dpdk_dir)
       "\"${target}\" not listed in ${supported_targets}")
   endif()
 
-  set(EXTRA_CFLAGS "-Wno-unknown-warning-option -Wno-stringop-truncation -Wno-address-of-packed-member -fPIC")
+  if(CMAKE_MAKE_PROGRAM MATCHES "make")
+    # try to inherit command line arguments passed by parent "make" job
+    set(make_cmd "$(MAKE)")
+  else()
+    set(make_cmd "${MAKE_EXECUTABLE}")
+  endif()
+
+  if(Seastar_DPDK AND WITH_SPDK)
+    message(FATAL_ERROR "not able to build DPDK with "
+      "both Seastar_DPDK and WITH_SPDK enabled")
+  elseif(Seastar_DPDK)
+    set(dpdk_source_dir ${CMAKE_SOURCE_DIR}/src/seastar/dpdk)
+  else() # WITH_SPDK or WITH_DPDK is enabled
+    set(dpdk_source_dir ${CMAKE_SOURCE_DIR}/src/spdk/dpdk)
+  endif()
 
   include(ExternalProject)
   ExternalProject_Add(dpdk-ext
-    SOURCE_DIR ${CMAKE_SOURCE_DIR}/src/spdk/dpdk
-    CONFIGURE_COMMAND $(MAKE) config O=${dpdk_dir} T=${target}
-    BUILD_COMMAND env CC=${CMAKE_C_COMPILER} $(MAKE) O=${dpdk_dir} EXTRA_CFLAGS=${EXTRA_CFLAGS}
+    SOURCE_DIR ${dpdk_source_dir}
+    CONFIGURE_COMMAND ${make_cmd} config O=${dpdk_dir} T=${target}
+    BUILD_COMMAND ${make_cmd} O=${dpdk_dir} CC=${CMAKE_C_COMPILER} EXTRA_CFLAGS=-fPIC
     BUILD_IN_SOURCE 1
     INSTALL_COMMAND "true")
+  if(NUMA_FOUND)
+    set(numa "y")
+  else()
+    set(numa "n")
+  endif()
   ExternalProject_Add_Step(dpdk-ext patch-config
-    COMMAND ${CMAKE_MODULE_PATH}/patch-dpdk-conf.sh ${dpdk_dir} ${machine} ${arch}
+    COMMAND ${CMAKE_MODULE_PATH}/patch-dpdk-conf.sh ${dpdk_dir} ${machine} ${arch} ${numa}
     DEPENDEES configure
     DEPENDERS build)
   # easier to adjust the config
   ExternalProject_Add_StepTargets(dpdk-ext configure patch-config build)
 endfunction()
 
-function(build_dpdk dpdk_dir)
-  do_build_dpdk(${dpdk_dir})
+function(do_export_dpdk dpdk_dir)
   set(DPDK_INCLUDE_DIR ${dpdk_dir}/include)
   # create the directory so cmake won't complain when looking at the imported
   # target
@@ -103,16 +125,38 @@ function(build_dpdk dpdk_dir)
     endif()
   endif()
 
-  foreach(c
-      bus_pci
-      eal
-      ethdev
-      kvargs
-      mbuf
-      mempool
-      mempool_ring
-      pci
-      ring)
+  list(APPEND dpdk_components
+    bus_pci
+    cmdline
+    eal
+    ethdev
+    kvargs
+    mbuf
+    mempool
+    mempool_ring
+    pci
+    ring)
+  if(Seastar_DPDK)
+    list(APPEND dpdk_components
+      bus_vdev
+      cfgfile
+      hash
+      net
+      pmd_bnxt
+      pmd_cxgbe
+      pmd_e1000
+      pmd_ena
+      pmd_enic
+      pmd_i40e
+      pmd_ixgbe
+      pmd_nfp
+      pmd_qede
+      pmd_ring
+      pmd_sfc_efx
+      timer)
+  endif()
+
+  foreach(c ${dpdk_components})
     add_library(dpdk::${c} STATIC IMPORTED)
     add_dependencies(dpdk::${c} dpdk-ext)
     set(dpdk_${c}_LIBRARY
@@ -125,6 +169,9 @@ function(build_dpdk dpdk_dir)
     list(APPEND DPDK_ARCHIVES "${dpdk_${c}_LIBRARY}")
   endforeach()
 
+  if(NUMA_FOUND)
+    set(dpdk_numa " -Wl,-lnuma")
+  endif()
   add_library(dpdk::dpdk INTERFACE IMPORTED)
   add_dependencies(dpdk::dpdk
     ${DPDK_LIBRARIES})
@@ -132,9 +179,19 @@ function(build_dpdk dpdk_dir)
   set_target_properties(dpdk::dpdk PROPERTIES
     INTERFACE_INCLUDE_DIRECTORIES ${DPDK_INCLUDE_DIR}
     INTERFACE_LINK_LIBRARIES
-    "-Wl,--whole-archive $<JOIN:${DPDK_ARCHIVES}, > -Wl,--no-whole-archive")
+    "-Wl,--whole-archive $<JOIN:${DPDK_ARCHIVES}, > -Wl,--no-whole-archive ${dpdk_numa} -Wl,-lpthread,-ldl")
   if(dpdk_rte_CFLAGS)
     set_target_properties(dpdk::dpdk PROPERTIES
       INTERFACE_COMPILE_OPTIONS "${dpdk_rte_CFLAGS}")
   endif()
 endfunction()
+
+function(build_dpdk dpdk_dir)
+  find_package(NUMA QUIET)
+  if(NOT TARGET dpdk-ext)
+    do_build_dpdk(${dpdk_dir})
+  endif()
+  if(NOT TARGET dpdk::dpdk)
+    do_export_dpdk(${dpdk_dir})
+  endif()
+endfunction()
diff -pruN 14.2.16-2/cmake/modules/Buildpmem.cmake 15.2.7-0ubuntu4/cmake/modules/Buildpmem.cmake
--- 14.2.16-2/cmake/modules/Buildpmem.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Buildpmem.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,45 @@
+function(build_pmem)
+  include(ExternalProject)
+  set(PMDK_SRC "${CMAKE_BINARY_DIR}/src/pmdk/src")
+  set(PMDK_INCLUDE "${PMDK_SRC}/include")
+
+  # Use debug PMDK libs in debug lib/rbd builds
+  if(CMAKE_BUILD_TYPE STREQUAL Debug)
+    set(PMDK_LIB_DIR "debug")
+  else()
+    set(PMDK_LIB_DIR "nondebug")
+  endif()
+  set(PMDK_LIB "${PMDK_SRC}/${PMDK_LIB_DIR}")
+
+  ExternalProject_Add(pmdk_ext
+      GIT_REPOSITORY "https://github.com/ceph/pmdk.git"
+      GIT_TAG "1.7"
+      SOURCE_DIR ${CMAKE_BINARY_DIR}/src/pmdk
+      CONFIGURE_COMMAND ""
+      # Explicitly built w/o NDCTL, otherwise if ndtcl is present on the
+      # build system tests statically linking to librbd (which uses
+      # libpmemobj) will not link (because we don't build the ndctl
+      # static library here).
+      BUILD_COMMAND $(MAKE) NDCTL_ENABLE=n
+      BUILD_IN_SOURCE 1
+      BUILD_BYPRODUCTS "${PMDK_LIB}/libpmem.a" "${PMDK_LIB}/libpmemobj.a"
+      INSTALL_COMMAND "true")
+
+    # libpmem
+    add_library(pmem::pmem STATIC IMPORTED)
+    add_dependencies(pmem::pmem pmdk_ext)
+    file(MAKE_DIRECTORY ${PMDK_INCLUDE})
+    set_target_properties(pmem::pmem PROPERTIES
+      INTERFACE_INCLUDE_DIRECTORIES ${PMDK_INCLUDE}
+      IMPORTED_LOCATION "${PMDK_LIB}/libpmem.a"
+      INTERFACE_LINK_LIBRARIES ${CMAKE_THREAD_LIBS_INIT})
+
+    # libpmemobj
+    add_library(pmem::pmemobj STATIC IMPORTED)
+    add_dependencies(pmem::pmemobj pmdk_ext)
+    set_target_properties(pmem::pmemobj PROPERTIES
+      INTERFACE_INCLUDE_DIRECTORIES ${PMDK_INCLUDE}
+      IMPORTED_LOCATION "${PMDK_LIB}/libpmemobj.a"
+      INTERFACE_LINK_LIBRARIES ${CMAKE_THREAD_LIBS_INIT})
+
+endfunction()
diff -pruN 14.2.16-2/cmake/modules/BuildRocksDB.cmake 15.2.7-0ubuntu4/cmake/modules/BuildRocksDB.cmake
--- 14.2.16-2/cmake/modules/BuildRocksDB.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/BuildRocksDB.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -2,17 +2,24 @@ function(build_rocksdb)
   set(rocksdb_CMAKE_ARGS -DCMAKE_POSITION_INDEPENDENT_CODE=ON)
   list(APPEND rocksdb_CMAKE_ARGS -DWITH_GFLAGS=OFF)
 
+  # cmake doesn't properly handle arguments containing ";", such as
+  # CMAKE_PREFIX_PATH, for which reason we'll have to use some other separator.
+  string(REPLACE ";" "!" CMAKE_PREFIX_PATH_ALT_SEP "${CMAKE_PREFIX_PATH}")
+  list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH_ALT_SEP})
+  if(CMAKE_TOOLCHAIN_FILE)
+    list(APPEND rocksdb_CMAKE_ARGS
+         -DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE)
+  endif()
+
   if(ALLOCATOR STREQUAL "jemalloc")
     list(APPEND rocksdb_CMAKE_ARGS -DWITH_JEMALLOC=ON)
     list(APPEND rocksdb_INTERFACE_LINK_LIBRARIES JeMalloc::JeMalloc)
   endif()
 
   if (WITH_CCACHE AND CCACHE_FOUND)
-    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_COMPILER=ccache)
-    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_COMPILER_ARG1=${CMAKE_CXX_COMPILER})
-  else()
-    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER})
+    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_COMPILER_LAUNCHER=ccache)
   endif()
+  list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER})
 
   list(APPEND rocksdb_CMAKE_ARGS -DWITH_SNAPPY=${SNAPPY_FOUND})
   if(SNAPPY_FOUND)
@@ -27,6 +34,9 @@ function(build_rocksdb)
   list(APPEND rocksdb_CMAKE_ARGS -DWITH_LZ4=${LZ4_FOUND})
   if(LZ4_FOUND)
     list(APPEND rocksdb_INTERFACE_LINK_LIBRARIES LZ4::LZ4)
+    # When cross compiling, cmake may fail to locate lz4.
+    list(APPEND rocksdb_CMAKE_ARGS -DLZ4_INCLUDE_DIR=${LZ4_INCLUDE_DIR})
+    list(APPEND rocksdb_CMAKE_ARGS -DLZ4_LIBRARIES=${LZ4_LIBRARY})
   endif()
 
   list(APPEND rocksdb_CMAKE_ARGS -DWITH_ZLIB=${ZLIB_FOUND})
@@ -39,32 +49,49 @@ function(build_rocksdb)
   list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE})
   list(APPEND rocksdb_CMAKE_ARGS -DFAIL_ON_WARNINGS=OFF)
   list(APPEND rocksdb_CMAKE_ARGS -DUSE_RTTI=1)
+  list(APPEND rocksdb_CMAKE_ARGS -G${CMAKE_GENERATOR})
   CHECK_C_COMPILER_FLAG("-Wno-stringop-truncation" HAS_WARNING_STRINGOP_TRUNCATION)
   if(HAS_WARNING_STRINGOP_TRUNCATION)
-    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_C_FLAGS="-Wno-stringop-truncation")
+    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_C_FLAGS=-Wno-stringop-truncation)
+  endif()
+  include(CheckCXXCompilerFlag)
+  check_cxx_compiler_flag("-Wno-deprecated-copy" HAS_WARNING_DEPRECATED_COPY)
+  if(HAS_WARNING_DEPRECATED_COPY)
+    set(rocksdb_CXX_FLAGS -Wno-deprecated-copy)
+  endif()
+  check_cxx_compiler_flag("-Wno-pessimizing-move" HAS_WARNING_PESSIMIZING_MOVE)
+  if(HAS_WARNING_PESSIMIZING_MOVE)
+    set(rocksdb_CXX_FLAGS "${rocksdb_CXX_FLAGS} -Wno-pessimizing-move")
+  endif()
+  if(rocksdb_CXX_FLAGS)
+    list(APPEND rocksdb_CMAKE_ARGS -DCMAKE_CXX_FLAGS='${rocksdb_CXX_FLAGS}')
   endif()
   # we use an external project and copy the sources to bin directory to ensure
   # that object files are built outside of the source tree.
   include(ExternalProject)
   set(rocksdb_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/rocksdb")
   set(rocksdb_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/rocksdb")
+  set(rocksdb_LIBRARY "${rocksdb_BINARY_DIR}/librocksdb.a")
+  if(CMAKE_MAKE_PROGRAM MATCHES "make")
+    # try to inherit command line arguments passed by parent "make" job
+    set(make_cmd $(MAKE) rocksdb)
+  else()
+    set(make_cmd ${CMAKE_COMMAND} --build <BINARY_DIR> --target rocksdb)
+  endif()
+
   ExternalProject_Add(rocksdb_ext
     SOURCE_DIR "${rocksdb_SOURCE_DIR}"
     CMAKE_ARGS ${rocksdb_CMAKE_ARGS}
     BINARY_DIR "${rocksdb_BINARY_DIR}"
-    BUILD_COMMAND $(MAKE) rocksdb
-    INSTALL_COMMAND "true")
-  # force rocksdb make to be called on each time
-  ExternalProject_Add_Step(rocksdb_ext forcebuild
-    DEPENDEES configure
-    DEPENDERS build
-    COMMAND "true"
-    ALWAYS 1)
+    BUILD_COMMAND "${make_cmd}"
+    BUILD_ALWAYS TRUE
+    BUILD_BYPRODUCTS "${rocksdb_LIBRARY}"
+    INSTALL_COMMAND "true"
+    LIST_SEPARATOR !)
 
   add_library(RocksDB::RocksDB STATIC IMPORTED)
   add_dependencies(RocksDB::RocksDB rocksdb_ext)
   set(rocksdb_INCLUDE_DIR "${rocksdb_SOURCE_DIR}/include")
-  set(rocksdb_LIBRARY "${rocksdb_BINARY_DIR}/librocksdb.a")
   foreach(ver "MAJOR" "MINOR" "PATCH")
     file(STRINGS "${rocksdb_INCLUDE_DIR}/rocksdb/version.h" ROCKSDB_VER_${ver}_LINE
       REGEX "^#define[ \t]+ROCKSDB_${ver}[ \t]+[0-9]+$")
diff -pruN 14.2.16-2/cmake/modules/BuildSPDK.cmake 15.2.7-0ubuntu4/cmake/modules/BuildSPDK.cmake
--- 14.2.16-2/cmake/modules/BuildSPDK.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/BuildSPDK.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -9,19 +9,38 @@ macro(build_spdk)
     find_package(aio REQUIRED)
     find_package(uuid REQUIRED)
   endif()
+
+  find_program(MAKE_EXECUTABLE NAMES gmake make)
+  if(NOT MAKE_EXECUTABLE)
+    message(FATAL_ERROR "Can't find make")
+  endif()
+  if(CMAKE_MAKE_PROGRAM MATCHES "make")
+    # try to inherit command line arguments passed by parent "make" job
+    set(make_cmd "$(MAKE)")
+  else()
+    set(make_cmd "${MAKE_EXECUTABLE}")
+  endif()
+
+  set(spdk_CFLAGS "-fPIC")
+  include(CheckCCompilerFlag)
+  check_c_compiler_flag("-Wno-address-of-packed-member" HAS_WARNING_ADDRESS_OF_PACKED_MEMBER)
+  if(HAS_WARNING_ADDRESS_OF_PACKED_MEMBER)
+    set(spdk_CFLAGS "${spdk_CFLAGS} -Wno-address-of-packed-member")
+  endif()
   include(ExternalProject)
   ExternalProject_Add(spdk-ext
     DEPENDS dpdk-ext
     SOURCE_DIR ${CMAKE_SOURCE_DIR}/src/spdk
-    CONFIGURE_COMMAND ./configure --with-dpdk=${DPDK_DIR}
+    CONFIGURE_COMMAND ./configure --with-dpdk=${DPDK_DIR} --without-isal --without-vhost
     # unset $CFLAGS, otherwise it will interfere with how SPDK sets
     # its include directory.
     # unset $LDFLAGS, otherwise SPDK will fail to mock some functions.
-    BUILD_COMMAND env -i PATH=$ENV{PATH} CC=${CMAKE_C_COMPILER} $(MAKE) EXTRA_CFLAGS="-fPIC"
+    BUILD_COMMAND env -i PATH=$ENV{PATH} CC=${CMAKE_C_COMPILER} ${make_cmd} EXTRA_CFLAGS="${spdk_CFLAGS}"
     BUILD_IN_SOURCE 1
     INSTALL_COMMAND "true")
+  unset(make_cmd)
   ExternalProject_Get_Property(spdk-ext source_dir)
-  foreach(c nvme log lvol env_dpdk util)
+  foreach(c nvme log lvol env_dpdk sock util)
     add_library(spdk::${c} STATIC IMPORTED)
     add_dependencies(spdk::${c} spdk-ext)
     set_target_properties(spdk::${c} PROPERTIES
diff -pruN 14.2.16-2/cmake/modules/CephCheck_link.c 15.2.7-0ubuntu4/cmake/modules/CephCheck_link.c
--- 14.2.16-2/cmake/modules/CephCheck_link.c	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/CephCheck_link.c	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,2 @@
+int main()
+{}
diff -pruN 14.2.16-2/cmake/modules/CephCheck_link.map 15.2.7-0ubuntu4/cmake/modules/CephCheck_link.map
--- 14.2.16-2/cmake/modules/CephCheck_link.map	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/CephCheck_link.map	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1 @@
+{};
\ No newline at end of file
diff -pruN 14.2.16-2/cmake/modules/CephChecks.cmake 15.2.7-0ubuntu4/cmake/modules/CephChecks.cmake
--- 14.2.16-2/cmake/modules/CephChecks.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/CephChecks.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,154 @@
+if(CMAKE_CXX_COMPILER_ID STREQUAL GNU)
+  if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7)
+    message(FATAL_ERROR "GCC 7+ required due to C++17 requirements")
+  endif()
+endif()
+
+#Check Includes
+include(CheckIncludeFiles)
+include(CheckIncludeFileCXX)
+include(CheckFunctionExists)
+
+check_function_exists(fallocate CEPH_HAVE_FALLOCATE)
+check_function_exists(posix_fadvise HAVE_POSIX_FADVISE)
+check_function_exists(posix_fallocate HAVE_POSIX_FALLOCATE)
+check_function_exists(syncfs HAVE_SYS_SYNCFS)
+check_function_exists(sync_file_range HAVE_SYNC_FILE_RANGE)
+check_function_exists(pwritev HAVE_PWRITEV)
+check_function_exists(splice CEPH_HAVE_SPLICE)
+check_function_exists(getgrouplist HAVE_GETGROUPLIST)
+if(NOT APPLE)
+  check_function_exists(fdatasync HAVE_FDATASYNC)
+endif()
+check_function_exists(strerror_r HAVE_Strerror_R)
+check_function_exists(name_to_handle_at HAVE_NAME_TO_HANDLE_AT)
+check_function_exists(pipe2 HAVE_PIPE2)
+check_function_exists(accept4 HAVE_ACCEPT4)
+
+include(CMakePushCheckState)
+cmake_push_check_state(RESET)
+set(CMAKE_REQUIRED_LIBRARIES pthread)
+check_function_exists(pthread_spin_init HAVE_PTHREAD_SPINLOCK)
+check_function_exists(pthread_set_name_np HAVE_PTHREAD_SET_NAME_NP)
+check_function_exists(pthread_get_name_np HAVE_PTHREAD_GET_NAME_NP)
+check_function_exists(pthread_setname_np HAVE_PTHREAD_SETNAME_NP)
+check_function_exists(pthread_getname_np HAVE_PTHREAD_GETNAME_NP)
+check_function_exists(pthread_rwlockattr_setkind_np HAVE_PTHREAD_RWLOCKATTR_SETKIND_NP)
+cmake_pop_check_state()
+
+check_function_exists(eventfd HAVE_EVENTFD)
+check_function_exists(getprogname HAVE_GETPROGNAME)
+check_function_exists(gettid HAVE_GETTID)
+
+CHECK_INCLUDE_FILES("linux/types.h" HAVE_LINUX_TYPES_H)
+CHECK_INCLUDE_FILES("linux/version.h" HAVE_LINUX_VERSION_H)
+CHECK_INCLUDE_FILES("arpa/nameser_compat.h" HAVE_ARPA_NAMESER_COMPAT_H)
+CHECK_INCLUDE_FILES("sys/mount.h" HAVE_SYS_MOUNT_H)
+CHECK_INCLUDE_FILES("sys/param.h" HAVE_SYS_PARAM_H)
+CHECK_INCLUDE_FILES("sys/types.h" HAVE_SYS_TYPES_H)
+CHECK_INCLUDE_FILES("sys/vfs.h" HAVE_SYS_VFS_H)
+CHECK_INCLUDE_FILES("sys/prctl.h" HAVE_SYS_PRCTL_H)
+CHECK_INCLUDE_FILES("execinfo.h" HAVE_EXECINFO_H)
+if(LINUX)
+  CHECK_INCLUDE_FILES("sched.h" HAVE_SCHED)
+endif()
+CHECK_INCLUDE_FILES("valgrind/helgrind.h" HAVE_VALGRIND_HELGRIND_H)
+
+include(CheckTypeSize)
+set(CMAKE_EXTRA_INCLUDE_FILES "linux/types.h")
+CHECK_TYPE_SIZE(__u8 __U8) 
+CHECK_TYPE_SIZE(__u16 __U16) 
+CHECK_TYPE_SIZE(__u32 __U32) 
+CHECK_TYPE_SIZE(__u64 __U64) 
+CHECK_TYPE_SIZE(__s8 __S8) 
+CHECK_TYPE_SIZE(__s16 __S16) 
+CHECK_TYPE_SIZE(__s32 __S32) 
+CHECK_TYPE_SIZE(__s64 __S64) 
+unset(CMAKE_EXTRA_INCLUDE_FILES)
+
+include(CheckSymbolExists)
+cmake_push_check_state(RESET)
+set(CMAKE_REQUIRED_LIBRARIES rt)
+check_symbol_exists(_POSIX_TIMERS "unistd.h;time.h" HAVE_POSIX_TIMERS)
+cmake_pop_check_state()
+if(HAVE_POSIX_TIMERS)
+  find_library(RT_LIBRARY NAMES rt)
+endif()
+check_symbol_exists(res_nquery "resolv.h" HAVE_RES_NQUERY)
+check_symbol_exists(F_SETPIPE_SZ "linux/fcntl.h" CEPH_HAVE_SETPIPE_SZ)
+check_symbol_exists(__func__ "" HAVE_FUNC)
+check_symbol_exists(__PRETTY_FUNCTION__ "" HAVE_PRETTY_FUNC)
+check_symbol_exists(getentropy "unistd.h" HAVE_GETENTROPY)
+
+include(CheckCXXSourceCompiles)
+check_cxx_source_compiles("
+  #include <string.h>
+  int main() { char x = *strerror_r(0, &x, sizeof(x)); return 0; }
+  " STRERROR_R_CHAR_P)
+
+include(CheckStructHasMember)
+CHECK_STRUCT_HAS_MEMBER("struct stat" st_mtim.tv_nsec sys/stat.h
+  HAVE_STAT_ST_MTIM_TV_NSEC LANGUAGE C)
+CHECK_STRUCT_HAS_MEMBER("struct stat" st_mtimespec.tv_nsec sys/stat.h
+  HAVE_STAT_ST_MTIMESPEC_TV_NSEC LANGUAGE C)
+
+if(NOT CMAKE_CROSSCOMPILING)
+  include(CheckCXXSourceRuns)
+  cmake_push_check_state()
+  set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -std=c++17")
+  if(WIN32)
+    set(CMAKE_REQUIRED_LIBRARIES ws2_32)
+  endif()
+
+  check_cxx_source_runs("
+#include <cstdint>
+#include <iterator>
+
+#ifdef _WIN32
+#include <winsock2.h>
+#else
+#include <arpa/inet.h>
+#endif
+
+uint32_t load(char* p, size_t offset)
+{
+  return *reinterpret_cast<uint32_t*>(p + offset);
+}
+
+bool good(uint32_t lhs, uint32_t big_endian)
+{
+  return lhs == ntohl(big_endian);
+}
+
+int main(int argc, char **argv)
+{
+  char a1[] = \"ABCDEFG\";
+  uint32_t a2[] = {0x41424344,
+                   0x42434445,
+                   0x43444546,
+                   0x44454647};
+  for (size_t i = 0; i < std::size(a2); i++) {
+    if (!good(load(a1, i), a2[i])) {
+      return 1;
+    }
+  }
+}"
+    HAVE_UNALIGNED_ACCESS)
+  cmake_pop_check_state()
+  if(NOT HAVE_UNALIGNED_ACCESS)
+    message(FATAL_ERROR "Unaligned access is required")
+  endif()
+else(NOT CMAKE_CROSSCOMPILING)
+  message(STATUS "Assuming unaligned access is supported")
+endif(NOT CMAKE_CROSSCOMPILING)
+
+# should use LINK_OPTIONS instead of LINK_LIBRARIES, if we can use cmake v3.14+
+try_compile(HAVE_LINK_VERSION_SCRIPT
+  ${CMAKE_CURRENT_BINARY_DIR}
+  SOURCES ${CMAKE_CURRENT_LIST_DIR}/CephCheck_link.c
+  LINK_LIBRARIES "-Wl,--version-script=${CMAKE_CURRENT_LIST_DIR}/CephCheck_link.map")
+
+try_compile(HAVE_LINK_EXCLUDE_LIBS
+  ${CMAKE_CURRENT_BINARY_DIR}
+  SOURCES ${CMAKE_CURRENT_LIST_DIR}/CephCheck_link.c
+  LINK_LIBRARIES "-Wl,--exclude-libs,ALL")
diff -pruN 14.2.16-2/cmake/modules/Distutils.cmake 15.2.7-0ubuntu4/cmake/modules/Distutils.cmake
--- 14.2.16-2/cmake/modules/Distutils.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Distutils.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -11,9 +11,11 @@ function(distutils_install_module name)
         COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/${src} ${src})
     endif()
   endforeach()
-  add_custom_target(${name}-clone ALL
-    DEPENDS ${py_clone})
-  cmake_parse_arguments(DU "" INSTALL_SCRIPT "" ${ARGN})
+  if(NOT TARGET ${name}-clone)
+    add_custom_target(${name}-clone ALL
+      DEPENDS ${py_clone})
+  endif()
+  cmake_parse_arguments(DU "" "INSTALL_SCRIPT" "" ${ARGN})
   install(CODE "
     set(options --prefix=${CMAKE_INSTALL_PREFIX})
     if(DEFINED ENV{DESTDIR})
@@ -28,12 +30,12 @@ function(distutils_install_module name)
       endif()
     endif()
     execute_process(
-    COMMAND ${PYTHON${PYTHON_VERSION}_EXECUTABLE}
+    COMMAND ${Python3_EXECUTABLE}
         setup.py install \${options}
     WORKING_DIRECTORY \"${CMAKE_CURRENT_BINARY_DIR}\")")
 endfunction(distutils_install_module)
 
-function(distutils_add_cython_module name src)
+function(distutils_add_cython_module target name src)
   get_property(compiler_launcher GLOBAL PROPERTY RULE_LAUNCH_COMPILE)
   get_property(link_launcher GLOBAL PROPERTY RULE_LAUNCH_LINK)
   # When using ccache, CMAKE_C_COMPILER is ccache executable absolute path
@@ -56,7 +58,21 @@ function(distutils_add_cython_module nam
   set(PY_CC ${compiler_launcher} ${CMAKE_C_COMPILER} ${c_compiler_arg1} ${cflags})
   set(PY_CXX ${compiler_launcher} ${CMAKE_CXX_COMPILER} ${cxx_compiler_arg1})
   set(PY_LDSHARED ${link_launcher} ${CMAKE_C_COMPILER} ${c_compiler_arg1} "-shared")
-  add_custom_target(${name} ALL
+
+  set(suffix_var "EXT_SUFFIX")
+  execute_process(COMMAND "${Python3_EXECUTABLE}" -c
+    "from distutils import sysconfig; print(sysconfig.get_config_var('${suffix_var}'))"
+    RESULT_VARIABLE result
+    OUTPUT_VARIABLE ext_suffix
+    ERROR_VARIABLE error
+    OUTPUT_STRIP_TRAILING_WHITESPACE)
+  if(NOT result EQUAL 0)
+    message(FATAL_ERROR "Unable to tell python extension's suffix: ${error}")
+  endif()
+  set(output_dir "${CYTHON_MODULE_DIR}/lib.3")
+  set(setup_py ${CMAKE_CURRENT_SOURCE_DIR}/setup.py)
+  add_custom_command(
+    OUTPUT ${output_dir}/${name}${ext_suffix}
     COMMAND
     env
     CC="${PY_CC}"
@@ -66,11 +82,14 @@ function(distutils_add_cython_module nam
     LDFLAGS=-L${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
     CYTHON_BUILD_DIR=${CMAKE_CURRENT_BINARY_DIR}
     CEPH_LIBDIR=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-    ${PYTHON${PYTHON_VERSION}_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/setup.py
+    ${Python3_EXECUTABLE} ${setup_py}
     build --verbose --build-base ${CYTHON_MODULE_DIR}
-    --build-platlib ${CYTHON_MODULE_DIR}/lib.${PYTHON${PYTHON_VERSION}_VERSION_MAJOR}
-    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
-    DEPENDS ${src})
+    --build-platlib ${output_dir}
+    MAIN_DEPENDENCY ${src}
+    DEPENDS ${setup_py}
+    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
+  add_custom_target(${target} ALL
+    DEPENDS ${output_dir}/${name}${ext_suffix})
 endfunction(distutils_add_cython_module)
 
 function(distutils_install_cython_module name)
@@ -99,9 +118,9 @@ function(distutils_install_cython_module
     endif()
     execute_process(
        COMMAND
-           ${PYTHON${PYTHON_VERSION}_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/setup.py
+           ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/setup.py
            build --verbose --build-base ${CYTHON_MODULE_DIR}
-           --build-platlib ${CYTHON_MODULE_DIR}/lib.${PYTHON${PYTHON_VERSION}_VERSION_MAJOR}
+           --build-platlib ${CYTHON_MODULE_DIR}/lib.3
            build_ext --cython-c-in-temp --build-temp ${CMAKE_CURRENT_BINARY_DIR} --cython-include-dirs ${PROJECT_SOURCE_DIR}/src/pybind/rados
            install \${options} --single-version-externally-managed --record /dev/null
            egg_info --egg-base ${CMAKE_CURRENT_BINARY_DIR}
diff -pruN 14.2.16-2/cmake/modules/FindBoost.cmake 15.2.7-0ubuntu4/cmake/modules/FindBoost.cmake
--- 14.2.16-2/cmake/modules/FindBoost.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindBoost.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -2297,7 +2297,7 @@ if(Boost_FOUND)
         endif()
         if(_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES)
           set_target_properties(Boost::${COMPONENT} PROPERTIES
-            CXX_STANDARD 17)
+            INTERFACE_COMPILE_FEATURES "${_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES}")
         endif()
       endif()
     endif()
diff -pruN 14.2.16-2/cmake/modules/FindCppCheck.cmake 15.2.7-0ubuntu4/cmake/modules/FindCppCheck.cmake
--- 14.2.16-2/cmake/modules/FindCppCheck.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindCppCheck.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,28 @@
+unset(CPPCHECK_BIN CACHE)
+find_program(CPPCHECK_BIN cppcheck)
+
+if(CPPCHECK_BIN)
+  find_file(PROJECT_FILE compile_commands.json PATH .)
+  if(NOT PROJECT_FILE)
+    message(STATUS "Found cppcheck, but no \"compile_commands.json\" file. To enable cppcheck, set CMAKE_EXPORT_COMPILE_COMMANDS to \"ON\" and build again.")
+    return()
+  endif()
+
+  include(ProcessorCount)
+  ProcessorCount(N)
+   
+  execute_process(COMMAND cppcheck --version OUTPUT_VARIABLE CPPCHECK_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE)
+  separate_arguments(CPPCHECK_VERSION)
+  list(GET CPPCHECK_VERSION 1 VERSION_NUMBER)
+  if(VERSION_NUMBER VERSION_GREATER_EQUAL 1.88)
+    set(CPP_STD_VERSION "c++17")
+  else()
+    set(CPP_STD_VERSION "c++14")
+  endif()
+
+  add_custom_target(cppcheck
+    COMMAND ${CPPCHECK_BIN} --verbose -j${N} --std=${CPP_STD_VERSION} --inline-suppr --project=${PROJECT_FILE} 2> cppcheck.txt | grep done
+    )
+
+  message(STATUS "Found cppcheck. To perform static analysis using cppcheck, use: \"make cppcheck\". Results will be stored in \"cppcheck.txt\".")
+endif()
diff -pruN 14.2.16-2/cmake/modules/FindCython.cmake 15.2.7-0ubuntu4/cmake/modules/FindCython.cmake
--- 14.2.16-2/cmake/modules/FindCython.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindCython.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -4,7 +4,7 @@
 
 # Try to run Cython, to make sure it works:
 execute_process(
-  COMMAND ${PYTHON${PYTHON_VERSION}_EXECUTABLE} -m cython --version
+  COMMAND ${Python3_EXECUTABLE} -m cython --version
   RESULT_VARIABLE cython_result
   ERROR_VARIABLE cython_output)
 if(cython_result EQUAL 0)
diff -pruN 14.2.16-2/cmake/modules/Finddpdk.cmake 15.2.7-0ubuntu4/cmake/modules/Finddpdk.cmake
--- 14.2.16-2/cmake/modules/Finddpdk.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Finddpdk.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -2,6 +2,7 @@
 #
 # Once done, this will define
 #
+# dpdk::dpdk
 # dpdk_FOUND
 # dpdk_INCLUDE_DIR
 # dpdk_LIBRARIES
@@ -11,7 +12,12 @@ if(PKG_CONFIG_FOUND)
   pkg_check_modules(dpdk QUIET libdpdk)
 endif()
 
-if(NOT dpdk_INCLUDE_DIRS)
+if(dpdk_INCLUDE_DIRS)
+  # good
+elseif(TARGET dpdk::dpdk)
+  get_target_property(dpdk_INCLUDE_DIRS
+     dpdk::dpdk INTERFACE_INCLUDE_DIRECTORIES)
+else()
   find_path(dpdk_config_INCLUDE_DIR rte_config.h
     HINTS
       ENV DPDK_DIR
@@ -32,6 +38,8 @@ endif()
 
 set(components
   bus_pci
+  bus_vdev
+  cfgfile
   cmdline
   eal
   ethdev
@@ -41,26 +49,44 @@ set(components
   mempool
   mempool_ring
   mempool_stack
+  net
   pci
   pmd_af_packet
+  pmd_bnxt
   pmd_bond
+  pmd_cxgbe
+  pmd_e1000
+  pmd_ena
+  pmd_enic
   pmd_i40e
   pmd_ixgbe
   pmd_mlx5
+  pmd_nfp
+  pmd_qede
   pmd_ring
+  pmd_sfc_efx
   pmd_vmxnet3_uio
-  ring)
+  ring
+  timer)
 
+# for collecting dpdk library targets, it will be used when defining dpdk::dpdk
+set(_dpdk_libs)
+# for list of dpdk library archive paths
 set(dpdk_LIBRARIES)
 
 foreach(c ${components})
-  find_library(DPDK_rte_${c}_LIBRARY rte_${c}
-    HINTS
-      ENV DPDK_DIR
-      ${dpdk_LIBRARY_DIRS}
-    PATH_SUFFIXES lib)
+  set(dpdk_lib dpdk::${c})
+  if(TARGET ${dpdk_lib})
+    get_target_property(DPDK_rte_${c}_LIBRARY
+      ${dpdk_lib} IMPORTED_LOCATION)
+  else()
+    find_library(DPDK_rte_${c}_LIBRARY rte_${c}
+      HINTS
+        ENV DPDK_DIR
+        ${dpdk_LIBRARY_DIRS}
+        PATH_SUFFIXES lib)
+  endif()
   if(DPDK_rte_${c}_LIBRARY)
-    set(dpdk_lib dpdk::${c})
     if (NOT TARGET ${dpdk_lib})
       add_library(${dpdk_lib} UNKNOWN IMPORTED)
       set_target_properties(${dpdk_lib} PROPERTIES
@@ -73,7 +99,8 @@ foreach(c ${components})
         endif()
       endif()
     endif()
-    list(APPEND dpdk_LIBRARIES ${dpdk_lib})
+    list(APPEND _dpdk_libs ${dpdk_lib})
+    list(APPEND dpdk_LIBRARIES ${DPDK_rte_${c}_LIBRARY})
   endif()
 endforeach()
 
@@ -103,11 +130,13 @@ if(dpdk_FOUND)
   if(NOT TARGET dpdk::dpdk)
     add_library(dpdk::dpdk INTERFACE IMPORTED)
     find_package(Threads QUIET)
-    list(APPEND dpdk_LIBRARIES
+    list(APPEND _dpdk_libs
       Threads::Threads
       dpdk::cflags)
     set_target_properties(dpdk::dpdk PROPERTIES
-      INTERFACE_LINK_LIBRARIES "${dpdk_LIBRARIES}"
+      INTERFACE_LINK_LIBRARIES "${_dpdk_libs}"
       INTERFACE_INCLUDE_DIRECTORIES "${dpdk_INCLUDE_DIRS}")
   endif()
 endif()
+
+unset(_dpdk_libs)
diff -pruN 14.2.16-2/cmake/modules/FindFUSE.cmake 15.2.7-0ubuntu4/cmake/modules/FindFUSE.cmake
--- 14.2.16-2/cmake/modules/FindFUSE.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindFUSE.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -19,23 +19,46 @@ if(APPLE)
   list(APPEND fuse_suffixes osxfuse)
 endif()
 
-find_path(
-  FUSE_INCLUDE_DIR
-  NAMES fuse_common.h fuse_lowlevel.h fuse.h
-  PATH_SUFFIXES ${fuse_suffixes})
-
-find_library(FUSE_LIBRARIES
-  NAMES ${fuse_names}
-  PATHS /usr/local/lib64 /usr/local/lib)
-
-foreach(ver "MAJOR" "MINOR")
-  file(STRINGS "${FUSE_INCLUDE_DIR}/fuse_common.h" fuse_ver_${ver}_line
-    REGEX "^#define[\t ]+FUSE_${ver}_VERSION[\t ]+[0-9]+$")
-  string(REGEX REPLACE ".*#define[\t ]+FUSE_${ver}_VERSION[\t ]+([0-9]+)$"
-    "\\1" FUSE_VERSION_${ver} "${fuse_ver_${ver}_line}")
-endforeach()
+find_package(PkgConfig QUIET)
+if(PKG_CONFIG_FOUND)
+  pkg_search_module(PKG_FUSE QUIET ${fuse_names})
+
+  string(REGEX REPLACE "([0-9]+)\.([0-9]+)\.([0-9]+)"
+    "\\1" FUSE_MAJOR_VERSION "${PKG_FUSE_VERSION}")
+  string(REGEX REPLACE "([0-9]+)\.([0-9]+)\.([0-9]+)"
+    "\\2" FUSE_MINOR_VERSION "${PKG_FUSE_VERSION}")
+
+  find_path(
+    FUSE_INCLUDE_DIR
+    NAMES fuse_common.h fuse_lowlevel.h fuse.h
+    HINTS ${PKG_FUSE_INCLUDE_DIRS}
+    PATH_SUFFIXES ${fuse_suffixes}
+    NO_DEFAULT_PATH)
+
+  find_library(FUSE_LIBRARIES
+    NAMES ${fuse_names}
+    HINTS ${PKG_FUSE_LIBDIR}
+    NO_DEFAULT_PATH)
+else()
+  find_path(
+    FUSE_INCLUDE_DIR
+    NAMES fuse_common.h fuse_lowlevel.h fuse.h
+    PATH_SUFFIXES ${fuse_suffixes})
+
+  find_library(FUSE_LIBRARIES
+    NAMES ${fuse_names}
+    PATHS /usr/local/lib64 /usr/local/lib)
+
+  foreach(ver "MAJOR" "MINOR")
+    file(STRINGS "${FUSE_INCLUDE_DIR}/fuse_common.h" fuse_ver_${ver}_line
+      REGEX "^#define[\t ]+FUSE_${ver}_VERSION[\t ]+[0-9]+$")
+    string(REGEX REPLACE ".*#define[\t ]+FUSE_${ver}_VERSION[\t ]+([0-9]+)$"
+      "\\1" FUSE_${ver}_VERSION "${fuse_ver_${ver}_line}")
+  endforeach()
+endif()
+
 set(FUSE_VERSION
-  "${FUSE_VERSION_MAJOR}.${FUSE_VERSION_MINOR}")
+  "${FUSE_MAJOR_VERSION}.${FUSE_MINOR_VERSION}")
 
 include(FindPackageHandleStandardArgs)
 find_package_handle_standard_args(FUSE
diff -pruN 14.2.16-2/cmake/modules/Findgenl.cmake 15.2.7-0ubuntu4/cmake/modules/Findgenl.cmake
--- 14.2.16-2/cmake/modules/Findgenl.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Findgenl.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-# - Find libnl-genl3
-# Find the genl library and includes
-#
-# GENL_INCLUDE_DIR - where to find netlink.h, etc.
-# GENL_LIBRARIES - List of libraries when using genl.
-# GENL_FOUND - True if genl found.
-
-find_path(GENL_INCLUDE_DIR NAMES netlink/netlink.h PATH_SUFFIXES libnl3)
-
-find_library(LIBNL_LIB nl-3)
-find_library(LIBNL_GENL_LIB nl-genl-3)
-set(GENL_LIBRARIES
-  ${LIBNL_LIB}
-  ${LIBNL_GENL_LIB}
-  )
-
-include(FindPackageHandleStandardArgs)
-find_package_handle_standard_args(nl-genl-3
-  DEFAULT_MSG GENL_LIBRARIES GENL_INCLUDE_DIR)
-
-mark_as_advanced(
-  GENL_LIBRARIES
-  GENL_INCLUDE_DIR)
diff -pruN 14.2.16-2/cmake/modules/FindIWYU.cmake 15.2.7-0ubuntu4/cmake/modules/FindIWYU.cmake
--- 14.2.16-2/cmake/modules/FindIWYU.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindIWYU.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,25 @@
+unset(IWYU_BIN CACHE)
+find_program(IWYU_BIN iwyu_tool.py)
+
+if(IWYU_BIN)
+  find_file(PROJECT_FILE compile_commands.json PATH .)
+  if(NOT PROJECT_FILE)
+    message(STATUS "Found IWYU, but no \"compile_commands.json\" file. To enable IWYU, set CMAKE_EXPORT_COMPILE_COMMANDS to \"ON\" and build again.")
+    return()
+  endif()
+
+  if(NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
+    message(STATUS "Found IWYU, but clang++ is not used. To enable IWYU, set CMAKE_C_COMPILER to \"clang\" and CMAKE_CXX_COMPILER to \"clang++\" and build again.")
+    return()
+  endif()
+
+  include(ProcessorCount)
+  ProcessorCount(N)
+   
+  add_custom_target(iwyu
+    COMMAND echo "IWYU is analyzing includes - this may take a while..."
+    COMMAND ${IWYU_BIN} -j${N} -p . > iwyu.txt
+    )
+
+  message(STATUS "Found IWYU. To perform header analysis using IWYU, use: \"make iwyu\". Results will be stored in \"iwyu.txt\".")
+endif()
diff -pruN 14.2.16-2/cmake/modules/Findnl.cmake 15.2.7-0ubuntu4/cmake/modules/Findnl.cmake
--- 14.2.16-2/cmake/modules/Findnl.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Findnl.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,50 @@
+# - Find libnl
+# Find the libnl-3 library and includes
+#
+# nl_INCLUDE_DIR - where to find netlink.h, etc.
+# nl_<COMPONENT>_LIBRARY - library when using nl::<COMPONENT>.
+# nl_FOUND - True if nl found.
+
+find_path(nl_INCLUDE_DIR
+  NAMES
+    netlink/netlink.h
+  PATH_SUFFIXES
+    libnl3)
+
+foreach(component "core" ${nl_FIND_COMPONENTS})
+  set(nl_COMPONENTS core cli genl idiag nf route xfrm)
+  list(FIND nl_COMPONENTS "${component}" found)
+  if(found EQUAL -1)
+    message(FATAL_ERROR "unknown libnl-3 component: ${component}")
+  endif()
+  if(component STREQUAL "core")
+    find_library(nl_${component}_LIBRARY nl-3)
+  else()
+    find_library(nl_${component}_LIBRARY nl-${component}-3)
+  endif()
+  list(APPEND nl_LIBRARIES "nl_${component}_LIBRARY")
+endforeach()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(nl
+  DEFAULT_MSG ${nl_LIBRARIES} nl_INCLUDE_DIR)
+
+mark_as_advanced(
+  ${nl_LIBRARIES}
+  nl_INCLUDE_DIR)
+
+if(nl_FOUND)
+  foreach(component "core" ${nl_FIND_COMPONENTS})
+    if(NOT TARGET nl::${component})
+      add_library(nl::${component} UNKNOWN IMPORTED)
+      set_target_properties(nl::${component} PROPERTIES
+        INTERFACE_INCLUDE_DIRECTORIES "${nl_INCLUDE_DIR}"
+        IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+        IMPORTED_LOCATION "${nl_${component}_LIBRARY}")
+      if(NOT component STREQUAL "core")
+        set_target_properties(nl::${component} PROPERTIES
+          INTERFACE_LINK_LIBRARIES "${nl_core_LIBRARY}")
+      endif()
+    endif()
+  endforeach()
+endif()
diff -pruN 14.2.16-2/cmake/modules/FindNSPR.cmake 15.2.7-0ubuntu4/cmake/modules/FindNSPR.cmake
--- 14.2.16-2/cmake/modules/FindNSPR.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindNSPR.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,104 +0,0 @@
-# - Try to find NSPR
-# Once done this will define
-#
-#  NSPR_FOUND - system has NSPR
-#  NSPR_INCLUDE_DIRS - the NSPR include directory
-#  NSPR_LIBRARIES - Link these to use NSPR
-#  NSPR_DEFINITIONS - Compiler switches required for using NSPR
-#
-#  Copyright (c) 2010 Andreas Schneider <asn@redhat.com>
-#
-#  Redistribution and use is allowed according to the terms of the New
-#  BSD license.
-#  For details see the accompanying COPYING-CMAKE-SCRIPTS file.
-#
-
-
-if (NSPR_LIBRARIES AND NSPR_INCLUDE_DIRS)
-  # in cache already
-  set(NSPR_FOUND TRUE)
-else (NSPR_LIBRARIES AND NSPR_INCLUDE_DIRS)
-  find_package(PkgConfig)
-  if (PKG_CONFIG_FOUND)
-    pkg_check_modules(_NSPR nspr)
-  endif (PKG_CONFIG_FOUND)
-
-  find_path(NSPR_INCLUDE_DIR
-    NAMES
-      nspr.h
-    PATHS
-      ${_NSPR_INCLUDEDIR}
-      /usr/include
-      /usr/local/include
-      /opt/local/include
-      /sw/include
-    PATH_SUFFIXES
-      nspr4
-      nspr
-  )
-
-  find_library(PLDS4_LIBRARY
-    NAMES
-      plds4
-    PATHS
-      ${_NSPR_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  find_library(PLC4_LIBRARY
-    NAMES
-      plc4
-    PATHS
-      ${_NSPR_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  find_library(NSPR4_LIBRARY
-    NAMES
-      nspr4
-    PATHS
-      ${_NSPR_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  set(NSPR_INCLUDE_DIRS
-    ${NSPR_INCLUDE_DIR}
-  )
-
-  if (PLDS4_LIBRARY)
-    set(NSPR_LIBRARIES
-        ${NSPR_LIBRARIES}
-        ${PLDS4_LIBRARY}
-    )
-  endif (PLDS4_LIBRARY)
-
-  if (PLC4_LIBRARY)
-    set(NSPR_LIBRARIES
-        ${NSPR_LIBRARIES}
-        ${PLC4_LIBRARY}
-    )
-  endif (PLC4_LIBRARY)
-
-  if (NSPR4_LIBRARY)
-    set(NSPR_LIBRARIES
-        ${NSPR_LIBRARIES}
-        ${NSPR4_LIBRARY}
-    )
-  endif (NSPR4_LIBRARY)
-
-  include(FindPackageHandleStandardArgs)
-  find_package_handle_standard_args(NSPR DEFAULT_MSG NSPR_LIBRARIES NSPR_INCLUDE_DIRS)
-
-  # show the NSPR_INCLUDE_DIRS and NSPR_LIBRARIES variables only in the advanced view
-  mark_as_advanced(NSPR_INCLUDE_DIRS NSPR_LIBRARIES)
-
-endif (NSPR_LIBRARIES AND NSPR_INCLUDE_DIRS)
diff -pruN 14.2.16-2/cmake/modules/FindNSS.cmake 15.2.7-0ubuntu4/cmake/modules/FindNSS.cmake
--- 14.2.16-2/cmake/modules/FindNSS.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindNSS.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,126 +0,0 @@
-# - Try to find NSS
-# Once done this will define
-#
-#  NSS_FOUND - system has NSS
-#  NSS_INCLUDE_DIRS - the NSS include directory
-#  NSS_LIBRARIES - Link these to use NSS
-#  NSS_DEFINITIONS - Compiler switches required for using NSS
-#
-#  Copyright (c) 2010 Andreas Schneider <asn@redhat.com>
-#
-#  Redistribution and use is allowed according to the terms of the New
-#  BSD license.
-#  For details see the accompanying COPYING-CMAKE-SCRIPTS file.
-#
-
-
-if (NSS_LIBRARIES AND NSS_INCLUDE_DIRS)
-  # in cache already
-  set(NSS_FOUND TRUE)
-else (NSS_LIBRARIES AND NSS_INCLUDE_DIRS)
-  find_package(PkgConfig)
-  if (PKG_CONFIG_FOUND)
-    pkg_check_modules(_NSS nss)
-  endif (PKG_CONFIG_FOUND)
-
-  find_path(NSS_INCLUDE_DIR
-    NAMES
-      pk11pub.h
-    PATHS
-      ${_NSS_INCLUDEDIR}
-      /usr/include
-      /usr/local/include
-      /opt/local/include
-      /sw/include
-      /usr/local/include/nss
-    PATH_SUFFIXES
-      nss3
-      nss
-  )
-
-  find_library(SSL3_LIBRARY
-    NAMES
-      ssl3
-    PATHS
-      ${_NSS_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  find_library(SMIME3_LIBRARY
-    NAMES
-      smime3
-    PATHS
-      ${_NSS_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  find_library(NSS3_LIBRARY
-    NAMES
-      nss3
-    PATHS
-      ${_NSS_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-      /usr/lib/x86_64-linux-gnu
-  )
-
-  find_library(NSSUTIL3_LIBRARY
-    NAMES
-      nssutil3
-    PATHS
-      ${_NSS_LIBDIR}
-      /usr/lib
-      /usr/local/lib
-      /opt/local/lib
-      /sw/lib
-  )
-
-  set(NSS_INCLUDE_DIRS
-    ${NSS_INCLUDE_DIR}
-  )
-
-  if (SSL3_LIBRARY)
-    set(NSS_LIBRARIES
-        ${NSS_LIBRARIES}
-        ${SSL3_LIBRARY}
-    )
-  endif (SSL3_LIBRARY)
-
-  if (SMIME3_LIBRARY)
-    set(NSS_LIBRARIES
-        ${NSS_LIBRARIES}
-        ${SMIME3_LIBRARY}
-    )
-  endif (SMIME3_LIBRARY)
-
-  if (NSS3_LIBRARY)
-    set(NSS_LIBRARIES
-        ${NSS_LIBRARIES}
-        ${NSS3_LIBRARY}
-    )
-  endif (NSS3_LIBRARY)
-
-  if (NSSUTIL3_LIBRARY)
-    set(NSS_LIBRARIES
-        ${NSS_LIBRARIES}
-        ${NSSUTIL3_LIBRARY}
-    )
-  endif (NSSUTIL3_LIBRARY)
-
-  include(FindPackageHandleStandardArgs)
-  message(STATUS "NSS_LIBRARIES: ${NSS_LIBRARIES}")
-  message(STATUS "NSS_INCLUDE_DIRS: ${NSS_INCLUDE_DIRS}")
-  find_package_handle_standard_args(NSS DEFAULT_MSG NSS_LIBRARIES NSS_INCLUDE_DIRS)
-
-  # show the NSS_INCLUDE_DIRS and NSS_LIBRARIES variables only in the advanced view
-  mark_as_advanced(NSS_INCLUDE_DIRS NSS_LIBRARIES)
-
-endif (NSS_LIBRARIES AND NSS_INCLUDE_DIRS)
diff -pruN 14.2.16-2/cmake/modules/FindNUMA.cmake 15.2.7-0ubuntu4/cmake/modules/FindNUMA.cmake
--- 14.2.16-2/cmake/modules/FindNUMA.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindNUMA.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,16 @@
+# - Find libnuma
+# Find the numa library and includes
+#
+# NUMA_INCLUDE_DIR - where to find numa.h, etc.
+# NUMA_LIBRARIES - List of libraries when using numa.
+# NUMA_FOUND - True if numa found.
+
+find_path(NUMA_INCLUDE_DIR numa.h)
+find_library(NUMA_LIBRARIES numa)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(numa DEFAULT_MSG NUMA_LIBRARIES NUMA_INCLUDE_DIR)
+
+mark_as_advanced(
+  NUMA_LIBRARIES
+  NUMA_INCLUDE_DIR)
diff -pruN 14.2.16-2/cmake/modules/Findpmem.cmake 15.2.7-0ubuntu4/cmake/modules/Findpmem.cmake
--- 14.2.16-2/cmake/modules/Findpmem.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Findpmem.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,45 @@
+# - Find pmem
+#
+# PMEM_INCLUDE_DIR - Where to find libpmem.h
+# PMEM_LIBRARIES - List of libraries when using pmdk.
+# pmem_FOUND - True if pmem found.
+# PMEMOBJ_INCLUDE_DIR - Where to find libpmemobj.h
+# PMEMOBJ_LIBRARIES - List of libraries when using pmdk obj.
+# pmemobj_FOUND - True if pmemobj found.
+
+find_path(PMEM_INCLUDE_DIR libpmem.h)
+find_library(PMEM_LIBRARIES pmem)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(pmem
+  DEFAULT_MSG PMEM_LIBRARIES PMEM_INCLUDE_DIR)
+
+mark_as_advanced(
+  PMEM_INCLUDE_DIR
+  PMEM_LIBRARIES)
+
+if(pmem_FOUND AND NOT TARGET pmem::pmem)
+  add_library(pmem::pmem UNKNOWN IMPORTED)
+  set_target_properties(pmem::pmem PROPERTIES
+    INTERFACE_INCLUDE_DIRECTORIES "${PMEM_INCLUDE_DIR}"
+    IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+    IMPORTED_LOCATION "${PMEM_LIBRARIES}")
+endif()
+
+find_path(PMEMOBJ_INCLUDE_DIR libpmemobj.h)
+find_library(PMEMOBJ_LIBRARIES pmemobj)
+
+find_package_handle_standard_args(pmemobj
+  DEFAULT_MSG PMEMOBJ_LIBRARIES PMEMOBJ_INCLUDE_DIR)
+
+mark_as_advanced(
+  PMEMOBJ_INCLUDE_DIR
+  PMEMOBJ_LIBRARIES)
+
+if(pmemobj_FOUND AND NOT TARGET pmem::pmemobj)
+  add_library(pmem::pmemobj UNKNOWN IMPORTED)
+  set_target_properties(pmem::pmemobj PROPERTIES
+    INTERFACE_INCLUDE_DIRECTORIES "${PMEMOBJ_INCLUDE_DIR}"
+    IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+    IMPORTED_LOCATION "${PMEMOBJ_LIBRARIES}")
+endif()
diff -pruN 14.2.16-2/cmake/modules/FindPython/Support.cmake 15.2.7-0ubuntu4/cmake/modules/FindPython/Support.cmake
--- 14.2.16-2/cmake/modules/FindPython/Support.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindPython/Support.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,1328 @@
+# Distributed under the OSI-approved BSD 3-Clause License.  See accompanying
+# file Copyright.txt or https://cmake.org/licensing for details.
+
+#
+# This file is a "template" file used by various FindPython modules.
+#
+
+cmake_policy (VERSION 3.5)
+
+#
+# Initial configuration
+#
+if (NOT DEFINED _PYTHON_PREFIX)
+  message (FATAL_ERROR "FindPython: INTERNAL ERROR")
+endif()
+if (NOT DEFINED _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+  message (FATAL_ERROR "FindPython: INTERNAL ERROR")
+endif()
+if (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL 3)
+  set(_${_PYTHON_PREFIX}_VERSIONS 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
+elseif (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL 2)
+  set(_${_PYTHON_PREFIX}_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0)
+else()
+  message (FATAL_ERROR "FindPython: INTERNAL ERROR")
+endif()
+
+get_property(_${_PYTHON_PREFIX}_CMAKE_ROLE GLOBAL PROPERTY CMAKE_ROLE)
+if (NOT DEFINED _${_PYTHON_PREFIX}_CMAKE_ROLE)
+  # CMake 3.14 introduced CMAKE_ROLE
+  set(_${_PYTHON_PREFIX}_CMAKE_ROLE "PROJECT")
+endif()
+
+
+#
+# helper commands
+#
+macro (_PYTHON_DISPLAY_FAILURE _PYTHON_MSG)
+  if (${_PYTHON_PREFIX}_FIND_REQUIRED)
+    message (FATAL_ERROR "${_PYTHON_MSG}")
+  else()
+    if (NOT ${_PYTHON_PREFIX}_FIND_QUIETLY)
+      message(STATUS "${_PYTHON_MSG}")
+    endif ()
+  endif()
+
+  set (${_PYTHON_PREFIX}_FOUND FALSE)
+  string (TOUPPER "${_PYTHON_PREFIX}" _${_PYTHON_PREFIX}_UPPER_PREFIX)
+  set (${_PYTHON_UPPER_PREFIX}_FOUND FALSE)
+  return()
+endmacro()
+
+
+macro (_PYTHON_FIND_FRAMEWORKS)
+  set (${_PYTHON_PREFIX}_FRAMEWORKS)
+  if (APPLE)
+    set (_pff_frameworks ${CMAKE_FRAMEWORK_PATH}
+                    $ENV{CMAKE_FRAMEWORK_PATH}
+                    ~/Library/Frameworks
+                    /usr/local/Frameworks
+                    ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+    list (REMOVE_DUPLICATES _pff_frameworks)
+    foreach (_pff_framework IN LISTS _pff_frameworks)
+      if (EXISTS ${_pff_framework}/Python.framework)
+        list (APPEND ${_PYTHON_PREFIX}_FRAMEWORKS ${_pff_framework}/Python.framework)
+      endif()
+    endforeach()
+    unset (_pff_frameworks)
+    unset (_pff_framework)
+  endif()
+endmacro()
+
+function (_PYTHON_GET_FRAMEWORKS _PYTHON_PGF_FRAMEWORK_PATHS _PYTHON_VERSION)
+  set (_PYTHON_FRAMEWORK_PATHS)
+  foreach (_PYTHON_FRAMEWORK IN LISTS ${_PYTHON_PREFIX}_FRAMEWORKS)
+    list (APPEND _PYTHON_FRAMEWORK_PATHS
+          "${_PYTHON_FRAMEWORK}/Versions/${_PYTHON_VERSION}")
+  endforeach()
+  set (${_PYTHON_PGF_FRAMEWORK_PATHS} ${_PYTHON_FRAMEWORK_PATHS} PARENT_SCOPE)
+endfunction()
+
+
+function (_PYTHON_VALIDATE_INTERPRETER)
+  if (NOT ${_PYTHON_PREFIX}_EXECUTABLE)
+    return()
+  endif()
+
+  if (ARGC EQUAL 1)
+    set (expected_version ${ARGV0})
+  else()
+    unset (expected_version)
+  endif()
+
+  get_filename_component (python_name "${${_PYTHON_PREFIX}_EXECUTABLE}" NAME)
+
+  if (expected_version AND NOT python_name STREQUAL "python${expected_version}${CMAKE_EXECUTABLE_SUFFIX}")
+    # executable found must have a specific version
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                             "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:2]]))"
+                     RESULT_VARIABLE result
+                     OUTPUT_VARIABLE version
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (result OR NOT version EQUAL expected_version)
+      # interpreter not usable or has wrong major version
+      set (${_PYTHON_PREFIX}_EXECUTABLE ${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND CACHE INTERNAL "" FORCE)
+      return()
+    endif()
+  else()
+    if (NOT python_name STREQUAL "python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}${CMAKE_EXECUTABLE_SUFFIX}")
+      # executable found do not have version in name
+      # ensure major version is OK
+      execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                               "import sys; sys.stdout.write(str(sys.version_info[0]))"
+                       RESULT_VARIABLE result
+                       OUTPUT_VARIABLE version
+                       ERROR_QUIET
+                       OUTPUT_STRIP_TRAILING_WHITESPACE)
+      if (result OR NOT version EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+        # interpreter not usable or has wrong major version
+        set (${_PYTHON_PREFIX}_EXECUTABLE ${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND CACHE INTERNAL "" FORCE)
+        return()
+      endif()
+    endif()
+  endif()
+
+  if (CMAKE_SIZEOF_VOID_P AND "Development" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
+      AND NOT CMAKE_CROSSCOMPILING)
+    # In this case, interpreter must have same architecture as environment
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                             "import sys, struct; sys.stdout.write(str(struct.calcsize(\"P\")))"
+                     RESULT_VARIABLE result
+                     OUTPUT_VARIABLE size
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (result OR NOT size EQUAL CMAKE_SIZEOF_VOID_P)
+      # interpreter not usable or has wrong architecture
+      set (${_PYTHON_PREFIX}_EXECUTABLE ${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND CACHE INTERNAL "" FORCE)
+      return()
+    endif()
+  endif()
+endfunction()
+
+
+function (_PYTHON_VALIDATE_COMPILER expected_version)
+  if (NOT ${_PYTHON_PREFIX}_COMPILER)
+    return()
+  endif()
+
+  # retrieve python environment version from compiler
+  set (working_dir "${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PythonCompilerVersion.dir")
+  file (WRITE "${working_dir}/version.py" "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:2]]))\n")
+  execute_process (COMMAND "${${_PYTHON_PREFIX}_COMPILER}" /target:exe /embed "${working_dir}/version.py"
+                   WORKING_DIRECTORY "${working_dir}"
+                   OUTPUT_QUIET
+                   ERROR_QUIET
+                   OUTPUT_STRIP_TRAILING_WHITESPACE)
+  execute_process (COMMAND "${working_dir}/version"
+                   WORKING_DIRECTORY "${working_dir}"
+                   RESULT_VARIABLE result
+                   OUTPUT_VARIABLE version
+                   ERROR_QUIET)
+  file (REMOVE_RECURSE "${_${_PYTHON_PREFIX}_VERSION_DIR}")
+
+  if (result OR NOT version EQUAL expected_version)
+    # Compiler not usable or has wrong major version
+    set (${_PYTHON_PREFIX}_COMPILER ${_PYTHON_PREFIX}_COMPILER-NOTFOUND CACHE INTERNAL "" FORCE)
+  endif()
+endfunction()
+
+
+function (_PYTHON_FIND_RUNTIME_LIBRARY _PYTHON_LIB)
+  string (REPLACE "_RUNTIME" "" _PYTHON_LIB "${_PYTHON_LIB}")
+  # look at runtime part on systems supporting it
+  if (CMAKE_SYSTEM_NAME STREQUAL "Windows" OR
+      (CMAKE_SYSTEM_NAME MATCHES "MSYS|CYGWIN"
+        AND ${_PYTHON_LIB} MATCHES "${CMAKE_IMPORT_LIBRARY_SUFFIX}$"))
+    set (CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_SHARED_LIBRARY_SUFFIX})
+    # MSYS has a special syntax for runtime libraries
+    if (CMAKE_SYSTEM_NAME MATCHES "MSYS")
+      list (APPEND CMAKE_FIND_LIBRARY_PREFIXES "msys-")
+    endif()
+    find_library (${ARGV})
+  endif()
+endfunction()
+
+
+function (_PYTHON_SET_LIBRARY_DIRS _PYTHON_SLD_RESULT)
+  unset (_PYTHON_DIRS)
+  set (_PYTHON_LIBS ${ARGV})
+  list (REMOVE_AT _PYTHON_LIBS 0)
+  foreach (_PYTHON_LIB IN LISTS _PYTHON_LIBS)
+    if (${_PYTHON_LIB})
+      get_filename_component (_PYTHON_DIR "${${_PYTHON_LIB}}" DIRECTORY)
+      list (APPEND _PYTHON_DIRS "${_PYTHON_DIR}")
+    endif()
+  endforeach()
+  if (_PYTHON_DIRS)
+    list (REMOVE_DUPLICATES _PYTHON_DIRS)
+  endif()
+  set (${_PYTHON_SLD_RESULT} ${_PYTHON_DIRS} PARENT_SCOPE)
+endfunction()
+
+
+# If major version is specified, it must be the same as internal major version
+if (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION_MAJOR
+    AND NOT ${_PYTHON_PREFIX}_FIND_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+  _python_display_failure ("Could NOT find ${_PYTHON_PREFIX}: Wrong major version specified is \"${${_PYTHON_PREFIX}_FIND_VERSION_MAJOR}\", but expected major version is \"${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}\"")
+endif()
+
+
+# handle components
+if (NOT ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+  set (${_PYTHON_PREFIX}_FIND_COMPONENTS Interpreter)
+  set (${_PYTHON_PREFIX}_FIND_REQUIRED_Interpreter TRUE)
+endif()
+if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+  list (APPEND ${_PYTHON_PREFIX}_FIND_COMPONENTS "Interpreter" "Development")
+  list (REMOVE_DUPLICATES ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+endif()
+foreach (_${_PYTHON_PREFIX}_COMPONENT IN LISTS ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+  set (${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_COMPONENT}_FOUND FALSE)
+endforeach()
+unset (_${_PYTHON_PREFIX}_FIND_VERSIONS)
+
+# Set versions to search
+## default: search any version
+set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${_${_PYTHON_PREFIX}_VERSIONS})
+
+if (${_PYTHON_PREFIX}_FIND_VERSION_COUNT GREATER 1)
+  if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
+    set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_FIND_VERSION_MAJOR}.${${_PYTHON_PREFIX}_FIND_VERSION_MINOR})
+  else()
+    unset (_${_PYTHON_PREFIX}_FIND_VERSIONS)
+    # add all compatible versions
+    foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_VERSIONS)
+      if (NOT _${_PYTHON_PREFIX}_VERSION VERSION_LESS ${_PYTHON_PREFIX}_FIND_VERSION)
+        list (APPEND _${_PYTHON_PREFIX}_FIND_VERSIONS ${_${_PYTHON_PREFIX}_VERSION})
+      endif()
+    endforeach()
+  endif()
+endif()
+
+# Python and Anaconda distributions: define which architectures can be used
+if (CMAKE_SIZEOF_VOID_P)
+  # In this case, search only for 64bit or 32bit
+  math (EXPR _${_PYTHON_PREFIX}_ARCH "${CMAKE_SIZEOF_VOID_P} * 8")
+  set (_${_PYTHON_PREFIX}_ARCH2 ${_${_PYTHON_PREFIX}_ARCH})
+else()
+  # architecture unknown, search for both 64bit and 32bit
+  set (_${_PYTHON_PREFIX}_ARCH 64)
+  set (_${_PYTHON_PREFIX}_ARCH2 32)
+endif()
+
+# IronPython support
+if (CMAKE_SIZEOF_VOID_P)
+  # In this case, search only for 64bit or 32bit
+  math (EXPR _${_PYTHON_PREFIX}_ARCH "${CMAKE_SIZEOF_VOID_P} * 8")
+  set (_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES ipy${_${_PYTHON_PREFIX}_ARCH} ipy)
+else()
+  # architecture unknown, search for natural interpreter
+  set (_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES ipy)
+endif()
+set (_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES net45 net40)
+
+# Apple frameworks handling
+_python_find_frameworks ()
+
+# Save CMAKE_FIND_APPBUNDLE
+if (DEFINED CMAKE_FIND_APPBUNDLE)
+  set (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE ${CMAKE_FIND_APPBUNDLE})
+else()
+  unset (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
+endif()
+# To avoid app bundle lookup
+set (CMAKE_FIND_APPBUNDLE "NEVER")
+
+# Save CMAKE_FIND_FRAMEWORK
+if (DEFINED CMAKE_FIND_FRAMEWORK)
+  set (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
+  if (CMAKE_FIND_FRAMEWORK STREQUAL "ONLY")
+    message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: CMAKE_FIND_FRAMEWORK: 'ONLY' value is not supported. 'FIRST' will be used instead.")
+    set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK "FIRST")
+  else()
+    set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
+  endif()
+else()
+  unset (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
+  set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK "FIRST")
+endif()
+# To avoid framework lookup
+set (CMAKE_FIND_FRAMEWORK "NEVER")
+
+# Windows Registry handling
+if (DEFINED ${_PYTHON_PREFIX}_FIND_REGISTRY)
+  if (NOT ${_PYTHON_PREFIX}_FIND_REGISTRY MATCHES "^(FIRST|LAST|NEVER)$")
+    message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_REGISTRY}: invalid value for '${_PYTHON_PREFIX}_FIND_REGISTRY'. 'FIRST', 'LAST' or 'NEVER' expected.")
+    set (_${_PYTHON_PREFIX}_FIND_REGISTRY "FIRST")
+  else()
+    set (_${_PYTHON_PREFIX}_FIND_REGISTRY ${${_PYTHON_PREFIX}_FIND_REGISTRY})
+  endif()
+else()
+  set (_${_PYTHON_PREFIX}_FIND_REGISTRY "FIRST")
+endif()
+
+
+unset (_${_PYTHON_PREFIX}_REQUIRED_VARS)
+unset (_${_PYTHON_PREFIX}_CACHED_VARS)
+
+
+# first step, search for the interpreter
+if ("Interpreter" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+  list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS ${_PYTHON_PREFIX}_EXECUTABLE)
+  if (${_PYTHON_PREFIX}_FIND_REQUIRED_Interpreter)
+    list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_EXECUTABLE)
+  endif()
+
+  set (_${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
+
+  # look-up for various versions and locations
+  foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
+    string (REPLACE "." "" _${_PYTHON_PREFIX}_VERSION_NO_DOTS ${_${_PYTHON_PREFIX}_VERSION})
+
+    _python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_VERSION})
+
+    # Apple frameworks handling
+    if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                    NAMES_PER_DIR
+                    PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                    PATH_SUFFIXES bin
+                    NO_CMAKE_PATH
+                    NO_CMAKE_ENVIRONMENT_PATH
+                    NO_SYSTEM_ENVIRONMENT_PATH
+                    NO_CMAKE_SYSTEM_PATH)
+    endif()
+
+    # Windows registry
+    if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                          python
+                          ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES}
+                    NAMES_PER_DIR
+                    HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                    PATHS [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                    PATH_SUFFIXES bin ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                    NO_SYSTEM_ENVIRONMENT_PATH
+                    NO_CMAKE_SYSTEM_PATH)
+    endif()
+
+    # try using HINTS
+    find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                  NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                        python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                        python
+                        ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES}
+                  NAMES_PER_DIR
+                  HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                  PATH_SUFFIXES bin ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                  NO_SYSTEM_ENVIRONMENT_PATH
+                  NO_CMAKE_SYSTEM_PATH)
+    # try using standard paths.
+    if (WIN32)
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                          python
+                          ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES}
+                    NAMES_PER_DIR)
+    else()
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                    NAMES_PER_DIR)
+    endif()
+
+    # Apple frameworks handling
+    if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                    NAMES_PER_DIR
+                    PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                    PATH_SUFFIXES bin
+                    NO_DEFAULT_PATH)
+    endif()
+
+    # Windows registry
+    if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
+      find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION}
+                          python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                          python
+                          ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES}
+                    NAMES_PER_DIR
+                    PATHS [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+                          [HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                    PATH_SUFFIXES bin ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                    NO_DEFAULT_PATH)
+    endif()
+
+    _python_validate_interpreter (${_${_PYTHON_PREFIX}_VERSION})
+    if (${_PYTHON_PREFIX}_EXECUTABLE)
+      break()
+    endif()
+  endforeach()
+
+  if (NOT ${_PYTHON_PREFIX}_EXECUTABLE)
+    # No specific version found. Retry with generic names
+    # try using HINTS
+    find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                  NAMES python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                        python
+                        ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES}
+                  NAMES_PER_DIR
+                  HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                  PATH_SUFFIXES bin ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                  NO_SYSTEM_ENVIRONMENT_PATH
+                  NO_CMAKE_SYSTEM_PATH)
+    # try using standard paths.
+    # NAMES_PER_DIR is not defined on purpose to have a chance to find
+    # expected version.
+    # For example, typical systems have 'python' for version 2.* and 'python3'
+    # for version 3.*. So looking for names per dir will find, potentially,
+    # systematically 'python' (i.e. version 2) even if version 3 is searched.
+    find_program (${_PYTHON_PREFIX}_EXECUTABLE
+                  NAMES python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}
+                        python
+                        ${_${_PYTHON_PREFIX}_IRON_PYTHON_NAMES})
+
+    _python_validate_interpreter ()
+  endif()
+
+  # retrieve exact version of executable found
+  if (${_PYTHON_PREFIX}_EXECUTABLE)
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                             "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))"
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE ${_PYTHON_PREFIX}_VERSION
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+      string (REGEX MATCHALL "[0-9]+" _${_PYTHON_PREFIX}_VERSIONS "${${_PYTHON_PREFIX}_VERSION}")
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 0 ${_PYTHON_PREFIX}_VERSION_MAJOR)
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 1 ${_PYTHON_PREFIX}_VERSION_MINOR)
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 2 ${_PYTHON_PREFIX}_VERSION_PATCH)
+    else()
+      # Interpreter is not usable
+      set (${_PYTHON_PREFIX}_EXECUTABLE ${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND CACHE INTERNAL "" FORCE)
+      unset (${_PYTHON_PREFIX}_VERSION)
+    endif()
+  endif()
+
+  if (${_PYTHON_PREFIX}_EXECUTABLE
+      AND ${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+    set (${_PYTHON_PREFIX}_Interpreter_FOUND TRUE)
+    # Use interpreter version for future searches to ensure consistency
+    set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
+  endif()
+
+  if (${_PYTHON_PREFIX}_Interpreter_FOUND)
+    if (NOT CMAKE_SIZEOF_VOID_P)
+      # determine interpreter architecture
+      execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c "import sys; print(sys.maxsize > 2**32)"
+                       RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                       OUTPUT_VARIABLE ${_PYTHON_PREFIX}_IS64BIT
+                       ERROR_VARIABLE ${_PYTHON_PREFIX}_IS64BIT)
+      if (NOT _${_PYTHON_PREFIX}_RESULT)
+        if (${_PYTHON_PREFIX}_IS64BIT)
+          set (_${_PYTHON_PREFIX}_ARCH 64)
+          set (_${_PYTHON_PREFIX}_ARCH2 64)
+        else()
+          set (_${_PYTHON_PREFIX}_ARCH 32)
+          set (_${_PYTHON_PREFIX}_ARCH2 32)
+        endif()
+      endif()
+    endif()
+
+    # retrieve interpreter identity
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -V
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE ${_PYTHON_PREFIX}_INTERPRETER_ID
+                     ERROR_VARIABLE ${_PYTHON_PREFIX}_INTERPRETER_ID)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+      if (${_PYTHON_PREFIX}_INTERPRETER_ID MATCHES "Anaconda")
+        set (${_PYTHON_PREFIX}_INTERPRETER_ID "Anaconda")
+      elseif (${_PYTHON_PREFIX}_INTERPRETER_ID MATCHES "Enthought")
+        set (${_PYTHON_PREFIX}_INTERPRETER_ID "Canopy")
+      else()
+        string (REGEX REPLACE "^([^ ]+).*" "\\1" ${_PYTHON_PREFIX}_INTERPRETER_ID "${${_PYTHON_PREFIX}_INTERPRETER_ID}")
+        if (${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "Python")
+          # try to get a more precise ID
+          execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c "import sys; print(sys.copyright)"
+                           RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                           OUTPUT_VARIABLE ${_PYTHON_PREFIX}_COPYRIGHT
+                           ERROR_QUIET)
+          if (${_PYTHON_PREFIX}_COPYRIGHT MATCHES "ActiveState")
+            set (${_PYTHON_PREFIX}_INTERPRETER_ID "ActivePython")
+          endif()
+        endif()
+      endif()
+    else()
+      set (${_PYTHON_PREFIX}_INTERPRETER_ID Python)
+    endif()
+  else()
+    unset (${_PYTHON_PREFIX}_INTERPRETER_ID)
+  endif()
+
+  # retrieve various package installation directories
+  execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c "import sys; from distutils import sysconfig;sys.stdout.write(';'.join([sysconfig.get_python_lib(plat_specific=False,standard_lib=True),sysconfig.get_python_lib(plat_specific=True,standard_lib=True),sysconfig.get_python_lib(plat_specific=False,standard_lib=False),sysconfig.get_python_lib(plat_specific=True,standard_lib=False)]))"
+
+                   RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                   OUTPUT_VARIABLE _${_PYTHON_PREFIX}_LIBPATHS
+                   ERROR_QUIET)
+  if (NOT _${_PYTHON_PREFIX}_RESULT)
+    list (GET _${_PYTHON_PREFIX}_LIBPATHS 0 ${_PYTHON_PREFIX}_STDLIB)
+    list (GET _${_PYTHON_PREFIX}_LIBPATHS 1 ${_PYTHON_PREFIX}_STDARCH)
+    list (GET _${_PYTHON_PREFIX}_LIBPATHS 2 ${_PYTHON_PREFIX}_SITELIB)
+    list (GET _${_PYTHON_PREFIX}_LIBPATHS 3 ${_PYTHON_PREFIX}_SITEARCH)
+  else()
+    unset (${_PYTHON_PREFIX}_STDLIB)
+    unset (${_PYTHON_PREFIX}_STDARCH)
+    unset (${_PYTHON_PREFIX}_SITELIB)
+    unset (${_PYTHON_PREFIX}_SITEARCH)
+  endif()
+
+  mark_as_advanced (${_PYTHON_PREFIX}_EXECUTABLE)
+endif()
+
+
+# second step, search for compiler (IronPython)
+if ("Compiler" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
+  list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS ${_PYTHON_PREFIX}_COMPILER)
+  if (${_PYTHON_PREFIX}_FIND_REQUIRED_Compiler)
+    list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_COMPILER)
+  endif()
+
+  # IronPython specific artifacts
+  # If IronPython interpreter is found, use its path
+  unset (_${_PYTHON_PREFIX}_IRON_ROOT)
+  if (${_PYTHON_PREFIX}_Interpreter_FOUND AND ${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "IronPython")
+    get_filename_component (_${_PYTHON_PREFIX}_IRON_ROOT "${${_PYTHON_PREFIX}_EXECUTABLE}" DIRECTORY)
+  endif()
+
+  # try using root dir and registry
+  foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
+    if (_${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
+      find_program (${_PYTHON_PREFIX}_COMPILER
+                    NAMES ipyc
+                    HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
+                    PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                    PATH_SUFFIXES ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                    NO_SYSTEM_ENVIRONMENT_PATH
+                    NO_CMAKE_SYSTEM_PATH)
+    endif()
+
+    find_program (${_PYTHON_PREFIX}_COMPILER
+                  NAMES ipyc
+                  HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
+                  PATH_SUFFIXES ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                  NO_SYSTEM_ENVIRONMENT_PATH
+                  NO_CMAKE_SYSTEM_PATH)
+
+    if (_${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
+      find_program (${_PYTHON_PREFIX}_COMPILER
+                    NAMES ipyc
+                    PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+                    PATH_SUFFIXES ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES}
+                    NO_DEFAULT_PATH)
+    endif()
+
+    _python_validate_compiler (${_${_PYTHON_PREFIX}_VERSION})
+    if (${_PYTHON_PREFIX}_COMPILER)
+      break()
+    endif()
+  endforeach()
+
+  # no specific version found, re-try in standard paths
+  find_program (${_PYTHON_PREFIX}_COMPILER
+                NAMES ipyc
+                HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
+                PATH_SUFFIXES ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES})
+
+  if (${_PYTHON_PREFIX}_COMPILER)
+    # retrieve python environment version from compiler
+    set (_${_PYTHON_PREFIX}_VERSION_DIR "${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PythonCompilerVersion.dir")
+    file (WRITE "${_${_PYTHON_PREFIX}_VERSION_DIR}/version.py" "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))\n")
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_COMPILER}" /target:exe /embed "${_${_PYTHON_PREFIX}_VERSION_DIR}/version.py"
+                     WORKING_DIRECTORY "${_${_PYTHON_PREFIX}_VERSION_DIR}"
+                     OUTPUT_QUIET
+                     ERROR_QUIET)
+    execute_process (COMMAND "${_${_PYTHON_PREFIX}_VERSION_DIR}/version"
+                     WORKING_DIRECTORY "${_${_PYTHON_PREFIX}_VERSION_DIR}"
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE _${_PYTHON_PREFIX}_VERSION
+                     ERROR_QUIET)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+      string (REGEX MATCHALL "[0-9]+" _${_PYTHON_PREFIX}_VERSIONS "${_${_PYTHON_PREFIX}_VERSION}")
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 0 _${_PYTHON_PREFIX}_VERSION_MAJOR)
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 1 _${_PYTHON_PREFIX}_VERSION_MINOR)
+      list (GET _${_PYTHON_PREFIX}_VERSIONS 2 _${_PYTHON_PREFIX}_VERSION_PATCH)
+
+      if (NOT ${_PYTHON_PREFIX}_Interpreter_FOUND)
+        # set public version information
+        set (${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_VERSION})
+        set (${_PYTHON_PREFIX}_VERSION_MAJOR ${_${_PYTHON_PREFIX}_VERSION_MAJOR})
+        set (${_PYTHON_PREFIX}_VERSION_MINOR ${_${_PYTHON_PREFIX}_VERSION_MINOR})
+        set (${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_VERSION_PATCH})
+      endif()
+    else()
+      # compiler not usable
+      set (${_PYTHON_PREFIX}_COMPILER ${_PYTHON_PREFIX}_COMPILER-NOTFOUND CACHE INTERNAL "" FORCE)
+    endif()
+    file (REMOVE_RECURSE "${_${_PYTHON_PREFIX}_VERSION_DIR}")
+  endif()
+
+  if (${_PYTHON_PREFIX}_COMPILER)
+    if (${_PYTHON_PREFIX}_Interpreter_FOUND)
+      # Compiler must be compatible with interpreter
+      if (${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR} VERSION_EQUAL ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
+        set (${_PYTHON_PREFIX}_Compiler_FOUND TRUE)
+      endif()
+    elseif (${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+      set (${_PYTHON_PREFIX}_Compiler_FOUND TRUE)
+    # Use compiler version for future searches to ensure consistency
+    set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
+    endif()
+  endif()
+
+  if (${_PYTHON_PREFIX}_Compiler_FOUND)
+    set (${_PYTHON_PREFIX}_COMPILER_ID IronPython)
+  else()
+    unset (${_PYTHON_PREFIX}_COMPILER_ID)
+  endif()
+
+  mark_as_advanced (${_PYTHON_PREFIX}_COMPILER)
+endif()
+
+
+# third step, search for the development artifacts
+## Development environment is not compatible with IronPython interpreter
+if ("Development" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
+    AND NOT ${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "IronPython")
+  list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS ${_PYTHON_PREFIX}_LIBRARY
+                                              ${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                                              ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
+                                              ${_PYTHON_PREFIX}_LIBRARY_DEBUG
+                                              ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG
+                                              ${_PYTHON_PREFIX}_INCLUDE_DIR)
+  if (${_PYTHON_PREFIX}_FIND_REQUIRED_Development)
+    list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_LIBRARY
+                                                  ${_PYTHON_PREFIX}_INCLUDE_DIR)
+  endif()
+
+  # Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
+  unset (_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES)
+  if (DEFINED ${_PYTHON_PREFIX}_USE_STATIC_LIBS AND NOT WIN32)
+    set(_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+    if(${_PYTHON_PREFIX}_USE_STATIC_LIBS)
+      set (CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
+    else()
+      list (REMOVE_ITEM CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
+    endif()
+  else()
+  endif()
+
+  # if python interpreter is found, use its location and version to ensure consistency
+  # between interpreter and development environment
+  unset (_${_PYTHON_PREFIX}_PREFIX)
+  if (${_PYTHON_PREFIX}_Interpreter_FOUND)
+    execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                             "import sys; from distutils import sysconfig; sys.stdout.write(sysconfig.PREFIX)"
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE _${_PYTHON_PREFIX}_PREFIX
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (_${_PYTHON_PREFIX}_RESULT)
+      unset (_${_PYTHON_PREFIX}_PREFIX)
+    endif()
+  endif()
+  set (_${_PYTHON_PREFIX}_HINTS "${_${_PYTHON_PREFIX}_PREFIX}" "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
+
+  foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
+    string (REPLACE "." "" _${_PYTHON_PREFIX}_VERSION_NO_DOTS ${_${_PYTHON_PREFIX}_VERSION})
+
+    # try to use pythonX.Y-config tool
+    set (_${_PYTHON_PREFIX}_CONFIG_NAMES)
+    if (DEFINED CMAKE_LIBRARY_ARCHITECTURE)
+      set (_${_PYTHON_PREFIX}_CONFIG_NAMES "${CMAKE_LIBRARY_ARCHITECTURE}-python${_${_PYTHON_PREFIX}_VERSION}-config")
+    endif()
+    list (APPEND _${_PYTHON_PREFIX}_CONFIG_NAMES "python${_${_PYTHON_PREFIX}_VERSION}-config")
+    find_program (_${_PYTHON_PREFIX}_CONFIG
+                  NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
+                  NAMES_PER_DIR
+                  HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                  PATH_SUFFIXES bin)
+    unset (_${_PYTHON_PREFIX}_CONFIG_NAMES)
+
+    if (NOT _${_PYTHON_PREFIX}_CONFIG)
+      continue()
+    endif()
+    if (DEFINED CMAKE_LIBRARY_ARCHITECTURE)
+      # check that config tool match library architecture
+      execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --configdir
+                       RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                       OUTPUT_VARIABLE _${_PYTHON_PREFIX}_CONFIGDIR
+                       ERROR_QUIET
+                       OUTPUT_STRIP_TRAILING_WHITESPACE)
+      if (_${_PYTHON_PREFIX}_RESULT)
+        unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
+        continue()
+      endif()
+      string(FIND "${_${_PYTHON_PREFIX}_CONFIGDIR}" "${CMAKE_LIBRARY_ARCHITECTURE}" _${_PYTHON_PREFIX}_RESULT)
+      if (_${_PYTHON_PREFIX}_RESULT EQUAL -1)
+        unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
+        continue()
+      endif()
+    endif()
+
+    # retrieve root install directory
+    execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --prefix
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE _${_PYTHON_PREFIX}_PREFIX
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (_${_PYTHON_PREFIX}_RESULT)
+      # python-config is not usable
+      unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
+      continue()
+    endif()
+    set (_${_PYTHON_PREFIX}_HINTS "${_${_PYTHON_PREFIX}_PREFIX}" "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
+
+    # retrieve library
+    execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --ldflags
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE _${_PYTHON_PREFIX}_FLAGS
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+      # retrieve library directory
+      string (REGEX MATCHALL "-L[^ ]+" _${_PYTHON_PREFIX}_LIB_DIRS "${_${_PYTHON_PREFIX}_FLAGS}")
+      string (REPLACE "-L" "" _${_PYTHON_PREFIX}_LIB_DIRS "${_${_PYTHON_PREFIX}_LIB_DIRS}")
+      list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_LIB_DIRS)
+      # retrieve library name
+      string (REGEX MATCHALL "-lpython[^ ]+" _${_PYTHON_PREFIX}_LIB_NAMES "${_${_PYTHON_PREFIX}_FLAGS}")
+      string (REPLACE "-l" "" _${_PYTHON_PREFIX}_LIB_NAMES "${_${_PYTHON_PREFIX}_LIB_NAMES}")
+      list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_LIB_NAMES)
+
+      find_library (${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                    NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
+                    NAMES_PER_DIR
+                    HINTS ${_${_PYTHON_PREFIX}_HINTS} ${_${_PYTHON_PREFIX}_LIB_DIRS}
+                    PATH_SUFFIXES lib
+                    NO_SYSTEM_ENVIRONMENT_PATH
+                    NO_CMAKE_SYSTEM_PATH)
+      # retrieve runtime library
+      if (${_PYTHON_PREFIX}_LIBRARY_RELEASE)
+        get_filename_component (_${_PYTHON_PREFIX}_PATH "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
+        get_filename_component (_${_PYTHON_PREFIX}_PATH2 "${_${_PYTHON_PREFIX}_PATH}" DIRECTORY)
+        _python_find_runtime_library (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
+                                      NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
+                                      NAMES_PER_DIR
+                                      HINTS "${_${_PYTHON_PREFIX}_PATH}" "${_${_PYTHON_PREFIX}_PATH2}" ${_${_PYTHON_PREFIX}_HINTS}
+                                      PATH_SUFFIXES bin
+                                      NO_SYSTEM_ENVIRONMENT_PATH
+                                      NO_CMAKE_SYSTEM_PATH)
+      endif()
+    endif()
+
+    # retrieve include directory
+    execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --includes
+                     RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                     OUTPUT_VARIABLE _${_PYTHON_PREFIX}_FLAGS
+                     ERROR_QUIET
+                     OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+      # retrieve include directory
+      string (REGEX MATCHALL "-I[^ ]+" _${_PYTHON_PREFIX}_INCLUDE_DIRS "${_${_PYTHON_PREFIX}_FLAGS}")
+      string (REPLACE "-I" "" _${_PYTHON_PREFIX}_INCLUDE_DIRS "${_${_PYTHON_PREFIX}_INCLUDE_DIRS}")
+      list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_INCLUDE_DIRS)
+
+      find_path (${_PYTHON_PREFIX}_INCLUDE_DIR
+                 NAMES Python.h
+                 HINTS ${_${_PYTHON_PREFIX}_INCLUDE_DIRS}
+                 NO_SYSTEM_ENVIRONMENT_PATH
+                 NO_CMAKE_SYSTEM_PATH)
+    endif()
+
+    if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_INCLUDE_DIR)
+      break()
+    endif()
+  endforeach()
+
+  # Rely on HINTS and standard paths if config tool failed to locate artifacts
+  if (NOT (${_PYTHON_PREFIX}_LIBRARY_RELEASE OR ${_PYTHON_PREFIX}_LIBRARY_DEBUG) OR NOT ${_PYTHON_PREFIX}_INCLUDE_DIR)
+    foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
+      string (REPLACE "." "" _${_PYTHON_PREFIX}_VERSION_NO_DOTS ${_${_PYTHON_PREFIX}_VERSION})
+
+      _python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_VERSION})
+
+      set (_${_PYTHON_PREFIX}_REGISTRY_PATHS
+        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
+        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_${_PYTHON_PREFIX}_VERSION}\\InstallPath]
+        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
+        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath])
+
+      if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
+        find_library (${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                      NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}
+                            python${_${_PYTHON_PREFIX}_VERSION}mu
+                            python${_${_PYTHON_PREFIX}_VERSION}m
+                            python${_${_PYTHON_PREFIX}_VERSION}u
+                            python${_${_PYTHON_PREFIX}_VERSION}
+                      NAMES_PER_DIR
+                      PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                      PATH_SUFFIXES lib/${CMAKE_LIBRARY_ARCHITECTURE} lib libs
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}mu
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}m
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}u
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config
+                      NO_CMAKE_PATH
+                      NO_CMAKE_ENVIRONMENT_PATH
+                      NO_SYSTEM_ENVIRONMENT_PATH
+                      NO_CMAKE_SYSTEM_PATH)
+      endif()
+
+      if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
+        find_library (${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                      NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}
+                            python${_${_PYTHON_PREFIX}_VERSION}mu
+                            python${_${_PYTHON_PREFIX}_VERSION}m
+                            python${_${_PYTHON_PREFIX}_VERSION}u
+                            python${_${_PYTHON_PREFIX}_VERSION}
+                      NAMES_PER_DIR
+                      HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                      PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                      PATH_SUFFIXES lib/${CMAKE_LIBRARY_ARCHITECTURE} lib libs
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}mu
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}m
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}u
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}
+                                    lib/python${_${_PYTHON_PREFIX}_VERSION}/config
+                      NO_SYSTEM_ENVIRONMENT_PATH
+                      NO_CMAKE_SYSTEM_PATH)
+      endif()
+
+      # search in HINTS locations
+      find_library (${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}
+                          python${_${_PYTHON_PREFIX}_VERSION}mu
+                          python${_${_PYTHON_PREFIX}_VERSION}m
+                          python${_${_PYTHON_PREFIX}_VERSION}u
+                          python${_${_PYTHON_PREFIX}_VERSION}
+                    NAMES_PER_DIR
+                    HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                    PATH_SUFFIXES lib/${CMAKE_LIBRARY_ARCHITECTURE} lib libs
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}mu
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}m
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}u
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config
+                    NO_SYSTEM_ENVIRONMENT_PATH
+                    NO_CMAKE_SYSTEM_PATH)
+
+      if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
+        set (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS})
+      else()
+        unset (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS)
+      endif()
+
+      if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
+        set (__${_PYTHON_PREFIX}_REGISTRY_PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS})
+      else()
+        unset (__${_PYTHON_PREFIX}_REGISTRY_PATHS)
+      endif()
+
+      # search in all default paths
+      find_library (${_PYTHON_PREFIX}_LIBRARY_RELEASE
+                    NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}
+                          python${_${_PYTHON_PREFIX}_VERSION}mu
+                          python${_${_PYTHON_PREFIX}_VERSION}m
+                          python${_${_PYTHON_PREFIX}_VERSION}u
+                          python${_${_PYTHON_PREFIX}_VERSION}
+                    NAMES_PER_DIR
+                    PATHS ${__${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                          ${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                    PATH_SUFFIXES lib/${CMAKE_LIBRARY_ARCHITECTURE} lib libs
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}mu
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}m
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}u
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config-${_${_PYTHON_PREFIX}_VERSION}
+                                  lib/python${_${_PYTHON_PREFIX}_VERSION}/config)
+      # retrieve runtime library
+      if (${_PYTHON_PREFIX}_LIBRARY_RELEASE)
+        get_filename_component (_${_PYTHON_PREFIX}_PATH "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
+        get_filename_component (_${_PYTHON_PREFIX}_PATH2 "${_${_PYTHON_PREFIX}_PATH}" DIRECTORY)
+        _python_find_runtime_library (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
+                                      NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}
+                                            python${_${_PYTHON_PREFIX}_VERSION}mu
+                                            python${_${_PYTHON_PREFIX}_VERSION}m
+                                            python${_${_PYTHON_PREFIX}_VERSION}u
+                                            python${_${_PYTHON_PREFIX}_VERSION}
+                                      NAMES_PER_DIR
+                                      HINTS "${_${_PYTHON_PREFIX}_PATH}" "${_${_PYTHON_PREFIX}_PATH2}" ${_${_PYTHON_PREFIX}_HINTS}
+                                      PATH_SUFFIXES bin)
+      endif()
+
+      if (WIN32)
+        # search for debug library
+        if (${_PYTHON_PREFIX}_LIBRARY_RELEASE)
+          # use library location as a hint
+          get_filename_component (_${_PYTHON_PREFIX}_PATH "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
+          find_library (${_PYTHON_PREFIX}_LIBRARY_DEBUG
+                        NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}_d
+                        NAMES_PER_DIR
+                        HINTS "${_${_PYTHON_PREFIX}_PATH}" ${_${_PYTHON_PREFIX}_HINTS}
+                        NO_DEFAULT_PATH)
+        else()
+          # search first in known locations
+          if (_${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
+            find_library (${_PYTHON_PREFIX}_LIBRARY_DEBUG
+                          NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}_d
+                          NAMES_PER_DIR
+                          HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                          PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                          PATH_SUFFIXES lib libs
+                          NO_SYSTEM_ENVIRONMENT_PATH
+                          NO_CMAKE_SYSTEM_PATH)
+          endif()
+          # search in all default paths
+          find_library (${_PYTHON_PREFIX}_LIBRARY_DEBUG
+                        NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}_d
+                        NAMES_PER_DIR
+                        HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                        PATHS ${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                        PATH_SUFFIXES lib libs)
+        endif()
+        if (${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+          get_filename_component (_${_PYTHON_PREFIX}_PATH "${${_PYTHON_PREFIX}_LIBRARY_DEBUG}" DIRECTORY)
+          get_filename_component (_${_PYTHON_PREFIX}_PATH2 "${_${_PYTHON_PREFIX}_PATH}" DIRECTORY)
+          _python_find_runtime_library (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG
+                                        NAMES python${_${_PYTHON_PREFIX}_VERSION_NO_DOTS}_d
+                                        NAMES_PER_DIR
+                                        HINTS "${_${_PYTHON_PREFIX}_PATH}" "${_${_PYTHON_PREFIX}_PATH2}" ${_${_PYTHON_PREFIX}_HINTS}
+                                        PATH_SUFFIXES bin)
+        endif()
+      endif()
+
+      # Don't search for include dir until library location is known
+      if (${_PYTHON_PREFIX}_LIBRARY_RELEASE OR ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+        unset (_${_PYTHON_PREFIX}_INCLUDE_HINTS)
+
+        if (${_PYTHON_PREFIX}_EXECUTABLE)
+          # pick up include directory from configuration
+          execute_process (COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+                                   "import sys; import sysconfig; sys.stdout.write(sysconfig.get_path('include'))"
+                           RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                           OUTPUT_VARIABLE _${_PYTHON_PREFIX}_PATH
+                           ERROR_QUIET
+                           OUTPUT_STRIP_TRAILING_WHITESPACE)
+           if (NOT _${_PYTHON_PREFIX}_RESULT)
+             file (TO_CMAKE_PATH "${_${_PYTHON_PREFIX}_PATH}" _${_PYTHON_PREFIX}_PATH)
+             list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${_${_PYTHON_PREFIX}_PATH}")
+           endif()
+        endif()
+
+        foreach (_${_PYTHON_PREFIX}_LIB IN ITEMS ${_PYTHON_PREFIX}_LIBRARY_RELEASE ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+          if (${_${_PYTHON_PREFIX}_LIB})
+            # Use the library's install prefix as a hint
+            if (${_${_PYTHON_PREFIX}_LIB} MATCHES "^(.+/Frameworks/Python.framework/Versions/[0-9.]+)")
+              list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
+            elseif (${_${_PYTHON_PREFIX}_LIB} MATCHES "^(.+)/lib(64|32)?/python[0-9.]+/config")
+              list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
+            elseif (DEFINED CMAKE_LIBRARY_ARCHITECTURE AND ${_${_PYTHON_PREFIX}_LIB} MATCHES "^(.+)/lib/${CMAKE_LIBRARY_ARCHITECTURE}")
+              list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
+            else()
+              # assume library is in a directory under root
+              get_filename_component (_${_PYTHON_PREFIX}_PREFIX "${${_${_PYTHON_PREFIX}_LIB}}" DIRECTORY)
+              get_filename_component (_${_PYTHON_PREFIX}_PREFIX "${_${_PYTHON_PREFIX}_PREFIX}" DIRECTORY)
+              list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${_${_PYTHON_PREFIX}_PREFIX}")
+            endif()
+          endif()
+        endforeach()
+        list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_INCLUDE_HINTS)
+
+        if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
+          find_path (${_PYTHON_PREFIX}_INCLUDE_DIR
+                     NAMES Python.h
+                     HINTS ${_${_PYTHON_PREFIX}_HINTS}
+                     PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                     PATH_SUFFIXES include/python${_${_PYTHON_PREFIX}_VERSION}mu
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}m
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}u
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}
+                                   include
+                     NO_CMAKE_PATH
+                     NO_CMAKE_ENVIRONMENT_PATH
+                     NO_SYSTEM_ENVIRONMENT_PATH
+                     NO_CMAKE_SYSTEM_PATH)
+        endif()
+
+        if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
+          find_path (${_PYTHON_PREFIX}_INCLUDE_DIR
+                     NAMES Python.h
+                     HINTS ${_${_PYTHON_PREFIX}_INCLUDE_HINTS} ${_${_PYTHON_PREFIX}_HINTS}
+                     PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                     PATH_SUFFIXES include/python${_${_PYTHON_PREFIX}_VERSION}mu
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}m
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}u
+                                   include/python${_${_PYTHON_PREFIX}_VERSION}
+                                   include
+                     NO_SYSTEM_ENVIRONMENT_PATH
+                     NO_CMAKE_SYSTEM_PATH)
+        endif()
+
+        find_path (${_PYTHON_PREFIX}_INCLUDE_DIR
+                   NAMES Python.h
+                   HINTS ${_${_PYTHON_PREFIX}_INCLUDE_HINTS} ${_${_PYTHON_PREFIX}_HINTS}
+                   PATHS ${__${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
+                         ${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
+                   PATH_SUFFIXES include/python${_${_PYTHON_PREFIX}_VERSION}mu
+                                 include/python${_${_PYTHON_PREFIX}_VERSION}m
+                                 include/python${_${_PYTHON_PREFIX}_VERSION}u
+                                 include/python${_${_PYTHON_PREFIX}_VERSION}
+                                 include
+                   NO_SYSTEM_ENVIRONMENT_PATH
+                   NO_CMAKE_SYSTEM_PATH)
+      endif()
+
+      if ((${_PYTHON_PREFIX}_LIBRARY_RELEASE OR ${_PYTHON_PREFIX}_LIBRARY_DEBUG) AND ${_PYTHON_PREFIX}_INCLUDE_DIR)
+        break()
+      endif()
+    endforeach()
+
+    # search header file in standard locations
+    find_path (${_PYTHON_PREFIX}_INCLUDE_DIR
+               NAMES Python.h)
+  endif()
+
+  if (${_PYTHON_PREFIX}_INCLUDE_DIR)
+    # retrieve version from header file
+    file (STRINGS "${${_PYTHON_PREFIX}_INCLUDE_DIR}/patchlevel.h" _${_PYTHON_PREFIX}_VERSION
+          REGEX "^#define[ \t]+PY_VERSION[ \t]+\"[^\"]+\"")
+    string (REGEX REPLACE "^#define[ \t]+PY_VERSION[ \t]+\"([^\"]+)\".*" "\\1"
+                          _${_PYTHON_PREFIX}_VERSION "${_${_PYTHON_PREFIX}_VERSION}")
+    string (REGEX MATCHALL "[0-9]+" _${_PYTHON_PREFIX}_VERSIONS "${_${_PYTHON_PREFIX}_VERSION}")
+    list (GET _${_PYTHON_PREFIX}_VERSIONS 0 _${_PYTHON_PREFIX}_VERSION_MAJOR)
+    list (GET _${_PYTHON_PREFIX}_VERSIONS 1 _${_PYTHON_PREFIX}_VERSION_MINOR)
+    list (GET _${_PYTHON_PREFIX}_VERSIONS 2 _${_PYTHON_PREFIX}_VERSION_PATCH)
+
+    if (NOT ${_PYTHON_PREFIX}_Interpreter_FOUND AND NOT ${_PYTHON_PREFIX}_Compiler_FOUND)
+      # set public version information
+      set (${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_VERSION})
+      set (${_PYTHON_PREFIX}_VERSION_MAJOR ${_${_PYTHON_PREFIX}_VERSION_MAJOR})
+      set (${_PYTHON_PREFIX}_VERSION_MINOR ${_${_PYTHON_PREFIX}_VERSION_MINOR})
+      set (${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_VERSION_PATCH})
+    endif()
+  endif()
+
+  # define public variables
+  include (SelectLibraryConfigurations)
+  select_library_configurations (${_PYTHON_PREFIX})
+  if (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE)
+    set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
+  elseif (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
+    set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG}")
+  else()
+    set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "$${_PYTHON_PREFIX}_RUNTIME_LIBRARY-NOTFOUND")
+  endif()
+
+  _python_set_library_dirs (${_PYTHON_PREFIX}_LIBRARY_DIRS
+                            ${_PYTHON_PREFIX}_LIBRARY_RELEASE ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+  if (UNIX)
+    if (${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$"
+        OR ${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$")
+      set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DIRS ${${_PYTHON_PREFIX}_LIBRARY_DIRS})
+    endif()
+  else()
+      _python_set_library_dirs (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DIRS
+                                ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
+  endif()
+
+  set (${_PYTHON_PREFIX}_INCLUDE_DIRS "${${_PYTHON_PREFIX}_INCLUDE_DIR}")
+
+  mark_as_advanced (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
+                    ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG
+                    ${_PYTHON_PREFIX}_INCLUDE_DIR)
+
+  if ((${_PYTHON_PREFIX}_LIBRARY_RELEASE OR ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+      AND ${_PYTHON_PREFIX}_INCLUDE_DIR)
+    if (${_PYTHON_PREFIX}_Interpreter_FOUND OR ${_PYTHON_PREFIX}_Compiler_FOUND)
+      # development environment must be compatible with interpreter/compiler
+      if (${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR} VERSION_EQUAL ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
+        set (${_PYTHON_PREFIX}_Development_FOUND TRUE)
+      endif()
+    elseif (${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+      set (${_PYTHON_PREFIX}_Development_FOUND TRUE)
+    endif()
+  endif()
+
+  # Restore the original find library ordering
+  if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES)
+    set (CMAKE_FIND_LIBRARY_SUFFIXES ${_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES})
+  endif()
+endif()
+
+if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS AND ${_PYTHON_PREFIX}_Interpreter_FOUND)
+  list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS ${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
+  if (${_PYTHON_PREFIX}_FIND_REQUIRED_NumPy)
+    list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
+  endif()
+  execute_process(
+      COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+              "from __future__ import print_function\ntry: import numpy; print(numpy.get_include(), end='')\nexcept:pass\n"
+      RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+      OUTPUT_VARIABLE _${_PYTHON_PREFIX}_NumPy_PATH
+      ERROR_QUIET
+      OUTPUT_STRIP_TRAILING_WHITESPACE)
+  if (NOT _${_PYTHON_PREFIX}_RESULT)
+    find_path(${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR
+              NAMES "numpy/arrayobject.h" "numpy/numpyconfig.h"
+              HINTS "${_${_PYTHON_PREFIX}_NumPy_PATH}"
+              NO_DEFAULT_PATH)
+  endif()
+  if(${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
+    set(${_PYTHON_PREFIX}_NumPy_INCLUDE_DIRS "${${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
+    set(${_PYTHON_PREFIX}_NumPy_FOUND TRUE)
+  endif()
+  if(${_PYTHON_PREFIX}_NumPy_FOUND)
+    execute_process(
+            COMMAND "${${_PYTHON_PREFIX}_EXECUTABLE}" -c
+            "from __future__ import print_function\ntry: import numpy; print(numpy.__version__, end='')\nexcept:pass\n"
+            RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+            OUTPUT_VARIABLE _${_PYTHON_PREFIX}_NumPy_VERSION)
+    if (NOT _${_PYTHON_PREFIX}_RESULT)
+       set(${_PYTHON_PREFIX}_NumPy_VERSION "${_${_PYTHON_PREFIX}_NumPy_VERSION}")
+    endif()
+  endif()
+  # final step: set NumPy founded only if Development component is founded as well
+  if (NOT ${_PYTHON_PREFIX}_Development_FOUND)
+    set(${_PYTHON_PREFIX}_NumPy_FOUND FALSE)
+  endif()
+endif()
+
+# final validation
+if (${_PYTHON_PREFIX}_VERSION_MAJOR AND
+    NOT ${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
+  _python_display_failure ("Could NOT find ${_PYTHON_PREFIX}: Found unsuitable major version \"${${_PYTHON_PREFIX}_VERSION_MAJOR}\", but required major version is exact version \"${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}\"")
+endif()
+
+include (FindPackageHandleStandardArgs)
+find_package_handle_standard_args (${_PYTHON_PREFIX}
+                                   REQUIRED_VARS ${_${_PYTHON_PREFIX}_REQUIRED_VARS}
+                                   VERSION_VAR ${_PYTHON_PREFIX}_VERSION
+                                   HANDLE_COMPONENTS)
+
+# Create imported targets and helper functions
+if(_${_PYTHON_PREFIX}_CMAKE_ROLE STREQUAL "PROJECT")
+  if ("Interpreter" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
+      AND ${_PYTHON_PREFIX}_Interpreter_FOUND
+      AND NOT TARGET ${_PYTHON_PREFIX}::Interpreter)
+    add_executable (${_PYTHON_PREFIX}::Interpreter IMPORTED)
+    set_property (TARGET ${_PYTHON_PREFIX}::Interpreter
+                  PROPERTY IMPORTED_LOCATION "${${_PYTHON_PREFIX}_EXECUTABLE}")
+  endif()
+
+  if ("Compiler" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
+      AND ${_PYTHON_PREFIX}_Compiler_FOUND
+      AND NOT TARGET ${_PYTHON_PREFIX}::Compiler)
+    add_executable (${_PYTHON_PREFIX}::Compiler IMPORTED)
+    set_property (TARGET ${_PYTHON_PREFIX}::Compiler
+                  PROPERTY IMPORTED_LOCATION "${${_PYTHON_PREFIX}_COMPILER}")
+  endif()
+
+  if ("Development" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
+      AND ${_PYTHON_PREFIX}_Development_FOUND AND NOT TARGET ${_PYTHON_PREFIX}::Python)
+
+    if (${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$"
+        OR ${_PYTHON_PREFIX}_LIBRARY_DEBUG MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$"
+        OR ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE OR ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
+      set (_${_PYTHON_PREFIX}_LIBRARY_TYPE SHARED)
+    else()
+      set (_${_PYTHON_PREFIX}_LIBRARY_TYPE STATIC)
+    endif()
+
+    add_library (${_PYTHON_PREFIX}::Python ${_${_PYTHON_PREFIX}_LIBRARY_TYPE} IMPORTED)
+
+    set_property (TARGET ${_PYTHON_PREFIX}::Python
+                  PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${_PYTHON_PREFIX}_INCLUDE_DIR}")
+
+    if ((${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE)
+        OR (${_PYTHON_PREFIX}_LIBRARY_DEBUG AND ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG))
+      # System manage shared libraries in two parts: import and runtime
+      if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+        set_property (TARGET ${_PYTHON_PREFIX}::Python PROPERTY IMPORTED_CONFIGURATIONS RELEASE DEBUG)
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
+                                          IMPORTED_IMPLIB_RELEASE "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}"
+                                          IMPORTED_LOCATION_RELEASE "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
+                                          IMPORTED_IMPLIB_DEBUG "${${_PYTHON_PREFIX}_LIBRARY_DEBUG}"
+                                          IMPORTED_LOCATION_DEBUG "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG}")
+      else()
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+                                          IMPORTED_IMPLIB "${${_PYTHON_PREFIX}_LIBRARY}"
+                                          IMPORTED_LOCATION "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY}")
+      endif()
+    else()
+      if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
+        set_property (TARGET ${_PYTHON_PREFIX}::Python PROPERTY IMPORTED_CONFIGURATIONS RELEASE DEBUG)
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
+                                          IMPORTED_LOCATION_RELEASE "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
+                                          IMPORTED_LOCATION_DEBUG "${${_PYTHON_PREFIX}_LIBRARY_DEBUG}")
+      else()
+        set_target_properties (${_PYTHON_PREFIX}::Python
+                               PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+                                          IMPORTED_LOCATION "${${_PYTHON_PREFIX}_LIBRARY}")
+      endif()
+    endif()
+
+    if (_${_PYTHON_PREFIX}_CONFIG AND _${_PYTHON_PREFIX}_LIBRARY_TYPE STREQUAL "STATIC")
+      # extend link information with dependent libraries
+      execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --ldflags
+                       RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
+                       OUTPUT_VARIABLE _${_PYTHON_PREFIX}_FLAGS
+                       ERROR_QUIET
+                       OUTPUT_STRIP_TRAILING_WHITESPACE)
+      if (NOT _${_PYTHON_PREFIX}_RESULT)
+        string (REGEX MATCHALL "-[Ll][^ ]+" _${_PYTHON_PREFIX}_LINK_LIBRARIES "${_${_PYTHON_PREFIX}_FLAGS}")
+        # remove elements relative to python library itself
+        list (FILTER _${_PYTHON_PREFIX}_LINK_LIBRARIES EXCLUDE REGEX "-lpython")
+        foreach (_${_PYTHON_PREFIX}_DIR IN LISTS ${_PYTHON_PREFIX}_LIBRARY_DIRS)
+          list (FILTER _${_PYTHON_PREFIX}_LINK_LIBRARIES EXCLUDE REGEX "-L${${_PYTHON_PREFIX}_DIR}")
+        endforeach()
+        set_property (TARGET ${_PYTHON_PREFIX}::Python
+                      PROPERTY INTERFACE_LINK_LIBRARIES ${_${_PYTHON_PREFIX}_LINK_LIBRARIES})
+      endif()
+    endif()
+
+    #
+    # PYTHON_ADD_LIBRARY (<name> [STATIC|SHARED|MODULE] src1 src2 ... srcN)
+    # It is used to build modules for python.
+    #
+    function (__${_PYTHON_PREFIX}_ADD_LIBRARY prefix name)
+      cmake_parse_arguments (PARSE_ARGV 2 PYTHON_ADD_LIBRARY
+                             "STATIC;SHARED;MODULE" "" "")
+
+      unset (type)
+      if (NOT (PYTHON_ADD_LIBRARY_STATIC
+            OR PYTHON_ADD_LIBRARY_SHARED
+            OR PYTHON_ADD_LIBRARY_MODULE))
+        set (type MODULE)
+      endif()
+      add_library (${name} ${type} ${ARGN})
+      target_link_libraries (${name} PRIVATE ${prefix}::Python)
+
+      # customize library name to follow module name rules
+      get_property (type TARGET ${name} PROPERTY TYPE)
+      if (type STREQUAL "MODULE_LIBRARY")
+        set_property (TARGET ${name} PROPERTY PREFIX "")
+        if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
+          set_property (TARGET ${name} PROPERTY SUFFIX ".pyd")
+        endif()
+      endif()
+    endfunction()
+  endif()
+
+  if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS AND ${_PYTHON_PREFIX}_NumPy_FOUND
+      AND NOT TARGET ${_PYTHON_PREFIX}::NumPy AND TARGET ${_PYTHON_PREFIX}::Python)
+    add_library (${_PYTHON_PREFIX}::NumPy INTERFACE IMPORTED)
+    set_property (TARGET ${_PYTHON_PREFIX}::NumPy
+                  PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
+    target_link_libraries (${_PYTHON_PREFIX}::NumPy INTERFACE ${_PYTHON_PREFIX}::Python)
+  endif()
+endif()
+
+# final clean-up
+
+# Restore CMAKE_FIND_APPBUNDLE
+if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
+  set (CMAKE_FIND_APPBUNDLE ${_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE})
+  unset (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
+else()
+  unset (CMAKE_FIND_APPBUNDLE)
+endif()
+# Restore CMAKE_FIND_FRAMEWORK
+if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
+  set (CMAKE_FIND_FRAMEWORK ${_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK})
+  unset (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
+else()
+  unset (CMAKE_FIND_FRAMEWORK)
+endif()
+
+unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
diff -pruN 14.2.16-2/cmake/modules/FindPython3.cmake 15.2.7-0ubuntu4/cmake/modules/FindPython3.cmake
--- 14.2.16-2/cmake/modules/FindPython3.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindPython3.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,189 @@
+# Distributed under the OSI-approved BSD 3-Clause License.  See accompanying
+# file Copyright.txt or https://cmake.org/licensing for details.
+
+#[=======================================================================[.rst:
+FindPython3
+-----------
+
+Find Python 3 interpreter, compiler and development environment (include
+directories and libraries).
+
+Three components are supported:
+
+* ``Interpreter``: search for Python 3 interpreter
+* ``Compiler``: search for Python 3 compiler. Only offered by IronPython.
+* ``Development``: search for development artifacts (include directories and
+  libraries)
+* ``NumPy``: search for NumPy include directories.
+
+If no ``COMPONENTS`` is specified, ``Interpreter`` is assumed.
+
+To ensure consistent versions between components ``Interpreter``, ``Compiler``,
+``Development`` and ``NumPy``, specify all components at the same time::
+
+  find_package (Python3 COMPONENTS Interpreter Development)
+
+This module looks only for version 3 of Python. This module can be used
+concurrently with :module:`FindPython2` module to use both Python versions.
+
+The :module:`FindPython` module can be used if Python version does not matter
+for you.
+
+.. note::
+
+  If components ``Interpreter`` and ``Development`` are both specified, this
+  module search only for interpreter with same platform architecture as the one
+  defined by ``CMake`` configuration. This contraint does not apply if only
+  ``Interpreter`` component is specified.
+
+Imported Targets
+^^^^^^^^^^^^^^^^
+
+This module defines the following :ref:`Imported Targets <Imported Targets>`
+(when :prop_gbl:`CMAKE_ROLE` is ``PROJECT``):
+
+``Python3::Interpreter``
+  Python 3 interpreter. Target defined if component ``Interpreter`` is found.
+``Python3::Compiler``
+  Python 3 compiler. Target defined if component ``Compiler`` is found.
+``Python3::Python``
+  Python 3 library. Target defined if component ``Development`` is found.
+``Python3::NumPy``
+  NumPy library for Python 3. Target defined if component ``NumPy`` is found.
+
+Result Variables
+^^^^^^^^^^^^^^^^
+
+This module will set the following variables in your project
+(see :ref:`Standard Variable Names <CMake Developer Standard Variable Names>`):
+
+``Python3_FOUND``
+  System has the Python 3 requested components.
+``Python3_Interpreter_FOUND``
+  System has the Python 3 interpreter.
+``Python3_EXECUTABLE``
+  Path to the Python 3 interpreter.
+``Python3_INTERPRETER_ID``
+  A short string unique to the interpreter. Possible values include:
+    * Python
+    * ActivePython
+    * Anaconda
+    * Canopy
+    * IronPython
+``Python3_STDLIB``
+  Standard platform independent installation directory.
+
+  Information returned by
+  ``distutils.sysconfig.get_python_lib(plat_specific=False,standard_lib=True)``.
+``Python3_STDARCH``
+  Standard platform dependent installation directory.
+
+  Information returned by
+  ``distutils.sysconfig.get_python_lib(plat_specific=True,standard_lib=True)``.
+``Python3_SITELIB``
+  Third-party platform independent installation directory.
+
+  Information returned by
+  ``distutils.sysconfig.get_python_lib(plat_specific=False,standard_lib=False)``.
+``Python3_SITEARCH``
+  Third-party platform dependent installation directory.
+
+  Information returned by
+  ``distutils.sysconfig.get_python_lib(plat_specific=True,standard_lib=False)``.
+``Python3_Compiler_FOUND``
+  System has the Python 3 compiler.
+``Python3_COMPILER``
+  Path to the Python 3 compiler. Only offered by IronPython.
+``Python3_COMPILER_ID``
+  A short string unique to the compiler. Possible values include:
+    * IronPython
+``Python3_Development_FOUND``
+  System has the Python 3 development artifacts.
+``Python3_INCLUDE_DIRS``
+  The Python 3 include directories.
+``Python3_LIBRARIES``
+  The Python 3 libraries.
+``Python3_LIBRARY_DIRS``
+  The Python 3 library directories.
+``Python3_RUNTIME_LIBRARY_DIRS``
+  The Python 3 runtime library directories.
+``Python3_VERSION``
+  Python 3 version.
+``Python3_VERSION_MAJOR``
+  Python 3 major version.
+``Python3_VERSION_MINOR``
+  Python 3 minor version.
+``Python3_VERSION_PATCH``
+  Python 3 patch version.
+``Python3_NumPy_FOUND``
+  System has the NumPy.
+``Python3_NumPy_INCLUDE_DIRS``
+  The NumPy include directries.
+``Python3_NumPy_VERSION``
+  The NumPy version.
+
+Hints
+^^^^^
+
+``Python3_ROOT_DIR``
+  Define the root directory of a Python 3 installation.
+
+``Python3_USE_STATIC_LIBS``
+  * If not defined, search for shared libraries and static libraries in that
+    order.
+  * If set to TRUE, search **only** for static libraries.
+  * If set to FALSE, search **only** for shared libraries.
+
+``Python3_FIND_REGISTRY``
+  On Windows the ``Python3_FIND_REGISTRY`` variable determine the order
+  of preference between registry and environment variables.
+  the ``Python3_FIND_REGISTRY`` variable can be set to empty or one of the
+  following:
+
+  * ``FIRST``: Try to use registry before environment variables.
+    This is the default.
+  * ``LAST``: Try to use registry after environment variables.
+  * ``NEVER``: Never try to use registry.
+
+``CMAKE_FIND_FRAMEWORK``
+  On OS X the :variable:`CMAKE_FIND_FRAMEWORK` variable determine the order of
+  preference between Apple-style and unix-style package components.
+
+  .. note::
+
+    Value ``ONLY`` is not supported so ``FIRST`` will be used instead.
+
+.. note::
+
+  If a Python virtual environment is configured, set variable
+  ``Python_FIND_REGISTRY`` (Windows) or ``CMAKE_FIND_FRAMEWORK`` (macOS) with
+  value ``LAST`` or ``NEVER`` to select it preferably.
+
+Commands
+^^^^^^^^
+
+This module defines the command ``Python3_add_library`` (when
+:prop_gbl:`CMAKE_ROLE` is ``PROJECT``), which has the same semantics as
+:command:`add_library`, but takes care of Python module naming rules
+(only applied if library is of type ``MODULE``), and adds a dependency to target
+``Python3::Python``::
+
+  Python3_add_library (my_module MODULE src1.cpp)
+
+If library type is not specified, ``MODULE`` is assumed.
+#]=======================================================================]
+
+
+set (_PYTHON_PREFIX Python3)
+
+set (_Python3_REQUIRED_VERSION_MAJOR 3)
+
+include (${CMAKE_CURRENT_LIST_DIR}/FindPython/Support.cmake)
+
+if (COMMAND __Python3_add_library)
+  macro (Python3_add_library)
+    __Python3_add_library (Python3 ${ARGV})
+  endmacro()
+endif()
+
+unset (_PYTHON_PREFIX)
diff -pruN 14.2.16-2/cmake/modules/FindPython3Interp.cmake 15.2.7-0ubuntu4/cmake/modules/FindPython3Interp.cmake
--- 14.2.16-2/cmake/modules/FindPython3Interp.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindPython3Interp.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,148 +0,0 @@
-#.rst:
-# FindPython3Interp
-# ----------------
-#
-# Find python interpreter
-#
-# This module finds if Python interpreter is installed and determines
-# where the executables are.  This code sets the following variables:
-#
-# ::
-#
-#   PYTHON3INTERP_FOUND         - Was the Python executable found
-#   PYTHON3_EXECUTABLE          - path to the Python interpreter
-#
-#
-#
-# ::
-#
-#   PYTHON3_VERSION_STRING      - Python version found e.g. 2.5.2
-#   PYTHON3_VERSION_MAJOR       - Python major version found e.g. 2
-#   PYTHON3_VERSION_MINOR       - Python minor version found e.g. 5
-#   PYTHON3_VERSION_PATCH       - Python patch version found e.g. 2
-#
-#
-#
-# The Python3_ADDITIONAL_VERSIONS variable can be used to specify a list
-# of version numbers that should be taken into account when searching
-# for Python.  You need to set this variable before calling
-# find_package(Python3Interp).
-#
-# If calling both ``find_package(Python3Interp)`` and
-# ``find_package(Python3Libs)``, call ``find_package(Python3Interp)`` first to
-# get the currently active Python version by default with a consistent version
-# of PYTHON3_LIBRARIES.
-
-#=============================================================================
-# Copyright 2005-2010 Kitware, Inc.
-# Copyright 2011 Bjoern Ricks <bjoern.ricks@gmail.com>
-# Copyright 2012 Rolf Eike Beer <eike@sf-mail.de>
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# * Redistributions of source code must retain the above copyright
-#   notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright
-#   notice, this list of conditions and the following disclaimer in the
-#   documentation and/or other materials provided with the distribution.
-#
-# * Neither the names of Kitware, Inc., the Insight Software Consortium,
-#   nor the names of their contributors may be used to endorse or promote
-#   products derived from this software without specific prior written
-#   permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#=============================================================================
-
-unset(_Python3_NAMES)
-
-set(_PYTHON3_VERSIONS 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
-
-if(Python3Interp_FIND_VERSION)
-    if(Python3Interp_FIND_VERSION_COUNT GREATER 1)
-        set(_PYTHON3_FIND_MAJ_MIN "${Python3Interp_FIND_VERSION_MAJOR}.${Python3Interp_FIND_VERSION_MINOR}")
-        list(APPEND _Python3_NAMES
-             python${_PYTHON3_FIND_MAJ_MIN}
-             python${Python3Interp_FIND_VERSION_MAJOR})
-        unset(_PYTHON3_FIND_OTHER_VERSIONS)
-        if(NOT Python3Interp_FIND_VERSION_EXACT)
-            foreach(_PYTHON3_V ${_PYTHON${Python3Interp_FIND_VERSION_MAJOR}_VERSIONS})
-                if(NOT _PYTHON3_V VERSION_LESS _PYTHON3_FIND_MAJ_MIN)
-                    list(APPEND _PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON3_V})
-                endif()
-             endforeach()
-        endif()
-        unset(_PYTHON3_FIND_MAJ_MIN)
-    else()
-        list(APPEND _Python3_NAMES python${Python3Interp_FIND_VERSION_MAJOR})
-        set(_PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON${Python3Interp_FIND_VERSION_MAJOR}_VERSIONS})
-    endif()
-else()
-    set(_PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON3_VERSIONS})
-endif()
-find_program(PYTHON3_EXECUTABLE NAMES ${_Python3_NAMES})
-
-# Set up the versions we know about, in the order we will search. Always add
-# the user supplied additional versions to the front.
-set(_Python3_VERSIONS ${Python3_ADDITIONAL_VERSIONS})
-# If FindPython3Interp has already found the major and minor version,
-# insert that version next to get consistent versions of the interpreter and
-# library.
-if(DEFINED PYTHON3LIBS_VERSION_STRING)
-  string(REPLACE "." ";" _PYTHON3LIBS_VERSION "${PYTHON3LIBS_VERSION_STRING}")
-  list(GET _PYTHON3LIBS_VERSION 0 _PYTHON3LIBS_VERSION_MAJOR)
-  list(GET _PYTHON3LIBS_VERSION 1 _PYTHON3LIBS_VERSION_MINOR)
-  list(APPEND _Python3_VERSIONS ${_PYTHON3LIBS_VERSION_MAJOR}.${_PYTHON3LIBS_VERSION_MINOR})
-endif()
-# Search for the current active python version first
-list(APPEND _Python3_VERSIONS ";")
-list(APPEND _Python3_VERSIONS ${_PYTHON3_FIND_OTHER_VERSIONS})
-
-unset(_PYTHON3_FIND_OTHER_VERSIONS)
-unset(_PYTHON3_VERSIONS)
-
-# Search for newest python version if python executable isn't found
-if(NOT PYTHON3_EXECUTABLE)
-    foreach(_CURRENT_VERSION IN LISTS _Python3_VERSIONS)
-      set(_Python3_NAMES python${_CURRENT_VERSION})
-      if(WIN32)
-        list(APPEND _Python3_NAMES python)
-      endif()
-      find_program(PYTHON3_EXECUTABLE
-        NAMES ${_Python3_NAMES}
-        PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]
-        )
-    endforeach()
-endif()
-
-# determine python version string
-if(PYTHON3_EXECUTABLE)
-    execute_process(COMMAND "${PYTHON3_EXECUTABLE}" -c
-                            "import sys; sys.stdout.write(';'.join([str(x) for x in sys.version_info[:3]]))"
-                    OUTPUT_VARIABLE _VERSION)
-    string(REPLACE ";" "." PYTHON3_VERSION_STRING "${_VERSION}")
-    list(GET _VERSION 0 PYTHON3_VERSION_MAJOR)
-    list(GET _VERSION 1 PYTHON3_VERSION_MINOR)
-    list(GET _VERSION 2 PYTHON3_VERSION_PATCH)
-    unset(_VERSION)
-endif()
-
-# handle the QUIETLY and REQUIRED arguments and set PYTHON3INTERP_FOUND to TRUE if
-# all listed variables are TRUE
-include(FindPackageHandleStandardArgs)
-FIND_PACKAGE_HANDLE_STANDARD_ARGS(Python3Interp REQUIRED_VARS PYTHON3_EXECUTABLE VERSION_VAR PYTHON3_VERSION_STRING)
-
-mark_as_advanced(PYTHON3_EXECUTABLE)
diff -pruN 14.2.16-2/cmake/modules/FindPython3Libs.cmake 15.2.7-0ubuntu4/cmake/modules/FindPython3Libs.cmake
--- 14.2.16-2/cmake/modules/FindPython3Libs.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindPython3Libs.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,369 +0,0 @@
-#.rst:
-# FindPython3Libs
-# --------------
-#
-# Find python libraries
-#
-# This module finds if Python is installed and determines where the
-# include files and libraries are.  It also determines what the name of
-# the library is.  This code sets the following variables:
-#
-# ::
-#
-#   PYTHON3LIBS_FOUND           - have the Python libs been found
-#   PYTHON3_LIBRARIES           - path to the python library
-#   PYTHON3_INCLUDE_PATH        - path to where Python.h is found (deprecated)
-#   PYTHON3_INCLUDE_DIRS        - path to where Python.h is found
-#   PYTHON3_DEBUG_LIBRARIES     - path to the debug library (deprecated)
-#   PYTHON3LIBS_VERSION_STRING  - version of the Python libs found (since CMake 2.8.8)
-#
-#
-#
-# The Python3_ADDITIONAL_VERSIONS variable can be used to specify a list
-# of version numbers that should be taken into account when searching
-# for Python.  You need to set this variable before calling
-# find_package(Python3Libs).
-#
-# If you'd like to specify the installation of Python to use, you should
-# modify the following cache variables:
-#
-# ::
-#
-#   PYTHON3_LIBRARY             - path to the python library
-#   PYTHON3_INCLUDE_DIR         - path to where Python.h is found
-#
-# If calling both ``find_package(PythonInterp)`` and
-# ``find_package(Python3Libs)``, call ``find_package(PythonInterp)`` first to
-# get the currently active Python version by default with a consistent version
-# of PYTHON3_LIBRARIES.
-
-#=============================================================================
-# Copyright 2001-2009 Kitware, Inc.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# * Redistributions of source code must retain the above copyright
-#   notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright
-#   notice, this list of conditions and the following disclaimer in the
-#   documentation and/or other materials provided with the distribution.
-#
-# * Neither the names of Kitware, Inc., the Insight Software Consortium,
-#   nor the names of their contributors may be used to endorse or promote
-#   products derived from this software without specific prior written
-#   permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#=============================================================================
-
-# Use the executable's path as a hint
-set(_Python3_LIBRARY_PATH_HINT)
-if(PYTHON3_EXECUTABLE)
-  if(WIN32)
-    get_filename_component(_Python3_PREFIX ${PYTHON3_EXECUTABLE} PATH)
-    if(_Python3_PREFIX)
-      set(_Python3_LIBRARY_PATH_HINT ${_Python3_PREFIX}/libs)
-    endif()
-    unset(_Python3_PREFIX)
-  else()
-    get_filename_component(_Python3_PREFIX ${PYTHON3_EXECUTABLE} PATH)
-    get_filename_component(_Python3_PREFIX ${_Python3_PREFIX} PATH)
-    if(_Python3_PREFIX)
-      set(_Python3_LIBRARY_PATH_HINT ${_Python3_PREFIX}/lib)
-    endif()
-    unset(_Python3_PREFIX)
-  endif()
-endif()
-
-include(CMakeFindFrameworks)
-# Search for the python framework on Apple.
-CMAKE_FIND_FRAMEWORKS(Python)
-
-# Save CMAKE_FIND_FRAMEWORK
-if(DEFINED CMAKE_FIND_FRAMEWORK)
-  set(_Python3Libs_CMAKE_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
-else()
-  unset(_Python3Libs_CMAKE_FIND_FRAMEWORK)
-endif()
-# To avoid picking up the system Python.h pre-maturely.
-set(CMAKE_FIND_FRAMEWORK LAST)
-
-set(_PYTHON3_VERSIONS 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
-
-if(Python3Libs_FIND_VERSION)
-    if(Python3Libs_FIND_VERSION_COUNT GREATER 1)
-        set(_PYTHON3_FIND_MAJ_MIN "${Python3Libs_FIND_VERSION_MAJOR}.${Python3Libs_FIND_VERSION_MINOR}")
-        unset(_PYTHON3_FIND_OTHER_VERSIONS)
-        if(Python3Libs_FIND_VERSION_EXACT)
-            if(_PYTHON3_FIND_MAJ_MIN STREQUAL Python3Libs_FIND_VERSION)
-                set(_PYTHON3_FIND_OTHER_VERSIONS "${Python3Libs_FIND_VERSION}")
-            else()
-                set(_PYTHON3_FIND_OTHER_VERSIONS "${Python3Libs_FIND_VERSION}" "${_PYTHON3_FIND_MAJ_MIN}")
-            endif()
-        else()
-            foreach(_PYTHON3_V ${_PYTHON${Python3Libs_FIND_VERSION_MAJOR}_VERSIONS})
-                if(NOT _PYTHON3_V VERSION_LESS _PYTHON3_FIND_MAJ_MIN)
-                    list(APPEND _PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON3_V})
-                endif()
-             endforeach()
-        endif()
-        unset(_PYTHON3_FIND_MAJ_MIN)
-    else()
-        set(_PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON${Python3Libs_FIND_VERSION_MAJOR}_VERSIONS})
-    endif()
-else()
-    set(_PYTHON3_FIND_OTHER_VERSIONS ${_PYTHON3_VERSIONS})
-endif()
-
-# Set up the versions we know about, in the order we will search. Always add
-# the user supplied additional versions to the front.
-# If FindPythonInterp has already found the major and minor version,
-# insert that version between the user supplied versions and the stock
-# version list.
-set(_Python3_VERSIONS ${Python3_ADDITIONAL_VERSIONS})
-if(DEFINED PYTHON3_VERSION_MAJOR AND DEFINED PYTHON3_VERSION_MINOR)
-  list(APPEND _Python3_VERSIONS ${PYTHON3_VERSION_MAJOR}.${PYTHON3_VERSION_MINOR})
-endif()
-list(APPEND _Python3_VERSIONS ${_PYTHON3_FIND_OTHER_VERSIONS})
-
-unset(_PYTHON3_FIND_OTHER_VERSIONS)
-unset(_PYTHON3_VERSIONS)
-
-foreach(_CURRENT_VERSION ${_Python3_VERSIONS})
-  string(REPLACE "." "" _CURRENT_VERSION_NO_DOTS ${_CURRENT_VERSION})
-  if(WIN32)
-    find_library(PYTHON3_DEBUG_LIBRARY
-      NAMES python${_CURRENT_VERSION_NO_DOTS}_d python
-      HINTS ${_Python3_LIBRARY_PATH_HINT}
-      PATHS
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs
-      )
-  endif()
-
-  set(PYTHON3_FRAMEWORK_LIBRARIES)
-  if(Python3_FRAMEWORKS AND NOT PYTHON3_LIBRARY)
-    foreach(dir ${Python3_FRAMEWORKS})
-      list(APPEND PYTHON3_FRAMEWORK_LIBRARIES
-           ${dir}/Versions/${_CURRENT_VERSION}/lib)
-    endforeach()
-  endif()
-  find_library(PYTHON3_LIBRARY
-    NAMES
-      python${_CURRENT_VERSION_NO_DOTS}
-      python${_CURRENT_VERSION}mu
-      python${_CURRENT_VERSION}m
-      python${_CURRENT_VERSION}u
-      python${_CURRENT_VERSION}
-    HINTS
-      ${_Python3_LIBRARY_PATH_HINT}
-    PATHS
-      ${PYTHON3_FRAMEWORK_LIBRARIES}
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs
-    # Avoid finding the .dll in the PATH.  We want the .lib.
-    NO_SYSTEM_ENVIRONMENT_PATH
-  )
-  # Look for the static library in the Python config directory
-  find_library(PYTHON3_LIBRARY
-    NAMES python${_CURRENT_VERSION_NO_DOTS} python${_CURRENT_VERSION}
-    # Avoid finding the .dll in the PATH.  We want the .lib.
-    NO_SYSTEM_ENVIRONMENT_PATH
-    # This is where the static library is usually located
-    PATH_SUFFIXES python${_CURRENT_VERSION}/config
-  )
-
-  # Don't search for include dir until library location is known
-  if(PYTHON3_LIBRARY)
-
-    # Use the library's install prefix as a hint
-    set(_Python3_INCLUDE_PATH_HINT)
-    get_filename_component(_Python3_PREFIX ${PYTHON3_LIBRARY} PATH)
-    get_filename_component(_Python3_PREFIX ${_Python3_PREFIX} PATH)
-    if(_Python3_PREFIX)
-      set(_Python3_INCLUDE_PATH_HINT ${_Python3_PREFIX}/include)
-    endif()
-    unset(_Python3_PREFIX)
-
-    # Add framework directories to the search paths
-    set(PYTHON3_FRAMEWORK_INCLUDES)
-    if(Python3_FRAMEWORKS AND NOT PYTHON3_INCLUDE_DIR)
-      foreach(dir ${Python3_FRAMEWORKS})
-        list(APPEND PYTHON3_FRAMEWORK_INCLUDES
-          ${dir}/Versions/${_CURRENT_VERSION}/include)
-      endforeach()
-    endif()
-
-    find_path(PYTHON3_INCLUDE_DIR
-      NAMES Python.h
-      HINTS
-        ${_Python3_INCLUDE_PATH_HINT}
-      PATHS
-        ${PYTHON3_FRAMEWORK_INCLUDES}
-        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/include
-        [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/include
-      PATH_SUFFIXES
-        python${_CURRENT_VERSION}mu
-        python${_CURRENT_VERSION}m
-        python${_CURRENT_VERSION}u
-        python${_CURRENT_VERSION}
-    )
-  endif()
-
-  # For backward compatibility, set PYTHON3_INCLUDE_PATH.
-  set(PYTHON3_INCLUDE_PATH "${PYTHON3_INCLUDE_DIR}")
-
-  if(PYTHON3_INCLUDE_DIR AND EXISTS "${PYTHON3_INCLUDE_DIR}/patchlevel.h")
-    file(STRINGS "${PYTHON3_INCLUDE_DIR}/patchlevel.h" python3_version_str
-         REGEX "^#define[ \t]+PY_VERSION[ \t]+\"[^\"]+\"")
-    string(REGEX REPLACE "^#define[ \t]+PY_VERSION[ \t]+\"([^\"]+)\".*" "\\1"
-                         PYTHON3LIBS_VERSION_STRING "${python3_version_str}")
-    unset(python3_version_str)
-  endif()
-
-  if(PYTHON3_LIBRARY AND PYTHON3_INCLUDE_DIR)
-    break()
-  endif()
-endforeach()
-
-unset(_Python3_INCLUDE_PATH_HINT)
-unset(_Python3_LIBRARY_PATH_HINT)
-
-mark_as_advanced(
-  PYTHON3_DEBUG_LIBRARY
-  PYTHON3_LIBRARY
-  PYTHON3_INCLUDE_DIR
-)
-
-# We use PYTHON3_INCLUDE_DIR, PYTHON3_LIBRARY and PYTHON3_DEBUG_LIBRARY for the
-# cache entries because they are meant to specify the location of a single
-# library. We now set the variables listed by the documentation for this
-# module.
-set(PYTHON3_INCLUDE_DIRS "${PYTHON3_INCLUDE_DIR}")
-set(PYTHON3_DEBUG_LIBRARIES "${PYTHON3_DEBUG_LIBRARY}")
-
-# These variables have been historically named in this module different from
-# what SELECT_LIBRARY_CONFIGURATIONS() expects.
-set(PYTHON3_LIBRARY_DEBUG "${PYTHON3_DEBUG_LIBRARY}")
-set(PYTHON3_LIBRARY_RELEASE "${PYTHON3_LIBRARY}")
-include(SelectLibraryConfigurations)
-SELECT_LIBRARY_CONFIGURATIONS(PYTHON3)
-# SELECT_LIBRARY_CONFIGURATIONS() sets ${PREFIX}_FOUND if it has a library.
-# Unset this, this prefix doesn't match the module prefix, they are different
-# for historical reasons.
-unset(PYTHON3_FOUND)
-
-# Restore CMAKE_FIND_FRAMEWORK
-if(DEFINED _Python3Libs_CMAKE_FIND_FRAMEWORK)
-  set(CMAKE_FIND_FRAMEWORK ${_Python3Libs_CMAKE_FIND_FRAMEWORK})
-  unset(_Python3Libs_CMAKE_FIND_FRAMEWORK)
-else()
-  unset(CMAKE_FIND_FRAMEWORK)
-endif()
-
-include(FindPackageHandleStandardArgs)
-FIND_PACKAGE_HANDLE_STANDARD_ARGS(Python3Libs
-                                  REQUIRED_VARS PYTHON3_LIBRARIES PYTHON3_INCLUDE_DIRS
-                                  VERSION_VAR PYTHON3LIBS_VERSION_STRING)
-
-# PYTHON3_ADD_MODULE(<name> src1 src2 ... srcN) is used to build modules for python.
-# PYTHON3_WRITE_MODULES_HEADER(<filename>) writes a header file you can include
-# in your sources to initialize the static python modules
-function(PYTHON3_ADD_MODULE _NAME )
-  get_property(_TARGET_SUPPORTS_SHARED_LIBS
-    GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS)
-  option(PYTHON3_ENABLE_MODULE_${_NAME} "Add module ${_NAME}" TRUE)
-  option(PYTHON3_MODULE_${_NAME}_BUILD_SHARED
-    "Add module ${_NAME} shared" ${_TARGET_SUPPORTS_SHARED_LIBS})
-
-  # Mark these options as advanced
-  mark_as_advanced(PYTHON3_ENABLE_MODULE_${_NAME}
-    PYTHON3_MODULE_${_NAME}_BUILD_SHARED)
-
-  if(PYTHON3_ENABLE_MODULE_${_NAME})
-    if(PYTHON3_MODULE_${_NAME}_BUILD_SHARED)
-      set(PY_MODULE_TYPE MODULE)
-    else()
-      set(PY_MODULE_TYPE STATIC)
-      set_property(GLOBAL  APPEND  PROPERTY  PY_STATIC_MODULES_LIST ${_NAME})
-    endif()
-
-    set_property(GLOBAL  APPEND  PROPERTY  PY_MODULES_LIST ${_NAME})
-    add_library(${_NAME} ${PY_MODULE_TYPE} ${ARGN})
-#    target_link_libraries(${_NAME} ${PYTHON3_LIBRARIES})
-
-    if(PYTHON3_MODULE_${_NAME}_BUILD_SHARED)
-      set_target_properties(${_NAME} PROPERTIES PREFIX "${PYTHON3_MODULE_PREFIX}")
-      if(WIN32 AND NOT CYGWIN)
-        set_target_properties(${_NAME} PROPERTIES SUFFIX ".pyd")
-      endif()
-    endif()
-
-  endif()
-endfunction()
-
-function(PYTHON3_WRITE_MODULES_HEADER _filename)
-
-  get_property(PY_STATIC_MODULES_LIST  GLOBAL  PROPERTY PY_STATIC_MODULES_LIST)
-
-  get_filename_component(_name "${_filename}" NAME)
-  string(REPLACE "." "_" _name "${_name}")
-  string(TOUPPER ${_name} _nameUpper)
-  set(_filename ${CMAKE_CURRENT_BINARY_DIR}/${_filename})
-
-  set(_filenameTmp "${_filename}.in")
-  file(WRITE ${_filenameTmp} "/*Created by cmake, do not edit, changes will be lost*/\n")
-  file(APPEND ${_filenameTmp}
-"#ifndef ${_nameUpper}
-#define ${_nameUpper}
-
-#include <Python.h>
-
-#ifdef __cplusplus
-extern \"C\" {
-#endif /* __cplusplus */
-
-")
-
-  foreach(_currentModule ${PY_STATIC_MODULES_LIST})
-    file(APPEND ${_filenameTmp} "extern void init${PYTHON3_MODULE_PREFIX}${_currentModule}(void);\n\n")
-  endforeach()
-
-  file(APPEND ${_filenameTmp}
-"#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-")
-
-
-  foreach(_currentModule ${PY_STATIC_MODULES_LIST})
-    file(APPEND ${_filenameTmp} "int ${_name}_${_currentModule}(void) \n{\n  static char name[]=\"${PYTHON3_MODULE_PREFIX}${_currentModule}\"; return PyImport_AppendInittab(name, init${PYTHON3_MODULE_PREFIX}${_currentModule});\n}\n\n")
-  endforeach()
-
-  file(APPEND ${_filenameTmp} "void ${_name}_LoadAllPythonModules(void)\n{\n")
-  foreach(_currentModule ${PY_STATIC_MODULES_LIST})
-    file(APPEND ${_filenameTmp} "  ${_name}_${_currentModule}();\n")
-  endforeach()
-  file(APPEND ${_filenameTmp} "}\n\n")
-  file(APPEND ${_filenameTmp} "#ifndef EXCLUDE_LOAD_ALL_FUNCTION\nvoid CMakeLoadAllPythonModules(void)\n{\n  ${_name}_LoadAllPythonModules();\n}\n#endif\n\n#endif\n")
-
-# with configure_file() cmake complains that you may not use a file created using file(WRITE) as input file for configure_file()
-  execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${_filenameTmp}" "${_filename}" OUTPUT_QUIET ERROR_QUIET)
-
-endfunction()
diff -pruN 14.2.16-2/cmake/modules/FindSanitizers.cmake 15.2.7-0ubuntu4/cmake/modules/FindSanitizers.cmake
--- 14.2.16-2/cmake/modules/FindSanitizers.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindSanitizers.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -44,6 +44,8 @@ if(Sanitizers_COMPILE_OPTIONS)
     "-fno-omit-frame-pointer")
 endif()
 
+string (REPLACE ";" " " Sanitizers_COMPILE_OPTIONS "${Sanitizers_COMPILE_OPTIONS}")
+
 include(CheckCXXSourceCompiles)
 set(CMAKE_REQUIRED_FLAGS ${Sanitizers_COMPILE_OPTIONS})
 set(CMAKE_REQUIRED_LIBRARIES ${Sanitizers_COMPILE_OPTIONS})
diff -pruN 14.2.16-2/cmake/modules/FindStdFilesystem.cmake 15.2.7-0ubuntu4/cmake/modules/FindStdFilesystem.cmake
--- 14.2.16-2/cmake/modules/FindStdFilesystem.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/FindStdFilesystem.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -2,16 +2,8 @@ set(_std_filesystem_test_src
   ${CMAKE_CURRENT_LIST_DIR}/FindStdFilesystem_test.cc)
 
 macro(try_std_filesystem_library _library _result)
-  if(CMAKE_VERSION VERSION_LESS "3.8")
-    # abuse the definition flags, because they are quite
-    # the same as CMAKE_C_FLAGS: they are passed to the
-    # compiler.
-    set(_std_filesystem_try_compile_arg
-      COMPILE_DEFINITIONS "-std=c++17")
-  else()
-    set(_std_filesystem_try_compile_arg
-      CXX_STANDARD 17)
-  endif()
+  set(_std_filesystem_try_compile_arg
+    CXX_STANDARD 17)
   try_compile(_std_filesystem_compiles
     ${CMAKE_CURRENT_BINARY_DIR}
     SOURCES ${_std_filesystem_test_src}
diff -pruN 14.2.16-2/cmake/modules/Findxio.cmake 15.2.7-0ubuntu4/cmake/modules/Findxio.cmake
--- 14.2.16-2/cmake/modules/Findxio.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/Findxio.cmake	1970-01-01 00:00:00.000000000 +0000
@@ -1,24 +0,0 @@
-# - Find libxio
-# Find libxio transport library
-#
-# XIO_INCLUDE_DIR -  libxio include dir
-# XIO_LIBRARIES - List of libraries
-# XIO_FOUND - True if libxio found.
-
-if(WITH_XIO AND EXISTS ${WITH_XIO})
-  find_path(XIO_INCLUDE_DIR libxio.h HINTS "${WITH_XIO}/include")
-  find_library(XIO_LIBRARY xio HINTS "${WITH_XIO}/lib")
-else()
-  find_path(XIO_INCLUDE_DIR libxio.h)
-  find_library(XIO_LIBRARY xio)
-endif()
-
-set(XIO_LIBRARIES ${XIO_LIBRARY})
-
-include(FindPackageHandleStandardArgs)
-find_package_handle_standard_args(xio DEFAULT_MSG XIO_LIBRARY XIO_INCLUDE_DIR)
-
-mark_as_advanced(
-  XIO_LIBRARY
-  XIO_INCLUDE_DIR
-  )
diff -pruN 14.2.16-2/cmake/modules/GetGitRevisionDescription.cmake 15.2.7-0ubuntu4/cmake/modules/GetGitRevisionDescription.cmake
--- 14.2.16-2/cmake/modules/GetGitRevisionDescription.cmake	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/GetGitRevisionDescription.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -53,12 +53,16 @@ function(get_git_head_revision _refspecv
 		endif()
 		set(GIT_DIR "${GIT_PARENT_DIR}/.git")
 	endwhile()
-	# check if this is a submodule
+	# check if this is a submodule or git-worktree
 	if(NOT IS_DIRECTORY ${GIT_DIR})
-		file(READ ${GIT_DIR} submodule)
-		string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" GIT_DIR_RELATIVE ${submodule})
-		get_filename_component(SUBMODULE_DIR ${GIT_DIR} PATH)
-		get_filename_component(GIT_DIR ${SUBMODULE_DIR}/${GIT_DIR_RELATIVE} ABSOLUTE)
+		file(READ ${GIT_DIR} gitdirfile)
+		string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" GIT_DIR_PATH ${gitdirfile})
+		if(IS_ABSOLUTE ${GIT_DIR_PATH})
+			get_filename_component(GIT_DIR ${GIT_DIR_PATH} ABSOLUTE)
+		else()
+			get_filename_component(LINKED_DIR ${GIT_DIR} PATH)
+			get_filename_component(GIT_DIR ${LINKED_DIR}/${GIT_DIR_PATH} ABSOLUTE)
+		endif()
 	endif()
 	set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data")
 	if(NOT EXISTS "${GIT_DATA}")
diff -pruN 14.2.16-2/cmake/modules/GetGitRevisionDescription.cmake.in 15.2.7-0ubuntu4/cmake/modules/GetGitRevisionDescription.cmake.in
--- 14.2.16-2/cmake/modules/GetGitRevisionDescription.cmake.in	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/GetGitRevisionDescription.cmake.in	2020-11-30 19:58:30.000000000 +0000
@@ -18,21 +18,30 @@ set(HEAD_HASH)
 file(READ "@HEAD_FILE@" HEAD_CONTENTS LIMIT 1024)
 
 string(STRIP "${HEAD_CONTENTS}" HEAD_CONTENTS)
+set(GIT_DIR "@GIT_DIR@")
+# handle git-worktree
+if(EXISTS "${GIT_DIR}/commondir")
+	file(READ "${GIT_DIR}/commondir" GIT_DIR_NEW LIMIT 1024)
+	string(STRIP "${GIT_DIR_NEW}" GIT_DIR_NEW)
+	if(NOT IS_ABSOLUTE "${GIT_DIR_NEW}")
+		get_filename_component(GIT_DIR_NEW ${GIT_DIR}/${GIT_DIR_NEW} ABSOLUTE)
+	endif()
+	if(EXISTS "${GIT_DIR_NEW}")
+		set(GIT_DIR "${GIT_DIR_NEW}")
+	endif()
+endif()
 if(HEAD_CONTENTS MATCHES "ref")
 	# named branch
 	string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}")
-	if(EXISTS "@GIT_DIR@/${HEAD_REF}")
-		configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
-	else()
-		configure_file("@GIT_DIR@/packed-refs" "@GIT_DATA@/packed-refs" COPYONLY)
-		file(READ "@GIT_DATA@/packed-refs" PACKED_REFS)
-		if(${PACKED_REFS} MATCHES "([0-9a-z]*) ${HEAD_REF}")
-			set(HEAD_HASH "${CMAKE_MATCH_1}")
-		endif()
+	if(EXISTS "${GIT_DIR}/${HEAD_REF}")
+		configure_file("${GIT_DIR}/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
+	elseif(EXISTS "${GIT_DIR}/logs/${HEAD_REF}")
+		configure_file("${GIT_DIR}/logs/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
+		set(HEAD_HASH "${HEAD_REF}")
 	endif()
 else()
 	# detached HEAD
-	configure_file("@GIT_DIR@/HEAD" "@GIT_DATA@/head-ref" COPYONLY)
+	configure_file("${GIT_DIR}/HEAD" "@GIT_DATA@/head-ref" COPYONLY)
 endif()
 
 if(NOT HEAD_HASH)
diff -pruN 14.2.16-2/cmake/modules/patch-dpdk-conf.sh 15.2.7-0ubuntu4/cmake/modules/patch-dpdk-conf.sh
--- 14.2.16-2/cmake/modules/patch-dpdk-conf.sh	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/modules/patch-dpdk-conf.sh	2020-11-30 19:58:30.000000000 +0000
@@ -17,6 +17,8 @@ machine=$1
 shift
 arch=$1
 shift
+numa=$1
+shift
 
 setconf CONFIG_RTE_MACHINE "${machine}"
 setconf CONFIG_RTE_ARCH "${arch}"
@@ -52,4 +54,4 @@ setconf CONFIG_RTE_TEST_PMD n
 setconf CONFIG_RTE_MBUF_REFCNT_ATOMIC n
 
 # balanced allocation of hugepages
-setconf CONFIG_RTE_EAL_NUMA_AWARE_HUGEPAGES n
+setconf CONFIG_RTE_EAL_NUMA_AWARE_HUGEPAGES "${numa}"
diff -pruN 14.2.16-2/cmake/toolchains/mingw32.cmake 15.2.7-0ubuntu4/cmake/toolchains/mingw32.cmake
--- 14.2.16-2/cmake/toolchains/mingw32.cmake	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/cmake/toolchains/mingw32.cmake	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,24 @@
+set(CMAKE_SYSTEM_NAME Windows)
+set(TOOLCHAIN_PREFIX x86_64-w64-mingw32)
+set(CMAKE_SYSTEM_PROCESSOR x86_64)
+
+# We'll need to use posix threads in order to use
+# C++11 features, such as std::thread.
+set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc-posix)
+set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++-posix)
+set(CMAKE_RC_COMPILER ${TOOLCHAIN_PREFIX}-windres)
+
+set(CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX} /usr/lib/gcc/${TOOLCHAIN_PREFIX}/7.3-posix)
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+# TODO: consider switching this to "ONLY". The issue with
+# that is that all our libs should then be under
+# CMAKE_FIND_ROOT_PATH and CMAKE_PREFIX_PATH would be ignored.
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH)
+
+# Some functions (e.g. localtime_r) will not be available unless we set
+# the following flag.
+add_definitions(-D_POSIX=1)
+add_definitions(-D_POSIX_C_SOURCE=1)
+add_definitions(-D_POSIX_=1)
+add_definitions(-D_POSIX_THREADS=1)
diff -pruN 14.2.16-2/CMakeLists.txt 15.2.7-0ubuntu4/CMakeLists.txt
--- 14.2.16-2/CMakeLists.txt	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/CMakeLists.txt	2020-11-30 19:58:30.000000000 +0000
@@ -1,29 +1,24 @@
-cmake_minimum_required(VERSION 3.5.1)
+cmake_minimum_required(VERSION 3.10.2)
+# remove cmake/modules/FindPython* once 3.12 is required
 
-project(ceph CXX C ASM)
-set(VERSION 14.2.16)
+project(ceph
+  VERSION 15.2.0
+  LANGUAGES CXX C ASM)
+
+foreach(policy
+    CMP0028
+    CMP0046
+    CMP0048
+    CMP0051
+    CMP0054
+    CMP0056
+    CMP0065
+    CMP0075)
+  if(POLICY ${policy})
+    cmake_policy(SET ${policy} NEW)
+  endif()
+endforeach()
 
-if(POLICY CMP0028)
-  cmake_policy(SET CMP0028 NEW)
-endif()
-if(POLICY CMP0046)
-  cmake_policy(SET CMP0046 NEW)
-endif()
-if(POLICY CMP0054)
-  cmake_policy(SET CMP0054 NEW)
-endif()
-if(POLICY CMP0056)
-  cmake_policy(SET CMP0056 NEW)
-endif()
-if(POLICY CMP0065)
-  cmake_policy(SET CMP0065 NEW)
-endif()
-if(POLICY CMP0051)
-  cmake_policy(SET CMP0051 NEW)
-endif()
-if(POLICY CMP0075)
-  cmake_policy(SET CMP0075 NEW)
-endif()
 list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
 
 if(CMAKE_SYSTEM_NAME MATCHES "Linux")
@@ -34,25 +29,31 @@ elseif(CMAKE_SYSTEM_NAME MATCHES "FreeBS
   FIND_PACKAGE(Threads)
 endif(CMAKE_SYSTEM_NAME MATCHES "Linux")
 
-if(CMAKE_CXX_COMPILER_ID STREQUAL GNU)
-  if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7)
-    message(FATAL_ERROR "GCC 7+ required due to C++17 requirements")
-  endif()
+if(WIN32)
+  # The Windows headers (e.g. coming from mingw or the Windows SDK) check
+  # the targeted Windows version. The availability of certain functions and
+  # structures will depend on it.
+  set(WIN32_WINNT "0x0A00" CACHE STRING "Targeted Windows version.")
+  add_definitions(-D_WIN32_WINNT=${WIN32_WINNT})
+endif()
+
+if(MINGW)
+  set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-allow-multiple-definition")
+  set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-allow-multiple-definition")
 endif()
 
 option(WITH_CCACHE "Build with ccache.")
 if(WITH_CCACHE)
   find_program(CCACHE_FOUND ccache)
-  if(CCACHE_FOUND)
-    message(STATUS "Building with ccache: ${CCACHE_FOUND}, CCACHE_DIR=$ENV{CCACHE_DIR}")
-    set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
-    # ccache does not accelerate link (ld), but let it handle it. by passing it
-    # along with cc to python's distutils, we are able to workaround
-    # https://bugs.python.org/issue8027.
-    set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
-  else(CCACHE_FOUND)
+  if(NOT CCACHE_FOUND)
     message(FATAL_ERROR "Can't find ccache. Is it installed?")
-  endif(CCACHE_FOUND)
+  endif()
+  message(STATUS "Building with ccache: ${CCACHE_FOUND}, CCACHE_DIR=$ENV{CCACHE_DIR}")
+  set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
+  # ccache does not accelerate link (ld), but let it handle it. by passing it
+  # along with cc to python's distutils, we are able to workaround
+  # https://bugs.python.org/issue8027.
+  set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
 endif(WITH_CCACHE)
 
 option(WITH_MANPAGE "Build man pages." ON)
@@ -79,97 +80,13 @@ if(FREEBSD)
   list(APPEND CMAKE_REQUIRED_INCLUDES /usr/local/include)
 endif(FREEBSD)
 
-#Check Includes
-include(CheckIncludeFiles)
-include(CheckIncludeFileCXX)
-include(CheckFunctionExists)
 
 #put all the libs and binaries in one place
 set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
 set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
 set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
 
-
-CHECK_FUNCTION_EXISTS(fallocate CEPH_HAVE_FALLOCATE)
-CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
-CHECK_FUNCTION_EXISTS(posix_fallocate HAVE_POSIX_FALLOCATE)
-CHECK_FUNCTION_EXISTS(syncfs HAVE_SYS_SYNCFS)
-CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
-CHECK_FUNCTION_EXISTS(pwritev HAVE_PWRITEV)
-CHECK_FUNCTION_EXISTS(splice CEPH_HAVE_SPLICE)
-CHECK_FUNCTION_EXISTS(getgrouplist HAVE_GETGROUPLIST)
-if(NOT APPLE)
-  CHECK_FUNCTION_EXISTS(fdatasync HAVE_FDATASYNC)
-endif()
-CHECK_FUNCTION_EXISTS(strerror_r HAVE_Strerror_R)
-CHECK_FUNCTION_EXISTS(name_to_handle_at HAVE_NAME_TO_HANDLE_AT)
-CHECK_FUNCTION_EXISTS(pipe2 HAVE_PIPE2)
-CHECK_FUNCTION_EXISTS(accept4 HAVE_ACCEPT4)
-
-include(CMakePushCheckState)
-cmake_push_check_state(RESET)
-set(CMAKE_REQUIRED_LIBRARIES pthread)
-CHECK_FUNCTION_EXISTS(pthread_spin_init HAVE_PTHREAD_SPINLOCK)
-CHECK_FUNCTION_EXISTS(pthread_set_name_np HAVE_PTHREAD_SET_NAME_NP)
-CHECK_FUNCTION_EXISTS(pthread_get_name_np HAVE_PTHREAD_GET_NAME_NP)
-CHECK_FUNCTION_EXISTS(pthread_setname_np HAVE_PTHREAD_SETNAME_NP)
-CHECK_FUNCTION_EXISTS(pthread_getname_np HAVE_PTHREAD_GETNAME_NP)
-CHECK_FUNCTION_EXISTS(pthread_rwlockattr_setkind_np HAVE_PTHREAD_RWLOCKATTR_SETKIND_NP)
-cmake_pop_check_state()
-
-CHECK_FUNCTION_EXISTS(eventfd HAVE_EVENTFD)
-CHECK_FUNCTION_EXISTS(getprogname HAVE_GETPROGNAME)
-
-CHECK_INCLUDE_FILES("linux/types.h" HAVE_LINUX_TYPES_H)
-CHECK_INCLUDE_FILES("linux/version.h" HAVE_LINUX_VERSION_H)
-CHECK_INCLUDE_FILES("arpa/nameser_compat.h" HAVE_ARPA_NAMESER_COMPAT_H)
-CHECK_INCLUDE_FILES("sys/mount.h" HAVE_SYS_MOUNT_H)
-CHECK_INCLUDE_FILES("sys/param.h" HAVE_SYS_PARAM_H)
-CHECK_INCLUDE_FILES("sys/types.h" HAVE_SYS_TYPES_H)
-CHECK_INCLUDE_FILES("sys/vfs.h" HAVE_SYS_VFS_H)
-CHECK_INCLUDE_FILES("sys/prctl.h" HAVE_SYS_PRCTL_H)
-CHECK_INCLUDE_FILES("execinfo.h" HAVE_EXECINFO_H)
-if(LINUX)
-  CHECK_INCLUDE_FILES("sched.h" HAVE_SCHED)
-endif(LINUX)
-CHECK_INCLUDE_FILES("valgrind/helgrind.h" HAVE_VALGRIND_HELGRIND_H)
-
-include(CheckTypeSize)
-set(CMAKE_EXTRA_INCLUDE_FILES "linux/types.h")
-CHECK_TYPE_SIZE(__be16 __BE16) 
-CHECK_TYPE_SIZE(__be32 __BE32) 
-CHECK_TYPE_SIZE(__be64 __BE64) 
-CHECK_TYPE_SIZE(__le16 __LE16) 
-CHECK_TYPE_SIZE(__le32 __LE32) 
-CHECK_TYPE_SIZE(__le64 __LE64) 
-CHECK_TYPE_SIZE(__u8 __U8) 
-CHECK_TYPE_SIZE(__u16 __U16) 
-CHECK_TYPE_SIZE(__u32 __U32) 
-CHECK_TYPE_SIZE(__u64 __U64) 
-CHECK_TYPE_SIZE(__s8 __S8) 
-CHECK_TYPE_SIZE(__s16 __S16) 
-CHECK_TYPE_SIZE(__s32 __S32) 
-CHECK_TYPE_SIZE(__s64 __S64) 
-unset(CMAKE_EXTRA_INCLUDE_FILES)
-
-include(CheckSymbolExists)
-CHECK_SYMBOL_EXISTS(res_nquery "resolv.h" HAVE_RES_NQUERY)
-CHECK_SYMBOL_EXISTS(F_SETPIPE_SZ "linux/fcntl.h" CEPH_HAVE_SETPIPE_SZ)
-CHECK_SYMBOL_EXISTS(__func__ "" HAVE_FUNC)
-CHECK_SYMBOL_EXISTS(__PRETTY_FUNCTION__ "" HAVE_PRETTY_FUNC)
-CHECK_SYMBOL_EXISTS(getentropy "unistd.h" HAVE_GETENTROPY)
-
-include(CheckCXXSourceCompiles)
-CHECK_CXX_SOURCE_COMPILES("
-  #include <string.h>
-  int main() { char x = *strerror_r(0, &x, sizeof(x)); return 0; }
-  " STRERROR_R_CHAR_P)
-
-include(CheckStructHasMember)
-CHECK_STRUCT_HAS_MEMBER("struct stat" st_mtim.tv_nsec sys/stat.h
-  HAVE_STAT_ST_MTIM_TV_NSEC LANGUAGE C)
-CHECK_STRUCT_HAS_MEMBER("struct stat" st_mtimespec.tv_nsec sys/stat.h
-  HAVE_STAT_ST_MTIMESPEC_TV_NSEC LANGUAGE C)
+include(CephChecks)
 
 set(CEPH_MAN_DIR "share/man" CACHE STRING "Install location for man pages (relative to prefix).")
 
@@ -183,15 +100,14 @@ set(CMAKE_POSITION_INDEPENDENT_CODE ${EN
 
 option(WITH_STATIC_LIBSTDCXX "Link against libstdc++ statically" OFF)
 if(WITH_STATIC_LIBSTDCXX)
-  if(CMAKE_COMPILER_IS_GNUCXX)
-    set(static_linker_flags "-static-libstdc++ -static-libgcc")
-    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${static_linker_flags}")
-    set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${static_linker_flags}")
-    unset(static_linker_flags)
-    set(GPERFTOOLS_USE_STATIC_LIBS TRUE)
-  else()
+  if(NOT CMAKE_COMPILER_IS_GNUCXX)
     message(FATAL_ERROR "Please use GCC to enable WITH_STATIC_LIBSTDCXX")
   endif()
+  set(static_linker_flags "-static-libstdc++ -static-libgcc")
+  set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${static_linker_flags}")
+  set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${static_linker_flags}")
+  unset(static_linker_flags)
+  set(GPERFTOOLS_USE_STATIC_LIBS TRUE)
 endif()
 include(CheckCxxAtomic)
 if(NOT HAVE_CXX11_ATOMIC)
@@ -210,33 +126,23 @@ endif()
 
 find_package(Backtrace)
 
-# remote block storage
-option(WITH_RBD "Remote block storage is here" ON)
+option(WITH_RBD "Enable RADOS Block Device related targets" ON)
 
 if(LINUX)
   find_package(udev REQUIRED)
   set(HAVE_UDEV ${UDEV_FOUND})
   find_package(blkid REQUIRED)
   set(HAVE_BLKID ${BLKID_FOUND})
-  if(WITH_RBD)
-    find_package(genl REQUIRED)
-    set(HAVE_GENL $GENL_FOUND)
-  endif()
   find_package(keyutils REQUIRED)
   set(HAVE_KEYUTILS ${KEYUTILS_FOUND})
 elseif(FREEBSD)
   set(HAVE_UDEV OFF)
   set(HAVE_LIBAIO OFF)
   set(HAVE_BLKID OFF)
-  set(HAVE_GENL OFF)
   set(HAVE_KEYUTILS OFF)
 else()
   set(HAVE_UDEV OFF)
-  message(STATUS "Not using udev")
   set(HAVE_BLKID OFF)
-  message(STATUS "Not using BLKID")
-  set(HAVE_GENL OFF)
-  message(STATUS "Not using GENL")
 endif(LINUX)
 
 option(WITH_OPENLDAP "OPENLDAP is here" ON)
@@ -280,7 +186,26 @@ if(WITH_BLUESTORE)
   endif()
 endif()
 
-option(WITH_SPDK "Enable SPDK" OFF)
+include(CMakeDependentOption)
+CMAKE_DEPENDENT_OPTION(WITH_LIBURING "Build with liburing library support" OFF
+  "WITH_BLUESTORE;HAVE_LIBAIO" OFF)
+set(HAVE_LIBURING ${WITH_LIBURING})
+
+CMAKE_DEPENDENT_OPTION(WITH_BLUESTORE_PMEM "Enable PMDK libraries" OFF
+  "WITH_BLUESTORE" OFF)
+
+CMAKE_DEPENDENT_OPTION(WITH_RBD_RWL "Enable librbd persistent write back cache" OFF
+  "WITH_RBD" OFF)
+
+CMAKE_DEPENDENT_OPTION(WITH_SYSTEM_PMDK "Require and build with system PMDK" OFF
+  "WITH_RBD_RWL OR WITH_BLUESTORE_PMEM" OFF)
+
+if(WITH_BLUESTORE_PMEM)
+  set(HAVE_BLUESTORE_PMEM ON)
+endif()
+
+CMAKE_DEPENDENT_OPTION(WITH_SPDK "Enable SPDK" OFF
+  "CMAKE_SYSTEM_PROCESSOR MATCHES i386|i686|amd64|x86_64|AMD64|aarch64" OFF)
 if(WITH_SPDK)
   if(NOT WITH_BLUESTORE)
     message(SEND_ERROR "Please enable WITH_BLUESTORE for using SPDK")
@@ -290,19 +215,11 @@ if(WITH_SPDK)
   set(HAVE_SPDK TRUE)
 endif(WITH_SPDK)
 
-option(WITH_PMEM "Enable PMEM" OFF)
-if(WITH_PMEM)
-  set(HAVE_PMEM ON)
-  if(NOT WITH_BLUESTORE)
-    message(SEND_ERROR "Please enable WITH_BLUESTORE for using PMEM")
-  endif()
-endif()
-
 if(WITH_BLUESTORE)
-  if(NOT AIO_FOUND AND NOT HAVE_POSIXAIO AND NOT WITH_SPDK AND NOT WITH_PMEM)
+  if(NOT AIO_FOUND AND NOT HAVE_POSIXAIO AND NOT WITH_SPDK AND NOT WITH_BLUESTORE_PMEM)
     message(SEND_ERROR "WITH_BLUESTORE is ON, "
       "but none of the bluestore backends is enabled. "
-      "Please install libaio, or enable WITH_SPDK or WITH_PMEM (experimental)")
+      "Please install libaio, or enable WITH_SPDK or WITH_BLUESTORE_PMEM (experimental)")
   endif()
 endif()
 
@@ -320,11 +237,10 @@ option(WITH_LIBCEPHFS "libcephfs client
 # key-value store
 option(WITH_KVS "Key value store is here" ON)
 
-# KERNEL remote block storage
-option(WITH_KRBD "Kernel Remote block storage is here" ON)
+option(WITH_KRBD "Enable Linux krbd support of 'rbd' utility" ON)
 
-if(WITH_KRBD AND WITHOUT_RBD)
-  message(FATAL_ERROR "Cannot have WITH_KRBD with WITH_RBD.")
+if(WITH_KRBD AND NOT WITH_RBD)
+  message(FATAL_ERROR "Cannot have WITH_KRBD without WITH_RBD.")
 endif()
 if(LINUX)
   if(WITH_LIBCEPHFS OR WITH_KRBD)
@@ -399,29 +315,22 @@ else()
   set(EXE_LINKER_USE_PIE ${ENABLE_SHARED})
 endif()
 
+if (HAVE_LIBTCMALLOC AND TCMALLOC_VERSION_STRING VERSION_LESS 2.6.2)
+  set(LIBTCMALLOC_MISSING_ALIGNED_ALLOC ON)
+endif()
+
 find_package(CURL REQUIRED)
 set(CMAKE_REQUIRED_INCLUDES ${CURL_INCLUDE_DIRS})
 set(CMAKE_REQUIRED_LIBRARIES ${CURL_LIBRARIES})
 CHECK_SYMBOL_EXISTS(curl_multi_wait curl/curl.h HAVE_CURL_MULTI_WAIT)
 
-find_package(NSS REQUIRED)
-find_package(NSPR REQUIRED)
 find_package(OpenSSL REQUIRED)
-# TODO: use NSS only for validation of the OpenSSL-based implementations
-set(USE_NSS 1)
-set(USE_OPENSSL 1)
-set(CRYPTO_LIBS ${NSS_LIBRARIES} ${NSPR_LIBRARIES} OpenSSL::Crypto)
-
-option(WITH_XIO "Enable XIO messaging" OFF)
-if(WITH_XIO)
-  find_package(xio REQUIRED)
-  set(HAVE_XIO ${XIO_FOUND})
-endif()
+set(CRYPTO_LIBS OpenSSL::Crypto)
 
 option(WITH_DPDK "Enable DPDK messaging" OFF)
 if(WITH_DPDK)
   find_package(dpdk)
-  if(NOT DPDK_FOUND AND NOT TARGET dpdk-ext)
+  if(NOT DPDK_FOUND)
     include(BuildDPDK)
     build_dpdk(${CMAKE_BINARY_DIR}/src/dpdk)
   endif()
@@ -471,68 +380,65 @@ if(WITH_RADOSGW)
     message(WARNING "unable to run curl-config; rgw cannot make ssl requests to external systems reliably")
   endif()
 
-  if (WITH_RADOSGW_BEAST_FRONTEND AND WITH_RADOSGW_BEAST_OPENSSL)
-    find_package(OpenSSL REQUIRED)
-  else()
-    find_package(OpenSSL)
+  if (NOT NO_CURL_SSL_LINK)
+    message(STATUS "libcurl is linked with openssl: explicitly setting locks")
+    set(WITH_CURL_OPENSSL ON)
+  endif() # CURL_SSL_LINK
+  execute_process(
+    COMMAND
+      "sh" "-c"
+      "objdump -p ${OPENSSL_SSL_LIBRARY} | sed -n 's/^  SONAME  *//p'"
+    OUTPUT_VARIABLE LIBSSL_SONAME
+    ERROR_VARIABLE OBJDUMP_ERRORS
+    RESULT_VARIABLE OBJDUMP_RESULTS
+    OUTPUT_STRIP_TRAILING_WHITESPACE)
+  if (OBJDUMP_RESULTS)
+    message(FATAL_ERROR "can't run objdump: ${OBJDUMP_RESULTS}")
   endif()
-
-  if (OPENSSL_FOUND)
-    if (NOT NO_CURL_SSL_LINK)
-      message(STATUS "libcurl is linked with openssl: explicitly setting locks")
-      set(WITH_CURL_OPENSSL ON)
-    endif() # CURL_SSL_LINK
-    execute_process(
-      COMMAND
-	"sh" "-c"
-	"objdump -p ${OPENSSL_SSL_LIBRARY} | sed -n 's/^  SONAME  *//p'"
-      OUTPUT_VARIABLE LIBSSL_SONAME
-      ERROR_VARIABLE OBJDUMP_ERRORS
-      RESULT_VARIABLE OBJDUMP_RESULTS
-      OUTPUT_STRIP_TRAILING_WHITESPACE)
-    if (OBJDUMP_RESULTS)
-      message(FATAL_ERROR "can't run objdump: ${OBJDUMP_RESULTS}")
-    endif()
-    if (NOT OBJDUMP_ERRORS STREQUAL "")
-      message(WARNING "message from objdump: ${OBJDUMP_ERRORS}")
-    endif()
-    execute_process(
-      COMMAND
-	"sh" "-c"
-	"objdump -p ${OPENSSL_CRYPTO_LIBRARY} | sed -n 's/^  SONAME  *//p'"
-      OUTPUT_VARIABLE LIBCRYPTO_SONAME
-      ERROR_VARIABLE OBJDUMP_ERRORS
-      RESULT_VARIABLE OBJDUMP_RESULTS
-      OUTPUT_STRIP_TRAILING_WHITESPACE)
-    if (OBJDUMP_RESULTS)
-      message(FATAL_ERROR "can't run objdump: ${OBJDUMP_RESULTS}")
-    endif()
-    if (NOT OBJDUMP_ERRORS STREQUAL "")
-      message(WARNING "message from objdump: ${OBJDUMP_ERRORS}")
-    endif()
-    message(STATUS "ssl soname: ${LIBSSL_SONAME}")
-    message(STATUS "crypto soname: ${LIBCRYPTO_SONAME}")
-  else()
-    message(WARNING "ssl not found: rgw civetweb may fail to dlopen libssl libcrypto")
-  endif()  # OPENSSL_FOUND
+  if (NOT OBJDUMP_ERRORS STREQUAL "")
+    message(WARNING "message from objdump: ${OBJDUMP_ERRORS}")
+  endif()
+  execute_process(
+    COMMAND
+      "sh" "-c"
+      "objdump -p ${OPENSSL_CRYPTO_LIBRARY} | sed -n 's/^  SONAME  *//p'"
+    OUTPUT_VARIABLE LIBCRYPTO_SONAME
+    ERROR_VARIABLE OBJDUMP_ERRORS
+    RESULT_VARIABLE OBJDUMP_RESULTS
+    OUTPUT_STRIP_TRAILING_WHITESPACE)
+  if (OBJDUMP_RESULTS)
+    message(FATAL_ERROR "can't run objdump: ${OBJDUMP_RESULTS}")
+  endif()
+  if (NOT OBJDUMP_ERRORS STREQUAL "")
+    message(WARNING "message from objdump: ${OBJDUMP_ERRORS}")
+  endif()
+  message(STATUS "ssl soname: ${LIBSSL_SONAME}")
+  message(STATUS "crypto soname: ${LIBCRYPTO_SONAME}")
 endif (WITH_RADOSGW)
 
 #option for CephFS
 option(WITH_CEPHFS "CephFS is enabled" ON)
 
-#option for Mgr
+# Please specify 3.[0-7] if you want to build with a certain version of python3.
+set(WITH_PYTHON3 "3" CACHE STRING "build with specified python3 version")
+if(NOT WITH_PYTHON3)
+  message(FATAL_ERROR "WITH_PYTHON3 should always be enabled")
+elseif(WITH_PYTHON3 MATCHES "^(1|ON|YES|TRUE|Y)$")
+  set(WITH_PYTHON3 "3")
+  message(NOTICE "Please specify a Python3 version instead of a BOOLEAN")
+elseif(NOT WITH_PYTHON3 STREQUAL "3")
+  set(find_python3_exact "EXACT")
+endif()
+find_package(Python3 ${WITH_PYTHON3} ${find_python3_exact} REQUIRED
+  COMPONENTS Interpreter Development)
+unset(find_python3_exact)
+
 option(WITH_MGR "ceph-mgr is enabled" ON)
 if(WITH_MGR)
-  # Please specify 3 or 3.[0-7] if you want to build with python3 support.
-  # FindPyhonInterp and FindPythonLibs think they belong to different families.
-  set(MGR_PYTHON_VERSION "2.7" CACHE
-    STRING "minimal required version of python runtime for running mgr plugins. ")
-  find_package(PythonInterp ${MGR_PYTHON_VERSION} REQUIRED)
-  find_package(PythonLibs ${MGR_PYTHON_VERSION} REQUIRED)
-  set(MGR_PYTHON_EXECUTABLE ${PYTHON_EXECUTABLE})
-  set(MGR_PYTHON_LIBRARIES ${PYTHON_LIBRARIES})
-  set(MGR_PYTHON_VERSION_MAJOR ${PYTHON_VERSION_MAJOR})
-  set(MGR_PYTHON_VERSION_MINOR ${PYTHON_VERSION_MINOR})
+  set(MGR_PYTHON_EXECUTABLE ${Python3_EXECUTABLE})
+  set(MGR_PYTHON_LIBRARIES ${Python3_LIBRARIES})
+  set(MGR_PYTHON_VERSION_MAJOR ${Python3_VERSION_MAJOR})
+  set(MGR_PYTHON_VERSION_MINOR ${Python3_VERSION_MINOR})
   # Boost dependency check deferred to Boost section
 endif(WITH_MGR)
 
@@ -552,7 +458,9 @@ set(HAVE_LIBROCKSDB 1)
 find_package(ZLIB REQUIRED)
 
 #option for EventTrace
-option(WITH_EVENTTRACE "Event tracing support" OFF)
+CMAKE_DEPENDENT_OPTION(
+  WITH_EVENTTRACE "Event tracing support, requires WITH_LTTNG"
+  OFF "USE_LTTNG" OFF)
 
 #option for LTTng
 option(WITH_LTTNG "LTTng tracing is enabled" ON)
@@ -592,7 +500,10 @@ option(WITH_SYSTEM_FIO "require and buil
 if(WITH_SYSTEM_FIO)
   find_package(fio REQUIRED)
 elseif(WITH_FIO)
-  set(FIO_INCLUDE_DIR ${CMAKE_BINARY_DIR}/src/fio)
+  if (NOT FIO_INCLUDE_DIR)
+    # Use local external fio if include directory is not set
+    set(FIO_INCLUDE_DIR ${CMAKE_BINARY_DIR}/src/fio)
+  endif()
   include(BuildFIO)
   build_fio()
 endif()
@@ -619,7 +530,7 @@ endif()
 
 option(WITH_UBSAN "build with UBSAN" OFF)
 if(WITH_UBSAN)
-  list(APPEND sanitizers "undefined-behavior")
+  list(APPEND sanitizers "undefined_behavior")
 endif()
 
 if(sanitizers)
@@ -633,7 +544,7 @@ endif()
 # Rocksdb
 option(WITH_SYSTEM_ROCKSDB "require and build with system rocksdb" OFF)
 if (WITH_SYSTEM_ROCKSDB)
-  find_package(RocksDB 5.8 REQUIRED)
+  find_package(RocksDB 5.14 REQUIRED)
 endif()
 
 option(WITH_SEASTAR "Build seastar components")
@@ -695,10 +606,13 @@ set(DASHBOARD_FRONTEND_LANGS "" CACHE ST
   "List of comma separated ceph-dashboard frontend languages to build. \
   Use value `ALL` to build all languages")
 
+# TODO: make this an option and set it to the same value as WITH_MGR_DASHBOARD_FRONTEND
+set(WITH_MGR_ROOK_CLIENT WITH_MGR_DASHBOARD_FRONTEND)
+
 include_directories(SYSTEM ${PROJECT_BINARY_DIR}/include)
 
 find_package(Threads REQUIRED)
-
+find_package(StdFilesystem)
 
 option(WITH_SELINUX "build SELinux policy" OFF)
 if(WITH_SELINUX)
@@ -721,6 +635,8 @@ add_custom_target(check
 
 add_subdirectory(src)
 
+add_subdirectory(qa)
+
 add_subdirectory(doc)
 if(WITH_MANPAGE)
   add_subdirectory(man)
@@ -748,3 +664,7 @@ add_tags(ctags
   EXCLUDE_OPTS ${CTAG_EXCLUDES}
   EXCLUDES "*.js" "*.css")
 add_custom_target(tags DEPENDS ctags)
+
+find_package(CppCheck)
+find_package(IWYU)
+set(VERSION 15.2.7)
diff -pruN 14.2.16-2/CodingStyle 15.2.7-0ubuntu4/CodingStyle
--- 14.2.16-2/CodingStyle	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/CodingStyle	2020-11-30 19:58:30.000000000 +0000
@@ -9,7 +9,7 @@ C code
 
 For C code, we conform by the Linux kernel coding standards:
 
-    https://www.kernel.org/doc/Documentation/CodingStyle
+    https://www.kernel.org/doc/Documentation/process/coding-style.rst
 
 
 C++ code
@@ -71,7 +71,7 @@ by section.
 * Comments > File Comments:
 
    Don't sweat it, unless the license varies from that of the project
-   (LGPL2.1) or the code origin isn't reflected by the git history.
+   (LGPL2.1 or LGPL3.0) or the code origin isn't reflected by the git history.
 
 * Formatting > Tabs:
   Indent width is two spaces.  When runs of 8 spaces can be compressed
@@ -79,7 +79,7 @@ by section.
   header is:
 
 // -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*-
-// vim: ts=8 sw=2 smarttab
+// vim: ts=8 sw=2 smarttab ft=cpp
 
 * Formatting > Conditionals:
 
@@ -105,7 +105,7 @@ by section.
 * Header Files -> The `#define` Guard:
 
   `#pragma once` is allowed for simplicity at the expense of
-   portability sinces `#pragma once` is wildly supported and is known
+   portability since `#pragma once` is widely supported and is known
    to work on GCC and Clang.
 
 
diff -pruN 14.2.16-2/COPYING 15.2.7-0ubuntu4/COPYING
--- 14.2.16-2/COPYING	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/COPYING	2020-11-30 19:58:30.000000000 +0000
@@ -5,7 +5,13 @@ Source: http://ceph.com/
 
 Files: *
 Copyright: (c) 2004-2010 by Sage Weil <sage@newdream.net>
-License: LGPL2.1 (see COPYING-LGPL2.1)
+License: LGPL-2.1 or LGPL-3 (see COPYING-LGPL2.1 and COPYING-LGPL3)
+
+Files: cmake/modules/FindLTTngUST.cmake
+Copyright:
+    Copyright 2016 Kitware, Inc.
+    Copyright 2016 Philippe Proulx <pproulx@efficios.com>
+License: BSD 3-clause
 
 Files: doc/*
 Copyright: (c) 2010-2012 New Dream Network and contributors
@@ -16,7 +22,7 @@ License: GPL3
 
 Files: src/mount/canonicalize.c
 Copyright: Copyright (C) 1993 Rick Sladkey <jrs@world.std.com>
-License: LGPL2 or later
+License: LGPL-2 or later
 
 Files: src/os/btrfs_ioctl.h
 Copyright: Copyright (C) 2007 Oracle.  All rights reserved.
@@ -35,6 +41,12 @@ Copyright:
     Copyright 2012-2013 Intel Corporation All Rights Reserved.
 License: BSD 3-clause
 
+Files: src/common/deleter.h
+Copyright:
+    Copyright (C) 2014 Cloudius Systems, Ltd.
+License:
+    Apache-2.0
+
 Files: src/common/sctp_crc32.c: 
 Copyright:
     Copyright (c) 2001-2007, by Cisco Systems, Inc. All rights reserved.
@@ -66,6 +78,18 @@ License:
   ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
   THE POSSIBILITY OF SUCH DAMAGE.
 
+Files: src/common/sstring.hh
+Copyright:
+    Copyright 2014 Cloudius Systems
+License:
+    Apache-2.0
+
+Files: src/include/cpp-btree
+Copyright:
+    Copyright 2013 Google Inc. All Rights Reserved.
+License:
+    Apache-2.0
+
 Files: src/json_spirit
 Copyright:
 	Copyright John W. Wilkinson 2007 - 2011
@@ -95,6 +119,10 @@ License:
   FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
   OTHER DEALINGS IN THE SOFTWARE.
 
+Files: src/test/common/Throttle.cc src/test/filestore/chain_xattr.cc
+Copyright: Copyright (C) 2013 Cloudwatt <libre.licensing@cloudwatt.com>
+License: LGPL-2.1 or later
+
 Files: src/osd/ErasureCodePluginJerasure/*.{c,h}
 Copyright: Copyright (c) 2011, James S. Plank <plank@cs.utk.edu>
 License:
@@ -130,7 +158,14 @@ License:
 Packaging:
     Copyright (C) 2004-2009 by Sage Weil <sage@newdream.net>
     Copyright (C) 2010 Canonical, Ltd.
-    Licensed under LGPL-2.1
+    Licensed under LGPL-2.1 or LGPL-3.0
+
+Files: src/test/perf_local.cc
+Copyright:
+  (c) 2011-2014 Stanford University
+  (c) 2011 Facebook
+License:
+  The MIT License
 
 File: qa/workunits/erasure-code/jquery.js
   Copyright 2012 jQuery Foundation and other contributors
@@ -154,3 +189,6 @@ Files: src/ceph-volume/plugin/zfs/*
 Copyright: 2018, Willem Jan Withagen
 License: BSD 3-clause
 
+Files: src/include/function2.hpp
+Copyright: 2015-2018, Denis Blank
+License: Boost Software License, Version 1.0
\ No newline at end of file
diff -pruN 14.2.16-2/COPYING-LGPL3 15.2.7-0ubuntu4/COPYING-LGPL3
--- 14.2.16-2/COPYING-LGPL3	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/COPYING-LGPL3	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,165 @@
+                   GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions.
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version.
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff -pruN 14.2.16-2/debian/calc-max-parallel.sh 15.2.7-0ubuntu4/debian/calc-max-parallel.sh
--- 14.2.16-2/debian/calc-max-parallel.sh	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/calc-max-parallel.sh	2020-12-04 09:23:59.000000000 +0000
@@ -8,25 +8,20 @@
 
 total_ram=$(grep MemTotal /proc/meminfo | awk '{ print $2 }')
 
-sixtyfour_g=$((64*1024*1024))
-fourtyheight_g=$((48*1024*1024))
-thirtytwo_g=$((32*1024*1024))
 sixteen_g=$((16*1024*1024))
 eight_g=$((8*1024*1024))
 four_g=$((4*1024*1024))
 
 if [ ${total_ram} -le ${four_g} ]; then
-    echo "--max-parallel=1"
+    if dpkg-architecture -q DEB_BUILD_ARCH | grep -q -e arm -e mips -e powerpc -e ppc; then
+        echo "--max-parallel=1"
+    else
+        echo "--max-parallel=2"
+    fi
 elif [ ${total_ram} -le ${eight_g} ]; then
     echo "--max-parallel=2"
 elif [ ${total_ram} -le ${sixteen_g} ]; then
     echo "--max-parallel=3"
-elif [ ${total_ram} -le ${thirtytwo_g} ]; then
-    echo "--max-parallel=6"
-elif [ ${total_ram} -le ${fourtyheight_g} ]; then
-    echo "--max-parallel=8"
-elif [ ${total_ram} -le ${sixtyfour_g} ]; then
-    echo "--max-parallel=12"
 else
-    echo "--max-parallel=16"
+    echo "--max-parallel=4"
 fi
diff -pruN 14.2.16-2/debian/cephadm.install 15.2.7-0ubuntu4/debian/cephadm.install
--- 14.2.16-2/debian/cephadm.install	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/cephadm.install	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,2 @@
+etc/sudoers.d/cephadm
+usr/sbin/cephadm
diff -pruN 14.2.16-2/debian/cephadm.postinst 15.2.7-0ubuntu4/debian/cephadm.postinst
--- 14.2.16-2/debian/cephadm.postinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/cephadm.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,67 @@
+#!/bin/sh
+# vim: set noet ts=8:
+# postinst script for cephadm
+#
+# see: dh_installdeb(1)
+
+set -e
+
+# summary of how this script can be called:
+#
+# 	postinst configure <most-recently-configured-version>
+# 	old-postinst abort-upgrade <new-version>
+# 	conflictor's-postinst abort-remove in-favour <package> <new-version>
+# 	postinst abort-remove
+# 	deconfigured's-postinst abort-deconfigure in-favour <failed-install-package> <version> [<removing conflicting-package> <version>]
+#
+
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+
+case "$1" in
+    configure)
+       # create cephadm user
+       # 1. create user if not existing
+       if ! getent passwd | grep -q "^cephadm:"; then
+         echo -n "Adding system user cephadm.."
+         adduser --quiet --system --disabled-password --gecos 'Ceph-dameon user for cephadm' --shell /bin/bash cephadm 2>/dev/null || true
+         echo "..done"
+       fi
+
+       # 2. make sure user is unlocked
+       if [ -f /etc/shadow ]; then
+           usermod -U -e '' cephadm
+       else
+           usermod -U cephadm
+       fi
+
+       # set up (initially empty) .ssh/authorized_keys file
+       if ! test -d /home/cephadm/.ssh; then
+	   mkdir /home/cephadm/.ssh
+	   chown --reference /home/cephadm /home/cephadm/.ssh
+	   chmod 0700 /home/cephadm/.ssh
+       fi
+       if ! test -e /home/cephadm/.ssh/authorized_keys; then
+	   touch /home/cephadm/.ssh/authorized_keys
+	   chown --reference /home/cephadm /home/cephadm/.ssh/authorized_keys
+	   chmod 0600 /home/cephadm/.ssh/authorized_keys
+       fi
+
+    ;;
+    abort-upgrade|abort-remove|abort-deconfigure)
+	:
+    ;;
+
+    *)
+        echo "postinst called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/cephadm.postrm 15.2.7-0ubuntu4/debian/cephadm.postrm
--- 14.2.16-2/debian/cephadm.postrm	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/cephadm.postrm	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,44 @@
+#!/bin/sh
+# postrm script for cephadm
+#
+# see: dh_installdeb(1)
+
+set -e
+
+# summary of how this script can be called:
+#        * <postrm> `remove'
+#        * <postrm> `purge'
+#        * <old-postrm> `upgrade' <new-version>
+#        * <new-postrm> `failed-upgrade' <old-version>
+#        * <new-postrm> `abort-install'
+#        * <new-postrm> `abort-install' <old-version>
+#        * <new-postrm> `abort-upgrade' <old-version>
+#        * <disappearer's-postrm> `disappear' <overwriter>
+#          <overwriter-version>
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+
+case "$1" in
+    remove)
+	deluser --remove-home cephadm
+    ;;
+
+    purge)
+    ;;
+
+    upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
+    ;;
+
+    *)
+        echo "postrm called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph-base.postrm 15.2.7-0ubuntu4/debian/ceph-base.postrm
--- 14.2.16-2/debian/ceph-base.postrm	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-base.postrm	2020-12-04 09:23:59.000000000 +0000
@@ -1,11 +1,47 @@
 #!/bin/sh
+# postrm script for ceph
+#
+# see: dh_installdeb(1)
 
 set -e
 
-if [ "${1}" = "purge" ] ; then
+# summary of how this script can be called:
+#        * <postrm> `remove'
+#        * <postrm> `purge'
+#        * <old-postrm> `upgrade' <new-version>
+#        * <new-postrm> `failed-upgrade' <old-version>
+#        * <new-postrm> `abort-install'
+#        * <new-postrm> `abort-install' <old-version>
+#        * <new-postrm> `abort-upgrade' <old-version>
+#        * <disappearer's-postrm> `disappear' <overwriter>
+#          <overwriter-version>
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+
+case "$1" in
+    remove)
+    ;;
+
+    purge)
 	rm -rf /var/log/ceph 
 	rm -rf /etc/ceph
-fi
+    ;;
+
+    upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
+    ;;
+
+    *)
+        echo "postrm called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+dpkg-maintscript-helper rm_conffile \
+    /etc/logrotate.d/ceph-base 10.2.3-0ubuntu5~ -- "$@"
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
 
 #DEBHELPER#
 
diff -pruN 14.2.16-2/debian/ceph-base.preinst 15.2.7-0ubuntu4/debian/ceph-base.preinst
--- 14.2.16-2/debian/ceph-base.preinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-base.preinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+set -e
+
+dpkg-maintscript-helper rm_conffile \
+    /etc/logrotate.d/ceph-base 10.2.3-0ubuntu5~ -- "$@"
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph-common.postinst 15.2.7-0ubuntu4/debian/ceph-common.postinst
--- 14.2.16-2/debian/ceph-common.postinst	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-common.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -67,7 +67,6 @@ case "$1" in
                  --disabled-password \
 	         --uid $SERVER_UID \
 	         --gid $SERVER_GID \
-	         --home $SERVER_HOME \
                  $SERVER_USER 2>/dev/null || true
 	 echo "..done"
        fi
diff -pruN 14.2.16-2/debian/ceph-immutable-object-cache.install 15.2.7-0ubuntu4/debian/ceph-immutable-object-cache.install
--- 14.2.16-2/debian/ceph-immutable-object-cache.install	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-immutable-object-cache.install	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,3 @@
+lib/systemd/system/ceph-immutable-object-cache*
+usr/bin/ceph-immutable-object-cache
+usr/share/man/man8/ceph-immutable-object-cache.8
diff -pruN 14.2.16-2/debian/ceph-mgr-cephadm.install 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.install
--- 14.2.16-2/debian/ceph-mgr-cephadm.install	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.install	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1 @@
+usr/share/ceph/mgr/cephadm
diff -pruN 14.2.16-2/debian/ceph-mgr-cephadm.postinst 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.postinst
--- 14.2.16-2/debian/ceph-mgr-cephadm.postinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,41 @@
+#!/bin/sh
+# vim: set noet ts=8:
+# postinst script for ceph-mgr-cephadm
+#
+# see: dh_installdeb(1)
+
+set -e
+
+# summary of how this script can be called:
+#
+# 	postinst configure <most-recently-configured-version>
+# 	old-postinst abort-upgrade <new-version>
+# 	conflictor's-postinst abort-remove in-favour <package> <new-version>
+# 	postinst abort-remove
+# 	deconfigured's-postinst abort-deconfigure in-favour <failed-install-package> <version> [<removing conflicting-package> <version>]
+#
+
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+case "$1" in
+    configure)
+	# attempt to load the plugin if the mgr is running
+	deb-systemd-invoke try-restart ceph-mgr.target
+    ;;
+    abort-upgrade|abort-remove|abort-deconfigure)
+	:
+    ;;
+
+    *)
+        echo "postinst called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph-mgr-cephadm.prerm 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.prerm
--- 14.2.16-2/debian/ceph-mgr-cephadm.prerm	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-mgr-cephadm.prerm	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,8 @@
+#!/bin/sh
+# vim: set noet ts=8:
+
+set -e
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph-mgr.install 15.2.7-0ubuntu4/debian/ceph-mgr.install
--- 14.2.16-2/debian/ceph-mgr.install	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-mgr.install	2020-12-04 09:23:59.000000000 +0000
@@ -1,28 +1,4 @@
 lib/systemd/system/ceph-mgr*
 usr/bin/ceph-mgr
-usr/share/ceph/mgr/ansible
-usr/share/ceph/mgr/balancer
-usr/share/ceph/mgr/crash
-usr/share/ceph/mgr/deepsea
-usr/share/ceph/mgr/devicehealth
-usr/share/ceph/mgr/influx
-usr/share/ceph/mgr/insights
-usr/share/ceph/mgr/iostat
-usr/share/ceph/mgr/localpool
 usr/share/ceph/mgr/mgr_module.*
 usr/share/ceph/mgr/mgr_util.*
-usr/share/ceph/mgr/orchestrator.*
-usr/share/ceph/mgr/orchestrator_cli
-usr/share/ceph/mgr/osd_perf_query
-usr/share/ceph/mgr/pg_autoscaler
-usr/share/ceph/mgr/progress
-usr/share/ceph/mgr/prometheus
-usr/share/ceph/mgr/rbd_support
-usr/share/ceph/mgr/restful
-usr/share/ceph/mgr/selftest
-usr/share/ceph/mgr/status
-usr/share/ceph/mgr/telegraf
-usr/share/ceph/mgr/telemetry
-usr/share/ceph/mgr/test_orchestrator
-usr/share/ceph/mgr/volumes
-usr/share/ceph/mgr/zabbix
diff -pruN 14.2.16-2/debian/ceph-mgr-modules-core.install 15.2.7-0ubuntu4/debian/ceph-mgr-modules-core.install
--- 14.2.16-2/debian/ceph-mgr-modules-core.install	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-mgr-modules-core.install	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,23 @@
+usr/share/ceph/mgr/alerts
+usr/share/ceph/mgr/balancer
+usr/share/ceph/mgr/crash
+usr/share/ceph/mgr/devicehealth
+usr/share/ceph/mgr/influx
+usr/share/ceph/mgr/insights
+usr/share/ceph/mgr/iostat
+usr/share/ceph/mgr/localpool
+usr/share/ceph/mgr/orchestrator
+usr/share/ceph/mgr/osd_perf_query
+usr/share/ceph/mgr/osd_support
+usr/share/ceph/mgr/pg_autoscaler
+usr/share/ceph/mgr/progress
+usr/share/ceph/mgr/prometheus
+usr/share/ceph/mgr/rbd_support
+usr/share/ceph/mgr/restful
+usr/share/ceph/mgr/selftest
+usr/share/ceph/mgr/status
+usr/share/ceph/mgr/telegraf
+usr/share/ceph/mgr/telemetry
+usr/share/ceph/mgr/test_orchestrator
+usr/share/ceph/mgr/volumes
+usr/share/ceph/mgr/zabbix
diff -pruN 14.2.16-2/debian/ceph-osd.postinst 15.2.7-0ubuntu4/debian/ceph-osd.postinst
--- 14.2.16-2/debian/ceph-osd.postinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph-osd.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,45 @@
+# vim: set noet ts=8:
+# postinst script for ceph-osd
+#
+# see: dh_installdeb(1)
+
+set -e
+
+# summary of how this script can be called:
+#
+# 	postinst configure <most-recently-configured-version>
+# 	old-postinst abort-upgrade <new-version>
+# 	conflictor's-postinst abort-remove in-favour <package> <new-version>
+# 	postinst abort-remove
+# 	deconfigured's-postinst abort-deconfigure in-favour <failed-install-package> <version> [<removing conflicting-package> <version>]
+#
+
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+[ -f "/etc/default/ceph" ] && . /etc/default/ceph
+[ -z "$SERVER_USER" ] && SERVER_USER=ceph
+[ -z "$SERVER_GROUP" ] && SERVER_GROUP=ceph
+
+case "$1" in
+    configure)
+	:
+    ;;
+    abort-upgrade|abort-remove|abort-deconfigure)
+	:
+    ;;
+
+    *)
+        echo "postinst called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
+
+#DEBHELPER#
+
+exit 0
+
+
diff -pruN 14.2.16-2/debian/ceph.postinst 15.2.7-0ubuntu4/debian/ceph.postinst
--- 14.2.16-2/debian/ceph.postinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+set -e
+
+dpkg-maintscript-helper rm_conffile \
+	/etc/logrotate.d/ceph 10.2.3-0ubuntu5~ -- "$@"
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph.postrm 15.2.7-0ubuntu4/debian/ceph.postrm
--- 14.2.16-2/debian/ceph.postrm	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph.postrm	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+set -e
+
+dpkg-maintscript-helper rm_conffile \
+	/etc/logrotate.d/ceph 10.2.3-0ubuntu5~ -- "$@"
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/ceph.preinst 15.2.7-0ubuntu4/debian/ceph.preinst
--- 14.2.16-2/debian/ceph.preinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/ceph.preinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+set -e
+
+dpkg-maintscript-helper rm_conffile \
+	/etc/logrotate.d/ceph 10.2.3-0ubuntu5~ -- "$@"
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/changelog 15.2.7-0ubuntu4/debian/changelog
--- 14.2.16-2/debian/changelog	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/changelog	2021-01-18 12:06:58.000000000 +0000
@@ -1,197 +1,227 @@
-ceph (14.2.16-2) unstable; urgency=medium
+ceph (15.2.7-0ubuntu4) hirsute; urgency=medium
 
-  * Add fix-ceph-osd-systemd-target.patch.
+  * Try to clean up obj build-dir on riscv64 to not run out of disk space
+    during build.
 
- -- Thomas Goirand <zigo@debian.org>  Thu, 28 Jan 2021 16:45:23 +0100
+ -- Dimitri John Ledkov <xnox@ubuntu.com>  Mon, 18 Jan 2021 12:06:58 +0000
 
-ceph (14.2.16-1) unstable; urgency=medium
+ceph (15.2.7-0ubuntu3) hirsute; urgency=medium
 
-  * Add allow-binding-on-lo.patch.
+  * Fix ftbfs in hirsute, due to missing atomic include.
+
+ -- Dimitri John Ledkov <xnox@ubuntu.com>  Mon, 04 Jan 2021 14:47:39 +0000
+
+ceph (15.2.7-0ubuntu2) hirsute; urgency=medium
+
+  * No-change rebuild for boost soname change.
+
+ -- Matthias Klose <doko@ubuntu.com>  Sat, 12 Dec 2020 13:04:13 +0100
+
+ceph (15.2.7-0ubuntu1) hirsute; urgency=medium
+
+  [ Corey Bryant ]
+  * d/p/python3.9.patch: Renamed from patch3.9.diff as Vcs .gitignore
+    will otherwise ignore.
+
+  [ James Page ]
   * New upstream release.
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 15 Jan 2021 12:26:14 +0100
+ -- James Page <james.page@ubuntu.com>  Fri, 04 Dec 2020 09:23:59 +0000
 
-ceph (14.2.15-4) unstable; urgency=medium
+ceph (15.2.5-0ubuntu4) hirsute; urgency=medium
 
-  * Add upstream 3 patches for libboost 1.74 (Closes: #977243).
+  * Make Python 3.9 a known Python3 version.
 
- -- Thomas Goirand <zigo@debian.org>  Sun, 13 Dec 2020 16:33:57 +0100
+ -- Matthias Klose <doko@ubuntu.com>  Tue, 24 Nov 2020 09:20:31 +0100
 
-ceph (14.2.15-3) unstable; urgency=medium
-
-  [ Adrian Bunk ]
-  * [197afaf] Merge branch 'debian/unstable' into 'debian/unstable'
-    Portability fixes
-    See merge request ceph-team/ceph!6
-
- -- Thomas Goirand <zigo@debian.org>  Thu, 03 Dec 2020 21:03:06 +0100
-
-ceph (14.2.15-2) unstable; urgency=medium
-
-  * Do not build with clang, instead, set --max-parallel=1, as it seems it
-    worked for Ubuntu. If this doesn't work, we'll disable the non-64 bits
-    arch completely.
-
- -- Thomas Goirand <zigo@debian.org>  Mon, 30 Nov 2020 21:10:12 +0100
-
-ceph (14.2.15-1) unstable; urgency=medium
-
-  * New upstream release (Closes: #956750):
-    - Fix FTBFS with GCC-10 (Closes: #957079).
-    - Fix CVE-2020-10753 (Closes: #963760).
-    - Fix CVE-2020-25660 (Closes: #975275).
-  * Refreshed patches:
-    - 32bit-avoid-size_t.patch
-    - add-option-to-disable-ceph-dencoder.patch
-    - bluefs-use-uint64_t-for-len.patch
-    - disable-crypto.patch
-    - mds-purgequeue-use_uint64_t.patch
-  * Raw wrap-and-sort -bastk.
-  * Added myself as uploader.
-  * Added librdkafka-dev as build-depends.
-  * Fixed debian/libcephfs-dev.install.
-  * debian/calc-max-parallel.sh: allow for more values of --max-parallel so
-    that ceph builds faster on more powerful machines.
-  * Add a patch to make Ceph aware of Python 3.9:
-    - make-ceph-python-3.9-aware.patch
-  * Add a debian/source/options to ignore CRLF to CR changes.
-  * Use --home in ceph-common.postinst when creating the Ceph system user.
-  * Updated debian/libcephfs2.symbols (added 3 new symbols).
-  * Package: ceph-resource-agents, switch Priority: to optional.
-  * Add debian/source.lintian-overrides to allow .js which shipped by upstream
-    in both compiled and source version.
-  * Removed now useless dpkg-maintscript-helper rm_conffile: they have been
-    around for more than one release.
-  * debian/ceph-osd.postinst: remove as it's doing nothing.
-  * Fix debian/lib{rbd1,rados2}.symbols (3 missing symbols).
-
- -- Thomas Goirand <zigo@debian.org>  Fri, 27 Nov 2020 23:58:00 +0100
-
-ceph (14.2.9-1) unstable; urgency=high
-
-  * [dc4e7cf] Update upstream source from tag 'upstream/14.2.9'
-    Update to upstream version '14.2.9'
-    with Debian dir 544321a5823a0e5b826198888c79cb3ed4dd9b2e
-    Closing #956142 / CVE-2020-1760
-
- -- Bernd Zeimetz <bzed@debian.org>  Fri, 24 Apr 2020 22:43:18 +0200
-
-ceph (14.2.8-2) unstable; urgency=medium
-
-  * [eed9184] Fix 32bit issues in src/mds/PurgeQueue.cc
-    mips64el (as reported in the bug report) built fine.
-    s390x is a buildd issue, gets stuck sometimes for unknown (and not
-    reproducible) reasons.
-    Other build issues are fixed hopefully.
-    Thanks to Ivo De Decker (Closes: #953749)
-
- -- Bernd Zeimetz <bzed@debian.org>  Mon, 23 Mar 2020 00:14:25 +0100
-
-ceph (14.2.8-1) unstable; urgency=medium
-
-  * [e14a030] Update upstream source from tag 'upstream/14.2.8'
-    Update to upstream version '14.2.8'
-    with Debian dir 6c28e7789e84694b28409f0aceb9bfe6f2acdade
-    Closes: #953364
-  * [6bc660e] Refreshing patches
-  * [9e935e3] Fix FTBFS on riscv64.
-    Thanks to Aurelien Jarno (Closes: #953003)
-  * [4287e84] fix lintian override
-
- -- Bernd Zeimetz <bzed@debian.org>  Sun, 08 Mar 2020 22:31:55 +0100
-
-ceph (14.2.7-1) unstable; urgency=medium
-
-  * [a68d12f] Update upstream source from tag 'upstream/14.2.7'
-    Update to upstream version '14.2.7'
-    with Debian dir 1125b03b88e8da85cf70f7cc540c1c30fa95d456
-    This also contains a fix for
-    [CVE-2020-1700]: Fixed a flaw in RGW beast frontend that
-    could lead to denial of service from an unauthenticated client.
-    CVE-2020-1699 was patched since 14.2.6-3.
-  * [1474595] Removing patches applied upstream
-
- -- Bernd Zeimetz <bzed@debian.org>  Sat, 01 Feb 2020 00:47:27 +0100
-
-ceph (14.2.6-6) unstable; urgency=medium
-
-  * [c18d632] Remove broken patch (Option::TYPE_SIZE as uint64_t).
-    This patch actually results in a segfault while parsing options.
-    Revert it and then see what it was actually needed for on 32bit.
-    Thanks to Martin Mlynář (Closes: #949743)
-  * [b5c7be8] Update patch with a non-segfaulting version.
-    Took me some time to figure out that in option.cc size_t is not what you
-    expect, but a struct instead, just with the same name.
-    So to make clang happy we'll use static_cast<std::size_t> now, although
-    this will for sure show various other issues on 32bit as not all
-    possible config values will fit into 32bit numbers.
-    Fixing this will need a lot of upstream work unfortunately.
-
- -- Bernd Zeimetz <bzed@debian.org>  Sun, 26 Jan 2020 15:39:29 +0100
-
-ceph (14.2.6-5) unstable; urgency=medium
-
-  * [966df1a] Removing cython from Build-deps.
-    Thanks to Sandro Tosi (Closes: #936282)
-  * [38fdd89] clang ist not available on sh4
-  * [3c97474] Replace findstring by filter where needed.
-    Thanks jrtc27 for the hint.
-  * [c694d0d] Pass -DHAVE_NEON=0 to cmake on armel.
-    Instead of "fixing" CMakeCache.txt.
-  * [825a942] Revert "Don't build ceph on mipsel."
-    This reverts commit 424ea9b82f956daa8fa9c0539d0752ccfdc7caf6.
-    Thanks to peter green (Closes: #949528)
-  * [79aef26] Remove merge fail
-
- -- Bernd Zeimetz <bzed@debian.org>  Tue, 21 Jan 2020 21:21:17 +0100
-
-ceph (14.2.6-4) unstable; urgency=high
-
-  * Really uploading to unstable now.
-
- -- Bernd Zeimetz <bzed@debian.org>  Sat, 18 Jan 2020 19:58:32 +0100
-
-ceph (14.2.6-3) experimental; urgency=high
-
-  * Uploading to unstable, including changes to make ceph
-    build on mipsel again (Closes: #948722).
-  * [1bac6f0] mgr/dashboard: fix improper URL checking.
-    This change disables up-level references beyond the HTTP base directory.
-    [CVE-2020-1699]
-    Upstream commit 0443e40c11280ba3b7efcba61522afa70c4f8158
-    Thanks to Salvatore Bonaccorso (Closes: #949206)
-  * [720ce76] Updating changelog (from experimental)
-
- -- Bernd Zeimetz <bzed@debian.org>  Sat, 18 Jan 2020 19:11:22 +0100
-
-ceph (14.2.6-2) experimental; urgency=medium
-
-  * [fc4df2f] d/rules: make sure mips doesn't match on mipsel/mips64el
-  * [3d9ed86] Add patch to disable dencoder build if needed.
-  * [ce0c9bb] Don't build ceph-dencoder on mipsel.
-    Its a debugging tool and does not build on mipsel due to g++/clang++
-    running out of memory.
-    Replacing it by a shellscript that prints an error message.
-  * [0d83e22] Configure gbp for experimental
+ceph (15.2.5-0ubuntu3) hirsute; urgency=medium
 
- -- Bernd Zeimetz <bzed@debian.org>  Thu, 16 Jan 2020 23:59:37 +0100
+  * No-change rebuild to build with python3.9 as default.
 
-ceph (14.2.6-1) unstable; urgency=medium
+ -- Matthias Klose <doko@ubuntu.com>  Thu, 19 Nov 2020 20:18:25 +0100
+
+ceph (15.2.5-0ubuntu2) hirsute; urgency=medium
+
+  * Also install ceph_ll_client.h needed by libcephfs.h header file
+
+ -- Gianfranco Costamagna <locutusofborg@debian.org>  Sun, 01 Nov 2020 09:51:15 +0100
+
+ceph (15.2.5-0ubuntu1) groovy; urgency=medium
+
+  [ Corey Bryant ]
+  * d/control: Remove Breaks/Replaces that are older than Focal (LP: #1878419).
+  * d/p/enable-strsignal.patch: Enable HAVE_REENTRANT_STRSIGNAL for groovy
+    to fix FTBFS with glibc 2.32.
+  * d/p/rules: Enable RelWithDebInfo (LP: #1894453).
 
   [ James Page ]
-  * [2e50d5b] Fix misnamed package Recommends brbd1 -> librbd1
-  * [11df8ed] Add missing debhelper misc:Depends for python3-ceph
-  * [08c3c8b] Add missing Depends on python3-{distutils,routes} to ceph-mgr-dashboard package
+  * New upstream point release (LP: #1898200).
+  * d/p/fix-crash-in-committed-osd-maps.patch: Drop, included upstream.
+  * d/p/*: Refresh.
+  * d/libcephfs-dev.install: Drop ceph_statx.h.
 
-  [ Bernd Zeimetz ]
-  * [a87f434] Update upstream source from tag 'upstream/14.2.6'
-    Update to upstream version '14.2.6'
-    with Debian dir f37aa9f99ec09cc88d8e5e468c1f642fa7f77ef1
-  * [e91626b] Revert "Fix ceph-mgr - indefinite queue growth hangs"
-    This reverts commit 010db9a30458a6417ff667c3c11a3870edb8ee0c.
-    Patches were applied upstream for 14.2.6
+  [ Dimitri John Ledkov ]
+  * d/rules,control: Enable boost context usage on s390x (LP: #1694926).
+
+ -- James Page <james.page@ubuntu.com>  Tue, 06 Oct 2020 12:01:52 +0100
+
+ceph (15.2.3-0ubuntu3) groovy; urgency=medium
+
+  * d/control: Drop BD on obsolete cython (LP: #1891820).
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Mon, 17 Aug 2020 13:46:06 -0400
+
+ceph (15.2.3-0ubuntu2) groovy; urgency=medium
+
+  * d/p/fix-crash-in-committed-osd-maps.patch: Fix ceph-osd crash
+    when processing osd map updates (LP: #1891567).
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 14 Aug 2020 11:30:12 -0400
+
+ceph (15.2.3-0ubuntu1) groovy; urgency=medium
+
+  [ Chris MacNaughton ]
+  * d/control: Add python3-yaml to the Ceph dashboard depends (LP: #1876325)
+
+  [ James Page ]
+  * New upstream release (LP: #1880084).
+  * d/p/revert-rgw-move-frontends-initial-init-to-after-glob.patch:
+    Dropped, fix upstream.
+  * d/p/32bit-fixes.patch: Misc updates for 32bit arch build failures.
+  * d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 08 Jun 2020 09:20:21 +0100
+
+ceph (15.2.1-0ubuntu2) focal; urgency=medium
+
+  * Link with -pthread to fix FTBFS on riscv64.
+
+ -- William Grant <wgrant@ubuntu.com>  Sun, 19 Apr 2020 13:58:15 +1000
+
+ceph (15.2.1-0ubuntu1) focal; urgency=high
+
+  * New upstream point release for Octopus (LP: #1873193):
+    - CVE-2020-1759: Fixed nonce reuse in msgr V2 secure mode.
+    - CVE-2020-1760: Fixed XSS due to RGW GetObject header-splitting.
+  * d/copyright: Restore excluded files.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 17 Apr 2020 20:08:37 +0100
+
+ceph (15.2.0-0ubuntu2) focal; urgency=high
+
+  * d/p/revert-rgw-move-frontends-initial-init-to-after-glob.patch:
+    Revert change to initialize global ceph context before determination
+    of the frontend in use, ensuring that privs are not dropped before
+    any frontend port binding to ports < 1024 has been completed
+    (LP: #1869324).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 08 Apr 2020 10:39:39 +0100
+
+ceph (15.2.0-0ubuntu1) focal; urgency=medium
+
+  * New upstream release for Ceph Octopus.
+  * d/control: Fix dependencies for ceph-mds package to avoid issues
+    with missing C++ symbols (LP: #1862850).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 25 Mar 2020 08:29:08 +0000
+
+ceph (15.1.1-0ubuntu1) focal; urgency=medium
+
+  * New upstream release candidate for Octopus.
+  * d/p/*: Refresh
+  * d/ceph-mgr.install: Drop ansible module.
+  * d/ceph-mgr.install: Drop deepsea module.
+  * d/control: Make cephadm Recommend (but not require) docker.io.
+  * d/control: Add python3-jsonpatch to ceph-mgr-rook Depends.
+  * d/ceph-mgr.install: Rename orchestrator_cli to orchestrator.
+  * d/ceph-mgr.install: Add osd_support module.
+  * d/control,ceph-mgr-modules-core.install: Add new arch:all package
+    for mgr modules, rename plugin->module as needed.
+  * d/ceph-mgr.install: Drop modules that have moved to
+    ceph-mgr-modules-core.
+  * d/control: fix upgrade path for move of modules to -core package.
+  * d/control: Move python3-openssl Depends to -modules-core package.
+  * d/control: Add missing Depends on python3-dateutil to
+    ceph-mgr-modules-core.
+  * d/*.symbols: Refresh for new RC.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 23 Mar 2020 11:19:04 +0000
+
+ceph (15.1.0-0ubuntu3) focal; urgency=medium
+
+  * d/*.symbols: Update symbols files for new release.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 21 Feb 2020 08:51:34 +0000
+
+ceph (15.1.0-0ubuntu2) focal; urgency=medium
+
+  * d/rules,control: Disable Kafka endpoint support for RGW.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 10 Feb 2020 11:53:47 +0000
+
+ceph (15.1.0-0ubuntu1) focal; urgency=medium
+
+  * SECURITY UPDATE: New upstream release (LP: #1861789) including
+    fixes for:
+    - CVE-2020-1699
+    - CVE-2020-1700
+  * d/p/32466.patch-*: Drop, included in release.
+  * New upstream release candidate for Octopus.
+  * d/p/32bit-avoid-overloading.patch,32bit-avoid-size_t.patch,bluefs-
+    use-uint64_t-for-len.patch,fix-clang-build.patch: Drop, no longer
+    needed.
+  * Sync packaging changes with upstream:
+    - d/control,rules,ceph-mgr-ssh.*: Drop ceph-mgr-ssh package.
+    - d/control,rules,{ceph-mgr-}cephadm.*: Add new ceph-mgr-cephadm
+      and cephadm packages, install sudoers file.
+    - d/control,ceph-immutable-object-cache.*: Add new
+      ceph-immutable-object-cache package.
+    - d/control,python3-ceph-common.install: Add new python3-ceph-common
+      package.
+  * d/*: wrap-and-sort -bast.
+  * d/control: Add librdkafka-dev to BD's.
+  * d/p/32bit-fixes.patch: Misc fixes for compilation failures on 32 bit
+    architectures.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 07 Feb 2020 11:24:29 +0000
+
+ceph (14.2.5-3ubuntu5) focal; urgency=medium
+
+  * No change rebuild against new boost1.71 ABI
+
+ -- Dimitri John Ledkov <xnox@ubuntu.com>  Mon, 03 Feb 2020 20:03:18 +0000
+
+ceph (14.2.5-3ubuntu4) focal; urgency=medium
+
+  * d/rules: Don't switch to using clang for armhf on Ubuntu as g++
+    is just fine and does not produce an over linked set of binaries.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 27 Jan 2020 09:20:28 +0000
+
+ceph (14.2.5-3ubuntu3) focal; urgency=medium
+
+  * No-change rebuild to build with python3.8.
+
+ -- Matthias Klose <doko@ubuntu.com>  Sat, 25 Jan 2020 05:46:31 +0000
+
+ceph (14.2.5-3ubuntu2) focal; urgency=medium
+
+  * d/control: Recommends->Suggests on ceph-fuse for ceph-mds
+    package to avoid main inclusion.
+  * d/rules: Skip radosgw, ceph-common and rbd-mirror packages on i386.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 14 Jan 2020 09:27:03 +0000
+
+ceph (14.2.5-3ubuntu1) focal; urgency=medium
+
+  * Merge from Debian unstable, remaining changes:
+    - d/control: Add missing Depends on python3-{distutils,routes} to
+      ceph-mgr-dashboard package (LP: #1858304).
+  * All other changes merged into Debian packaging (Thanks Bernd).
+  * d/control: Fix misnamed package Recommends brbd1 -> librbd1.
+  * d/control: Add missing debhelper misc:Depends for python3-ceph.
 
- -- Bernd Zeimetz <bzed@debian.org>  Sun, 12 Jan 2020 23:13:27 +0100
+ -- James Page <james.page@ubuntu.com>  Fri, 10 Jan 2020 09:22:49 +0000
 
 ceph (14.2.5-3) unstable; urgency=medium
 
@@ -513,6 +543,224 @@ ceph (14.2.4-1) unstable; urgency=medium
 
  -- Bernd Zeimetz <bzed@debian.org>  Mon, 18 Nov 2019 14:18:10 +0100
 
+ceph (14.2.4-0ubuntu3) focal; urgency=medium
+
+  * Drop uninstallable and unneeded server binaries on i386.
+
+ -- Steve Langasek <steve.langasek@ubuntu.com>  Sat, 21 Dec 2019 17:05:10 -0600
+
+ceph (14.2.4-0ubuntu2) focal; urgency=medium
+
+  [ Dariusz Gadomski ]
+  * d/p/issue37490.patch: Cherry pick fix to optimize LVM queries in
+    ceph-volume, resolving performance issues in systems under heavy load
+    or with large numbers of disks (LP: #1850754).
+
+  [ James Page ]
+  * d/p/issue40114.patch: Cherry pick endian fixes to resolve issues
+    using Ceph on big-endian architectures such as s390x (LP: #1851290).
+
+ -- Dariusz Gadomski <dariusz.gadomski@canonical.com>  Wed, 06 Nov 2019 11:08:53 +0100
+
+ceph (14.2.4-0ubuntu1) focal; urgency=medium
+
+  * New upstream release (LP: #1850901):
+    - d/p/more-py3-compat.patch,ceph-volume-wait-for-lvs.patch,
+      ceph-volume-wait-for-lvs.patch: Drop, included upstream.
+    - d/p/bluefs-use-uint64_t-for-len.patch: Cherry pick fix to resolve
+      FTBFS on 32 bit architectures.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 04 Nov 2019 14:10:20 +0000
+
+ceph (14.2.2-0ubuntu4) focal; urgency=medium
+
+  * No-change rebuild to build with python3.8.
+
+ -- Matthias Klose <doko@ubuntu.com>  Fri, 18 Oct 2019 18:38:16 +0000
+
+ceph (14.2.2-0ubuntu3) eoan; urgency=medium
+
+  * d/rules: Disable SPDK support as this generates a build which
+    has a minimum CPU baseline of 'corei7' on x86_64 which is not
+    compatible with older CPU's (LP: #1842020).
+  * d/p/issue40781.patch: Cherry pick fix for py3 compatibility in ceph-
+    crash.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 03 Sep 2019 14:52:38 +0100
+
+ceph (14.2.2-0ubuntu2) eoan; urgency=medium
+
+  [ Eric Desrochers ]
+  * Ensure that daemons are not automatically restarted during package
+    upgrades (LP: #1840347):
+    - d/rules: Use "--no-restart-after-upgrade" and "--no-stop-on-upgrade"
+      instead of "--no-restart-on-upgrade".
+    - d/rules: Drop exclusion for ceph-[osd,mon,mds] for restarts.
+
+  [ Jesse Williamson ]
+  * d/p/civetweb-755-1.8-somaxconn-configurable*.patch: Backport changes
+    to civetweb to allow tuning of SOMAXCONN in Ceph RADOS Gateway
+    deployments (LP: #1838109).
+
+  [ James Page ]
+  * d/p/ceph-volume-wait-for-lvs.patch: Cherry pick inflight fix to
+    ensure that required wal and db devices are present before
+    activating OSD's (LP: #1828617).
+
+  [ Steve Beattie ]
+  * SECURITY UPDATE: RADOS gateway remote denial of service
+    - d/p/CVE-2019-10222.patch: rgw: asio: check the remote endpoint
+      before processing requests.
+    - CVE-2019-10222
+
+ -- James Page <james.page@ubuntu.com>  Thu, 29 Aug 2019 13:54:25 +0100
+
+ceph (14.2.2-0ubuntu1) eoan; urgency=medium
+
+  * New upstream release.
+  * d/p/fix-py3-encoding-fsid.patch: Drop, no longer required.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 28 Aug 2019 08:42:02 +0100
+
+ceph (14.2.1-0ubuntu3) eoan; urgency=medium
+
+  * d/p/pybind-auto-encode-decode-cstr.patch: Drop, reverted upstream.
+  * d/p/fix-py3-encoding-fsid.patch: Cherry pick correct fix to resolve
+    FSID encoding issues under Python 3 (LP: #1833079).
+
+ -- James Page <james.page@ubuntu.com>  Tue, 25 Jun 2019 08:02:09 +0100
+
+ceph (14.2.1-0ubuntu2) eoan; urgency=medium
+
+  * d/p/pybind-auto-encode-decode-cstr.patch: Cherry pick fix to ensure
+    that encoding/decoding of strings is correctly performed under
+    Python 3 (LP: #1833079).
+
+ -- James Page <james.page@ubuntu.com>  Fri, 21 Jun 2019 07:08:20 +0100
+
+ceph (14.2.1-0ubuntu1) eoan; urgency=medium
+
+  * New upstream release.
+  * d/p/misc-32-bit-fixes.patch: Drop, included upstream.
+  * d/p/py37-compat.patch: Drop, included upstream.
+  * d/p/collections.abc-compat.patch: Drop, included in release.
+  * d/p/*: Refresh.
+  * d/*: Re-sync packaging with upstream for Nautilus release.
+  * d/control,ceph-test.*,rules: Disable build of test binaries, drop
+    ceph-test binary package (reduce build size).
+  * d/control,rules: Use system boost libraries (reduce build time).
+  * d/control: Add dependency on smartmontools, suggest use of nvme-cli
+    for ceph-osd package.
+  * d/p/32bit-*.patch: Fix misc 32 bit related issues which cause
+    compilation failures on armhf and i386 architectures.
+  * d/control: Add Breaks/Replaces on ceph-common for ceph-argparse to
+    deal with move of Python module.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 29 May 2019 05:25:27 +0100
+
+ceph (13.2.4+dfsg1-0ubuntu2) disco; urgency=medium
+
+  * Rebuild for libleveldb1v5 -> libleveldb1d transition.
+
+ -- Adam Conrad <adconrad@ubuntu.com>  Fri, 12 Apr 2019 12:37:05 -0600
+
+ceph (13.2.4+dfsg1-0ubuntu1) disco; urgency=medium
+
+  * New upstream release (LP: #1810766).
+  * d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 08 Jan 2019 11:40:47 +0000
+
+ceph (13.2.2+dfsg1-0ubuntu4) disco; urgency=medium
+
+  * d/p/more-py3-compat.patch: Add more py3 fixes.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 07 Dec 2018 10:31:26 +0000
+
+ceph (13.2.2+dfsg1-0ubuntu3) disco; urgency=medium
+
+  * d/p/more-py3-compat.patch: Misc Python 3 fixes in ceph-create-keys.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 05 Dec 2018 16:37:07 +0000
+
+ceph (13.2.2+dfsg1-0ubuntu2) disco; urgency=medium
+
+  * d/tests/python-ceph: Fix python3 test support resolving
+    autopkgtest failure.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 30 Nov 2018 13:00:55 +0000
+
+ceph (13.2.2+dfsg1-0ubuntu1) disco; urgency=medium
+
+  * New upstream point release.
+  * d/p/*: Refresh.
+  * d/control,python-*.install,rules: Drop Python 2 support.
+  * d/tests: Update for Python 2 removal.
+  * d/p/misc-32-bit-fixes.patch: Update type of rgw_max_attr_name_len,
+    resolving SIGABRT in radosgw (LP: #1805145).
+  * d/p/boost-py37-compat.patch: Fix compilation issue with boost
+    imports conflicting with ceph's assert.h header.
+  * d/p/collections.abc-compat.patch: Selective cherry-pick of upstream
+    fix for future compatibility with Python 3.8, avoiding deprecation
+    warnings under Python 3.7.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 29 Nov 2018 09:34:25 +0000
+
+ceph (13.2.1+dfsg1-0ubuntu3) disco; urgency=medium
+
+  * No-change rebuild for python3.7 as the default python3.
+
+ -- Matthias Klose <doko@ubuntu.com>  Tue, 30 Oct 2018 19:25:09 +0100
+
+ceph (13.2.1+dfsg1-0ubuntu2) cosmic; urgency=medium
+
+  * d/ceph-mds.install: Install missing systemd configuration
+    (LP: #1789927).
+
+ -- James Page <james.page@ubuntu.com>  Thu, 30 Aug 2018 16:07:10 +0100
+
+ceph (13.2.1+dfsg1-0ubuntu1) cosmic; urgency=medium
+
+  * Re-instate 32bit architectures.
+    - d/control: Switch back to linux-any
+    - d/p/misc-32-bit-fixes.patch: Misc fixes for compilation
+      failures under 32 bit architectures.
+    - d/rules: Disable SPDK integration under i386.
+  * Repack upstream tarball, excluding non-DFSG sources (LP: #1750848):
+    - d/copyright: Purge upstream tarball of minified js files, which
+      are neither shipped in binaries or required for package build.
+    - d/watch: Add dversionmangle for +dfsg\d version suffix.
+  * d/control,rules: Drop requirement for gcc-7 for arm64.
+  * d/ceph-osd.udev: Add udev rules for sample LVM layout for OSD's,
+    ensuring that LV's have ceph:ceph ownership (LP: #1767087).
+
+ -- James Page <james.page@ubuntu.com>  Thu, 09 Aug 2018 14:20:08 +0100
+
+ceph (13.2.1-0ubuntu1) cosmic; urgency=medium
+
+  * d/copyright,source.lintian-overrides: Exclude jsonchecker component
+    of rapidjson avoiding license-problem-json-evil non-free issue.
+  * New upstream point release.
+  * d/control: Remove obsolete X{S}-* fields.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 01 Aug 2018 11:13:12 +0100
+
+ceph (13.2.0-0ubuntu1) cosmic; urgency=medium
+
+  * New upstream release.
+  * Sync with changes in upstream packaging:
+    - d/*.install,rules: Use generated systemd unit files for install
+    - d/ceph-test.install: Drop binaries removed upstream.
+  * d/p/*: Refresh and drop as needed.
+  * d/*.symbols: Refresh for new release.
+  * d/rules,calc-max-parallel.sh: Automatically calculate the maximum
+    number of parallel compilation units based on total memory.
+  * d/control: Drop support for 32 bit architectures.
+  * d/control: Update Vcs-* fields for Ubuntu.
+  * d/control: Drop min python version field.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 24 Jul 2018 11:01:39 +0100
+
 ceph (12.2.11+dfsg1-2.1) unstable; urgency=medium
 
   * Non-maintainer upload.
@@ -680,6 +928,159 @@ ceph (12.2.8+dfsg1-1) unstable; urgency=
 
  -- Gaudenz Steinlin <gaudenz@debian.org>  Sun, 28 Oct 2018 23:43:10 +0100
 
+ceph (12.2.4-0ubuntu1.1) bionic; urgency=medium
+
+  * d/p/update-java-source-target-flags.patch: Use --release instead
+    of -source/-target and set release to 7 as that is the minimum
+    required for OpenJDK 11 (LP: #1766998).
+  * d/p/replace-javah-usage.patch: call -h during compilation time to
+    generate native header files and remove the javah call as the binary
+    is no longer part of openjdk-10 - javah has been deprecated since
+    openjdk-9 (LP: #1766995).
+
+ -- Tiago Stürmer Daitx <tiago.daitx@ubuntu.com>  Wed, 25 Apr 2018 01:45:34 +0000
+
+ceph (12.2.4-0ubuntu1) bionic; urgency=medium
+
+  [ James Page ]
+  * New upstream point release (LP: #1750826, #1731819, #1718134).
+  * d/ceph-osd.install: Add ceph-volume tools (LP: #1750376).
+  * d/*: wrap-and-sort -bast.
+  * d/control,compat: Bump debhelper compat level to 10.
+  * d/control: Switch to using python3-sphinx.
+  * d/rules: Switch to using WITH_BOOST_CONTEXT for rgw beast frontend
+    enablement.
+  * d/rules,control: Switch to using vendored boost as 1.66 is required.
+  * d/control: Add python-jinja2 to Depends of ceph-mgr (LP: #1752308).
+
+  [ Tiago Stürmer Daitx ]
+  * Update java source and target flags from 1.5 to 1.8. Allows it to run
+    using OpenJDK 8 or earlier and to be build with OpenJDK 9, 10, and 11
+    (LP: #1756854).
+
+  [ James Page ]
+  * d/ceph*.prerm: Drop, no longer needed as only use for removed upstart
+    and init.d methods of managing ceph daemons (LP: #1754585).
+
+ -- James Page <james.page@ubuntu.com>  Tue, 20 Mar 2018 09:28:22 +0000
+
+ceph (12.2.2-0ubuntu2) bionic; urgency=medium
+
+  * d/control: Re-order Recommends to prefer chrony over time-daemon
+    (chrony/openntp) and ntp for Ubuntu (LP: #1744072).
+
+ -- Christian Ehrhardt <christian.ehrhardt@canonical.com>  Fri, 16 Feb 2018 09:19:21 +0100
+
+ceph (12.2.2-0ubuntu1) bionic; urgency=medium
+
+  * New upstream point release (LP: #1739002).
+    - d/p/armhf-ftbfs.patch: Cherry pick upstream fix to resolve
+      FTBFS on armhf.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 20 Dec 2017 08:44:40 +0000
+
+ceph (12.2.1-0ubuntu1) bionic; urgency=medium
+
+  * New upstream point release (LP: #1728576).
+
+ -- James Page <james.page@ubuntu.com>  Tue, 31 Oct 2017 09:13:22 +0000
+
+ceph (12.2.0-0ubuntu2) bionic; urgency=medium
+
+  * No-change rebuild for boost soname change.
+
+ -- Matthias Klose <doko@ubuntu.com>  Thu, 26 Oct 2017 17:10:59 +0000
+
+ceph (12.2.0-0ubuntu1) artful; urgency=medium
+
+  * New upstream stable release.
+  * d/p/32bit-compat-service-daemon.patch: Dropped, accepted upstream.
+  * d/rules: Install ceph-volume systemd configuration.
+  * d/*.symbols: Refresh for new release.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 30 Aug 2017 17:27:07 +0100
+
+ceph (12.1.2-0ubuntu2) artful; urgency=medium
+
+  * d/p/rocksdb-fallthrough-i386.patch: Mark intentional fallthroughs
+    for compatibility with gcc-7.
+  * d/p/32bit-compat-service-daemon.patch: Fix implicit type conversion
+    for Boost variant types on 32 bit architectures (LP: #1709396).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 09 Aug 2017 16:43:46 +0100
+
+ceph (12.1.2-0ubuntu1) artful; urgency=medium
+
+  * New release candidate for next stable release.
+  * d/ceph-base.install: Increase scope of install wildcards to ensure
+    that all ceph modules are included in the binary package.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 07 Aug 2017 14:23:59 +0100
+
+ceph (12.1.1-0ubuntu1) artful; urgency=medium
+
+  * New release candidate for next stable release.
+  * d/p/ec-isa-module.patch: Drop, included upstream.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 26 Jul 2017 10:43:04 +0100
+
+ceph (12.1.0-0ubuntu1) artful; urgency=medium
+
+  * New release candidate for next stable release.
+  * Sync packaging changes with upstream:
+    - d/control,python3-*,rules: Add Python 3 support
+    - d/control,python-rgw.*: Add Python rgw module.
+    - d/control,ceph-mgr.*,rules: Add ceph-mgr binary packages.
+    - d/*.symbols: Update for new release.
+    - d/control,rules,*.install: Switch buildsystem to cmake.
+    - d/control,rados-objclass-dev.*: Add rados-objclass-dev
+      binary package.
+    - d/control,ceph-fs-common.install: Drop ceph-fs-common, fold
+      binaries into ceph-common.
+    - d/control,ceph-common.install: Move radosgw-admin to
+      ceph-common package.
+    - d/ceph-common.install: Install crypto modules on amd64 only.
+    - d/*: wrap-and-sort.
+    - d/p/*: Drop existing patches, either upstream or obsolete due
+      to switch to cmake build.
+    - d/control,rules: Drop use of dh-autoreconf.
+  * Use distro provided boost libraries:
+    - d/rules: Enable use of system boost instead of vendored copy.
+    - d/control: Add required boost dependencies >= 1.61.
+  * Support build on s390x:
+    - d/rules: Disable RGW Beast frontend on s390x architecture.
+    - d/control: Scope libboost-{context,coroutine}-dev to exclude
+      s390x.
+  * Support build on i386:
+    - d/p/0001-CoreLocalArray-class.patch,
+      d/p/0002-core-local-array-type-conversions.patch,
+      d/p/0003-Core-local-statistics.patch: Cherry pick rocksdb commits
+      to resolve compatibility with gcc-6 on i386.
+  * d/p/ec-isa-module.patch: Drop versioning of libec_isa.so to ensure
+    that it gets installed as part of the package.
+  * d/control,rules: Workaround arm64 compilation segfault by forcing
+    use of gcc-7 toolchain for this architecture.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 07 Jul 2017 08:02:02 +0000
+
+ceph (10.2.7-0ubuntu1) artful; urgency=medium
+
+  * New upstream point release (LP: #1684527):
+    - d/p/disable-openssl-linking.patch: Dropped, no longer required.
+    - d/control: Add BD on libssl-dev to support optional runtime
+      loading of openssl in the radosgw.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 21 Apr 2017 09:20:08 +0100
+
+ceph (10.2.6-0ubuntu1) zesty; urgency=medium
+
+  * New upstream stable point release (LP: #1671117):
+    - d/p/osd-limit-omap-data-in-push-op.patch,rgw_rados-creation_time.patch:
+      Dropped, included upstream.
+    - d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 09 Mar 2017 09:23:49 +0000
+
 ceph (10.2.5-7.2) unstable; urgency=medium
 
   * Non-maintainer upload.
@@ -878,6 +1279,302 @@ ceph (10.2.5-1) unstable; urgency=medium
 
  -- Gaudenz Steinlin <gaudenz@debian.org>  Sat, 17 Dec 2016 22:40:22 +0100
 
+ceph (10.2.5-0ubuntu2) zesty; urgency=medium
+
+  * d/rules: Install upstream provided systemd targets and ensure they
+    are enabled and started on install to ensure that integrations aligned
+    to upstream packaging work with Ubuntu packages (LP: #1646583).
+  * d/rules,d/p/powerpc_libatomic.patch: Ensure linking with -latomic,
+    resolving FTBFS on powerpc architecture.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 17 Jan 2017 11:10:40 +0000
+
+ceph (10.2.5-0ubuntu1) zesty; urgency=medium
+
+  * New upstream stable release (LP: #1649856).
+    - d/p/32bit-ftbfs.patch: Drop, no longer needed.
+    - d/p/*: Refresh.
+    - d/ceph-common.install: Switch to RSA keys for drop.ceph.com.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 15 Dec 2016 07:51:11 +0000
+
+ceph (10.2.3-0ubuntu5) zesty; urgency=medium
+
+  * d/ceph.{postinst,preinst,postrm}: Ensure that ceph logrotate
+    configuration is purged on upgrade from pre-yakkety installs
+    (LP: #1635844).
+  * d/ceph-base.*,d/*.logrotate: Install logrotate configuration
+    in ceph-common, ensuring that all daemons get log rotation on
+    log files, deal with removal of logrotate configuration in
+    ceph-base for upgrades (LP: #1609866).
+
+ -- James Page <james.page@ubuntu.com>  Thu, 01 Dec 2016 08:54:21 +0000
+
+ceph (10.2.3-0ubuntu4) zesty; urgency=high
+
+  * No change rebuild against boost1.62.
+
+ -- Dimitri John Ledkov <xnox@ubuntu.com>  Tue, 01 Nov 2016 16:12:26 +0000
+
+ceph (10.2.3-0ubuntu3) zesty; urgency=medium
+
+  * rgw: Fixes for creation times for buckets (LP: #1587261).
+    - d/p/rgw_rados-creation_time.patch: Backport fix from upstream master.
+      - Fix logic error that leads to creation time being 0 instead of current
+        time when creating buckets.
+
+ -- Frode Nordahl <frode.nordahl@canonical.com>  Wed, 26 Oct 2016 14:39:55 +0200
+
+ceph (10.2.3-0ubuntu2) yakkety; urgency=medium
+
+  * d/control: Add Pre-Depends on ceph-common to ceph-osd to ensure
+    that ceph user and group are created prior to unpacking of udev
+    rules (LP: #1631328).
+  * d/control: Drop surplus Recommends on ceph-common for ceph-mon, as
+    it already has a in-direct Depends via ceph-base.
+
+ -- James Page <james.page@ubuntu.com>  Sat, 08 Oct 2016 16:16:00 +0100
+
+ceph (10.2.3-0ubuntu1) yakkety; urgency=medium
+
+  * New upstream point release (LP: #1628809):
+    - d/p/rocksdb-flags.patch: Dropped, included upstream.
+    - d/p/*: Refreshed.
+    - d/p/32bit-ftbfs.patch: Cherry pick fix for 32bit arch compat.
+    - d/ceph-{fs-common,fuse}.install: Fix install locations
+      for mount{.fuse}.ceph.
+  * Limit the amount of data per chunk in omap push operations to 64k,
+    ensuring that OSD threads don't hit timeouts during recovery
+    operations (LP: #1628750):
+    - d/p/osd-limit-omap-data-in-push-op.patch: Cherry pick fix from
+      upstream master branch.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 29 Sep 2016 21:44:33 +0100
+
+ceph (10.2.2-0ubuntu5) yakkety; urgency=medium
+
+  * No-change rebuild for boost soname change.
+
+ -- Matthias Klose <doko@ubuntu.com>  Thu, 04 Aug 2016 08:13:41 +0000
+
+ceph (10.2.2-0ubuntu4) yakkety; urgency=medium
+
+  * d/rules: Drop override_dh_strip from .PHONY to ensure files actually
+    get stripped.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 20 Jul 2016 09:26:41 +0100
+
+ceph (10.2.2-0ubuntu3) yakkety; urgency=medium
+
+  * d/rules,control: Drop -dbg packages and let Ubuntu builds take care of
+    ddeb generation instead.
+  * d/ceph-mon.install: Only install .py files for ceph_rest_api module.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 19 Jul 2016 10:36:32 +0100
+
+ceph (10.2.2-0ubuntu2) yakkety; urgency=medium
+
+  * d/ceph.install,d/etc/*: Drop pm based power management scripts as they
+    are not used under systemd (LP: #1455097).
+  * d/*: Split ceph-osd and ceph-mon into separate binary packages and add
+    new ceph-base binary package inline with upstream packaging changes
+    (LP: #1596063).
+  * d/rules,*.{postinst,prerm}: Drop install and management of upstart
+    configuration files; this package version onwards will only ever
+    target >= Xenial, which is a systemd baseline.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 14 Jul 2016 10:51:10 +0100
+
+ceph (10.2.2-0ubuntu1) yakkety; urgency=medium
+
+  * New upstream version (LP: #1585660):
+    - d/ceph.install: Drop install of 60-ceph-partuuid-workaround.rules
+      as no longer part of upstream codebase.
+  * d/control: Bumped Standards-Version to 3.9.8, no changes.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 16 Jun 2016 17:40:04 +0100
+
+ceph (10.2.1-0ubuntu2) yakkety; urgency=medium
+
+  * d/ceph-common.{preinst,postinst,postrm}: Ensure that rename of
+    /etc/default/ceph/ceph -> /etc/default/ceph is handled correctly
+    and that any end-user changes are preserved (LP: #1587516).
+
+ -- James Page <james.page@ubuntu.com>  Mon, 13 Jun 2016 20:53:52 +0100
+
+ceph (10.2.1-0ubuntu1) yakkety; urgency=medium
+
+  * New upstream version (LP: #1585660).
+    - d/p/drop-user-group-osd-prestart.patch: Dropped, included upstream.
+  * d/rules,ceph-common.install: Ensure that /etc/default/ceph is a file
+    and not a directory (LP: #1587516).
+
+ -- James Page <james.page@ubuntu.com>  Mon, 06 Jun 2016 09:23:35 +0100
+
+ceph (10.2.0-0ubuntu1) yakkety; urgency=medium
+
+  * Ceph Jewel stable release (LP: #1569249).
+
+ -- James Page <james.page@ubuntu.com>  Thu, 21 Apr 2016 19:54:54 +0100
+
+ceph (10.1.2-0ubuntu1) xenial; urgency=medium
+
+  * New upstream release candidate for Ceph Jewel:
+    - FFe: http://pad.lv/1563714.
+    - d/p/32bit-compat.patch,tasksmax-infinity.patch: Dropped,
+      included upstream.
+  * d/ceph-common.postinst: Silence output of usermod call (LP: #1569249).
+
+ -- James Page <james.page@ubuntu.com>  Thu, 14 Apr 2016 14:46:58 +0100
+
+ceph (10.1.1-0ubuntu1) xenial; urgency=medium
+
+  * New upstream release candidate for Ceph Jewel:
+    - FFe: http://pad.lv/1563714.
+    - d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 08 Apr 2016 16:30:43 +0100
+
+ceph (10.1.0-0ubuntu1) xenial; urgency=medium
+
+  * New upstream release candidate for Ceph Jewel
+    (see http://pad.lv/1563714 for FFe):
+    - d/control,rules,librgw*: Add new binary packages for librgw2.
+    - d/p/fix-systemd-escaping.patch,pybind-flags.patch: Dropped,
+      included upstream.
+    - d/p/*: Refresh remaining patches.
+    - d/control: Add BD on libldap2-dev for rados gateway.
+    - d/p/disable-openssl-linking.patch: Disable build time linking
+      with OpenSSL due to licensing incompatibilities.
+    - d/*.symbols: Add new symbols for RC.
+    - d/python-*.install: Correct wildcards for python module install.
+    - d/p/32bit-compat.patch: Cherry pick upstream fix for 32 bit
+      compatibility, resolving FTBFS on armhf/i386.
+  * d/rules: Strip rbd-mirror package correctly.
+  * d/rules: Install upstart and systemd configurations for rbd-mirror.
+  * d/copyright: Ensure that jerasure and gf-complete are not stripped
+    from the upstream release tarball.
+  * d/p/drop-user-group-osd-prestart.patch: Drop --setuser/--setgroup
+    arguments from call to ceph-osd-prestart.sh; they are not supported
+    and generate spurious non-fatal warning messages (LP: #1557461).
+  * d/p/tasksmax-infinity.patch: Drop systemd limitation of number of
+    processes and threads to long running ceph processes; the default
+    of 512 tasks is way to low for even a modest Ceph cluster
+    (LP: #1564917).
+  * d/rules: Ensure that dh_systemd_start does not insert maintainer
+    script snippets for ceph-mon and ceph-create-keys - service restart
+    should be handled outside of the packaging as it is under upstart
+    and for all other systemd unit files installed (LP: #1563330).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 06 Apr 2016 09:17:59 +0100
+
+ceph (10.0.5-0ubuntu1) xenial; urgency=medium
+
+  * New upstream point release, in preparation for Ceph Jewel.
+    - d/p/*: Refresh patches
+    - d/control: Enable rbd-mirror(-dbg) packages.
+    - d/control: Add BD on libboost-iostreams-dev.
+    - d/p/skip-setup.py-makefiles.patch,rules: Avoid use of virtualenv
+      to install ceph-disk and ceph-detect-init python modules.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 23 Mar 2016 14:07:58 +0000
+
+ceph (10.0.3-0ubuntu1) xenial; urgency=medium
+
+  * New upstream point release, in preparation for Ceph Jewel.
+    - d/p/*: Refresh patches
+    - d/rules,d/p/rocksdb-flags.patch: Enable rocksdb build for
+      experimental bluestore support, add patch to set g++ flags
+      correctly across all Ubuntu archs.
+    - d/rules: Enable gperftools use on arm64 architecture.
+    - d/ceph.install: Add ceph-bluefs-tool to install.
+  * d/*: wrap-and-sort.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 18 Mar 2016 10:41:37 +0000
+
+ceph (10.0.2-0ubuntu1) xenial; urgency=medium
+
+  * New upstream release, in preparation for Ceph Jewel stable release:
+    - d/control: Add python-dev to BD's.
+    - d/p/pybind-flags.patch: Ensure that python flags are correct
+      set for cython rbd build.
+    - d/python-rbd.install: Switch rbd python binding to cython.
+    - d/p/modules.patch: Dropped, no longer required as upstream.
+    - d/control,rbd-nbd.*,rules: Add rbd-nbd package.
+    - d/p/*: Tidy old redundant patches.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 18 Feb 2016 08:07:30 +0000
+
+ceph (9.2.0-0ubuntu6) xenial; urgency=medium
+
+  * d/ceph-mds.dirs: Actually create /var/lib/ceph/mds prior to changing
+    permissions (LP: #1544647).
+  * d/ceph.init: Restore link to init-ceph, resolving un-install failures
+    due to missing init script (LP: #1546112).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 17 Feb 2016 10:33:24 +0000
+
+ceph (9.2.0-0ubuntu5) xenial; urgency=medium
+
+  [ guessi ]
+  * d/ceph-mds.postinst: Fixup syntax error (LP: #1544647).
+
+ -- James Page <james.page@ubuntu.com>  Tue, 16 Feb 2016 12:54:16 +0000
+
+ceph (9.2.0-0ubuntu4) xenial; urgency=medium
+
+  * d/p/fix-systemd-escaping.patch: Ensure that leading '/' is stripped
+    from block device paths when escaping for use in systemd unit
+    names.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 09 Feb 2016 11:03:03 +0000
+
+ceph (9.2.0-0ubuntu3) xenial; urgency=medium
+
+  * d/ceph{-common}.install: Move ceph_daemon module to common package
+    as its required to use the ceph command.
+  * d/rules: Disable parallel builds on arm64, resolving FTBFS due to
+    memory constraints in builders.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 15 Jan 2016 10:00:47 +0200
+
+ceph (9.2.0-0ubuntu2) xenial; urgency=medium
+
+  * d/control: Fixup broken Breaks/Replaces for backports to 14.04.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 06 Jan 2016 10:46:19 +0000
+
+ceph (9.2.0-0ubuntu1) xenial; urgency=medium
+
+  * [754935] Imported Upstream version 9.2.0
+    - [df85c3] Resync relevant packaging changes with upstream.
+    - [be5f82] Refresh patches.
+    - [d1f3fe] Add python-setuptools to BD's for ceph-detect-init.
+    - [b2f926] Add lsb-release to BD's to ensure that python modules are
+               installed to correct locations.
+    - [e4d702] Add python-sphinx to BD's to ensure man pages get generated
+               and installed.
+    - [3ead6e] Correct install location for ceph-monstore-update tool.
+    - [269754] [177b7a] Update symbols for new release.
+  * [6f322e5] Imported Upstream version 10.0.2
+    - [ba06deb] Add python-dev to BD's.
+    - [62c26dc] d/p/fix-systemd-escaping.patch: Ensure that leading '/' is
+                stripped from block device paths when escaping for use in
+                systemd unit names.
+    - [e02b2a1] d/p/pybind-flags.patch: Ensure that python flags are correct
+                set for cython rbd build.
+    - [d9dad09] Switch rbd python binding to cython
+  * [4c45629] Update NEWS file for infernalis changes.
+  * [940491e] Limit number of parallel builds to 2 to reduce memory footprint
+              on builders.
+  * [23e78f3] d/ceph-mds.postinst: Fix syntax error.
+  * [26182b5] d/ceph-mds.dirs: Actually create /var/lib/ceph/mds prior
+              to changing permissions (LP: #1544647).
+  * [414f4c1] d/ceph.init: Restore link to init-ceph, resolving un-install
+              failures due to missing init script (LP: #1546112).
+
+ -- James Page <james.page@ubuntu.com>  Wed, 09 Dec 2015 18:02:30 +0000
+
 ceph (0.94.5-1) experimental; urgency=medium
 
   * [2d330d6] New upstream release:
@@ -1460,3 +2157,4 @@ ceph (0.23.1-1) experimental; urgency=lo
   * Initial release (Closes: #506040)
 
  -- Sage Weil <sage@newdream.net>  Sun, 21 Nov 2010 15:22:21 -0800
+
diff -pruN 14.2.16-2/debian/control 15.2.7-0ubuntu4/debian/control
--- 14.2.16-2/debian/control	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/control	2020-12-04 09:23:59.000000000 +0000
@@ -1,13 +1,17 @@
 Source: ceph
 Section: admin
 Priority: optional
-Maintainer: Ceph Packaging Team <team+ceph@tracker.debian.org>
+Maintainer: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
+XSBC-Original-Maintainer: Ceph Packaging Team <team+ceph@tracker.debian.org>
 Uploaders:
  James Page <jamespage@debian.org>,
  Gaudenz Steinlin <gaudenz@debian.org>,
  Bernd Zeimetz <bzed@debian.org>,
- Thomas Goirand <zigo@debian.org>,
+Homepage: http://ceph.com/
+Vcs-Git: https://salsa.debian.org/ceph-team/ceph.git
+Vcs-Browser: https://salsa.debian.org/ceph-team/ceph
 Build-Depends:
+ clang [armhf mipsel armel m68k sh4],
  cmake,
  cython3,
  debhelper (>= 10~),
@@ -22,18 +26,18 @@ Build-Depends:
  libbabeltrace-ctf-dev,
  libbabeltrace-dev,
  libblkid-dev (>= 2.17),
- libboost-atomic-dev (>= 1.67.0),
- libboost-chrono-dev (>= 1.67.0),
- libboost-context-dev (>= 1.67.0) [!s390x !mips64el !ia64 !m68k !ppc64 !riscv64 !sh4 !sparc64 !x32 !alpha],
- libboost-coroutine-dev (>= 1.67.0) [!s390x !mips64el !ia64 !m68k !ppc64 !riscv64 !sh4 !sparc64 !x32 !alpha],
- libboost-date-time-dev (>= 1.67.0),
- libboost-iostreams-dev (>= 1.67.0),
- libboost-program-options-dev (>= 1.67.0),
- libboost-python-dev (>= 1.67.0),
- libboost-random-dev (>= 1.67.0),
- libboost-regex-dev (>= 1.67.0),
- libboost-system-dev (>= 1.67.0),
- libboost-thread-dev (>= 1.67.0),
+ libboost-atomic-dev (>= 1.71.0),
+ libboost-chrono-dev (>= 1.71.0),
+ libboost-context-dev (>= 1.71.0) [!mips64el !ia64 !m68k !ppc64 !riscv64 !sh4 !sparc64 !x32],
+ libboost-coroutine-dev (>= 1.71.0) [!mips64el !ia64 !m68k !ppc64 !riscv64 !sh4 !sparc64 !x32],
+ libboost-date-time-dev (>= 1.71.0),
+ libboost-iostreams-dev (>= 1.71.0),
+ libboost-program-options-dev (>= 1.71.0),
+ libboost-python-dev (>= 1.71.0),
+ libboost-random-dev (>= 1.71.0),
+ libboost-regex-dev (>= 1.71.0),
+ libboost-system-dev (>= 1.71.0),
+ libboost-thread-dev (>= 1.71.0),
  libbz2-dev,
  libcap-ng-dev,
  libcunit1-dev,
@@ -53,7 +57,6 @@ Build-Depends:
  libnss3-dev,
  liboath-dev,
  librabbitmq-dev,
- librdkafka-dev,
  librdmacm-dev,
  libsnappy-dev,
  libssl-dev,
@@ -77,12 +80,9 @@ Build-Depends:
 Build-Conflicts:
  libcrypto++-dev,
 Standards-Version: 4.2.1
-Vcs-Git: https://salsa.debian.org/ceph-team/ceph.git
-Vcs-Browser: https://salsa.debian.org/ceph-team/ceph
-Homepage: http://ceph.com/
 
 Package: ceph
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-mgr (= ${binary:Version}),
  ceph-mon (= ${binary:Version}),
@@ -96,7 +96,7 @@ Description: distributed storage and fil
  block and file system storage.
 
 Package: ceph-base
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  binutils,
  ceph-common (= ${binary:Version}),
@@ -111,16 +111,6 @@ Depends:
  ${shlibs:Depends},
 Pre-Depends:
  ${misc:Pre-Depends},
-Breaks:
- ceph (<< 10.2.2-0ubuntu2~),
- ceph-common (<< 9.2.0-0~),
- ceph-test (<< 12.2.8+dfsg1-1~),
- python-ceph (<< 0.94.1-1~),
-Replaces:
- ceph (<< 12.2.8+dfsg1-1~),
- ceph-common (<< 9.2.0-0~),
- ceph-test (<< 12.2.8+dfsg1-1~),
- python-ceph (<< 0.94.1-1~),
 Recommends:
  ceph-mds (= ${binary:Version}),
  chrony | time-daemon | ntp,
@@ -138,9 +128,11 @@ Description: common ceph daemon librarie
  are necessary for creating, running, and administering a Ceph storage cluster.
 
 Package: ceph-common
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  librbd1 (= ${binary:Version}),
+ python3-ceph-argparse (= ${binary:Version}),
+ python3-ceph-common (= ${binary:Version}),
  python3-cephfs (= ${binary:Version}),
  python3-prettytable,
  python3-rados (= ${binary:Version}),
@@ -152,23 +144,9 @@ Depends:
 Conflicts:
  ceph-client-tools,
 Breaks:
- ceph (<< 9.2.0-0~),
- ceph-base (<< 12.2.10+dfsg1-1~),
- ceph-fs-common (<< 12.2.10+dfsg1-1~),
  ceph-mds (<< 14.2.5-3~),
- ceph-test (<< 9.2.0-0~),
- librbd1 (<< 0.94.1-1~),
- python-ceph (<< 0.94.1-1~),
- radosgw (<< 12.0.3-0~),
 Replaces:
- ceph (<< 9.2.0-0~),
- ceph-client-tools,
- ceph-fs-common (<< 12.2.8+dfsg1-1~),
  ceph-mds (<< 14.2.5-3~),
- ceph-test (<< 9.2.0-1~),
- librbd1 (<< 0.94.1-1~),
- python-ceph (<< 0.94.1-1~),
- radosgw (<< 12.0.3-0~),
 Suggests:
  ceph,
  ceph-mds,
@@ -198,16 +176,29 @@ Description: FUSE-based client for the C
  (insufficient privileges, older kernel, etc.), then the FUSE client will
  do.
 
+Package: ceph-immutable-object-cache
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
+Depends:
+ librados2 (= ${binary:Version}),
+ ${misc:Depends},
+ ${shlibs:Depends},
+Description: Ceph daemon for immutable object cache
+ Ceph is a massively scalable, open-source, distributed
+ storage system that runs on commodity hardware and delivers object,
+ block and file system storage.  This is a daemon for immutable
+ object cache.
+
 Package: ceph-mds
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
- ceph,
+ ceph-base (= ${binary:Version}),
+ librados2 (= ${binary:Version}),
  ${misc:Depends},
  ${shlibs:Depends},
 Recommends:
- ceph-common,
+ libcephfs2 (= ${binary:Version}),
+Suggests:
  ceph-fuse,
- libcephfs2,
 Breaks:
  ceph (<< 0.67.3-1),
 Replaces:
@@ -220,13 +211,13 @@ Description: metadata server for the cep
  create a distributed file system on top of the ceph storage cluster.
 
 Package: ceph-mgr
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-base (= ${binary:Version}),
+ ceph-mgr-modules-core (= ${binary:Version}),
  python3-bcrypt,
  python3-cherrypy3,
  python3-jwt,
- python3-openssl,
  python3-pecan,
  python3-werkzeug,
  ${misc:Depends},
@@ -237,11 +228,11 @@ Replaces:
 Breaks:
  ceph (<< 0.93-417),
 Suggests:
+ ceph-mgr-cephadm,
  ceph-mgr-dashboard,
  ceph-mgr-diskprediction-cloud,
  ceph-mgr-diskprediction-local,
- ceph-mgr-rook,
- ceph-mgr-ssh,
+ ceph-mgr-k8sevents,
 Description: manager for the ceph distributed file system
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
@@ -250,6 +241,24 @@ Description: manager for the ceph distri
  This package contains the manager daemon, which is used to expose high
  level management and monitoring functionality.
 
+Package: ceph-mgr-cephadm
+Architecture: all
+Depends:
+ ceph-mgr (= ${binary:Version}),
+ cephadm,
+ openssh-client,
+ python3-six,
+ ${misc:Depends},
+ ${python:Depends},
+Description: cephadm orchestrator module for ceph-mgr
+ Ceph is a massively scalable, open-source, distributed
+ storage system that runs on commodity hardware and delivers object,
+ block and file system storage.
+ .
+ This package contains the CEPHADM module for ceph-mgr's orchestration
+ functionality, to allow ceph-mgr to perform orchestration functions
+ over a standard SSH connection.
+
 Package: ceph-mgr-dashboard
 Architecture: all
 Depends:
@@ -261,15 +270,16 @@ Depends:
  python3-openssl,
  python3-routes,
  python3-werkzeug,
+ python3-yaml,
  ${misc:Depends},
  ${python3:Depends},
  ${shlibs:Depends},
-Description: dashboard plugin for ceph-mgr
+Description: dashboard module for ceph-mgr
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package provides a ceph-mgr plugin, providing a web-based
+ This package provides a ceph-mgr module, providing a web-based
  application to monitor and manage many aspects of a Ceph cluster and
  related components.
  .
@@ -283,12 +293,12 @@ Depends:
  ${misc:Depends},
  ${python3:Depends},
  ${shlibs:Depends},
-Description: diskprediction-cloud plugin for ceph-mgr
+Description: diskprediction-cloud module for ceph-mgr
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package contains the diskprediction_cloud plugin for the ceph-mgr
+ This package contains the diskprediction_cloud module for the ceph-mgr
  daemon, which helps predict disk failures.
 
 Package: ceph-mgr-diskprediction-local
@@ -301,12 +311,12 @@ Depends:
  ${misc:Depends},
  ${python3:Depends},
  ${shlibs:Depends},
-Description: diskprediction-local plugin for ceph-mgr
+Description: diskprediction-local module for ceph-mgr
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package contains the diskprediction_local plugin for the ceph-mgr
+ This package contains the diskprediction_local module for the ceph-mgr
  daemon, which helps predict disk failures.
 
 Package: ceph-mgr-k8sevents
@@ -316,50 +326,57 @@ Depends:
  python3-kubernetes,
  ${misc:Depends},
  ${python:Depends},
-Description: kubernetes events plugin for ceph-mgr
+Description: kubernetes events module for ceph-mgr
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package contains the k8sevents plugin, to allow ceph-mgr to send
+ This package contains the k8sevents module, to allow ceph-mgr to send
  ceph related events to the kubernetes events API, and track all events
  that occur within the rook-ceph namespace.
 
-Package: ceph-mgr-rook
+Package: ceph-mgr-modules-core
 Architecture: all
 Depends:
- ceph-mgr (>= ${binary:Version}),
- python3-six,
+ python3-dateutil,
+ python3-openssl,
  ${misc:Depends},
  ${python3:Depends},
  ${shlibs:Depends},
-Description: rook plugin for ceph-mgr
+Suggests:
+ ceph-mgr-rook,
+Replaces:
+ ceph-mgr (<< 15.1.1-0ubuntu1~),
+Breaks:
+ ceph-mgr (<< 15.1.1-0ubuntu1~),
+Description: ceph manager modules which are always enabled
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package contains the rook plugin for ceph-mgr's orchestration
- functionality, to allow ceph-mgr to install and configure ceph using
- Rook.
+ This package contains a set of core ceph-mgr modules which are always
+ enabled.
 
-Package: ceph-mgr-ssh
+Package: ceph-mgr-rook
 Architecture: all
 Depends:
  ceph-mgr (>= ${binary:Version}),
+ python3-jsonpatch,
  python3-six,
  ${misc:Depends},
  ${python3:Depends},
-Description: ssh orchestrator plugin for ceph-mgr
+ ${shlibs:Depends},
+Description: rook module for ceph-mgr
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
  block and file system storage.
  .
- This package contains the SSH plugin for ceph-mgr's orchestration
- functionality, to allow ceph-mgr to perform orchestration functions
- over a standard SSH connection.
+ This package contains the rook module for ceph-mgr's orchestration
+ functionality, to allow ceph-mgr to install and configure ceph using
+ Rook.
 
 Package: ceph-mon
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-base (= ${binary:Version}),
  ${misc:Depends},
@@ -380,7 +397,7 @@ Description: monitor server for the ceph
  membership, configuration, and state.
 
 Package: ceph-osd
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-base (= ${binary:Version}),
  lvm2,
@@ -410,7 +427,7 @@ Description: OSD server for the ceph sto
 
 Package: ceph-resource-agents
 Architecture: all
-Priority: optional
+Priority: extra
 Recommends:
  pacemaker,
 Depends:
@@ -425,6 +442,21 @@ Description: OCF-compliant resource agen
  Ceph with OCF-compliant cluster resource managers,
  such as Pacemaker.
 
+Package: cephadm
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
+Depends:
+ lvm2,
+ ${python3:Depends},
+Recommends:
+ docker.io,
+Description: cephadm utility to bootstrap ceph daemons with systemd and containers
+ Ceph is a massively scalable, open-source, distributed
+ storage system that runs on commodity hardware and delivers object,
+ block and file system storage.
+ .
+ The cephadm utility is used to bootstrap a Ceph cluster and to manage
+ ceph daemons deployed with systemd and containers.
+
 Package: cephfs-shell
 Architecture: all
 Depends:
@@ -440,7 +472,7 @@ Description: interactive shell for the C
  This package contains a CLI for interacting with the CephFS.
 
 Package: libcephfs-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  libcephfs2 (= ${binary:Version}),
@@ -477,7 +509,7 @@ Description: Java library for the Ceph F
  File System.
 
 Package: libcephfs-jni
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Depends:
  libcephfs2 (= ${binary:Version}),
@@ -491,7 +523,7 @@ Description: Java Native Interface libra
  with the Ceph File System.
 
 Package: libcephfs2
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Conflicts:
  libceph,
@@ -513,7 +545,7 @@ Description: Ceph distributed file syste
  file system via a POSIX-like interface.
 
 Package: librados-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  librados2 (= ${binary:Version}),
@@ -535,7 +567,7 @@ Description: RADOS distributed object st
  link against librados2.
 
 Package: librados2
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Conflicts:
  librados,
@@ -555,7 +587,7 @@ Description: RADOS distributed object st
  store using a simple file-like interface.
 
 Package: libradospp-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  librados-dev (= ${binary:Version}),
@@ -571,7 +603,7 @@ Description: RADOS distributed object st
  link against librados.
 
 Package: libradosstriper-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  libradosstriper1 (= ${binary:Version}),
@@ -585,7 +617,7 @@ Description: RADOS striping interface (d
  link against libradosstriper.
 
 Package: libradosstriper1
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Depends:
  librados2 (= ${binary:Version}),
@@ -597,7 +629,7 @@ Description: RADOS striping interface
  an interface very similar to the rados one.
 
 Package: librbd-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  librados-dev,
@@ -617,7 +649,7 @@ Description: RADOS block device client l
  link against librbd1.
 
 Package: librbd1
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Depends:
  librados2 (= ${binary:Version}),
@@ -632,7 +664,7 @@ Description: RADOS block device client l
  shared library allowing applications to manage these block devices.
 
 Package: librgw-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  librados-dev (= ${binary:Version}),
@@ -648,7 +680,7 @@ Description: RADOS client library (devel
  that link against librgw2.
 
 Package: librgw2
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libs
 Depends:
  librados2 (= ${binary:Version}),
@@ -688,10 +720,6 @@ Section: python
 Depends:
  ${misc:Depends},
  ${python3:Depends},
-Breaks:
- ceph-common (<< 14.2.1-0~),
-Replaces:
- ceph-common (<< 14.2.1-0~),
 Description: Python 3 utility libraries for Ceph CLI
  Ceph is a massively scalable, open-source, distributed
  storage system that runs on commodity hardware and delivers object,
@@ -700,8 +728,22 @@ Description: Python 3 utility libraries
  This package contains types and routines for Python 3 used by the
  Ceph CLI as well as the RESTful interface.
 
+Package: python3-ceph-common
+Architecture: all
+Section: python
+Depends:
+ ${misc:Depends},
+ ${python3:Depends},
+Description: Python 3 utility libraries for Ceph
+ Ceph is a massively scalable, open-source, distributed
+ storage system that runs on commodity hardware and delivers object,
+ block and file system storage.
+ .
+ This package contains data structures, classes and functions used by Ceph.
+ It also contains utilities used for the cephadm orchestrator.
+
 Package: python3-cephfs
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: python
 Depends:
  libcephfs2 (= ${binary:Version}),
@@ -719,7 +761,7 @@ Description: Python 3 libraries for the
  CephFS file system client library.
 
 Package: python3-rados
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: python
 Depends:
  librados2 (= ${binary:Version}),
@@ -735,7 +777,7 @@ Description: Python 3 libraries for the
  RADOS object storage.
 
 Package: python3-rbd
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: python
 Depends:
  librbd1 (>= ${binary:Version}),
@@ -751,7 +793,7 @@ Description: Python 3 libraries for the
  RBD block device library.
 
 Package: python3-rgw
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: python
 Depends:
  librgw2 (>= ${binary:Version}),
@@ -768,7 +810,7 @@ Description: Python 3 libraries for the
  RGW library.
 
 Package: rados-objclass-dev
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Section: libdevel
 Depends:
  librados-dev (= ${binary:Version}),
@@ -778,7 +820,7 @@ Description: RADOS object class developm
  This package contains development files needed for building RADOS object class plugins.
 
 Package: radosgw
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-common (= ${binary:Version}),
  librgw2 (= ${binary:Version}),
@@ -795,7 +837,7 @@ Description: REST gateway for RADOS dist
  This package contains the proxy daemon and related tools only.
 
 Package: rbd-fuse
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ${misc:Depends},
  ${shlibs:Depends},
@@ -807,7 +849,7 @@ Description: FUSE-based rbd client for t
  FUSE-based client that allows one to map Ceph rbd images as files.
 
 Package: rbd-mirror
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ceph-common (= ${binary:Version}),
  librados2 (= ${binary:Version}),
@@ -821,7 +863,7 @@ Description: Ceph daemon for mirroring R
  Ceph clusters, streaming changes asynchronously.
 
 Package: rbd-nbd
-Architecture: linux-any
+Architecture: alpha amd64 arm64 armel armhf hppa i386 ia64 m68k mips64el powerpc ppc64 ppc64el riscv64 s390x sh4 sparc64 x32
 Depends:
  ${misc:Depends},
  ${shlibs:Depends},
diff -pruN 14.2.16-2/debian/copyright 15.2.7-0ubuntu4/debian/copyright
--- 14.2.16-2/debian/copyright	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/copyright	2020-12-04 09:23:59.000000000 +0000
@@ -1,7 +1,22 @@
-Format-Specification: http://anonscm.debian.org/viewvc/dep/web/deps/dep5/copyright-format.xml?revision=279&view=markup
-Name: ceph
-Maintainer: Sage Weil <sage@newdream.net>
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: ceph
+Upstream-Contact: Sage Weil <sage@newdream.net>
 Source: http://ceph.com/
+Files-Excluded:
+ debian
+ src/rapidjson/bin/jsonchecker/*
+ */DotZLib.chm
+ src/rocksdb/docs/*
+ qa/workunits/erasure-code/jquery.flot.js
+ */jquery.js
+ src/civetweb/src/third_party/duktape*
+ src/civetweb/examples/_obsolete/websocket_client/ssl/server.key.orig
+ src/civetweb/resources/cert/client.key.orig
+ src/civetweb/resources/cert/server.key.orig
+ src/civetweb/resources/cert/server_bkup.key.orig
+ src/erasure-code/jerasure/jerasure/Examples/makefile.orig
+ src/erasure-code/jerasure/jerasure/include/config.h.in~
+ src/erasure-code/jerasure/jerasure/makefile.orig
 
 Files: *
 Copyright: 2004-2014 Sage Weil <sage@newdream.net>
diff -pruN 14.2.16-2/debian/gbp.conf 15.2.7-0ubuntu4/debian/gbp.conf
--- 14.2.16-2/debian/gbp.conf	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/gbp.conf	2020-12-04 09:23:59.000000000 +0000
@@ -1,3 +1,3 @@
 [DEFAULT]
-debian-branch = debian/unstable
+debian-branch = ubuntu/hirsute
 pristine-tar = True
diff -pruN 14.2.16-2/debian/libcephfs2.symbols 15.2.7-0ubuntu4/debian/libcephfs2.symbols
--- 14.2.16-2/debian/libcephfs2.symbols	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/libcephfs2.symbols	2020-12-04 09:23:59.000000000 +0000
@@ -62,6 +62,7 @@ libcephfs.so.2 libcephfs2 #MINVER#
  ceph_get_pool_name@Base 12.0.3
  ceph_get_pool_replication@Base 12.0.3
  ceph_get_stripe_unit_granularity@Base 12.0.3
+ ceph_getaddrs@Base 15.1.0
  ceph_getcwd@Base 12.0.3
  ceph_getdents@Base 12.0.3
  ceph_getdnames@Base 12.0.3
@@ -69,8 +70,8 @@ libcephfs.so.2 libcephfs2 #MINVER#
  ceph_init@Base 12.0.3
  ceph_is_mounted@Base 12.0.3
  ceph_lazyio@Base 14.2.0
- ceph_lazyio_propagate@Base 14.2.15
- ceph_lazyio_synchronize@Base 14.2.15
+ ceph_lazyio_propagate@Base 15.1.0
+ ceph_lazyio_synchronize@Base 15.1.0
  ceph_lchown@Base 12.0.3
  ceph_lgetxattr@Base 12.0.3
  ceph_link@Base 12.0.3
@@ -107,7 +108,6 @@ libcephfs.so.2 libcephfs2 #MINVER#
  ceph_ll_read_block@Base 12.0.3
  ceph_ll_readlink@Base 12.0.3
  ceph_ll_readv@Base 12.0.3
- ceph_ll_register_callbacks@Base 14.2.15
  ceph_ll_releasedir@Base 12.0.3
  ceph_ll_removexattr@Base 12.0.3
  ceph_ll_rename@Base 12.0.3
diff -pruN 14.2.16-2/debian/libcephfs-dev.install 15.2.7-0ubuntu4/debian/libcephfs-dev.install
--- 14.2.16-2/debian/libcephfs-dev.install	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/libcephfs-dev.install	2020-12-04 09:23:59.000000000 +0000
@@ -1,2 +1,3 @@
-usr/include/cephfs/*.h
+usr/include/cephfs/libcephfs.h
+usr/include/cephfs/ceph_ll_client.h
 usr/lib/*/libcephfs.so
diff -pruN 14.2.16-2/debian/librados2.symbols 15.2.7-0ubuntu4/debian/librados2.symbols
--- 14.2.16-2/debian/librados2.symbols	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/librados2.symbols	2020-12-04 09:23:59.000000000 +0000
@@ -1,4 +1,4 @@
-libceph-common.so.0 librados2 #MINVER#
+libceph-common.so.2 librados2 #MINVER#
  (regex|c++)"^_.*" 12.0.3
  (regex)"^ceph_ver__[0-9a-f]{40}@Base$" 12.0.3
  MDS_GID_NONE@Base 12.0.3
@@ -47,14 +47,7 @@ libceph-common.so.0 librados2 #MINVER#
  ceph_crc32c_sctp@Base 12.0.3
  ceph_crc32c_zeros@Base 12.1.4
  ceph_options@Base 12.1.4
- ceph_os_fgetxattr@Base 12.0.3
- ceph_os_flistxattr@Base 12.0.3
- ceph_os_fremovexattr@Base 12.0.3
- ceph_os_fsetxattr@Base 12.0.3
- ceph_os_getxattr@Base 12.0.3
- ceph_os_listxattr@Base 12.0.3
- ceph_os_removexattr@Base 12.0.3
- ceph_os_setxattr@Base 12.0.3
+ ceph_strerror_r@Base 15.1.0
  ceph_unarmor@Base 12.0.3
  check_for_control_characters@Base 12.0.3
  check_for_control_characters_cstr@Base 12.0.3
@@ -129,24 +122,8 @@ libceph-common.so.0 librados2 #MINVER#
  current_maxid@Base 12.0.3
  decode_utf8@Base 12.0.3
  encode_utf8@Base 12.0.3
- g_assert_condition@Base 14.2.0
- g_assert_file@Base 14.2.0
- g_assert_func@Base 14.2.0
- g_assert_line@Base 14.2.0
- g_assert_msg@Base 14.2.0
- g_assert_thread@Base 14.2.0
- g_assert_thread_name@Base 14.2.0
- g_ceph_context@Base 12.0.3
  g_code_env@Base 12.0.3
- g_eio@Base 14.2.0
- g_eio_devname@Base 14.2.0
- g_eio_error@Base 14.2.0
- g_eio_iotype@Base 14.2.0
- g_eio_length@Base 14.2.0
- g_eio_offset@Base 14.2.0
- g_eio_path@Base 14.2.0
  g_lockdep@Base 12.0.3
- g_process_name@Base 14.2.0
  get_linux_version@Base 12.0.3
  get_process_name@Base 12.0.3
  is_control_character@Base 12.0.3
@@ -187,12 +164,14 @@ librados.so.2 librados2 #MINVER#
  rados_aio_append@Base 0.72.2
  rados_aio_cancel@Base 0.87
  rados_aio_cmpext@Base 12.0.3
+ rados_aio_create_completion2@Base 15.1.0
  rados_aio_create_completion@Base 0.72.2
  rados_aio_exec@Base 12.0.3
  rados_aio_flush@Base 0.72.2
  rados_aio_flush_async@Base 0.72.2
  rados_aio_get_return_value@Base 0.72.2
  rados_aio_get_version@Base 12.0.3
+ rados_getaddrs@Base 15.1.0
  rados_aio_getxattr@Base 12.0.3
  rados_aio_getxattrs@Base 12.0.3
  rados_aio_ioctx_selfmanaged_snap_create@Base 12.0.3
@@ -256,7 +235,6 @@ librados.so.2 librados2 #MINVER#
  rados_get_last_version@Base 0.72.2
  rados_get_min_compatible_client@Base 13.2.0
  rados_get_min_compatible_osd@Base 14.2.0
- rados_getaddrs@Base 14.2.15
  rados_getxattr@Base 0.72.2
  rados_getxattrs@Base 0.72.2
  rados_getxattrs_end@Base 0.72.2
@@ -296,6 +274,7 @@ librados.so.2 librados2 #MINVER#
  rados_lock_exclusive@Base 0.72.2
  rados_lock_shared@Base 0.72.2
  rados_mgr_command@Base 12.0.3
+ rados_mgr_command_target@Base 15.1.0
  rados_mon_command@Base 0.72.2
  rados_mon_command_target@Base 0.72.2
  rados_monitor_log2@Base 12.1.4
@@ -370,6 +349,7 @@ librados.so.2 librados2 #MINVER#
  rados_set_alloc_hint2@Base 12.0.3
  rados_set_alloc_hint@Base 0.79
  rados_set_osdmap_full_try@Base 13.2.0
+ rados_set_pool_full_try@Base 15.1.0
  rados_setxattr@Base 0.72.2
  rados_shutdown@Base 0.72.2
  rados_stat@Base 0.72.2
@@ -382,6 +362,7 @@ librados.so.2 librados2 #MINVER#
  rados_trunc@Base 0.72.2
  rados_unlock@Base 0.72.2
  rados_unset_osdmap_full_try@Base 13.2.0
+ rados_unset_pool_full_try@Base 15.1.0
  rados_unwatch2@Base 0.93
  rados_unwatch@Base 0.72.2
  rados_version@Base 0.72.2
@@ -405,6 +386,7 @@ librados.so.2 librados2 #MINVER#
  rados_write_op_omap_cmp@Base 0.79
  rados_write_op_omap_rm_keys2@Base 13.2.0
  rados_write_op_omap_rm_keys@Base 0.79
+ rados_write_op_omap_rm_range2@Base 15.1.0
  rados_write_op_omap_set2@Base 13.2.0
  rados_write_op_omap_set@Base 0.79
  rados_write_op_operate2@Base 10.1.0
diff -pruN 14.2.16-2/debian/librbd1.symbols 15.2.7-0ubuntu4/debian/librbd1.symbols
--- 14.2.16-2/debian/librbd1.symbols	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/librbd1.symbols	2020-12-04 09:23:59.000000000 +0000
@@ -9,6 +9,7 @@ librbd.so.1 librbd1 #MINVER#
  rbd_aio_get_return_value@Base 0.72.2
  rbd_aio_is_complete@Base 0.72.2
  rbd_aio_mirror_image_demote@Base 12.0.3
+ rbd_aio_mirror_image_get_global_status@Base 15.1.0
  rbd_aio_mirror_image_get_info@Base 12.0.3
  rbd_aio_mirror_image_get_status@Base 12.0.3
  rbd_aio_mirror_image_promote@Base 12.0.3
@@ -23,7 +24,6 @@ librbd.so.1 librbd1 #MINVER#
  rbd_aio_wait_for_complete@Base 0.72.2
  rbd_aio_write2@Base 0.93
  rbd_aio_write@Base 0.72.2
- rbd_aio_write_zeroes@Base 14.2.15
  rbd_aio_writesame@Base 12.0.3
  rbd_aio_writev@Base 12.0.3
  rbd_break_lock@Base 0.72.2
@@ -53,6 +53,8 @@ librbd.so.1 librbd1 #MINVER#
  rbd_diff_iterate2@Base 9.2.0
  rbd_diff_iterate@Base 0.72.2
  rbd_discard@Base 0.72.2
+ rbd_features_from_string@Base 15.1.0
+ rbd_features_to_string@Base 15.1.0
  rbd_flatten@Base 0.72.2
  rbd_flatten_with_progress@Base 12.0.3
  rbd_flush@Base 0.72.2
@@ -137,12 +139,22 @@ librbd.so.1 librbd1 #MINVER#
  rbd_migration_prepare@Base 14.2.0
  rbd_migration_status@Base 14.2.0
  rbd_migration_status_cleanup@Base 14.2.0
+ rbd_mirror_image_create_snapshot@Base 15.1.0
  rbd_mirror_image_demote@Base 10.1.0
  rbd_mirror_image_disable@Base 10.1.0
+ rbd_mirror_image_enable2@Base 15.1.0
  rbd_mirror_image_enable@Base 10.1.0
+ rbd_mirror_image_get_global_status@Base 15.1.0
  rbd_mirror_image_get_info@Base 10.1.0
+ rbd_mirror_image_get_info_cleanup@Base 15.1.0
  rbd_mirror_image_get_instance_id@Base 14.2.0
+ rbd_mirror_image_get_mode@Base 15.1.0
  rbd_mirror_image_get_status@Base 12.0.3
+ rbd_mirror_image_global_status_cleanup@Base 15.1.0
+ rbd_mirror_image_global_status_list@Base 15.1.0
+ rbd_mirror_image_global_status_list_cleanup@Base 15.1.0
+ rbd_mirror_image_info_list@Base 15.1.0
+ rbd_mirror_image_info_list_cleanup@Base 15.1.0
  rbd_mirror_image_instance_id_list@Base 14.2.0
  rbd_mirror_image_instance_id_list_cleanup@Base 14.2.0
  rbd_mirror_image_promote@Base 10.1.0
@@ -162,8 +174,18 @@ librbd.so.1 librbd1 #MINVER#
  rbd_mirror_peer_set_attributes@Base 14.2.0
  rbd_mirror_peer_set_client@Base 10.1.0
  rbd_mirror_peer_set_cluster@Base 10.1.0
+ rbd_mirror_peer_site_add@Base 15.1.0
+ rbd_mirror_peer_site_get_attributes@Base 15.1.0
+ rbd_mirror_peer_site_list@Base 15.1.0
+ rbd_mirror_peer_site_list_cleanup@Base 15.1.0
+ rbd_mirror_peer_site_remove@Base 15.1.0
+ rbd_mirror_peer_site_set_attributes@Base 15.1.0
+ rbd_mirror_peer_site_set_client_name@Base 15.1.0
+ rbd_mirror_peer_site_set_direction@Base 15.1.0
+ rbd_mirror_peer_site_set_name@Base 15.1.0
  rbd_mirror_site_name_get@Base 14.2.5
  rbd_mirror_site_name_set@Base 14.2.5
+ rbd_mirror_uuid_get@Base 15.1.1
  rbd_namespace_create@Base 14.2.0
  rbd_namespace_exists@Base 14.2.0
  rbd_namespace_list@Base 14.2.0
@@ -195,8 +217,12 @@ librbd.so.1 librbd1 #MINVER#
  rbd_resize_with_progress@Base 0.72.2
  rbd_set_image_notification@Base 10.1.0
  rbd_snap_create@Base 0.72.2
+ rbd_snap_exists@Base 15.1.0
  rbd_snap_get_group_namespace@Base 13.2.0
+ rbd_snap_get_id@Base 15.1.0
  rbd_snap_get_limit@Base 12.0.3
+ rbd_snap_get_mirror_namespace@Base 15.1.1
+ rbd_snap_get_name@Base 15.1.0
  rbd_snap_get_namespace_type@Base 13.2.0
  rbd_snap_get_timestamp@Base 12.0.3
  rbd_snap_get_trash_namespace@Base 14.2.0
@@ -204,6 +230,7 @@ librbd.so.1 librbd1 #MINVER#
  rbd_snap_is_protected@Base 0.72.2
  rbd_snap_list@Base 0.72.2
  rbd_snap_list_end@Base 0.72.2
+ rbd_snap_mirror_namespace_cleanup@Base 15.1.1
  rbd_snap_protect@Base 0.72.2
  rbd_snap_remove2@Base 12.0.3
  rbd_snap_remove@Base 0.72.2
@@ -238,5 +265,4 @@ librbd.so.1 librbd1 #MINVER#
  rbd_watchers_list_cleanup@Base 13.2.0
  rbd_write2@Base 0.93
  rbd_write@Base 0.72.2
- rbd_write_zeroes@Base 14.2.15
  rbd_writesame@Base 12.0.3
diff -pruN 14.2.16-2/debian/patches/32bit-avoid-overloading.patch 15.2.7-0ubuntu4/debian/patches/32bit-avoid-overloading.patch
--- 14.2.16-2/debian/patches/32bit-avoid-overloading.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/32bit-avoid-overloading.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-Description: Avoid overloading on 32 bit architectures
- unsigned and size_t are equivalent on 32 bit architectures,
- so only define the size_t based overload of advance on 64
- bit architectures.
- https://wiki.debian.org/ArchitectureSpecificsMemo
-Author: James Page <james.page@ubuntu.com>, Bernd Zeimetz <bzed@debian.org>
-Forwarded: no
-
---- a/src/include/buffer.h
-+++ b/src/include/buffer.h
-@@ -737,7 +737,12 @@ inline namespace v14_2_0 {
- 
-       void advance(int o) = delete;
-       void advance(unsigned o);
-+
-+// unsigned and size_t are equivalent on 32bit architectures.
-+// so casting is only needed when not on 32bit.
-+#if defined(UINTPTR_MAX) && UINTPTR_MAX > 0xffffffff
-       void advance(size_t o) { advance(static_cast<unsigned>(o)); }
-+#endif
-       void seek(unsigned o);
-       char operator*() const;
-       iterator_impl& operator++();
diff -pruN 14.2.16-2/debian/patches/32bit-avoid-size_t.patch 15.2.7-0ubuntu4/debian/patches/32bit-avoid-size_t.patch
--- 14.2.16-2/debian/patches/32bit-avoid-size_t.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/32bit-avoid-size_t.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,112 +0,0 @@
-Description: Avoid use of size_t when necessary
- On 32 bit architectures size_t is not a 64 bit type, which
- causes comparison mismatch failures during compilation.
-Author: James Page <james.page@ubuntu.com>, Bernd Zeimetz <bzed@debian.org>
-Forwarded: no
-
-Index: ceph/src/osd/PrimaryLogPG.cc
-===================================================================
---- ceph.orig/src/osd/PrimaryLogPG.cc
-+++ ceph/src/osd/PrimaryLogPG.cc
-@@ -1611,7 +1611,7 @@ int PrimaryLogPG::do_scrub_ls(MOSDOp *m,
- 
- void PrimaryLogPG::calc_trim_to()
- {
--  size_t target = cct->_conf->osd_min_pg_log_entries;
-+  uint64_t target = cct->_conf->osd_min_pg_log_entries;
-   if (is_degraded() ||
-       state_test(PG_STATE_RECOVERING |
-                  PG_STATE_RECOVERY_WAIT |
-@@ -1627,15 +1627,15 @@ void PrimaryLogPG::calc_trim_to()
-   if (limit != eversion_t() &&
-       limit != pg_trim_to &&
-       pg_log.get_log().approx_size() > target) {
--    size_t num_to_trim = std::min(pg_log.get_log().approx_size() - target,
--                             cct->_conf->osd_pg_log_trim_max);
-+    uint64_t num_to_trim = std::min(pg_log.get_log().approx_size() - target,
-+                                    cct->_conf->osd_pg_log_trim_max);
-     if (num_to_trim < cct->_conf->osd_pg_log_trim_min &&
-         cct->_conf->osd_pg_log_trim_max >= cct->_conf->osd_pg_log_trim_min) {
-       return;
-     }
-     list<pg_log_entry_t>::const_iterator it = pg_log.get_log().log.begin();
-     eversion_t new_trim_to;
--    for (size_t i = 0; i < num_to_trim; ++i) {
-+    for (uint64_t i = 0; i < num_to_trim; ++i) {
-       new_trim_to = it->version;
-       ++it;
-       if (new_trim_to > limit) {
-Index: ceph/src/os/bluestore/BlueFS.h
-===================================================================
---- ceph.orig/src/os/bluestore/BlueFS.h
-+++ ceph/src/os/bluestore/BlueFS.h
-@@ -69,7 +69,7 @@ public:
-    * @params
-    * alloc_size - allocation unit size to check
-    */
--  virtual size_t available_freespace(uint64_t alloc_size) = 0;
-+  virtual uint64_t available_freespace(uint64_t alloc_size) = 0;
- };
- 
- class BlueFSVolumeSelector {
-Index: ceph/src/os/bluestore/BlueStore.cc
-===================================================================
---- ceph.orig/src/os/bluestore/BlueStore.cc
-+++ ceph/src/os/bluestore/BlueStore.cc
-@@ -5911,12 +5911,12 @@ int BlueStore::allocate_bluefs_freespace
-   return 0;
- }
- 
--size_t BlueStore::available_freespace(uint64_t alloc_size) {
--  size_t total = 0;
--  auto iterated_allocation = [&](size_t off, size_t len) {
-+uint64_t BlueStore::available_freespace(uint64_t alloc_size) {
-+  uint64_t total = 0;
-+  auto iterated_allocation = [&](uint64_t off, uint64_t len) {
-     //only count in size that is alloc_size aligned
--    size_t dist_to_alignment;
--    size_t offset_in_block = off & (alloc_size - 1);
-+    uint64_t dist_to_alignment;
-+    uint64_t offset_in_block = off & (alloc_size - 1);
-     if (offset_in_block == 0)
-       dist_to_alignment = 0;
-     else
-Index: ceph/src/os/bluestore/BlueStore.h
-===================================================================
---- ceph.orig/src/os/bluestore/BlueStore.h
-+++ ceph/src/os/bluestore/BlueStore.h
-@@ -3081,7 +3081,7 @@ private:
-     PExtentVector& extents) override {
-     return allocate_bluefs_freespace(min_size, size, &extents);
-   };
--  size_t available_freespace(uint64_t alloc_size) override;
-+  uint64_t available_freespace(uint64_t alloc_size) override;
- 
- public:
-   struct sb_info_t {
-Index: ceph/src/common/config_values.h
-===================================================================
---- ceph.orig/src/common/config_values.h
-+++ ceph/src/common/config_values.h
-@@ -50,7 +50,7 @@ public:
- #define OPTION_OPT_U32(name) uint64_t name;
- #define OPTION_OPT_U64(name) uint64_t name;
- #define OPTION_OPT_UUID(name) uuid_d name;
--#define OPTION_OPT_SIZE(name) size_t name;
-+#define OPTION_OPT_SIZE(name) uint64_t name;
- #define OPTION(name, ty)       \
-   public:                      \
-     OPTION_##ty(name)          
-Index: ceph/src/common/options.cc
-===================================================================
---- ceph.orig/src/common/options.cc
-+++ ceph/src/common/options.cc
-@@ -189,7 +189,7 @@ int Option::parse_value(
-     }
-     *out = uuid;
-   } else if (type == Option::TYPE_SIZE) {
--    Option::size_t sz{strict_iecstrtoll(val.c_str(), error_message)};
-+    Option::size_t sz{static_cast<std::size_t>(strict_iecstrtoll(val.c_str(), error_message))};
-     if (!error_message->empty()) {
-       return -EINVAL;
-     }
diff -pruN 14.2.16-2/debian/patches/32bit-fixes.patch 15.2.7-0ubuntu4/debian/patches/32bit-fixes.patch
--- 14.2.16-2/debian/patches/32bit-fixes.patch	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/32bit-fixes.patch	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,69 @@
+Description: Misc fixes for 32 bit architecture builds.
+Author: James Page <james.page@ubuntu.com>
+Forwarded: no
+
+--- a/src/mds/PurgeQueue.cc
++++ b/src/mds/PurgeQueue.cc
+@@ -501,7 +501,8 @@ void PurgeQueue::_execute_item(
+ 
+   in_flight[expire_to] = item;
+   logger->set(l_pq_executing, in_flight.size());
+-  files_high_water = std::max(files_high_water, in_flight.size());
++  files_high_water = std::max(files_high_water,
++                              static_cast<uint64_t>(in_flight.size()));
+   logger->set(l_pq_executing_high_water, files_high_water);
+   auto ops = _calculate_ops(item);
+   ops_in_flight += ops;
+@@ -579,7 +580,8 @@ void PurgeQueue::_execute_item(
+     logger->set(l_pq_executing_ops_high_water, ops_high_water);
+     in_flight.erase(expire_to);
+     logger->set(l_pq_executing, in_flight.size());
+-    files_high_water = std::max(files_high_water, in_flight.size());
++    files_high_water = std::max(files_high_water,
++                                static_cast<uint64_t>(in_flight.size()));
+     logger->set(l_pq_executing_high_water, files_high_water);
+     return;
+   }
+@@ -657,7 +659,8 @@ void PurgeQueue::_execute_item_complete(
+ 
+   in_flight.erase(iter);
+   logger->set(l_pq_executing, in_flight.size());
+-  files_high_water = std::max(files_high_water, in_flight.size());
++  files_high_water = std::max(files_high_water,
++                              static_cast<uint64_t>(in_flight.size()));
+   logger->set(l_pq_executing_high_water, files_high_water);
+   dout(10) << "in_flight.size() now " << in_flight.size() << dendl;
+ 
+--- a/src/tools/rbd_mirror/image_replayer/snapshot/Replayer.cc
++++ b/src/tools/rbd_mirror/image_replayer/snapshot/Replayer.cc
+@@ -234,7 +234,8 @@ bool Replayer<I>::get_replay_status(std:
+ 
+   json_spirit::mObject root_obj;
+   root_obj["replay_state"] = replay_state;
+-  root_obj["remote_snapshot_timestamp"] = remote_snap_info->timestamp.sec();
++  root_obj["remote_snapshot_timestamp"] = static_cast<uint64_t>(
++    remote_snap_info->timestamp.sec());
+ 
+   auto matching_remote_snap_id = util::compute_remote_snap_id(
+     m_state_builder->local_image_ctx->image_lock,
+@@ -248,8 +249,8 @@ bool Replayer<I>::get_replay_status(std:
+     // use the timestamp from the matching remote image since
+     // the local snapshot would just be the time the snapshot was
+     // synced and not the consistency point in time.
+-    root_obj["local_snapshot_timestamp"] =
+-      matching_remote_snap_it->second.timestamp.sec();
++    root_obj["local_snapshot_timestamp"] = static_cast<uint64_t>(
++      matching_remote_snap_it->second.timestamp.sec());
+   }
+ 
+   matching_remote_snap_it = m_state_builder->remote_image_ctx->snap_info.find(
+@@ -257,7 +258,8 @@ bool Replayer<I>::get_replay_status(std:
+   if (m_remote_snap_id_end != CEPH_NOSNAP &&
+       matching_remote_snap_it !=
+         m_state_builder->remote_image_ctx->snap_info.end()) {
+-    root_obj["syncing_snapshot_timestamp"] = remote_snap_info->timestamp.sec();
++    root_obj["syncing_snapshot_timestamp"] = static_cast<uint64_t>(
++        remote_snap_info->timestamp.sec());
+     root_obj["syncing_percent"] = static_cast<uint64_t>(
+         100 * m_local_mirror_snap_ns.last_copied_object_number /
+         static_cast<float>(std::max<uint64_t>(1U, m_local_object_count)));
diff -pruN 14.2.16-2/debian/patches/add-option-to-disable-ceph-dencoder.patch 15.2.7-0ubuntu4/debian/patches/add-option-to-disable-ceph-dencoder.patch
--- 14.2.16-2/debian/patches/add-option-to-disable-ceph-dencoder.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/add-option-to-disable-ceph-dencoder.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-Index: ceph/src/tools/CMakeLists.txt
-===================================================================
---- ceph.orig/src/tools/CMakeLists.txt
-+++ ceph/src/tools/CMakeLists.txt
-@@ -126,4 +126,6 @@ if(WITH_RBD)
-   endif()
- endif(WITH_RBD)
- 
-+if(NOT DISABLE_DENCODER)
- add_subdirectory(ceph-dencoder)
-+endif(DISABLE_DENCODER)
diff -pruN 14.2.16-2/debian/patches/allow-binding-on-lo.patch 15.2.7-0ubuntu4/debian/patches/allow-binding-on-lo.patch
--- 14.2.16-2/debian/patches/allow-binding-on-lo.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/allow-binding-on-lo.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-Description: Allow binding on lo
- Commmit 5cf0fa8, solves the issue that
- the osd can't restart after seting a virtual local loopback IP. However,
- this commit also prevents a bgp-to-the-host over unumbered Ipv6
- local-link is setup, where OSD typically are bound to the lo interface.
- .
- To solve this, this single char patch simply checks against "lo:" to
- match only virtual interfaces instead of anything that starts with "lo".
-Author: Thomas Goirand <zigo@debian.org>
-Forwarded: https://github.com/ceph/ceph/pull/38925
-Last-Update: 2021-01-15
-
---- ceph-14.2.15.orig/src/common/ipaddr.cc
-+++ ceph-14.2.15/src/common/ipaddr.cc
-@@ -56,7 +56,7 @@ const struct ifaddrs *find_ipv4_in_subne
-     if (addrs->ifa_addr == NULL)
-       continue;
- 
--    if (strcmp(addrs->ifa_name, "lo") == 0)
-+    if (strcmp(addrs->ifa_name, "lo:") == 0)
-       continue;
- 
-     if (numa_node >= 0 && !match_numa_node(addrs->ifa_name, numa_node))
diff -pruN 14.2.16-2/debian/patches/another-cmakelists-fix.patch 15.2.7-0ubuntu4/debian/patches/another-cmakelists-fix.patch
--- 14.2.16-2/debian/patches/another-cmakelists-fix.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/another-cmakelists-fix.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,31 +0,0 @@
-Description: Another cmakelists fix
- This fixes the last Boost 1.74 compatibility problems.
-Author: Thomas Goirand <zigo@debian.org>
-Forwarded: no
-Last-Update: 2021-01-08
-
---- ceph-14.2.15.orig/CMakeLists.txt
-+++ ceph-14.2.15/CMakeLists.txt
-@@ -27,6 +27,9 @@ endif()
- if(POLICY CMP0075)
-   cmake_policy(SET CMP0075 NEW)
- endif()
-+if(POLICY CMP0093)
-+  cmake_policy(SET CMP0093 NEW)
-+endif()
- list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
- 
- if(CMAKE_SYSTEM_NAME MATCHES "Linux")
---- ceph-14.2.15.orig/src/rgw/CMakeLists.txt
-+++ ceph-14.2.15/src/rgw/CMakeLists.txt
-@@ -19,6 +19,10 @@ function(gperf_generate input output)
-     )
- endfunction()
- 
-+if(Boost_VERSION VERSION_GREATER_EQUAL 1.74)
-+  add_definitions(-DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT)
-+endif()
-+
- set(librgw_common_srcs
-   services/svc_finisher.cc
-   services/svc_notify.cc
diff -pruN 14.2.16-2/debian/patches/bluefs-use-uint64_t-for-len.patch 15.2.7-0ubuntu4/debian/patches/bluefs-use-uint64_t-for-len.patch
--- 14.2.16-2/debian/patches/bluefs-use-uint64_t-for-len.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/bluefs-use-uint64_t-for-len.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,58 +0,0 @@
-From 10a953afc8f803e50c96354470fb114b33e62599 Mon Sep 17 00:00:00 2001
-From: Kefu Chai <kchai@redhat.com>
-Date: Fri, 28 Jun 2019 11:35:54 +0800
-Subject: [PATCH] os/bluestore/BlueFS: use uint64_t for `len`
-
-change the type of parameter `len` of `BlueFS::_read_random()` from
-`size_t` to `uint64_t`.
-
-i think the type of `size_t` comes from
-`rocksdb::RandomAccessFile::Read(uint64_t offset, size_t n,
-rocksdb::Slice* result, char* scratch)`. and when we implement this
-method, we continued using `n`'s type. but, we are using it with
-`std::min()`, for instance, where the template parameter type deduction
-fails if the lhs and rhs parameters' types are different. so probaly the
-better solution is to use `uint64_t` directly to avoid the the cast and
-specializing the template.
-
-Signed-off-by: Kefu Chai <kchai@redhat.com>
----
- src/os/bluestore/BlueFS.cc | 4 ++--
- src/os/bluestore/BlueFS.h  | 2 +-
- 2 files changed, 3 insertions(+), 3 deletions(-)
-
-Index: ceph/src/os/bluestore/BlueFS.cc
-===================================================================
---- ceph.orig/src/os/bluestore/BlueFS.cc
-+++ ceph/src/os/bluestore/BlueFS.cc
-@@ -1526,7 +1526,7 @@ void BlueFS::_drop_link(FileRef file)
- int BlueFS::_read_random(
-   FileReader *h,         ///< [in] read from here
-   uint64_t off,          ///< [in] offset
--  size_t len,            ///< [in] this many bytes
-+  uint64_t len,          ///< [in] this many bytes
-   char *out)             ///< [out] optional: or copy it here
- {
-   auto* buf = &h->buf;
-@@ -1556,7 +1556,7 @@ int BlueFS::_read_random(
-       s_lock.unlock();
-       uint64_t x_off = 0;
-       auto p = h->file->fnode.seek(off, &x_off);
--      uint64_t l = std::min(p->length - x_off, static_cast<uint64_t>(len));
-+      uint64_t l = std::min(p->length - x_off, len);
-       dout(20) << __func__ << " read random 0x"
- 	       << std::hex << x_off << "~" << l << std::dec
- 	       << " of " << *p << dendl;
-Index: ceph/src/os/bluestore/BlueFS.h
-===================================================================
---- ceph.orig/src/os/bluestore/BlueFS.h
-+++ ceph/src/os/bluestore/BlueFS.h
-@@ -412,7 +412,7 @@ private:
-   int _read_random(
-     FileReader *h,   ///< [in] read from here
-     uint64_t offset, ///< [in] offset
--    size_t len,      ///< [in] this many bytes
-+    uint64_t len,    ///< [in] this many bytes
-     char *out);      ///< [out] optional: or copy it here
- 
-   void _invalidate_cache(FileRef f, uint64_t offset, uint64_t length);
diff -pruN 14.2.16-2/debian/patches/cmake_add_1.74_to_known_versions.patch 15.2.7-0ubuntu4/debian/patches/cmake_add_1.74_to_known_versions.patch
--- 14.2.16-2/debian/patches/cmake_add_1.74_to_known_versions.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/cmake_add_1.74_to_known_versions.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,52 +0,0 @@
-Description: cmake: add 1.74 to known versions
-Author: Kefu Chai <kchai@redhat.com>
-Bug-Debian: https://bugs.debian.org/977243
-Origin: upstream, https://github.com/ceph/ceph/commit/b6a94da6149e50bdd43752919d7c01b04c59f79e.patch
-Last-Update: 2020-12-13
-
---- ceph-14.2.15.orig/cmake/modules/FindBoost.cmake
-+++ ceph-14.2.15/cmake/modules/FindBoost.cmake
-@@ -437,10 +437,23 @@ if (NOT Boost_NO_BOOST_CMAKE)
-     endif()
-   endif()
- 
-+  set(_boost_FIND_PACKAGE_ARGS "")
-+  if(Boost_NO_SYSTEM_PATHS)
-+    list(APPEND _boost_FIND_PACKAGE_ARGS NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH)
-+  endif()
-+
-   # Do the same find_package call but look specifically for the CMake version.
-   # Note that args are passed in the Boost_FIND_xxxxx variables, so there is no
-   # need to delegate them to this find_package call.
--  find_package(Boost QUIET NO_MODULE)
-+  cmake_policy(PUSH)
-+  if(BOOST_ROOT AND NOT Boost_ROOT)
-+    if(POLICY CMP0074)
-+      cmake_policy(SET CMP0074 NEW)
-+    endif()
-+    set(Boost_ROOT "${BOOST_ROOT}")
-+  endif()
-+  find_package(Boost QUIET NO_MODULE ${_boost_FIND_PACKAGE_ARGS})
-+  cmake_policy(POP)
-   mark_as_advanced(Boost_DIR)
- 
-   # If we found a boost cmake package, then we're done. Print out what we found.
-@@ -1157,7 +1170,7 @@ function(_Boost_COMPONENT_DEPENDENCIES c
-     set(_Boost_TIMER_DEPENDENCIES chrono)
-     set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic)
-     set(_Boost_WSERIALIZATION_DEPENDENCIES serialization)
--    if(NOT Boost_VERSION_STRING VERSION_LESS 1.73.0)
-+    if(NOT Boost_VERSION_STRING VERSION_LESS 1.75.0)
-       message(WARNING "New Boost version may have incorrect or missing dependencies and imported targets")
-     endif()
-   endif()
-@@ -1429,7 +1442,8 @@ else()
-   # _Boost_COMPONENT_HEADERS.  See the instructions at the top of
-   # _Boost_COMPONENT_DEPENDENCIES.
-   set(_Boost_KNOWN_VERSIONS ${Boost_ADDITIONAL_VERSIONS}
--    "1.72.0" "1.72" "1.71.0" "1.71" "1.70.0" "1.70" "1.69.0" "1.69"
-+    "1.74.0" "1.74"
-+    "1.73.0" "1.73" "1.72.0" "1.72" "1.71.0" "1.71" "1.70.0" "1.70" "1.69.0" "1.69"
-     "1.68.0" "1.68" "1.67.0" "1.67" "1.66.0" "1.66" "1.65.1" "1.65.0" "1.65"
-     "1.64.0" "1.64" "1.63.0" "1.63" "1.62.0" "1.62" "1.61.0" "1.61" "1.60.0" "1.60"
-     "1.59.0" "1.59" "1.58.0" "1.58" "1.57.0" "1.57" "1.56.0" "1.56" "1.55.0" "1.55"
diff -pruN 14.2.16-2/debian/patches/cmake_define_BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT_for_Boost.Asio_users.patch 15.2.7-0ubuntu4/debian/patches/cmake_define_BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT_for_Boost.Asio_users.patch
--- 14.2.16-2/debian/patches/cmake_define_BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT_for_Boost.Asio_users.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/cmake_define_BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT_for_Boost.Asio_users.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,40 +0,0 @@
-Description: cmake: define BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT for
- Boost.Asio users
- .
- see also
- https://www.boost.org/doc/libs/1_74_0/doc/html/boost_asio/std_executors.html#boost_asio.std_executors.polymorphic_i_o_executor
- .
- we could use `asio::any_io_executor` later on though for better
- performance.
- .
- also, define CMP0093, so FindBoost reports Boost_VERSION in x.y.z
- format. it is simpler to use `VERSION_GREATER_EQUAL` to compare its
- version with 1.74 instead of its C macro version ("107000").
-Signed-off-by: Kefu Chai <kchai@redhat.com>
-Author: Kefu Chai <kchai@redhat.com>
-Origin: upstream, https://github.com/ceph/ceph/commit/3d708219092d0e89a1434c30ffc8a4999f062cc0.patch
-Bug-Debian: https://bugs.debian.org/977243
-Last-Update: 2020-12-13
-
---- ceph-14.2.15.orig/src/librbd/CMakeLists.txt
-+++ ceph-14.2.15/src/librbd/CMakeLists.txt
-@@ -1,3 +1,7 @@
-+if(Boost_VERSION VERSION_GREATER_EQUAL 1.74)
-+  add_definitions(-DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT)
-+endif()
-+
- add_library(rbd_types STATIC
-   journal/Types.cc
-   mirroring_watcher/Types.cc
---- ceph-14.2.15.orig/CMakeLists.txt	2020-12-14 09:42:50.543215302 +0100
-+++ ceph-14.2.15/CMakeLists.txt	2020-12-14 09:44:07.827084724 +0100
-@@ -21,6 +21,9 @@
- if(POLICY CMP0051)
-   cmake_policy(SET CMP0051 NEW)
- endif()
-+if(POLICY CMP0074)
-+  cmake_policy(SET CMP0074 NEW)
-+endif()
- if(POLICY CMP0075)
-   cmake_policy(SET CMP0075 NEW)
- endif()
diff -pruN 14.2.16-2/debian/patches/disable-crypto.patch 15.2.7-0ubuntu4/debian/patches/disable-crypto.patch
--- 14.2.16-2/debian/patches/disable-crypto.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/disable-crypto.patch	2020-12-04 09:23:59.000000000 +0000
@@ -1,8 +1,6 @@
-Index: ceph/src/os/CMakeLists.txt
-===================================================================
---- ceph.orig/src/os/CMakeLists.txt
-+++ ceph/src/os/CMakeLists.txt
-@@ -110,8 +110,9 @@ endif()
+--- a/src/os/CMakeLists.txt
++++ b/src/os/CMakeLists.txt
+@@ -113,8 +113,9 @@ endif()
  target_link_libraries(os kv)
  
  add_dependencies(os compressor_plugins)
diff -pruN 14.2.16-2/debian/patches/enable-strsignal.patch 15.2.7-0ubuntu4/debian/patches/enable-strsignal.patch
--- 14.2.16-2/debian/patches/enable-strsignal.patch	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/enable-strsignal.patch	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,16 @@
+Description: This defines HAVE_REENTRANT_STRSIGNAL as sys_siglist no longer
+  exists with glibc 2.32 and all programs should use strsignal instead.
+Forwarded: no
+Last-Update: 2020-09-21
+
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -447,7 +447,7 @@ if(WITH_THREAD_SAFE_RES_QUERY)
+   set(HAVE_THREAD_SAFE_RES_QUERY 1 CACHE INTERNAL "Thread safe res_query supported.")
+ endif()
+ 
+-option(WITH_REENTRANT_STRSIGNAL "strsignal is reentrant" OFF)
++option(WITH_REENTRANT_STRSIGNAL "strsignal is reentrant" ON)
+ if(WITH_REENTRANT_STRSIGNAL)
+   set(HAVE_REENTRANT_STRSIGNAL 1 CACHE INTERNAL "Reentrant strsignal is supported.")
+ endif()
diff -pruN 14.2.16-2/debian/patches/fix-ceph-osd-systemd-target.patch 15.2.7-0ubuntu4/debian/patches/fix-ceph-osd-systemd-target.patch
--- 14.2.16-2/debian/patches/fix-ceph-osd-systemd-target.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/fix-ceph-osd-systemd-target.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-Description: Fix systemd ceph-osd.target
- This helps when rebooting.
-Author: Thomas Goirand <zigo@debian.org>
-Forwarded: no
-Last-Update: 2021-01-28
-
---- ceph-14.2.16.orig/systemd/ceph-osd.target
-+++ ceph-14.2.16/systemd/ceph-osd.target
-@@ -1,7 +1,7 @@
- [Unit]
- Description=ceph target allowing to start/stop all ceph-osd@.service instances at once
- PartOf=ceph.target
--After=ceph-mon.target
-+After=ceph-mon.target systemd-udev-settle.service
- Before=ceph.target
- Wants=ceph.target ceph-mon.target
- 
diff -pruN 14.2.16-2/debian/patches/fix-ftbfs-c++17.patch 15.2.7-0ubuntu4/debian/patches/fix-ftbfs-c++17.patch
--- 14.2.16-2/debian/patches/fix-ftbfs-c++17.patch	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/fix-ftbfs-c++17.patch	2021-01-04 14:47:36.000000000 +0000
@@ -0,0 +1,14 @@
+Description: add missing atomic include, otherwise atomic is undefined.
+ Fixes FTBFS in hirsute.
+Index: ceph-15.2.7/src/tools/rbd/action/Bench.cc
+===================================================================
+--- ceph-15.2.7.orig/src/tools/rbd/action/Bench.cc
++++ ceph-15.2.7/src/tools/rbd/action/Bench.cc
+@@ -9,6 +9,7 @@
+ #include "common/ceph_mutex.h"
+ #include "include/types.h"
+ #include "global/signal_handler.h"
++#include <atomic>
+ #include <iostream>
+ #include <boost/accumulators/accumulators.hpp>
+ #include <boost/accumulators/statistics/stats.hpp>
diff -pruN 14.2.16-2/debian/patches/make-ceph-python-3.9-aware.patch 15.2.7-0ubuntu4/debian/patches/make-ceph-python-3.9-aware.patch
--- 14.2.16-2/debian/patches/make-ceph-python-3.9-aware.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/make-ceph-python-3.9-aware.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,28 +0,0 @@
-Description: Make Ceph Python 3.9 aware
- Add versions of interpreters Ceph didn't know about.
-Author: Thomas Goirand <zigo@debian.org>
-Forwarded: no
-Last-Update: 2020-11-28
-
---- ceph-14.2.15.orig/cmake/modules/FindPython3Interp.cmake
-+++ ceph-14.2.15/cmake/modules/FindPython3Interp.cmake
-@@ -69,7 +69,7 @@
- 
- unset(_Python3_NAMES)
- 
--set(_PYTHON3_VERSIONS 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
-+set(_PYTHON3_VERSIONS 3.9 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
- 
- if(Python3Interp_FIND_VERSION)
-     if(Python3Interp_FIND_VERSION_COUNT GREATER 1)
---- ceph-14.2.15.orig/cmake/modules/FindPython3Libs.cmake
-+++ ceph-14.2.15/cmake/modules/FindPython3Libs.cmake
-@@ -101,7 +101,7 @@ endif()
- # To avoid picking up the system Python.h pre-maturely.
- set(CMAKE_FIND_FRAMEWORK LAST)
- 
--set(_PYTHON3_VERSIONS 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
-+set(_PYTHON3_VERSIONS 3.9 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
- 
- if(Python3Libs_FIND_VERSION)
-     if(Python3Libs_FIND_VERSION_COUNT GREATER 1)
diff -pruN 14.2.16-2/debian/patches/mds-purgequeue-use_uint64_t.patch 15.2.7-0ubuntu4/debian/patches/mds-purgequeue-use_uint64_t.patch
--- 14.2.16-2/debian/patches/mds-purgequeue-use_uint64_t.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/mds-purgequeue-use_uint64_t.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,31 +0,0 @@
-Index: ceph/src/mds/PurgeQueue.cc
-===================================================================
---- ceph.orig/src/mds/PurgeQueue.cc
-+++ ceph/src/mds/PurgeQueue.cc
-@@ -499,7 +499,7 @@ void PurgeQueue::_execute_item(
- 
-   in_flight[expire_to] = item;
-   logger->set(l_pq_executing, in_flight.size());
--  files_high_water = std::max(files_high_water, in_flight.size());
-+  files_high_water = std::max(files_high_water, static_cast<uint64_t>(in_flight.size()));
-   logger->set(l_pq_executing_high_water, files_high_water);
-   auto ops = _calculate_ops(item);
-   ops_in_flight += ops;
-@@ -577,7 +577,7 @@ void PurgeQueue::_execute_item(
-     logger->set(l_pq_executing_ops_high_water, ops_high_water);
-     in_flight.erase(expire_to);
-     logger->set(l_pq_executing, in_flight.size());
--    files_high_water = std::max(files_high_water, in_flight.size());
-+    files_high_water = std::max(files_high_water, static_cast<uint64_t>(in_flight.size()));
-     logger->set(l_pq_executing_high_water, files_high_water);
-     return;
-   }
-@@ -654,7 +654,7 @@ void PurgeQueue::_execute_item_complete(
- 
-   in_flight.erase(iter);
-   logger->set(l_pq_executing, in_flight.size());
--  files_high_water = std::max(files_high_water, in_flight.size());
-+  files_high_water = std::max(files_high_water, static_cast<uint64_t>(in_flight.size()));
-   logger->set(l_pq_executing_high_water, files_high_water);
-   dout(10) << "in_flight.size() now " << in_flight.size() << dendl;
- 
diff -pruN 14.2.16-2/debian/patches/python3.9.patch 15.2.7-0ubuntu4/debian/patches/python3.9.patch
--- 14.2.16-2/debian/patches/python3.9.patch	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/python3.9.patch	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,11 @@
+--- a/cmake/modules/FindPython/Support.cmake
++++ b/cmake/modules/FindPython/Support.cmake
+@@ -17,7 +17,7 @@ if (NOT DEFINED _${_PYTHON_PREFIX}_REQUI
+   message (FATAL_ERROR "FindPython: INTERNAL ERROR")
+ endif()
+ if (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL 3)
+-  set(_${_PYTHON_PREFIX}_VERSIONS 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
++  set(_${_PYTHON_PREFIX}_VERSIONS 3.9 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
+ elseif (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL 2)
+   set(_${_PYTHON_PREFIX}_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0)
+ else()
diff -pruN 14.2.16-2/debian/patches/riscv64-link-pthread.patch 15.2.7-0ubuntu4/debian/patches/riscv64-link-pthread.patch
--- 14.2.16-2/debian/patches/riscv64-link-pthread.patch	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/riscv64-link-pthread.patch	2020-12-04 09:23:59.000000000 +0000
@@ -2,9 +2,9 @@ Description: Link with -pthread instead
 Forwarded: no
 Last-Update: 2020-03-01
 
---- ceph-14.2.7.orig/CMakeLists.txt
-+++ ceph-14.2.7/CMakeLists.txt
-@@ -28,6 +28,7 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_S
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -23,6 +23,7 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_S
  
  if(CMAKE_SYSTEM_NAME MATCHES "Linux")
    set(LINUX ON)
diff -pruN 14.2.16-2/debian/patches/series 15.2.7-0ubuntu4/debian/patches/series
--- 14.2.16-2/debian/patches/series	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/patches/series	2021-01-04 14:45:45.000000000 +0000
@@ -1,22 +1,14 @@
+enable-strsignal.patch
 update-java-source-target-flags.patch
 disable-crypto.patch
-32bit-avoid-overloading.patch
-32bit-avoid-size_t.patch
 # Ubuntu: civetweb max connections
 civetweb-755-1.8-somaxconn-configurable_conf.patch
 civetweb-755-1.8-somaxconn-configurable.patch
 civetweb-755-1.8-somaxconn-configurable_test.patch
-# Upstream: py3
-# Upstream: 32bit
-bluefs-use-uint64_t-for-len.patch
+# Debian misc fixes
 debian-armel-armhf-buildflags.patch
 fix-bash-completion-location
-add-option-to-disable-ceph-dencoder.patch
+32bit-fixes.patch
 riscv64-link-pthread.patch
-mds-purgequeue-use_uint64_t.patch
-make-ceph-python-3.9-aware.patch
-cmake_define_BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT_for_Boost.Asio_users.patch
-cmake_add_1.74_to_known_versions.patch
-another-cmakelists-fix.patch
-allow-binding-on-lo.patch
-fix-ceph-osd-systemd-target.patch
+python3.9.patch
+fix-ftbfs-c++17.patch
diff -pruN 14.2.16-2/debian/python3-ceph-common.install 15.2.7-0ubuntu4/debian/python3-ceph-common.install
--- 14.2.16-2/debian/python3-ceph-common.install	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/python3-ceph-common.install	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,2 @@
+usr/lib/python3*/dist-packages/ceph
+usr/lib/python3*/dist-packages/ceph-*.egg-info
diff -pruN 14.2.16-2/debian/radosgw.install 15.2.7-0ubuntu4/debian/radosgw.install
--- 14.2.16-2/debian/radosgw.install	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/radosgw.install	2020-12-04 09:23:59.000000000 +0000
@@ -3,4 +3,5 @@ usr/bin/radosgw
 usr/bin/radosgw-es
 usr/bin/radosgw-object-expirer
 usr/bin/radosgw-token
+usr/lib/*/libradosgw.so.*
 usr/share/man/man8/radosgw.8
diff -pruN 14.2.16-2/debian/radosgw.postinst 15.2.7-0ubuntu4/debian/radosgw.postinst
--- 14.2.16-2/debian/radosgw.postinst	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/radosgw.postinst	2020-12-04 09:23:59.000000000 +0000
@@ -1,15 +1,52 @@
 #!/bin/sh
+# vim: set noet ts=8:
+# postinst script for radosgw
+#
+# see: dh_installdeb(1)
 
 set -e
 
-if [ "${1}" = "configure" ] ; then
-	[ -f "/etc/default/ceph" ] && . /etc/default/ceph
-	[ -z "$SERVER_USER" ] && SERVER_USER=ceph
-	[ -z "$SERVER_GROUP" ] && SERVER_GROUP=ceph
+# summary of how this script can be called:
+#
+# 	postinst configure <most-recently-configured-version>
+# 	old-postinst abort-upgrade <new-version>
+# 	conflictor's-postinst abort-remove in-favour <package> <new-version>
+# 	postinst abort-remove
+# 	deconfigured's-postinst abort-deconfigure in-favour <failed-install-package> <version> [<removing conflicting-package> <version>]
+#
+# The current action is to simply remove the mistakenly-added
+# /etc/init/ceph.conf file; this could be done in any of these cases,
+# although technically it will leave the system in a different state
+# than the original install that included that file.  So instead we
+# only remove on "configure", since that's the only time we know we're
+# successful in installing a newer package than the erroneous version.
+
+# for details, see http://www.debian.org/doc/debian-policy/ or
+# the debian-policy package
+
+[ -f "/etc/default/ceph" ] && . /etc/default/ceph
+[ -z "$SERVER_USER" ] && SERVER_USER=ceph
+[ -z "$SERVER_GROUP" ] && SERVER_GROUP=ceph
+
+case "$1" in
+    configure)
 	if ! dpkg-statoverride --list /var/lib/ceph/radosgw >/dev/null; then
-		chown $SERVER_USER:$SERVER_GROUP /var/lib/ceph/radosgw
+	    chown $SERVER_USER:$SERVER_GROUP /var/lib/ceph/radosgw
 	fi
-fi
+    ;;
+    abort-upgrade|abort-remove|abort-deconfigure)
+	:
+    ;;
+
+    *)
+        echo "postinst called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+# dh_installdeb will replace this with shell code automatically
+# generated by other debhelper scripts.
+
 #DEBHELPER#
 
 exit 0
diff -pruN 14.2.16-2/debian/radosgw.postrm 15.2.7-0ubuntu4/debian/radosgw.postrm
--- 14.2.16-2/debian/radosgw.postrm	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/radosgw.postrm	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+set -e
+
+if dpkg-maintscript-helper supports mv_conffile 2>/dev/null; then
+    dpkg-maintscript-helper mv_conffile \
+        /etc/init/radosgw.conf /etc/init/radosgw-instance.conf \
+        0.72.1-3~ radosgw -- "$@"
+fi
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/radosgw.preinst 15.2.7-0ubuntu4/debian/radosgw.preinst
--- 14.2.16-2/debian/radosgw.preinst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/radosgw.preinst	2020-12-04 09:23:59.000000000 +0000
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+set -e
+
+if dpkg-maintscript-helper supports mv_conffile 2>/dev/null; then
+    dpkg-maintscript-helper mv_conffile \
+        /etc/init/radosgw.conf /etc/init/radosgw-instance.conf \
+        0.72.1-3~ radosgw -- "$@"
+fi
+
+#DEBHELPER#
+
+exit 0
diff -pruN 14.2.16-2/debian/rules 15.2.7-0ubuntu4/debian/rules
--- 14.2.16-2/debian/rules	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/rules	2021-01-18 12:06:39.000000000 +0000
@@ -12,27 +12,35 @@ ifeq (yes,$(findstring yes,$(shell test
   extraopts += -DWITH_CCACHE=ON
 endif
 
+# Don't switch to clang for Ubuntu - armhf builder is
+# just fine using g++
+ifeq ($(shell dpkg-vendor --is Debian && echo yes), yes)
 # try to save even more memory on some architectures
 # see #849657 for hints.
-# Reduce size of debug symbols to fix FTBFS due to the
-# 2GB/3GB address space limits on 32bit
-ifeq (32,$(DEB_HOST_ARCH_BITS))
+ifneq (,$(findstring $(DEB_HOST_ARCH),armhf mipsel armel sh4 m68k))
+  export CC=clang
+  export CXX=clang++
+  extraopts += -DCMAKE_CXX_COMPILER=/usr/bin/clang++
+  extraopts += -DCMAKE_C_COMPILER=/usr/bin/clang
+else
+    # Reduce size of debug symbols to fix FTBFS due to the
+    # 2GB/3GB address space limits on 32bit
+    ifeq (32,$(DEB_HOST_ARCH_BITS))
         export DEB_CFLAGS_MAINT_APPEND = -g1
         export DEB_CXXFLAGS_MAINT_APPEND = -g1
+    endif
 endif
-
-# we don't have NEON on armel.
-ifeq ($(DEB_HOST_ARCH),armel)
-    extraopts += -DHAVE_ARM_NEON=0
-endif
-
-# disable ceph-dencoder on 32bit except i386 to avoid g++ oom
-ifneq (,$(filter $(DEB_HOST_ARCH), armel armhf hppa m68k mips mipsel powerpc sh4 x32))
-    extraopts += -DDISABLE_DENCODER=1
-endif
+else
+    # Reduce size of debug symbols to fix FTBFS due to the
+    # 2GB/3GB address space limits on 32bit
+    ifeq (32,$(DEB_HOST_ARCH_BITS))
+        export DEB_CFLAGS_MAINT_APPEND = -g1
+        export DEB_CXXFLAGS_MAINT_APPEND = -g1
+    endif
+endif # dpkg-vendor --is Debian
 
 ifeq ($(shell dpkg-vendor --is Ubuntu && echo yes) $(DEB_HOST_ARCH), yes i386)
-   skip_packages = -Nceph -Nceph-base -Nceph-mds -Nceph-mgr -Nceph-mon -Nceph-osd
+   skip_packages = -Nceph -Nceph-base -Nceph-mds -Nceph-mgr -Nceph-mon -Nceph-osd -Nceph-common -Nradosgw -Nrbd-mirror
 endif
 
 # minimise needless linking and link to libatomic
@@ -61,14 +69,16 @@ extraopts += -DWITH_MGR_DASHBOARD_FRONTE
 extraopts += -DWITH_SYSTEMD=ON -DCEPH_SYSTEMD_ENV_DIR=/etc/default
 extraopts += -DCMAKE_INSTALL_SYSCONFDIR=/etc
 extraopts += -DCMAKE_INSTALL_SYSTEMD_SERVICEDIR=/lib/systemd/system
+extraopts += -DWITH_RADOSGW_KAFKA_ENDPOINT=OFF
+extraopts += -DCMAKE_BUILD_TYPE=RelWithDebInfo
 
 ifneq (,$(filter parallel=%,$(DEB_BUILD_OPTIONS)))
   NUMJOBS = $(patsubst parallel=%,%,$(filter parallel=%,$(DEB_BUILD_OPTIONS)))
   extraopts += -DBOOST_J=$(NUMJOBS)
 endif
 
-ifneq (,$(filter $(DEB_HOST_ARCH),s390x mips64el ia64 m68k ppc64 riscv64 sh4 sparc64 x32 alpha))
-  # beast depends on libboost_{context,coroutine} which is not supported on s390x
+ifneq (,$(findstring $(DEB_HOST_ARCH),mips64el ia64 m68k ppc64 riscv64 sh4 sparc64 x32))
+  # beast depends on libboost_{context,coroutine} which is not supported on various archs
   extraopts += -DWITH_BOOST_CONTEXT=OFF
 else
   extraopts += -DWITH_BOOST_CONTEXT=ON
@@ -86,21 +96,29 @@ MAX_PARALLEL ?= $(shell ./debian/calc-ma
 override_dh_auto_configure:
 	env | sort
 	dh_auto_configure --buildsystem=cmake -- $(extraopts)
+# after trying to patch the various places where HAVE_ARM_NEON is used
+# the easiest way to get rid of it on armel seems to be to patch the cmake
+# cache file.
+ifeq ($(DEB_HOST_ARCH),armel)
+	sed 's,^HAVE_ARM_NEON.*,HAVE_ARM_NEON:INTERNAL=0,' -i obj-arm-linux-gnueabi/CMakeCache.txt
+endif
 
 override_dh_auto_install:
 	dh_auto_install --buildsystem=cmake --destdir=$(DESTDIR)
-	if [ ! -f $(DESTDIR)/usr/bin/ceph-dencoder ]; then \
-	    cp debian/workarounds/ceph-dencoder-oom $(DESTDIR)/usr/bin/ceph-dencoder ;\
-	    chmod 755 $(DESTDIR)/usr/bin/ceph-dencoder ;\
-	fi
 	install -D -m 644 udev/50-rbd.rules $(DESTDIR)/lib/udev/rules.d/50-rbd.rules
 	install -D -m 644 src/etc-rbdmap $(DESTDIR)/etc/ceph/rbdmap
 	install -D -m 644 etc/sysctl/90-ceph-osd.conf $(DESTDIR)/etc/sysctl.d/30-ceph-osd.conf
 	install -D -m 600 sudoers.d/ceph-osd-smartctl $(DESTDIR)/etc/sudoers.d/ceph-osd-smartctl
+	install -D -m 600 sudoers.d/cephadm $(DESTDIR)/etc/sudoers.d/cephadm
+
+	install -m 755 src/cephadm/cephadm $(DESTDIR)/usr/sbin/cephadm
+
 	# NOTE: ensure that any versioned erasure coding test code is dropped
 	#       from the package install - package ships unversioned modules.
 	rm -f $(CURDIR)/debian/tmp/usr/lib/*/ceph/erasure-code/libec_*.so.*
 	find $(CURDIR)/debian/tmp/usr/lib/*/ceph/erasure-code -type l -delete || :
+	# running out of disk space on riscv64
+	rm -rf $(CURDIR)/obj-riscv64-linux-gnu
 
 
 # doc/changelog is a directory, which confuses dh_installchangelogs
@@ -153,11 +171,13 @@ override_dh_python3:
 		dh_python3 -p python3-$$binding --shebang=/usr/bin/python3;      \
         done
 	dh_python3 -p python3-ceph-argparse --shebang=/usr/bin/python3
+	dh_python3 -p python3-ceph-common --shebang=/usr/bin/python3
 	dh_python3 -p ceph-common --shebang=/usr/bin/python3
 	dh_python3 -p ceph-base --shebang=/usr/bin/python3
 	dh_python3 -p ceph-osd --shebang=/usr/bin/python3
 	dh_python3 -p ceph-mgr --shebang=/usr/bin/python3
 	dh_python3 -p cephfs-shell --shebang=/usr/bin/python3
+	dh_python3 -p cephadm --shebang=/usr/bin/python3
 
 override_dh_builddeb:
 	dh_builddeb ${skip_packages}
diff -pruN 14.2.16-2/debian/source/lintian-overrides 15.2.7-0ubuntu4/debian/source/lintian-overrides
--- 14.2.16-2/debian/source/lintian-overrides	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/source/lintian-overrides	2020-12-04 09:23:59.000000000 +0000
@@ -7,5 +7,4 @@ ceph source: license-problem-json-evil s
 ceph source: source-is-missing src/pybind/mgr/dashboard/frontend/dist/*.js
 
 # regression test file is actually shipped with source and build files
-ceph source: source-contains-prebuilt-ms-help-file src/boost/tools/boost_install/test/iostreams/zlib-1.2.11/contrib/dotzlib/DotZLib.chm
 ceph source: source-contains-prebuilt-ms-help-file src/boost/libs/beast/test/extern/zlib-1.2.11/contrib/dotzlib/DotZLib.chm
diff -pruN 14.2.16-2/debian/source/options 15.2.7-0ubuntu4/debian/source/options
--- 14.2.16-2/debian/source/options	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/source/options	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googlemock/msvc/20\d\d/gmock\.sln"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googlemock/msvc/20\d\d/gmock.*vcproj"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googlemock/msvc/20\d\d/gmock.*vsprops"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googlemock/msvc/20\d\d/gmock.*vcxproj"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googlemock/msvc/20\d\d/gmock_config.props"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/codegear/gtest.*\.cbproj"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/codegear/gtest_all\.cc"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/codegear/gtest_link\.cc"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/codegear/gtest\.groupproj"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/msvc/gtest.*\.vcproj"
-extend-diff-ignore = ".*src/rapidjson/thirdparty/gtest/googletest/msvc/gtest.*\.sln"
diff -pruN 14.2.16-2/debian/source.lintian-overrides 15.2.7-0ubuntu4/debian/source.lintian-overrides
--- 14.2.16-2/debian/source.lintian-overrides	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/source.lintian-overrides	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-# This is a false positive: upstream is shipping both the compiled
-# and the source version of the .js files.
-ceph source: source-is-missing
diff -pruN 14.2.16-2/debian/workarounds/ceph-dencoder-oom 15.2.7-0ubuntu4/debian/workarounds/ceph-dencoder-oom
--- 14.2.16-2/debian/workarounds/ceph-dencoder-oom	2021-01-28 15:45:23.000000000 +0000
+++ 15.2.7-0ubuntu4/debian/workarounds/ceph-dencoder-oom	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-echo "Unfortunately it is not possible to compile ceph-dencoder" 1>&2
-echo "on this architecture, see bug #947886." 1>&2
-echo "" 1>&2
-exit 1
-
diff -pruN 14.2.16-2/doc/api/index.rst 15.2.7-0ubuntu4/doc/api/index.rst
--- 14.2.16-2/doc/api/index.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/api/index.rst	2020-11-30 19:58:30.000000000 +0000
@@ -11,8 +11,8 @@ See `Ceph Storage Cluster APIs`_.
 .. _Ceph Storage Cluster APIs: ../rados/api/
 
 
-Ceph Filesystem APIs
-====================
+Ceph File System APIs
+=====================
 
 See `libcephfs (javadoc)`_.
 
diff -pruN 14.2.16-2/doc/architecture.rst 15.2.7-0ubuntu4/doc/architecture.rst
--- 14.2.16-2/doc/architecture.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/architecture.rst	2020-11-30 19:58:30.000000000 +0000
@@ -27,9 +27,7 @@ A Ceph Storage Cluster consists of two t
 - :term:`Ceph Monitor`
 - :term:`Ceph OSD Daemon`
 
-.. ditaa::
-
-            +---------------+ +---------------+
+.. ditaa::  +---------------+ +---------------+
             |      OSDs     | |    Monitors   |
             +---------------+ +---------------+
 
@@ -53,14 +51,12 @@ Storing Data
 
 The Ceph Storage Cluster receives data from :term:`Ceph Clients`--whether it
 comes through a :term:`Ceph Block Device`, :term:`Ceph Object Storage`, the
-:term:`Ceph Filesystem` or a custom implementation you create using
+:term:`Ceph File System` or a custom implementation you create using
 ``librados``--and it stores the data as objects. Each object corresponds to a
 file in a filesystem, which is stored on an :term:`Object Storage Device`. Ceph
 OSD Daemons handle the read/write operations on the storage disks.
 
-.. ditaa::
-
-           /-----\       +-----+       +-----+
+.. ditaa:: /-----\       +-----+       +-----+
            | obj |------>| {d} |------>| {s} |
            \-----/       +-----+       +-----+
    
@@ -74,9 +70,7 @@ attributes such as the file owner, creat
 forth.
 
 
-.. ditaa::
-
-           /------+------------------------------+----------------\
+.. ditaa:: /------+------------------------------+----------------\
            | ID   | Binary Data                  | Metadata       |
            +------+------------------------------+----------------+
            | 1234 | 0101010101010100110101010010 | name1 = value1 | 
@@ -235,9 +229,7 @@ the client and the monitor share a secre
 .. note:: The ``client.admin`` user must provide the user ID and 
    secret key to the user in a secure manner. 
 
-.. ditaa::
-
-           +---------+     +---------+
+.. ditaa:: +---------+     +---------+
            | Client  |     | Monitor |
            +---------+     +---------+
                 |  request to   |
@@ -260,9 +252,7 @@ user's secret key and transmits it back
 ticket and uses it to sign requests to OSDs and metadata servers throughout the
 cluster.
 
-.. ditaa::
-
-           +---------+     +---------+
+.. ditaa:: +---------+     +---------+
            | Client  |     | Monitor |
            +---------+     +---------+
                 |  authenticate |
@@ -293,9 +283,7 @@ machine and the Ceph servers. Each messa
 subsequent to the initial authentication, is signed using a ticket that the
 monitors, OSDs and metadata servers can verify with their shared secret.
 
-.. ditaa::
-
-           +---------+     +---------+     +-------+     +-------+
+.. ditaa:: +---------+     +---------+     +-------+     +-------+
            |  Client |     | Monitor |     |  MDS  |     |  OSD  |
            +---------+     +---------+     +-------+     +-------+
                 |  request to   |              |             |
@@ -405,8 +393,7 @@ ability to leverage this computing power
    and tertiary OSDs (as many OSDs as additional replicas), and responds to the
    client once it has confirmed the object was stored successfully.
 
-.. ditaa::
-
+.. ditaa:: 
              +----------+
              |  Client  |
              |          |
@@ -456,8 +443,7 @@ Ceph Clients retrieve a `Cluster Map`_ f
 pools. The pool's ``size`` or number of replicas, the CRUSH rule and the
 number of placement groups determine how Ceph will place the data.
 
-.. ditaa::
-
+.. ditaa:: 
             +--------+  Retrieves  +---------------+
             | Client |------------>|  Cluster Map  |
             +--------+             +---------------+
@@ -502,8 +488,7 @@ rebalance dynamically when new Ceph OSD
 come online. The following diagram depicts how CRUSH maps objects to placement
 groups, and placement groups to OSDs.
 
-.. ditaa::
-
+.. ditaa:: 
            /-----\  /-----\  /-----\  /-----\  /-----\
            | obj |  | obj |  | obj |  | obj |  | obj |
            \-----/  \-----/  \-----/  \-----/  \-----/
@@ -629,8 +614,7 @@ and each OSD gets some added capacity, s
 new OSD after rebalancing is complete.
 
 
-.. ditaa::
-
+.. ditaa:: 
            +--------+     +--------+
    Before  |  OSD 1 |     |  OSD 2 |
            +--------+     +--------+
@@ -701,7 +685,6 @@ name. Chunk 1 contains ``ABC`` and is st
 
 
 .. ditaa::
-
                             +-------------------+
                        name |       NYAN        |
                             +-------------------+
@@ -756,7 +739,6 @@ three chunks are read: **OSD2** was the
 account.
 
 .. ditaa::
-
 	                         +-------------------+
 	                    name |       NYAN        |
 	                         +-------------------+
@@ -812,7 +794,7 @@ sends them to the other OSDs. It is also
 authoritative version of the placement group logs.
 
 In the following diagram, an erasure coded placement group has been created with
-``K = 2 + M = 1`` and is supported by three OSDs, two for ``K`` and one for
+``K = 2, M = 1`` and is supported by three OSDs, two for ``K`` and one for
 ``M``. The acting set of the placement group is made of **OSD 1**, **OSD 2** and
 **OSD 3**. An object has been encoded and stored in the OSDs : the chunk
 ``D1v1`` (i.e. Data chunk number 1, version 1) is on **OSD 1**, ``D2v1`` on
@@ -822,7 +804,6 @@ version 1).
 
 
 .. ditaa::
-
      Primary OSD
     
    +-------------+
@@ -953,7 +934,6 @@ object can be removed: ``D1v1`` on **OSD
 on **OSD 3**.
 
 .. ditaa::
-
      Primary OSD
     
    +-------------+
@@ -992,7 +972,6 @@ to be available on all OSDs in the previ
 will be the head of the new authoritative log.
 
 .. ditaa::
-
    +-------------+
    |    OSD 1    |
    |   (down)    |
@@ -1038,7 +1017,6 @@ the erasure coding library during scrubb
 
 
 .. ditaa::
-
      Primary OSD
     
    +-------------+
@@ -1090,8 +1068,7 @@ tier. So the cache tier and the backing
 to Ceph clients.
 
 
-.. ditaa::
-
+.. ditaa:: 
            +-------------+
            | Ceph Client |
            +------+------+
@@ -1173,8 +1150,7 @@ Cluster. Ceph packages this functionalit
 you can create your own custom Ceph Clients. The following diagram depicts the
 basic architecture.
 
-.. ditaa::
-
+.. ditaa::  
             +---------------------------------+
             |  Ceph Storage Cluster Protocol  |
             |           (librados)            |
@@ -1217,9 +1193,7 @@ notification. This enables a client to u
 synchronization/communication channel.
 
 
-.. ditaa::
-
-           +----------+     +----------+     +----------+     +---------------+
+.. ditaa:: +----------+     +----------+     +----------+     +---------------+
            | Client 1 |     | Client 2 |     | Client 3 |     | OSD:Object ID |
            +----------+     +----------+     +----------+     +---------------+
                  |                |                |                  |
@@ -1275,13 +1249,13 @@ The RAID type most similar to Ceph's str
 volume'. Ceph's striping offers the throughput of RAID 0 striping, the
 reliability of n-way RAID mirroring and faster recovery.
 
-Ceph provides three types of clients: Ceph Block Device, Ceph Filesystem, and
+Ceph provides three types of clients: Ceph Block Device, Ceph File System, and
 Ceph Object Storage. A Ceph Client converts its data from the representation 
 format it provides to its users (a block device image, RESTful objects, CephFS
 filesystem directories) into objects for storage in the Ceph Storage Cluster. 
 
 .. tip:: The objects Ceph stores in the Ceph Storage Cluster are not striped. 
-   Ceph Object Storage, Ceph Block Device, and the Ceph Filesystem stripe their 
+   Ceph Object Storage, Ceph Block Device, and the Ceph File System stripe their 
    data over multiple Ceph Storage Cluster objects. Ceph Clients that write 
    directly to the Ceph Storage Cluster via ``librados`` must perform the
    striping (and parallel I/O) for themselves to obtain these benefits.
@@ -1295,8 +1269,7 @@ take maximum advantage of Ceph's ability
 groups, and consequently doesn't improve performance very much. The following
 diagram depicts the simplest form of striping:
 
-.. ditaa::
-
+.. ditaa::              
                         +---------------+
                         |  Client Data  |
                         |     Format    |
@@ -1354,8 +1327,7 @@ set (``object set 2`` in the following d
 stripe (``stripe unit 16``) in the first object in the new object set (``object
 4`` in the diagram below).
 
-.. ditaa::
-
+.. ditaa::                 
                           +---------------+
                           |  Client Data  |
                           |     Format    |
@@ -1462,7 +1434,7 @@ Ceph Clients include a number of service
   provides RESTful APIs with interfaces that are compatible with Amazon S3
   and OpenStack Swift. 
   
-- **Filesystem**: The :term:`Ceph Filesystem` (CephFS) service provides 
+- **Filesystem**: The :term:`Ceph File System` (CephFS) service provides 
   a POSIX compliant filesystem usable with ``mount`` or as 
   a filesystem in user space (FUSE).
 
@@ -1471,7 +1443,6 @@ and high availability. The following dia
 architecture. 
 
 .. ditaa::
-
             +--------------+  +----------------+  +-------------+
             | Block Device |  | Object Storage |  |   CephFS    |
             +--------------+  +----------------+  +-------------+            
@@ -1542,21 +1513,20 @@ client. Other virtualization technologie
 Device kernel object(s). This is done with the  command-line tool ``rbd``.
 
 
-.. index:: CephFS; Ceph Filesystem; libcephfs; MDS; metadata server; ceph-mds
+.. index:: CephFS; Ceph File System; libcephfs; MDS; metadata server; ceph-mds
 
 .. _arch-cephfs:
 
-Ceph Filesystem
----------------
+Ceph File System
+----------------
 
-The Ceph Filesystem (CephFS) provides a POSIX-compliant filesystem as a
+The Ceph File System (CephFS) provides a POSIX-compliant filesystem as a
 service that is layered on top of the object-based Ceph Storage Cluster.
 CephFS files get mapped to objects that Ceph stores in the Ceph Storage
 Cluster. Ceph Clients mount a CephFS filesystem as a kernel object or as
 a Filesystem in User Space (FUSE).
 
 .. ditaa::
-
             +-----------------------+  +------------------------+
             | CephFS Kernel Object  |  |      CephFS FUSE       |
             +-----------------------+  +------------------------+            
@@ -1574,14 +1544,14 @@ a Filesystem in User Space (FUSE).
             +---------------+ +---------------+ +---------------+
 
 
-The Ceph Filesystem service includes the Ceph Metadata Server (MDS) deployed
+The Ceph File System service includes the Ceph Metadata Server (MDS) deployed
 with the Ceph Storage cluster. The purpose of the MDS is to store all the
 filesystem metadata (directories, file ownership, access modes, etc) in
 high-availability Ceph Metadata Servers where the metadata resides in memory.
 The reason for the MDS (a daemon called ``ceph-mds``) is that simple filesystem
 operations like listing a directory or changing a directory (``ls``, ``cd``)
 would tax the Ceph OSD Daemons unnecessarily. So separating the metadata from
-the data means that the Ceph Filesystem can provide high performance services
+the data means that the Ceph File System can provide high performance services
 without taxing the Ceph Storage Cluster.
 
 CephFS separates the metadata from the data, storing the metadata in the MDS,
diff -pruN 14.2.16-2/doc/cephadm/adoption.rst 15.2.7-0ubuntu4/doc/cephadm/adoption.rst
--- 14.2.16-2/doc/cephadm/adoption.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/adoption.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,151 @@
+.. _cephadm-adoption:
+
+Converting an existing cluster to cephadm
+=========================================
+
+Cephadm allows you to convert an existing Ceph cluster that
+has been deployed with ceph-deploy, ceph-ansible, DeepSea, or similar tools.
+
+Limitations
+-----------
+
+* Cephadm only works with BlueStore OSDs.  If there are FileStore OSDs
+  in your cluster you cannot manage them.
+
+Preparation
+-----------
+
+#. Get the ``cephadm`` command line tool on each host in the existing
+   cluster.  See :ref:`get-cephadm`.
+
+#. Prepare each host for use by ``cephadm``::
+
+     # cephadm prepare-host
+
+#. Determine which Ceph version you will use.  You can use any Octopus (15.2.z)
+   release or later.  For example, ``docker.io/ceph/ceph:v15.2.0``.  The default
+   will be the latest stable release, but if you are upgrading from an earlier
+   release at the same time be sure to refer to the upgrade notes for any
+   special steps to take while upgrading.
+
+   The image is passed to cephadm with::
+
+     # cephadm --image $IMAGE <rest of command goes here>
+
+#. Cephadm can provide a list of all Ceph daemons on the current host::
+
+     # cephadm ls
+
+   Before starting, you should see that all existing daemons have a
+   style of ``legacy`` in the resulting output.  As the adoption
+   process progresses, adopted daemons will appear as style
+   ``cephadm:v1``.
+
+
+Adoption process
+----------------
+
+#. Ensure the ceph configuration is migrated to use the cluster config database.
+   If the ``/etc/ceph/ceph.conf`` is identical on each host, then on one host::
+
+     # ceph config assimilate-conf -i /etc/ceph/ceph.conf
+
+   If there are config variations on each host, you may need to repeat
+   this command on each host.  You can view the cluster's
+   configuration to confirm that it is complete with::
+
+     # ceph config dump
+
+#. Adopt each monitor::
+
+     # cephadm adopt --style legacy --name mon.<hostname>
+
+   Each legacy monitor should stop, quickly restart as a cephadm
+   container, and rejoin the quorum.
+
+#. Adopt each manager::
+
+     # cephadm adopt --style legacy --name mgr.<hostname>
+
+#. Enable cephadm::
+
+     # ceph mgr module enable cephadm
+     # ceph orch set backend cephadm
+
+#. Generate an SSH key::
+
+     # ceph cephadm generate-key
+     # ceph cephadm get-pub-key > ceph.pub
+
+#. Install the cluster SSH key on each host in the cluster::
+
+     # ssh-copy-id -f -i ceph.pub root@<host>
+
+   .. note::
+     It is also possible to import an existing ssh key. See
+     :ref:`ssh errors <cephadm-ssh-errors>` in the troubleshooting
+     document for instructions describing how to import existing
+     ssh keys.
+
+#. Tell cephadm which hosts to manage::
+
+     # ceph orch host add <hostname> [ip-address]
+
+   This will perform a ``cephadm check-host`` on each host before
+   adding it to ensure it is working.  The IP address argument is only
+   required if DNS does not allow you to connect to each host by its
+   short name.
+
+#. Verify that the adopted monitor and manager daemons are visible::
+
+     # ceph orch ps
+
+#. Adopt all OSDs in the cluster::
+
+     # cephadm adopt --style legacy --name <name>
+
+   For example::
+
+     # cephadm adopt --style legacy --name osd.1
+     # cephadm adopt --style legacy --name osd.2
+
+#. Redeploy MDS daemons by telling cephadm how many daemons to run for
+   each file system.  You can list file systems by name with ``ceph fs
+   ls``.  Run the following command on the master nodes::
+
+     # ceph orch apply mds <fs-name> [--placement=<placement>]
+
+   For example, in a cluster with a single file system called `foo`::
+
+     # ceph fs ls
+     name: foo, metadata pool: foo_metadata, data pools: [foo_data ]
+     # ceph orch apply mds foo 2
+
+   Wait for the new MDS daemons to start with::
+
+     # ceph orch ps --daemon-type mds
+
+   Finally, stop and remove the legacy MDS daemons::
+
+     # systemctl stop ceph-mds.target
+     # rm -rf /var/lib/ceph/mds/ceph-*
+
+#. Redeploy RGW daemons.  Cephadm manages RGW daemons by zone.  For each
+   zone, deploy new RGW daemons with cephadm::
+
+     # ceph orch apply rgw <realm> <zone> [--subcluster=<subcluster>] [--port=<port>] [--ssl] [--placement=<placement>]
+
+   where *<placement>* can be a simple daemon count, or a list of
+   specific hosts (see :ref:`orchestrator-cli-placement-spec`).
+
+   Once the daemons have started and you have confirmed they are functioning,
+   stop and remove the old legacy daemons::
+
+     # systemctl stop ceph-rgw.target
+     # rm -rf /var/lib/ceph/radosgw/ceph-*
+
+   For adopting single-site systems without a realm, see also
+   :ref:`rgw-multisite-migrate-from-single-site`.
+
+#. Check the ``ceph health detail`` output for cephadm warnings about
+   stray cluster daemons or hosts that are not yet managed.
diff -pruN 14.2.16-2/doc/cephadm/client-setup.rst 15.2.7-0ubuntu4/doc/cephadm/client-setup.rst
--- 14.2.16-2/doc/cephadm/client-setup.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/client-setup.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,36 @@
+=======================
+Basic Ceph Client Setup
+=======================
+Client machines need some basic configuration in order to interact with
+a cluster. This document describes how to configure a client machine
+for cluster interaction.
+
+.. note:: Most client machines only need the `ceph-common` package and
+          its dependencies installed. That will supply the basic `ceph`
+          and `rados` commands, as well as other commands like
+          `mount.ceph` and `rbd`.
+
+Config File Setup
+=================
+Client machines can generally get away with a smaller config file than
+a full-fledged cluster member. To generate a minimal config file, log
+into a host that is already configured as a client or running a cluster
+daemon, and then run::
+
+    ceph config generate-minimal-conf
+
+This will generate a minimal config file that will tell the client how to
+reach the Ceph Monitors. The contents of this file should typically be
+installed in `/etc/ceph/ceph.conf`.
+
+Keyring Setup
+=============
+Most Ceph clusters are run with authentication enabled, and the client will
+need keys in order to communicate with cluster machines. To generate a
+keyring file with credentials for `client.fs`, log into an extant cluster
+member and run::
+
+    ceph auth get-or-create client.fs
+
+The resulting output should be put into a keyring file, typically
+`/etc/ceph/ceph.keyring`.
diff -pruN 14.2.16-2/doc/cephadm/concepts.rst 15.2.7-0ubuntu4/doc/cephadm/concepts.rst
--- 14.2.16-2/doc/cephadm/concepts.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/concepts.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,120 @@
+Cephadm Concepts
+================
+
+.. _cephadm-fqdn:
+
+Fully qualified domain names vs bare host names
+-----------------------------------------------
+
+cephadm has very minimal requirements when it comes to resolving host
+names etc. When cephadm initiates an ssh connection to a remote host,
+the host name  can be resolved in four different ways:
+
+-  a custom ssh config resolving the name to an IP
+-  via an externally maintained ``/etc/hosts``
+-  via explicitly providing an IP address to cephadm: ``ceph orch host add <hostname> <IP>``
+-  automatic name resolution via DNS.
+
+Ceph itself uses the command ``hostname`` to determine the name of the
+current host.
+
+.. note::
+
+  cephadm demands that the name of the host given via ``ceph orch host add`` 
+  equals the output of ``hostname`` on remote hosts.
+
+Otherwise cephadm can't be sure, the host names returned by
+``ceph * metadata`` match the hosts known to cephadm. This might result
+in a :ref:`cephadm-stray-host` warning.
+
+When configuring new hosts, there are two **valid** ways to set the 
+``hostname`` of a host:
+
+1. Using the bare host name. In this case:
+
+-  ``hostname`` returns the bare host name.
+-  ``hostname -f`` returns the FQDN.
+
+2. Using the fully qualified domain name as the host name. In this case:
+
+-  ``hostname`` returns the FQDN
+-  ``hostname -s`` return the bare host name
+
+Note that ``man hostname`` recommends ``hostname`` to return the bare
+host name:
+
+    The FQDN (Fully Qualified Domain Name) of the system is the
+    name that the resolver(3) returns for the host name, such as,
+    ursula.example.com. It is usually the hostname followed by the DNS
+    domain name (the part after the first dot). You can check the FQDN
+    using ``hostname --fqdn`` or the domain name using ``dnsdomainname``.
+
+    ::
+
+          You cannot change the FQDN with hostname or dnsdomainname.
+
+          The recommended method of setting the FQDN is to make the hostname
+          be an alias for the fully qualified name using /etc/hosts, DNS, or
+          NIS. For example, if the hostname was "ursula", one might have
+          a line in /etc/hosts which reads
+
+                 127.0.1.1    ursula.example.com ursula
+
+Which means, ``man hostname`` recommends ``hostname`` to return the bare
+host name. This in turn means that Ceph will return the bare host names
+when executing ``ceph * metadata``. This in turn means cephadm also
+requires the bare host name when adding a host to the cluster: 
+``ceph orch host add <bare-name>``.
+
+..
+  TODO: This chapter needs to provide way for users to configure
+  Grafana in the dashboard, as this is right no very hard to do.
+  
+Cephadm Scheduler
+-----------------
+
+Cephadm uses a declarative state to define the layout of the cluster. This
+state consists of a list of service specifications containing placement
+specifications (See :ref:`orchestrator-cli-service-spec` ). 
+
+Cephadm constantly compares list of actually running daemons in the cluster
+with the desired service specifications and will either add or remove new 
+daemons.
+
+First, cephadm will select a list of candidate hosts. It first looks for 
+explicit host names and will select those. In case there are no explicit hosts 
+defined, cephadm looks for a label specification. If there is no label defined 
+in the specification, cephadm will select hosts based on a host pattern. If 
+there is no pattern defined, cepham will finally select all known hosts as
+candidates.
+
+Then, cephadm will consider existing daemons of this services and will try to
+avoid moving any daemons.
+
+Cephadm supports the deployment of a specific amount of services. Let's 
+consider a service specification like so:
+
+.. code-block:: yaml
+
+    service_type: mds
+    service_name: myfs
+    placement:
+      count: 3
+      label: myfs
+
+This instructs cephadm to deploy three daemons on hosts labeled with
+``myfs`` across the cluster.
+
+Then, in case there are less than three daemons deployed on the candidate 
+hosts, cephadm will then then randomly choose hosts for deploying new daemons.
+
+In case there are more than three daemons deployed, cephadm will remove 
+existing daemons.
+
+Finally, cephadm will remove daemons on hosts that are outside of the list of 
+candidate hosts.
+
+However, there is a special cases that cephadm needs to consider.
+
+In case the are fewer hosts selected by the placement specification than 
+demanded by ``count``, cephadm will only deploy on selected hosts.
\ No newline at end of file
diff -pruN 14.2.16-2/doc/cephadm/drivegroups.rst 15.2.7-0ubuntu4/doc/cephadm/drivegroups.rst
--- 14.2.16-2/doc/cephadm/drivegroups.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/drivegroups.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,409 @@
+.. _drivegroups:
+
+=========================
+OSD Service Specification
+=========================
+
+:ref:`orchestrator-cli-service-spec` of type ``osd`` are a way to describe a cluster layout using the properties of disks.
+It gives the user an abstract way tell ceph which disks should turn into an OSD
+with which configuration without knowing the specifics of device names and paths.
+
+Instead of doing this::
+
+  [monitor 1] # ceph orch daemon add osd *<host>*:*<path-to-device>*
+
+for each device and each host, we can define a yaml|json file that allows us to describe
+the layout. Here's the most basic example.
+
+Create a file called i.e. osd_spec.yml
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: default_drive_group  <- name of the drive_group (name can be custom)
+    placement:
+      host_pattern: '*'              <- which hosts to target, currently only supports globs
+    data_devices:                    <- the type of devices you are applying specs to
+      all: true                      <- a filter, check below for a full list
+
+This would translate to:
+
+Turn any available(ceph-volume decides what 'available' is) into an OSD on all hosts that match
+the glob pattern '*'. (The glob pattern matches against the registered hosts from `host ls`)
+There will be a more detailed section on host_pattern down below.
+
+and pass it to `osd create` like so::
+
+  [monitor 1] # ceph orch apply osd -i /path/to/osd_spec.yml
+
+This will go out on all the matching hosts and deploy these OSDs.
+
+Since we want to have more complex setups, there are more filters than just the 'all' filter.
+
+Also, there is a `--dry-run` flag that can be passed to the `apply osd` command, which gives you a synopsis
+of the proposed layout.
+
+Example::
+
+  [monitor 1] # ceph orch apply osd -i /path/to/osd_spec.yml --dry-run
+
+
+
+Filters
+=======
+
+.. note::
+   Filters are applied using a `AND` gate by default. This essentially means that a drive needs to fulfill all filter
+   criteria in order to get selected.
+   If you wish to change this behavior you can adjust this behavior by setting
+
+    `filter_logic: OR`  # valid arguments are `AND`, `OR`
+
+   in the OSD Specification.
+
+You can assign disks to certain groups by their attributes using filters.
+
+The attributes are based off of ceph-volume's disk query. You can retrieve the information
+with::
+
+  ceph-volume inventory </path/to/disk>
+
+Vendor or Model:
+-------------------
+
+You can target specific disks by their Vendor or by their Model
+
+.. code-block:: yaml
+
+    model: disk_model_name
+
+or
+
+.. code-block:: yaml
+
+    vendor: disk_vendor_name
+
+
+Size:
+--------------
+
+You can also match by disk `Size`.
+
+.. code-block:: yaml
+
+    size: size_spec
+
+Size specs:
+___________
+
+Size specification of format can be of form:
+
+* LOW:HIGH
+* :HIGH
+* LOW:
+* EXACT
+
+Concrete examples:
+
+Includes disks of an exact size::
+
+    size: '10G'
+
+Includes disks which size is within the range::
+
+    size: '10G:40G'
+
+Includes disks less than or equal to 10G in size::
+
+    size: ':10G'
+
+
+Includes disks equal to or greater than 40G in size::
+
+    size: '40G:'
+
+Sizes don't have to be exclusively in Gigabyte(G).
+
+Supported units are Megabyte(M), Gigabyte(G) and Terrabyte(T). Also appending the (B) for byte is supported. MB, GB, TB
+
+
+Rotational:
+-----------
+
+This operates on the 'rotational' attribute of the disk.
+
+.. code-block:: yaml
+
+    rotational: 0 | 1
+
+`1` to match all disks that are rotational
+
+`0` to match all disks that are non-rotational (SSD, NVME etc)
+
+
+All:
+------------
+
+This will take all disks that are 'available'
+
+Note: This is exclusive for the data_devices section.
+
+.. code-block:: yaml
+
+    all: true
+
+
+Limiter:
+--------
+
+When you specified valid filters but want to limit the amount of matching disks you can use the 'limit' directive.
+
+.. code-block:: yaml
+
+    limit: 2
+
+For example, if you used `vendor` to match all disks that are from `VendorA` but only want to use the first two
+you could use `limit`.
+
+.. code-block:: yaml
+
+  data_devices:
+    vendor: VendorA
+    limit: 2
+
+Note: Be aware that `limit` is really just a last resort and shouldn't be used if it can be avoided.
+
+
+Additional Options
+===================
+
+There are multiple optional settings you can use to change the way OSDs are deployed.
+You can add these options to the base level of a DriveGroup for it to take effect.
+
+This example would deploy all OSDs with encryption enabled.
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: example_osd_spec
+    placement:
+      host_pattern: '*'
+    data_devices:
+      all: true
+    encrypted: true
+
+See a full list in the DriveGroupSpecs
+
+.. py:currentmodule:: ceph.deployment.drive_group
+
+.. autoclass:: DriveGroupSpec
+   :members:
+   :exclude-members: from_json
+
+Examples
+========
+
+The simple case
+---------------
+
+All nodes with the same setup::
+
+    20 HDDs
+    Vendor: VendorA
+    Model: HDD-123-foo
+    Size: 4TB
+
+    2 SSDs
+    Vendor: VendorB
+    Model: MC-55-44-ZX
+    Size: 512GB
+
+This is a common setup and can be described quite easily:
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_default
+    placement:
+      host_pattern: '*'
+    data_devices:
+      model: HDD-123-foo <- note that HDD-123 would also be valid
+    db_devices:
+      model: MC-55-44-XZ <- same here, MC-55-44 is valid
+
+However, we can improve it by reducing the filters on core properties of the drives:
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_default
+    placement:
+      host_pattern: '*'
+    data_devices:
+      rotational: 1
+    db_devices:
+      rotational: 0
+
+Now, we enforce all rotating devices to be declared as 'data devices' and all non-rotating devices will be used as shared_devices (wal, db)
+
+If you know that drives with more than 2TB will always be the slower data devices, you can also filter by size:
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_default
+    placement:
+      host_pattern: '*'
+    data_devices:
+      size: '2TB:'
+    db_devices:
+      size: ':2TB'
+
+Note: All of the above DriveGroups are equally valid. Which of those you want to use depends on taste and on how much you expect your node layout to change.
+
+
+The advanced case
+-----------------
+
+Here we have two distinct setups::
+
+    20 HDDs
+    Vendor: VendorA
+    Model: HDD-123-foo
+    Size: 4TB
+
+    12 SSDs
+    Vendor: VendorB
+    Model: MC-55-44-ZX
+    Size: 512GB
+
+    2 NVMEs
+    Vendor: VendorC
+    Model: NVME-QQQQ-987
+    Size: 256GB
+
+
+* 20 HDDs should share 2 SSDs
+* 10 SSDs should share 2 NVMes
+
+This can be described with two layouts.
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_hdd
+    placement:
+      host_pattern: '*'
+    data_devices:
+      rotational: 0
+    db_devices:
+      model: MC-55-44-XZ
+      limit: 2 (db_slots is actually to be favoured here, but it's not implemented yet)
+      
+    service_type: osd
+    service_id: osd_spec_ssd
+    placement:
+      host_pattern: '*'
+    data_devices:
+      model: MC-55-44-XZ
+    db_devices:
+      vendor: VendorC
+
+This would create the desired layout by using all HDDs as data_devices with two SSD assigned as dedicated db/wal devices.
+The remaining SSDs(8) will be data_devices that have the 'VendorC' NVMEs assigned as dedicated db/wal devices.
+
+The advanced case (with non-uniform nodes)
+------------------------------------------
+
+The examples above assumed that all nodes have the same drives. That's however not always the case.
+
+Node1-5::
+
+    20 HDDs
+    Vendor: Intel
+    Model: SSD-123-foo
+    Size: 4TB
+    2 SSDs
+    Vendor: VendorA
+    Model: MC-55-44-ZX
+    Size: 512GB
+
+Node6-10::
+
+    5 NVMEs
+    Vendor: Intel
+    Model: SSD-123-foo
+    Size: 4TB
+    20 SSDs
+    Vendor: VendorA
+    Model: MC-55-44-ZX
+    Size: 512GB
+
+You can use the 'host_pattern' key in the layout to target certain nodes. Salt target notation helps to keep things easy.
+
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_node_one_to_five
+    placement:
+      host_pattern: 'node[1-5]'
+    data_devices:
+      rotational: 1
+    db_devices:
+      rotational: 0
+      
+      
+    service_type: osd
+    service_id: osd_spec_six_to_ten
+    placement:
+      host_pattern: 'node[6-10]'
+    data_devices:
+      model: MC-55-44-XZ
+    db_devices:
+      model: SSD-123-foo
+
+This applies different OSD specs to different hosts depending on the `host_pattern` key.
+
+Dedicated wal + db
+------------------
+
+All previous cases co-located the WALs with the DBs.
+It's however possible to deploy the WAL on a dedicated device as well, if it makes sense.
+
+::
+
+    20 HDDs
+    Vendor: VendorA
+    Model: SSD-123-foo
+    Size: 4TB
+
+    2 SSDs
+    Vendor: VendorB
+    Model: MC-55-44-ZX
+    Size: 512GB
+
+    2 NVMEs
+    Vendor: VendorC
+    Model: NVME-QQQQ-987
+    Size: 256GB
+
+
+The OSD spec for this case would look like the following (using the `model` filter):
+
+.. code-block:: yaml
+
+    service_type: osd
+    service_id: osd_spec_default
+    placement:
+      host_pattern: '*'
+    data_devices:
+      model: MC-55-44-XZ
+    db_devices:
+      model: SSD-123-foo
+    wal_devices:
+      model: NVME-QQQQ-987
+
+
+This can easily be done with other filters, like `size` or `vendor` as well.
+
diff -pruN 14.2.16-2/doc/cephadm/index.rst 15.2.7-0ubuntu4/doc/cephadm/index.rst
--- 14.2.16-2/doc/cephadm/index.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/index.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,40 @@
+.. _cephadm:
+
+=======
+Cephadm
+=======
+
+Cephadm deploys and manages a Ceph cluster by connection to hosts from the
+manager daemon via SSH to add, remove, or update Ceph daemon containers.  It
+does not rely on external configuration or orchestration tools like Ansible,
+Rook, or Salt.
+
+Cephadm manages the full lifecycle of a Ceph cluster.  It starts
+by bootstrapping a tiny Ceph cluster on a single node (one monitor and
+one manager) and then uses the orchestration interface ("day 2"
+commands) to expand the cluster to include all hosts and to provision
+all Ceph daemons and services.  This can be performed via the Ceph
+command-line interface (CLI) or dashboard (GUI).
+
+Cephadm is new in the Octopus v15.2.0 release and does not support older
+versions of Ceph.
+
+.. note::
+
+   Cephadm is new.  Please read about :ref:`cephadm-stability` before
+   using cephadm to deploy a production system.
+
+.. toctree::
+    :maxdepth: 1
+
+    stability
+    install
+    adoption
+    upgrade
+    Cephadm operations <operations>
+    Cephadm monitoring <monitoring>
+    Cephadm CLI <../mgr/orchestrator>
+    Client Setup <client-setup>
+    DriveGroups <drivegroups>
+    troubleshooting
+    concepts
\ No newline at end of file
diff -pruN 14.2.16-2/doc/cephadm/install.rst 15.2.7-0ubuntu4/doc/cephadm/install.rst
--- 14.2.16-2/doc/cephadm/install.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/install.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,430 @@
+============================
+Deploying a new Ceph cluster
+============================
+
+Cephadm creates a new Ceph cluster by "bootstrapping" on a single
+host, expanding the cluster to encompass any additional hosts, and
+then deploying the needed services.
+
+.. highlight:: console
+
+Requirements
+============
+
+- Systemd
+- Podman or Docker for running containers
+- Time synchronization (such as chrony or NTP)
+- LVM2 for provisioning storage devices
+
+Any modern Linux distribution should be sufficient.  Dependencies
+are installed automatically by the bootstrap process below.
+
+.. _get-cephadm:
+
+Install cephadm
+===============
+
+The ``cephadm`` command can (1) bootstrap a new cluster, (2)
+launch a containerized shell with a working Ceph CLI, and (3) aid in
+debugging containerized Ceph daemons.
+
+There are a few ways to install cephadm:
+
+* Use ``curl`` to fetch the most recent version of the
+  standalone script::
+
+    # curl --silent --remote-name --location https://github.com/ceph/ceph/raw/octopus/src/cephadm/cephadm
+    # chmod +x cephadm
+
+  This script can be run directly from the current directory with::
+
+    # ./cephadm <arguments...>
+
+* Although the standalone script is sufficient to get a cluster started, it is
+  convenient to have the ``cephadm`` command installed on the host.  To install
+  these packages for the current Octopus release::
+
+    # ./cephadm add-repo --release octopus
+    # ./cephadm install
+
+  Confirm that ``cephadm`` is now in your PATH with::
+
+    # which cephadm
+
+* Some commercial Linux distributions (e.g., RHEL, SLE) may already
+  include up-to-date Ceph packages.  In that case, you can install
+  cephadm directly.  For example::
+
+    # dnf install -y cephadm     # or
+    # zypper install -y cephadm
+
+
+
+Bootstrap a new cluster
+=======================
+
+You need to know which *IP address* to use for the cluster's first
+monitor daemon.  This is normally just the IP for the first host.  If there
+are multiple networks and interfaces, be sure to choose one that will
+be accessible by any host accessing the Ceph cluster.
+
+To bootstrap the cluster::
+
+  # mkdir -p /etc/ceph
+  # cephadm bootstrap --mon-ip *<mon-ip>*
+
+This command will:
+
+* Create a monitor and manager daemon for the new cluster on the local
+  host.
+* Generate a new SSH key for the Ceph cluster and adds it to the root
+  user's ``/root/.ssh/authorized_keys`` file.
+* Write a minimal configuration file needed to communicate with the
+  new cluster to ``/etc/ceph/ceph.conf``.
+* Write a copy of the ``client.admin`` administrative (privileged!)
+  secret key to ``/etc/ceph/ceph.client.admin.keyring``.
+* Write a copy of the public key to
+  ``/etc/ceph/ceph.pub``.
+
+The default bootstrap behavior will work for the vast majority of
+users.  See below for a few options that may be useful for some users,
+or run ``cephadm bootstrap -h`` to see all available options:
+
+* Bootstrap writes the files needed to access the new cluster to
+  ``/etc/ceph`` for convenience, so that any Ceph packages installed
+  on the host itself (e.g., to access the command line interface) can
+  easily find them.
+
+  Daemon containers deployed with cephadm, however, do not need
+  ``/etc/ceph`` at all.  Use the ``--output-dir *<directory>*`` option
+  to put them in a different directory (like ``.``), avoiding any
+  potential conflicts with existing Ceph configuration (cephadm or
+  otherwise) on the same host.
+
+* You can pass any initial Ceph configuration options to the new
+  cluster by putting them in a standard ini-style configuration file
+  and using the ``--config *<config-file>*`` option.
+
+* You can choose the ssh user cephadm will use to connect to hosts by
+  using the ``--ssh-user *<user>*`` option. The ssh key will be added
+  to ``/home/*<user>*/.ssh/authorized_keys``. This user will require
+  passwordless sudo access.
+
+* If you are using a container on an authenticated registry that requires
+  login you may add the three arguments ``--registry-url <url of registry>``,
+  ``--registry-username <username of account on registry>``,
+  ``--registry-password <password of account on registry>`` OR
+  ``--registry-json <json file with login info>``. Cephadm will attempt
+  to login to this registry so it may pull your container and then store
+  the login info in its config database so other hosts added to the cluster
+  may also make use of the authenticated registry.
+
+Enable Ceph CLI
+===============
+
+Cephadm does not require any Ceph packages to be installed on the
+host.  However, we recommend enabling easy access to the ``ceph``
+command.  There are several ways to do this:
+
+* The ``cephadm shell`` command launches a bash shell in a container
+  with all of the Ceph packages installed. By default, if
+  configuration and keyring files are found in ``/etc/ceph`` on the
+  host, they are passed into the container environment so that the
+  shell is fully functional. Note that when executed on a MON host,
+  ``cephadm shell`` will infer the ``config`` from the MON container
+  instead of using the default configuration. If ``--mount <path>``
+  is given, then the host ``<path>`` (file or directory) will appear
+  under ``/mnt`` inside the container::
+
+    # cephadm shell
+
+* To execute ``ceph`` commands, you can also run commands like so::
+
+    # cephadm shell -- ceph -s
+
+* You can install the ``ceph-common`` package, which contains all of the
+  ceph commands, including ``ceph``, ``rbd``, ``mount.ceph`` (for mounting
+  CephFS file systems), etc.::
+
+    # cephadm add-repo --release octopus
+    # cephadm install ceph-common
+
+Confirm that the ``ceph`` command is accessible with::
+
+  # ceph -v
+
+Confirm that the ``ceph`` command can connect to the cluster and also
+its status with::
+
+  # ceph status
+
+
+Add hosts to the cluster
+========================
+
+To add each new host to the cluster, perform two steps:
+
+#. Install the cluster's public SSH key in the new host's root user's
+   ``authorized_keys`` file::
+
+     # ssh-copy-id -f -i /etc/ceph/ceph.pub root@*<new-host>*
+
+   For example::
+
+     # ssh-copy-id -f -i /etc/ceph/ceph.pub root@host2
+     # ssh-copy-id -f -i /etc/ceph/ceph.pub root@host3
+
+#. Tell Ceph that the new node is part of the cluster::
+
+     # ceph orch host add *newhost*
+
+   For example::
+
+     # ceph orch host add host2
+     # ceph orch host add host3
+
+
+.. _deploy_additional_monitors:
+
+Deploy additional monitors (optional)
+=====================================
+
+A typical Ceph cluster has three or five monitor daemons spread
+across different hosts.  We recommend deploying five
+monitors if there are five or more nodes in your cluster.
+
+.. _CIDR: https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation
+
+When Ceph knows what IP subnet the monitors should use it can automatically
+deploy and scale monitors as the cluster grows (or contracts).  By default,
+Ceph assumes that other monitors should use the same subnet as the first
+monitor's IP.
+
+If your Ceph monitors (or the entire cluster) live on a single subnet,
+then by default cephadm automatically adds up to 5 monitors as you add new
+hosts to the cluster. No further steps are necessary.
+
+* If there is a specific IP subnet that should be used by monitors, you
+  can configure that in `CIDR`_ format (e.g., ``10.1.2.0/24``) with::
+
+    # ceph config set mon public_network *<mon-cidr-network>*
+
+  For example::
+
+    # ceph config set mon public_network 10.1.2.0/24
+
+  Cephadm only deploys new monitor daemons on hosts that have IPs
+  configured in the configured subnet.
+
+* If you want to adjust the default of 5 monitors::
+
+    # ceph orch apply mon *<number-of-monitors>*
+
+* To deploy monitors on a specific set of hosts::
+
+    # ceph orch apply mon *<host1,host2,host3,...>*
+
+  Be sure to include the first (bootstrap) host in this list.
+
+* You can control which hosts the monitors run on by making use of
+  host labels.  To set the ``mon`` label to the appropriate
+  hosts::
+
+    # ceph orch host label add *<hostname>* mon
+
+  To view the current hosts and labels::
+
+    # ceph orch host ls
+
+  For example::
+
+    # ceph orch host label add host1 mon
+    # ceph orch host label add host2 mon
+    # ceph orch host label add host3 mon
+    # ceph orch host ls
+    HOST   ADDR   LABELS  STATUS
+    host1         mon
+    host2         mon
+    host3         mon
+    host4
+    host5
+
+  Tell cephadm to deploy monitors based on the label::
+
+    # ceph orch apply mon label:mon
+
+* You can explicitly specify the IP address or CIDR network for each monitor
+  and control where it is placed.  To disable automated monitor deployment::
+
+    # ceph orch apply mon --unmanaged
+
+  To deploy each additional monitor::
+
+    # ceph orch daemon add mon *<host1:ip-or-network1> [<host1:ip-or-network-2>...]*
+
+  For example, to deploy a second monitor on ``newhost1`` using an IP
+  address ``10.1.2.123`` and a third monitor on ``newhost2`` in
+  network ``10.1.2.0/24``::
+
+    # ceph orch apply mon --unmanaged
+    # ceph orch daemon add mon newhost1:10.1.2.123
+    # ceph orch daemon add mon newhost2:10.1.2.0/24
+
+  .. note::
+     The **apply** command can be confusing. For this reason, we recommend using
+     YAML specifications. 
+
+     Each 'ceph orch apply mon' command supersedes the one before it. 
+     This means that you must use the proper comma-separated list-based 
+     syntax when you want to apply monitors to more than one host. 
+     If you do not use the proper syntax, you will clobber your work 
+     as you go.
+
+     For example::
+        
+          # ceph orch apply mon host1
+          # ceph orch apply mon host2
+          # ceph orch apply mon host3
+
+     This results in only one host having a monitor applied to it: host 3.
+
+     (The first command creates a monitor on host1. Then the second command
+     clobbers the monitor on host1 and creates a monitor on host2. Then the
+     third command clobbers the monitor on host2 and creates a monitor on 
+     host3. In this scenario, at this point, there is a monitor ONLY on
+     host3.)
+
+     To make certain that a monitor is applied to each of these three hosts,
+     run a command like this::
+       
+          # ceph orch apply mon "host1,host2,host3"
+
+     Instead of using the "ceph orch apply mon" commands, run a command like
+     this::
+
+          # ceph orch apply -i file.yaml
+
+     Here is a sample **file.yaml** file::
+
+          service_type: mon
+          placement:
+            hosts:
+             - host1
+             - host2
+             - host3
+
+
+Deploy OSDs
+===========
+
+An inventory of storage devices on all cluster hosts can be displayed with::
+
+  # ceph orch device ls
+
+A storage device is considered *available* if all of the following
+conditions are met:
+
+* The device must have no partitions.
+* The device must not have any LVM state.
+* The device must not be mounted.
+* The device must not contain a file system.
+* The device must not contain a Ceph BlueStore OSD.
+* The device must be larger than 5 GB.
+
+Ceph refuses to provision an OSD on a device that is not available.
+
+There are a few ways to create new OSDs:
+
+* Tell Ceph to consume any available and unused storage device::
+
+    # ceph orch apply osd --all-available-devices
+
+* Create an OSD from a specific device on a specific host::
+
+    # ceph orch daemon add osd *<host>*:*<device-path>*
+
+  For example::
+
+    # ceph orch daemon add osd host1:/dev/sdb
+
+* Use :ref:`drivegroups` to describe device(s) to consume
+  based on their properties, such device type (SSD or HDD), device
+  model names, size, or the hosts on which the devices exist::
+
+    # ceph orch apply osd -i spec.yml
+
+
+Deploy MDSs
+===========
+
+One or more MDS daemons is required to use the CephFS file system.
+These are created automatically if the newer ``ceph fs volume``
+interface is used to create a new file system.  For more information,
+see :ref:`fs-volumes-and-subvolumes`.
+
+To deploy metadata servers::
+
+  # ceph orch apply mds *<fs-name>* --placement="*<num-daemons>* [*<host1>* ...]"
+
+See :ref:`orchestrator-cli-placement-spec` for details of the placement specification.
+
+Deploy RGWs
+===========
+
+Cephadm deploys radosgw as a collection of daemons that manage a
+particular *realm* and *zone*.  (For more information about realms and
+zones, see :ref:`multisite`.)
+
+Note that with cephadm, radosgw daemons are configured via the monitor
+configuration database instead of via a `ceph.conf` or the command line.  If
+that configuration isn't already in place (usually in the
+``client.rgw.<realmname>.<zonename>`` section), then the radosgw
+daemons will start up with default settings (e.g., binding to port
+80).
+
+To deploy a set of radosgw daemons for a particular realm and zone::
+
+  # ceph orch apply rgw *<realm-name>* *<zone-name>* --placement="*<num-daemons>* [*<host1>* ...]"
+
+For example, to deploy 2 rgw daemons serving the *myorg* realm and the *us-east-1*
+zone on *myhost1* and *myhost2*::
+
+  # ceph orch apply rgw myorg us-east-1 --placement="2 myhost1 myhost2"
+
+Cephadm will wait for a healthy cluster and automatically create the supplied realm and zone if they do not exist before deploying the rgw daemon(s)
+
+Alternatively, the realm, zonegroup, and zone can be manually created using ``radosgw-admin`` commands::
+
+  # radosgw-admin realm create --rgw-realm=<realm-name> --default
+
+  # radosgw-admin zonegroup create --rgw-zonegroup=<zonegroup-name>  --master --default
+
+  # radosgw-admin zone create --rgw-zonegroup=<zonegroup-name> --rgw-zone=<zone-name> --master --default
+
+  # radosgw-admin period update --rgw-realm=<realm-name> --commit
+
+See :ref:`orchestrator-cli-placement-spec` for details of the placement specification.
+
+Deploying NFS ganesha
+=====================
+
+Cephadm deploys NFS Ganesha using a pre-defined RADOS *pool*
+and optional *namespace*
+
+To deploy a NFS Ganesha gateway,::
+
+  # ceph orch apply nfs *<svc_id>* *<pool>* *<namespace>* --placement="*<num-daemons>* [*<host1>* ...]"
+
+For example, to deploy NFS with a service id of *foo*, that will use the
+RADOS pool *nfs-ganesha* and namespace *nfs-ns*,::
+
+  # ceph orch apply nfs foo nfs-ganesha nfs-ns
+
+.. note::
+   Create the *nfs-ganesha* pool first if it doesn't exist.
+
+See :ref:`orchestrator-cli-placement-spec` for details of the placement specification.
+
+Deploying custom containers
+===========================
+It is also possible to choose different containers than the default containers to deploy Ceph. See :ref:`containers` for information about your options in this regard.
diff -pruN 14.2.16-2/doc/cephadm/monitoring.rst 15.2.7-0ubuntu4/doc/cephadm/monitoring.rst
--- 14.2.16-2/doc/cephadm/monitoring.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/monitoring.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,154 @@
+Monitoring Stack with Cephadm
+=============================
+
+Ceph Dashboard uses `Prometheus <https://prometheus.io/>`_, `Grafana
+<https://grafana.com/>`_, and related tools to store and visualize detailed
+metrics on cluster utilization and performance.  Ceph users have three options:
+
+#. Have cephadm deploy and configure these services.  This is the default
+   when bootstrapping a new cluster unless the ``--skip-monitoring-stack``
+   option is used.
+#. Deploy and configure these services manually.  This is recommended for users
+   with existing prometheus services in their environment (and in cases where
+   Ceph is running in Kubernetes with Rook).
+#. Skip the monitoring stack completely.  Some Ceph dashboard graphs will
+   not be available.
+
+The monitoring stack consists of `Prometheus <https://prometheus.io/>`_,
+Prometheus exporters (:ref:`mgr-prometheus`, `Node exporter
+<https://prometheus.io/docs/guides/node-exporter/>`_), `Prometheus Alert
+Manager <https://prometheus.io/docs/alerting/alertmanager/>`_ and `Grafana
+<https://grafana.com/>`_.
+
+.. note::
+
+  Prometheus' security model presumes that untrusted users have access to the
+  Prometheus HTTP endpoint and logs. Untrusted users have access to all the
+  (meta)data Prometheus collects that is contained in the database, plus a
+  variety of operational and debugging information.
+
+  However, Prometheus' HTTP API is limited to read-only operations.
+  Configurations can *not* be changed using the API and secrets are not
+  exposed. Moreover, Prometheus has some built-in measures to mitigate the
+  impact of denial of service attacks.
+
+  Please see `Prometheus' Security model
+  <https://prometheus.io/docs/operating/security/>` for more detailed
+  information.
+
+By default, bootstrap will deploy a basic monitoring stack.  If you
+did not do this (by passing ``--skip-monitoring-stack``, or if you
+converted an existing cluster to cephadm management, you can set up
+monitoring by following the steps below.
+
+#. Enable the prometheus module in the ceph-mgr daemon.  This exposes the internal Ceph metrics so that prometheus can scrape them.::
+
+     ceph mgr module enable prometheus
+
+#. Deploy a node-exporter service on every node of the cluster.  The node-exporter provides host-level metrics like CPU and memory utilization.::
+
+     ceph orch apply node-exporter '*'
+
+#. Deploy alertmanager::
+
+     ceph orch apply alertmanager 1
+
+#. Deploy prometheus.  A single prometheus instance is sufficient, but
+   for HA you may want to deploy two.::
+
+     ceph orch apply prometheus 1    # or 2
+
+#. Deploy grafana::
+
+     ceph orch apply grafana 1
+
+Cephadm handles the prometheus, grafana, and alertmanager
+configurations automatically.
+
+It may take a minute or two for services to be deployed.  Once
+completed, you should see something like this from ``ceph orch ls``::
+
+  $ ceph orch ls
+  NAME           RUNNING  REFRESHED  IMAGE NAME                                      IMAGE ID        SPEC
+  alertmanager       1/1  6s ago     docker.io/prom/alertmanager:latest              0881eb8f169f  present
+  crash              2/2  6s ago     docker.io/ceph/daemon-base:latest-master-devel  mix           present
+  grafana            1/1  0s ago     docker.io/pcuzner/ceph-grafana-el8:latest       f77afcf0bcf6   absent
+  node-exporter      2/2  6s ago     docker.io/prom/node-exporter:latest             e5a616e4b9cf  present
+  prometheus         1/1  6s ago     docker.io/prom/prometheus:latest                e935122ab143  present
+
+Using custom images
+~~~~~~~~~~~~~~~~~~~
+
+It is possible to install or upgrade monitoring components based on other
+images.  To do so, the name of the image to be used needs to be stored in the
+configuration first.  The following configuration options are available.
+
+- ``container_image_prometheus``
+- ``container_image_grafana``
+- ``container_image_alertmanager``
+- ``container_image_node_exporter``
+
+Custom images can be set with the ``ceph config`` command::
+
+     ceph config set mgr mgr/cephadm/<option_name> <value>
+
+For example::
+
+     ceph config set mgr mgr/cephadm/container_image_prometheus prom/prometheus:v1.4.1
+
+.. note::
+
+     By setting a custom image, the default value will be overridden (but not
+     overwritten).  The default value changes when updates become available.
+     By setting a custom image, you will not be able to update the component
+     you have set the custom image for automatically.  You will need to
+     manually update the configuration (image name and tag) to be able to
+     install updates.
+     
+     If you choose to go with the recommendations instead, you can reset the
+     custom image you have set before.  After that, the default value will be
+     used again.  Use ``ceph config rm`` to reset the configuration option::
+
+          ceph config rm mgr mgr/cephadm/<option_name>
+
+     For example::
+
+          ceph config rm mgr mgr/cephadm/container_image_prometheus
+
+Disabling monitoring
+--------------------
+
+If you have deployed monitoring and would like to remove it, you can do
+so with::
+
+  ceph orch rm grafana
+  ceph orch rm prometheus --force   # this will delete metrics data collected so far
+  ceph orch rm node-exporter
+  ceph orch rm alertmanager
+  ceph mgr module disable prometheus
+
+
+Deploying monitoring manually
+-----------------------------
+
+If you have an existing prometheus monitoring infrastructure, or would like
+to manage it yourself, you need to configure it to integrate with your Ceph
+cluster.
+
+* Enable the prometheus module in the ceph-mgr daemon::
+
+     ceph mgr module enable prometheus
+
+  By default, ceph-mgr presents prometheus metrics on port 9283 on each host
+  running a ceph-mgr daemon.  Configure prometheus to scrape these.
+
+* To enable the dashboard's prometheus-based alerting, see :ref:`dashboard-alerting`.
+
+* To enable dashboard integration with Grafana, see :ref:`dashboard-grafana`.
+
+Enabling RBD-Image monitoring
+---------------------------------
+
+Due to performance reasons, monitoring of RBD images is disabled by default. For more information please see
+:ref:`prometheus-rbd-io-statistics`. If disabled, the overview and details dashboards will stay empty in Grafana
+and the metrics will not be visible in Prometheus.
diff -pruN 14.2.16-2/doc/cephadm/operations.rst 15.2.7-0ubuntu4/doc/cephadm/operations.rst
--- 14.2.16-2/doc/cephadm/operations.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/operations.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,296 @@
+==================
+Cephadm Operations
+==================
+
+Watching cephadm log messages
+=============================
+
+Cephadm logs to the ``cephadm`` cluster log channel, meaning you can
+monitor progress in realtime with::
+
+  # ceph -W cephadm
+
+By default it will show info-level events and above.  To see
+debug-level messages too::
+
+  # ceph config set mgr mgr/cephadm/log_to_cluster_level debug
+  # ceph -W cephadm --watch-debug
+
+Be careful: the debug messages are very verbose!
+
+You can see recent events with::
+
+  # ceph log last cephadm
+
+These events are also logged to the ``ceph.cephadm.log`` file on
+monitor hosts and to the monitor daemons' stderr.
+
+
+.. _cephadm-logs:
+
+Ceph daemon logs
+================
+
+Logging to stdout
+-----------------
+
+Traditionally, Ceph daemons have logged to ``/var/log/ceph``.  By
+default, cephadm daemons log to stderr and the logs are
+captured by the container runtime environment.  For most systems, by
+default, these logs are sent to journald and accessible via
+``journalctl``.
+
+For example, to view the logs for the daemon ``mon.foo`` for a cluster
+with ID ``5c5a50ae-272a-455d-99e9-32c6a013e694``, the command would be
+something like::
+
+  journalctl -u ceph-5c5a50ae-272a-455d-99e9-32c6a013e694@mon.foo
+
+This works well for normal operations when logging levels are low.
+
+To disable logging to stderr::
+
+  ceph config set global log_to_stderr false
+  ceph config set global mon_cluster_log_to_stderr false
+
+Logging to files
+----------------
+
+You can also configure Ceph daemons to log to files instead of stderr,
+just like they have in the past.  When logging to files, Ceph logs appear
+in ``/var/log/ceph/<cluster-fsid>``.
+
+To enable logging to files::
+
+  ceph config set global log_to_file true
+  ceph config set global mon_cluster_log_to_file true
+
+We recommend disabling logging to stderr (see above) or else everything
+will be logged twice::
+
+  ceph config set global log_to_stderr false
+  ceph config set global mon_cluster_log_to_stderr false
+
+By default, cephadm sets up log rotation on each host to rotate these
+files.  You can configure the logging retention schedule by modifying
+``/etc/logrotate.d/ceph.<cluster-fsid>``.
+
+
+Data location
+=============
+
+Cephadm daemon data and logs in slightly different locations than older
+versions of ceph:
+
+* ``/var/log/ceph/<cluster-fsid>`` contains all cluster logs.  Note
+  that by default cephadm logs via stderr and the container runtime,
+  so these logs are normally not present.
+* ``/var/lib/ceph/<cluster-fsid>`` contains all cluster daemon data
+  (besides logs).
+* ``/var/lib/ceph/<cluster-fsid>/<daemon-name>`` contains all data for
+  an individual daemon.
+* ``/var/lib/ceph/<cluster-fsid>/crash`` contains crash reports for
+  the cluster.
+* ``/var/lib/ceph/<cluster-fsid>/removed`` contains old daemon
+  data directories for stateful daemons (e.g., monitor, prometheus)
+  that have been removed by cephadm.
+
+Disk usage
+----------
+
+Because a few Ceph daemons may store a significant amount of data in
+``/var/lib/ceph`` (notably, the monitors and prometheus), we recommend
+moving this directory to its own disk, partition, or logical volume so
+that it does not fill up the root file system.
+
+
+
+SSH Configuration
+=================
+
+Cephadm uses SSH to connect to remote hosts.  SSH uses a key to authenticate
+with those hosts in a secure way.
+
+
+Default behavior
+----------------
+
+Cephadm stores an SSH key in the monitor that is used to
+connect to remote hosts.  When the cluster is bootstrapped, this SSH
+key is generated automatically and no additional configuration
+is necessary.
+
+A *new* SSH key can be generated with::
+
+  ceph cephadm generate-key
+
+The public portion of the SSH key can be retrieved with::
+
+  ceph cephadm get-pub-key
+
+The currently stored SSH key can be deleted with::
+
+  ceph cephadm clear-key
+
+You can make use of an existing key by directly importing it with::
+
+  ceph config-key set mgr/cephadm/ssh_identity_key -i <key>
+  ceph config-key set mgr/cephadm/ssh_identity_pub -i <pub>
+
+You will then need to restart the mgr daemon to reload the configuration with::
+
+  ceph mgr fail
+
+Configuring a different SSH user
+----------------------------------
+
+Cephadm must be able to log into all the Ceph cluster nodes as an user
+that has enough privileges to download container images, start containers
+and execute commands without prompting for a password. If you do not want
+to use the "root" user (default option in cephadm), you must provide
+cephadm the name of the user that is going to be used to perform all the
+cephadm operations. Use the command::
+
+  ceph cephadm set-user <user>
+
+Prior to running this the cluster ssh key needs to be added to this users
+authorized_keys file and non-root users must have passwordless sudo access.
+
+
+Customizing the SSH configuration
+---------------------------------
+
+Cephadm generates an appropriate ``ssh_config`` file that is
+used for connecting to remote hosts.  This configuration looks
+something like this::
+
+  Host *
+  User root
+  StrictHostKeyChecking no
+  UserKnownHostsFile /dev/null
+
+There are two ways to customize this configuration for your environment:
+
+#. Import a customized configuration file that will be stored
+   by the monitor with::
+
+     ceph cephadm set-ssh-config -i <ssh_config_file>
+
+   To remove a customized SSH config and revert back to the default behavior::
+
+     ceph cephadm clear-ssh-config
+
+#. You can configure a file location for the SSH configuration file with::
+
+     ceph config set mgr mgr/cephadm/ssh_config_file <path>
+
+   We do *not recommend* this approach.  The path name must be
+   visible to *any* mgr daemon, and cephadm runs all daemons as
+   containers. That means that the file either need to be placed
+   inside a customized container image for your deployment, or
+   manually distributed to the mgr data directory
+   (``/var/lib/ceph/<cluster-fsid>/mgr.<id>`` on the host, visible at
+   ``/var/lib/ceph/mgr/ceph-<id>`` from inside the container).
+
+
+Health checks
+=============
+
+CEPHADM_PAUSED
+--------------
+
+Cephadm background work has been paused with ``ceph orch pause``.  Cephadm
+continues to perform passive monitoring activities (like checking
+host and daemon status), but it will not make any changes (like deploying
+or removing daemons).
+
+Resume cephadm work with::
+
+  ceph orch resume
+
+.. _cephadm-stray-host:
+
+CEPHADM_STRAY_HOST
+------------------
+
+One or more hosts have running Ceph daemons but are not registered as
+hosts managed by *cephadm*.  This means that those services cannot
+currently be managed by cephadm (e.g., restarted, upgraded, included
+in `ceph orch ps`).
+
+You can manage the host(s) with::
+
+  ceph orch host add *<hostname>*
+
+Note that you may need to configure SSH access to the remote host
+before this will work.
+
+Alternatively, you can manually connect to the host and ensure that
+services on that host are removed or migrated to a host that is
+managed by *cephadm*.
+
+You can also disable this warning entirely with::
+
+  ceph config set mgr mgr/cephadm/warn_on_stray_hosts false
+
+See :ref:`cephadm-fqdn` for more information about host names and
+domain names.
+
+CEPHADM_STRAY_DAEMON
+--------------------
+
+One or more Ceph daemons are running but not are not managed by
+*cephadm*.  This may be because they were deployed using a different
+tool, or because they were started manually.  Those
+services cannot currently be managed by cephadm (e.g., restarted,
+upgraded, or included in `ceph orch ps`).
+
+If the daemon is a stateful one (monitor or OSD), it should be adopted
+by cephadm; see :ref:`cephadm-adoption`.  For stateless daemons, it is
+usually easiest to provision a new daemon with the ``ceph orch apply``
+command and then stop the unmanaged daemon.
+
+This warning can be disabled entirely with::
+
+  ceph config set mgr mgr/cephadm/warn_on_stray_daemons false
+
+CEPHADM_HOST_CHECK_FAILED
+-------------------------
+
+One or more hosts have failed the basic cephadm host check, which verifies
+that (1) the host is reachable and cephadm can be executed there, and (2)
+that the host satisfies basic prerequisites, like a working container
+runtime (podman or docker) and working time synchronization.
+If this test fails, cephadm will no be able to manage services on that host.
+
+You can manually run this check with::
+
+  ceph cephadm check-host *<hostname>*
+
+You can remove a broken host from management with::
+
+  ceph orch host rm *<hostname>*
+
+You can disable this health warning with::
+
+  ceph config set mgr mgr/cephadm/warn_on_failed_host_check false
+
+/etc/ceph/ceph.conf
+===================
+
+Cephadm uses a minimized ``ceph.conf`` that only contains 
+a minimal set of information to connect to the Ceph cluster.
+
+To update the configuration settings, use::
+
+  ceph config set ...
+
+
+To set up an initial configuration before calling
+`bootstrap`, create an initial ``ceph.conf`` file. For example::
+
+  cat <<EOF > /etc/ceph/ceph.conf
+  [global]
+  osd crush chooseleaf type = 0
+  EOF
+  cephadm bootstrap -c /root/ceph.conf ...
diff -pruN 14.2.16-2/doc/cephadm/stability.rst 15.2.7-0ubuntu4/doc/cephadm/stability.rst
--- 14.2.16-2/doc/cephadm/stability.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/stability.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,41 @@
+.. _cephadm-stability:
+
+Stability
+=========
+
+Cephadm is a new feature in the Octopus release and has seen limited
+use in production and at scale.  We would like users to try cephadm,
+especially for new clusters, but please be aware that some
+functionality is still rough around the edges.  We expect fairly
+frequent updates and improvements over the first several bug fix
+releases of Octopus.
+
+Cephadm management of the following components are currently well-supported:
+
+- Monitors
+- Managers
+- OSDs
+- CephFS file systems
+- rbd-mirror
+
+The following components are working with cephadm, but the
+documentation is not as complete as we would like, and there may be some
+changes in the near future:
+
+- RGW
+- dmcrypt OSDs
+
+Cephadm support for the following features is still under development:
+
+- NFS
+- iSCSI
+
+If you run into problems, you can always pause cephadm with::
+
+  ceph orch pause
+
+Or turn cephadm off completely with::
+
+  ceph orch set backend ''
+  ceph mgr module disable cephadm
+
diff -pruN 14.2.16-2/doc/cephadm/troubleshooting.rst 15.2.7-0ubuntu4/doc/cephadm/troubleshooting.rst
--- 14.2.16-2/doc/cephadm/troubleshooting.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/troubleshooting.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,178 @@
+
+Troubleshooting
+===============
+
+Sometimes there is a need to investigate why a cephadm command failed or why
+a specific service no longer runs properly.
+
+As cephadm deploys daemons as containers, troubleshooting daemons is slightly
+different. Here are a few tools and commands to help investigating issues.
+
+Pausing or disabling cephadm
+----------------------------
+
+If something goes wrong and cephadm is doing behaving in a way you do
+not like, you can pause most background activity with::
+
+  ceph orch pause
+
+This will stop any changes, but cephadm will still periodically check hosts to
+refresh its inventory of daemons and devices.  You can disable cephadm
+completely with::
+
+  ceph orch set backend ''
+  ceph mgr module disable cephadm
+
+This will disable all of the ``ceph orch ...`` CLI commands but the previously
+deployed daemon containers will still continue to exist and start as they
+did before.
+
+Checking cephadm logs
+---------------------
+
+You can monitor the cephadm log in real time with::
+
+  ceph -W cephadm
+
+You can see the last few messages with::
+
+  ceph log last cephadm
+
+If you have enabled logging to files, you can see a cephadm log file called
+``ceph.cephadm.log`` on monitor hosts (see :ref:`cephadm-logs`).
+
+Gathering log files
+-------------------
+
+Use journalctl to gather the log files of all daemons:
+
+.. note:: By default cephadm now stores logs in journald. This means
+   that you will no longer find daemon logs in ``/var/log/ceph/``.
+
+To read the log file of one specific daemon, run::
+
+    cephadm logs --name <name-of-daemon>
+
+Note: this only works when run on the same host where the daemon is running. To
+get logs of a daemon running on a different host, give the ``--fsid`` option::
+
+    cephadm logs --fsid <fsid> --name <name-of-daemon>
+
+where the ``<fsid>`` corresponds to the cluster ID printed by ``ceph status``.
+
+To fetch all log files of all daemons on a given host, run::
+
+    for name in $(cephadm ls | jq -r '.[].name') ; do
+      cephadm logs --fsid <fsid> --name "$name" > $name;
+    done
+
+Collecting systemd status
+-------------------------
+
+To print the state of a systemd unit, run::
+
+      systemctl status "ceph-$(cephadm shell ceph fsid)@<service name>.service";
+
+
+To fetch all state of all daemons of a given host, run::
+
+    fsid="$(cephadm shell ceph fsid)"
+    for name in $(cephadm ls | jq -r '.[].name') ; do
+      systemctl status "ceph-$fsid@$name.service" > $name;
+    done
+
+
+List all downloaded container images
+------------------------------------
+
+To list all container images that are downloaded on a host:
+
+.. note:: ``Image`` might also be called `ImageID`
+
+::
+
+    podman ps -a --format json | jq '.[].Image'
+    "docker.io/library/centos:8"
+    "registry.opensuse.org/opensuse/leap:15.2"
+
+
+Manually running containers
+---------------------------
+
+Cephadm writes small wrappers that run a containers. Refer to
+``/var/lib/ceph/<cluster-fsid>/<service-name>/unit.run`` for the
+container execution command.
+
+.. _cephadm-ssh-errors:
+
+ssh errors
+----------
+
+Error message::
+
+  xxxxxx.gateway_bootstrap.HostNotFound: -F /tmp/cephadm-conf-kbqvkrkw root@10.10.1.2
+  raise OrchestratorError('Failed to connect to %s (%s).  Check that the host is reachable and accepts  connections using the cephadm SSH key' % (host, addr)) from
+  orchestrator._interface.OrchestratorError: Failed to connect to 10.10.1.2 (10.10.1.2).  Check that the host is reachable and accepts connections using the cephadm SSH key
+
+Things users can do:
+
+1. Ensure cephadm has an SSH identity key::
+      
+     [root@mon1~]# cephadm shell -- ceph config-key get mgr/cephadm/ssh_identity_key > key
+     INFO:cephadm:Inferring fsid f8edc08a-7f17-11ea-8707-000c2915dd98
+     INFO:cephadm:Using recent ceph image docker.io/ceph/ceph:v15 obtained 'mgr/cephadm/ssh_identity_key'
+     [root@mon1 ~] # chmod 0600 key
+
+ If this fails, cephadm doesn't have a key. Fix this by running the following command::
+   
+     [root@mon1 ~]# cephadm shell -- ceph cephadm generate-ssh-key
+
+ or::
+   
+     [root@mon1 ~]# cat key | cephadm shell -- ceph cephadm set-ssk-key -i -
+
+2. Ensure that the ssh config is correct::
+   
+     [root@mon1 ~]# cephadm shell -- ceph cephadm get-ssh-config > config
+
+3. Verify that we can connect to the host::
+    
+     [root@mon1 ~]# ssh -F config -i key root@mon1
+
+
+
+
+Verifying that the Public Key is Listed in the authorized_keys file
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+To verify that the public key is in the authorized_keys file, run the following commands::
+
+     [root@mon1 ~]# cephadm shell -- ceph config-key get mgr/cephadm/ssh_identity_pub > key.pub
+     [root@mon1 ~]# grep "`cat key.pub`"  /root/.ssh/authorized_keys
+
+Failed to infer CIDR network error
+----------------------------------
+
+If you see this error::
+
+   ERROR: Failed to infer CIDR network for mon ip ***; pass --skip-mon-network to configure it later
+
+Or this error::
+
+   Must set public_network config option or specify a CIDR network, ceph addrvec, or plain IP
+
+This means that you must run a command of this form::
+
+  ceph config set mon public_network <mon_network>
+
+For more detail on operations of this kind, see :ref:`deploy_additional_monitors`
+
+Accessing the admin socket
+--------------------------
+
+Each Ceph daemon provides an admin socket that bypasses the
+MONs (See :ref:`rados-monitoring-using-admin-socket`).
+
+To access the admin socket, first enter the daemon container on the host::
+
+    [root@mon1 ~]# cephadm enter --name <daemon-name>
+    [ceph: root@mon1 /]# ceph --admin-daemon /var/run/ceph/ceph-<daemon-name>.asok config show
diff -pruN 14.2.16-2/doc/cephadm/upgrade.rst 15.2.7-0ubuntu4/doc/cephadm/upgrade.rst
--- 14.2.16-2/doc/cephadm/upgrade.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephadm/upgrade.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,118 @@
+==============
+Upgrading Ceph
+==============
+
+Cephadm is capable of safely upgrading Ceph from one bugfix release to
+another.  For example, you can upgrade from v15.2.0 (the first Octopus
+release) to the next point release v15.2.1.
+
+The automated upgrade process follows Ceph best practices.  For example:
+
+* The upgrade order starts with managers, monitors, then other daemons.
+* Each daemon is restarted only after Ceph indicates that the cluster
+  will remain available.
+
+Keep in mind that the Ceph cluster health status is likely to switch to
+`HEALTH_WARNING` during the upgrade.
+
+
+Starting the upgrade
+====================
+
+Before you start, you should verify that all hosts are currently online
+and your cluster is healthy.
+
+::
+
+  # ceph -s
+
+To upgrade (or downgrade) to a specific release::
+
+  # ceph orch upgrade start --ceph-version <version>
+
+For example, to upgrade to v15.2.1::
+
+  # ceph orch upgrade start --ceph-version 15.2.1
+
+
+Monitoring the upgrade
+======================
+
+Determine whether an upgrade is in process and what version the cluster is
+upgrading to with::
+
+  # ceph orch upgrade status
+
+While the upgrade is underway, you will see a progress bar in the ceph
+status output.  For example::
+
+  # ceph -s
+  [...]
+    progress:
+      Upgrade to docker.io/ceph/ceph:v15.2.1 (00h 20m 12s)
+        [=======.....................] (time remaining: 01h 43m 31s)
+
+You can also watch the cephadm log with::
+
+  # ceph -W cephadm
+
+
+Canceling an upgrade
+====================
+
+You can stop the upgrade process at any time with::
+
+  # ceph orch upgrade stop
+
+
+Potential problems
+==================
+
+There are a few health alerts that can arise during the upgrade process.
+
+UPGRADE_NO_STANDBY_MGR
+----------------------
+
+Ceph requires an active and standby manager daemon in order to proceed, but
+there is currently no standby.
+
+You can ensure that Cephadm is configured to run 2 (or more) managers with::
+
+  # ceph orch apply mgr 2  # or more
+
+You can check the status of existing mgr daemons with::
+
+  # ceph orch ps --daemon-type mgr
+
+If an existing mgr daemon has stopped, you can try restarting it with::
+
+  # ceph orch daemon restart <name>
+
+UPGRADE_FAILED_PULL
+-------------------
+
+Ceph was unable to pull the container image for the target version.
+This can happen if you specify an version or container image that does
+not exist (e.g., 1.2.3), or if the container registry is not reachable from
+one or more hosts in the cluster.
+
+You can cancel the existing upgrade and specify a different target version with::
+
+  # ceph orch upgrade stop
+  # ceph orch upgrade start --ceph-version <version>
+
+
+Using customized container images
+=================================
+
+For most users, simplify specifying the Ceph version is sufficient.
+Cephadm will locate the specific Ceph container image to use by
+combining the ``container_image_base`` configuration option (default:
+``docker.io/ceph/ceph``) with a tag of ``vX.Y.Z``.
+
+You can also upgrade to an arbitrary container image.  For example, to
+upgrade to a development build::
+
+  # ceph orch upgrade start --image quay.io/ceph-ci/ceph:recent-git-branch-name
+
+For more information about available container images, see :ref:`containers`.
diff -pruN 14.2.16-2/doc/cephfs/add-remove-mds.rst 15.2.7-0ubuntu4/doc/cephfs/add-remove-mds.rst
--- 14.2.16-2/doc/cephfs/add-remove-mds.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/add-remove-mds.rst	2020-11-30 19:58:30.000000000 +0000
@@ -15,9 +15,9 @@ Provisioning Hardware for an MDS
 ================================
 
 The present version of the MDS is single-threaded and CPU-bound for most
-activities, including responding to client requests. Even so, an MDS under the
-most aggressive client loads still uses about 2 to 3 CPU cores. This is due to
-the other miscellaneous upkeep threads working in tandem.
+activities, including responding to client requests. An MDS under the most
+aggressive client loads uses about 2 to 3 CPU cores. This is due to the other
+miscellaneous upkeep threads working in tandem.
 
 Even so, it is recommended that an MDS server be well provisioned with an
 advanced CPU with sufficient cores. Development is on-going to make better use
@@ -27,14 +27,17 @@ that the MDS server will improve perform
 The other dimension to MDS performance is the available RAM for caching. The
 MDS necessarily manages a distributed and cooperative metadata cache among all
 clients and other active MDSs. Therefore it is essential to provide the MDS
-with sufficient RAM to enable faster metadata access and mutation.
+with sufficient RAM to enable faster metadata access and mutation. The default
+MDS cache size (see also :doc:`/cephfs/cache-size-limits`) is 4GB. It is
+recommended to provision at least 8GB of RAM for the MDS to support this cache
+size.
 
 Generally, an MDS serving a large cluster of clients (1000 or more) will use at
-least 64GB of cache (see also :doc:`/cephfs/cache-size-limits`). An MDS with a larger
-cache is not well explored in the largest known community clusters; there may
-be diminishing returns where management of such a large cache negatively
-impacts performance in surprising ways. It would be best to do analysis with
-expected workloads to determine if provisioning more RAM is worthwhile.
+least 64GB of cache. An MDS with a larger cache is not well explored in the
+largest known community clusters; there may be diminishing returns where
+management of such a large cache negatively impacts performance in surprising
+ways. It would be best to do analysis with expected workloads to determine if
+provisioning more RAM is worthwhile.
 
 In a bare-metal cluster, the best practice is to over-provision hardware for
 the MDS server. Even if a single MDS daemon is unable to fully utilize the
@@ -61,23 +64,23 @@ Adding an MDS
 
 #. Create an mds data point ``/var/lib/ceph/mds/ceph-${id}``. The daemon only uses this directory to store its keyring.
 
-#. Edit ``ceph.conf`` and add MDS section. ::
-
-	[mds.${id}]
-	host = {hostname}
-
-#. Create the authentication key, if you use CephX. ::
+#. Create the authentication key, if you use CephX: ::
 
 	$ sudo ceph auth get-or-create mds.${id} mon 'profile mds' mgr 'profile mds' mds 'allow *' osd 'allow *' > /var/lib/ceph/mds/ceph-${id}/keyring
 
-#. Start the service. ::
+#. Start the service: ::
 
-	$ sudo systemctl start mds.${id}
+	$ sudo systemctl start ceph-mds@${id}
 
 #. The status of the cluster should show: ::
 
 	mds: ${id}:1 {0=${id}=up:active} 2 up:standby
 
+#. Optionally, configure the file system the MDS should join (:ref:`mds-join-fs`): ::
+
+    $ ceph config set mds.${id} mds_join_fs ${fs}
+
+
 Removing an MDS
 ===============
 
@@ -92,7 +95,7 @@ the following method.
 
 #. Stop the MDS to be removed. ::
 
-	$ sudo systemctl stop mds.${id}
+	$ sudo systemctl stop ceph-mds@${id}
 
    The MDS will automatically notify the Ceph monitors that it is going down.
    This enables the monitors to perform instantaneous failover to an available
diff -pruN 14.2.16-2/doc/cephfs/administration.rst 15.2.7-0ubuntu4/doc/cephfs/administration.rst
--- 14.2.16-2/doc/cephfs/administration.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/administration.rst	2020-11-30 19:58:30.000000000 +0000
@@ -3,19 +3,19 @@
 CephFS Administrative commands
 ==============================
 
-Filesystems
------------
+File Systems
+------------
 
 .. note:: The names of the file systems, metadata pools, and data pools can
           only have characters in the set [a-zA-Z0-9\_-.].
 
-These commands operate on the CephFS filesystems in your Ceph cluster.
-Note that by default only one filesystem is permitted: to enable
-creation of multiple filesystems use ``ceph fs flag set enable_multiple true``.
+These commands operate on the CephFS file systems in your Ceph cluster.
+Note that by default only one file system is permitted: to enable
+creation of multiple file systems use ``ceph fs flag set enable_multiple true``.
 
 ::
 
-    fs new <filesystem name> <metadata pool name> <data pool name>
+    fs new <file system name> <metadata pool name> <data pool name>
 
 This command creates a new file system. The file system name and metadata pool
 name are self-explanatory. The specified data pool is the default data pool and
@@ -40,7 +40,7 @@ standby MDS daemons.
 
 ::
 
-    fs rm <filesystem name> [--yes-i-really-mean-it]
+    fs rm <file system name> [--yes-i-really-mean-it]
 
 Destroy a CephFS file system. This wipes information about the state of the
 file system from the FSMap. The metadata pool and data pools are untouched and
@@ -48,28 +48,28 @@ must be destroyed separately.
 
 ::
 
-    fs get <filesystem name>
+    fs get <file system name>
 
 Get information about the named file system, including settings and ranks. This
 is a subset of the same information from the ``fs dump`` command.
 
 ::
 
-    fs set <filesystem name> <var> <val>
+    fs set <file system name> <var> <val>
 
 Change a setting on a file system. These settings are specific to the named
 file system and do not affect other file systems.
 
 ::
 
-    fs add_data_pool <filesystem name> <pool name/id>
+    fs add_data_pool <file system name> <pool name/id>
 
 Add a data pool to the file system. This pool can be used for file layouts
 as an alternate location to store file data.
 
 ::
 
-    fs rm_data_pool <filesystem name> <pool name/id>
+    fs rm_data_pool <file system name> <pool name/id>
 
 This command removes the specified pool from the list of data pools for the
 file system.  If any files have layouts for the removed data pool, the file
@@ -261,7 +261,7 @@ Advanced
 These commands are not required in normal operation, and exist
 for use in exceptional circumstances.  Incorrect use of these
 commands may cause serious problems, such as an inaccessible
-filesystem.
+file system.
 
 ::
 
@@ -289,7 +289,7 @@ This removes a rank from the failed set.
 
 ::
 
-    fs reset <filesystem name>
+    fs reset <file system name>
 
 This command resets the file system state to defaults, except for the name and
 pools. Non-zero ranks are saved in the stopped set.
diff -pruN 14.2.16-2/doc/cephfs/app-best-practices.rst 15.2.7-0ubuntu4/doc/cephfs/app-best-practices.rst
--- 14.2.16-2/doc/cephfs/app-best-practices.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/app-best-practices.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,16 +1,16 @@
 
-Application best practices for distributed filesystems
-======================================================
+Application best practices for distributed file systems
+=======================================================
 
 CephFS is POSIX compatible, and therefore should work with any existing
-applications that expect a POSIX filesystem.  However, because it is a
-network filesystem (unlike e.g. XFS) and it is highly consistent (unlike
+applications that expect a POSIX file system.  However, because it is a
+network file system (unlike e.g. XFS) and it is highly consistent (unlike
 e.g. NFS), there are some consequences that application authors may
 benefit from knowing about.
 
-The following sections describe some areas where distributed filesystems
+The following sections describe some areas where distributed file systems
 may have noticeably different performance behaviours compared with
-local filesystems.
+local file systems.
 
 
 ls -l
@@ -56,7 +56,7 @@ Hard links
 ----------
 
 Hard links have an intrinsic cost in terms of the internal housekeeping
-that a filesystem has to do to keep two references to the same data.  In
+that a file system has to do to keep two references to the same data.  In
 CephFS there is a particular performance cost, because with normal files
 the inode is embedded in the directory (i.e. there is no extra fetch of
 the inode after looking up the path).
@@ -69,18 +69,14 @@ performance is very different for worklo
 that cache.
 
 If your workload has more files than fit in your cache (configured using 
-``mds_cache_memory_limit`` or ``mds_cache_size`` settings), then
-make sure you test it appropriately: don't test your system with a small
-number of files and then expect equivalent performance when you move
-to a much larger number of files.
+``mds_cache_memory_limit`` settings), then make sure you test it
+appropriately: don't test your system with a small number of files and then
+expect equivalent performance when you move to a much larger number of files.
 
-Do you need a filesystem?
--------------------------
+Do you need a file system?
+--------------------------
 
 Remember that Ceph also includes an object storage interface.  If your
 application needs to store huge flat collections of files where you just
 read and write whole files at once, then you might well be better off
 using the :ref:`Object Gateway <object-gateway>`
-
-
-
diff -pruN 14.2.16-2/doc/cephfs/best-practices.rst 15.2.7-0ubuntu4/doc/cephfs/best-practices.rst
--- 14.2.16-2/doc/cephfs/best-practices.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/best-practices.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,88 +0,0 @@
-
-CephFS best practices
-=====================
-
-This guide provides recommendations for best results when deploying CephFS.
-
-For the actual configuration guide for CephFS, please see the instructions
-at :doc:`/cephfs/index`.
-
-Which Ceph version?
--------------------
-
-Use at least the Jewel (v10.2.0) release of Ceph.  This is the first
-release to include stable CephFS code and fsck/repair tools.  Make sure
-you are using the latest point release to get bug fixes.
-
-Note that Ceph releases do not include a kernel, this is versioned
-and released separately.  See below for guidance of choosing an
-appropriate kernel version if you are using the kernel client
-for CephFS.
-
-Most stable configuration
--------------------------
-
-Some features in CephFS are still experimental.  See
-:doc:`/cephfs/experimental-features` for guidance on these.
-
-For the best chance of a happy healthy filesystem, use a **single active MDS** 
-and **do not use snapshots**.  Both of these are the default.
-
-Note that creating multiple MDS daemons is fine, as these will simply be
-used as standbys.  However, for best stability you should avoid
-adjusting ``max_mds`` upwards, as this would cause multiple MDS
-daemons to be active at once.
-
-Which client?
--------------
-
-The FUSE client is the most accessible and the easiest to upgrade to the
-version of Ceph used by the storage cluster, while the kernel client will
-often give better performance.
-
-The clients do not always provide equivalent functionality, for example
-the fuse client supports client-enforced quotas while the kernel client
-does not.
-
-When encountering bugs or performance issues, it is often instructive to
-try using the other client, in order to find out whether the bug was
-client-specific or not (and then to let the developers know).
-
-Which kernel version?
----------------------
-
-Because the kernel client is distributed as part of the linux kernel (not
-as part of packaged ceph releases),
-you will need to consider which kernel version to use on your client nodes.
-Older kernels are known to include buggy ceph clients, and may not support
-features that more recent Ceph clusters support.
-
-Remember that the "latest" kernel in a stable linux distribution is likely
-to be years behind the latest upstream linux kernel where Ceph development
-takes place (including bug fixes).
-
-As a rough guide, as of Ceph 10.x (Jewel), you should be using a least a
-4.x kernel.  If you absolutely have to use an older kernel, you should use
-the fuse client instead of the kernel client.
-
-This advice does not apply if you are using a linux distribution that
-includes CephFS support, as in this case the distributor will be responsible
-for backporting fixes to their stable kernel: check with your vendor.
-
-Reporting issues
-----------------
-
-If you have identified a specific issue, please report it with as much
-information as possible.  Especially important information:
-
-* Ceph versions installed on client and server
-* Whether you are using the kernel or fuse client
-* If you are using the kernel client, what kernel version?
-* How many clients are in play, doing what kind of workload?
-* If a system is 'stuck', is that affecting all clients or just one?
-* Any ceph health messages
-* Any backtraces in the ceph logs from crashes
-
-If you are satisfied that you have found a bug, please file it on
-`the tracker <http://tracker.ceph.com>`_.  For more general queries please write
-to the `ceph-users mailing list <http://lists.ceph.com/listinfo.cgi/ceph-users-ceph.com/>`_.
diff -pruN 14.2.16-2/doc/cephfs/cache-size-limits.rst 15.2.7-0ubuntu4/doc/cephfs/cache-size-limits.rst
--- 14.2.16-2/doc/cephfs/cache-size-limits.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/cache-size-limits.rst	2020-11-30 19:58:30.000000000 +0000
@@ -5,10 +5,9 @@ This section describes ways to limit MDS
 
 You can limit the size of the Metadata Server (MDS) cache by:
 
-* *A memory limit*: A new behavior introduced in the Luminous release. Use the `mds_cache_memory_limit` parameters. We recommend to use memory limits instead of inode count limits.
-* *Inode count*: Use the `mds_cache_size` parameter. By default, limiting the MDS cache by inode count is disabled.
+* *A memory limit*: A new behavior introduced in the Luminous release. Use the `mds_cache_memory_limit` parameters.
 
-In addition, you can specify a cache reservation by using the `mds_cache_reservation` parameter for MDS operations. The cache reservation is limited as a percentage of the memory or inode limit and is set to 5% by default. The intent of this parameter is to have the MDS maintain an extra reserve of memory for its cache for new metadata operations to use. As a consequence, the MDS should in general operate below its memory limit because it will recall old state from clients in order to drop unused metadata in its cache.
+In addition, you can specify a cache reservation by using the `mds_cache_reservation` parameter for MDS operations. The cache reservation is limited as a percentage of the memory and is set to 5% by default. The intent of this parameter is to have the MDS maintain an extra reserve of memory for its cache for new metadata operations to use. As a consequence, the MDS should in general operate below its memory limit because it will recall old state from clients in order to drop unused metadata in its cache.
 
 The `mds_cache_reservation` parameter replaces the `mds_health_cache_threshold` in all situations except when MDS nodes sends a health alert to the Monitors indicating the cache is too large. By default, `mds_health_cache_threshold` is 150% of the maximum cache size.
 
diff -pruN 14.2.16-2/doc/cephfs/capabilities.rst 15.2.7-0ubuntu4/doc/cephfs/capabilities.rst
--- 14.2.16-2/doc/cephfs/capabilities.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/capabilities.rst	2020-11-30 19:58:30.000000000 +0000
@@ -4,7 +4,7 @@ Capabilities in CephFS
 When a client wants to operate on an inode, it will query the MDS in various
 ways, which will then grant the client a set of **capabilities**. These
 grant the client permissions to operate on the inode in various ways. One
-of the major differences from other network filesystems (e.g NFS or SMB) is
+of the major differences from other network file systems (e.g NFS or SMB) is
 that the capabilities granted are quite granular, and it's possible that
 multiple clients can hold different capabilities on the same inodes.
 
diff -pruN 14.2.16-2/doc/cephfs/cephfs-architecture.svg 15.2.7-0ubuntu4/doc/cephfs/cephfs-architecture.svg
--- 14.2.16-2/doc/cephfs/cephfs-architecture.svg	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/cephfs-architecture.svg	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1 @@
+<svg version="1.1" viewBox="0.0 0.0 784.4199475065617 481.56692913385825" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l784.4199 0l0 481.56693l-784.4199 0l0 -481.56693z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l784.4199 0l0 481.56693l-784.4199 0z" fill-rule="evenodd"/><path fill="#b6d7a8" d="m30.272966 79.10533l0 0c0 -31.388912 24.684267 -56.83465 55.133858 -56.83465l0 0c14.622414 0 28.64592 5.987919 38.985527 16.646484c10.3396 10.658562 16.14833 25.114677 16.14833 40.188164l0 0c0 31.388908 -24.684265 56.83464 -55.133858 56.83464l0 0c-30.449589 0 -55.133858 -25.445732 -55.133858 -56.83464z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m30.272966 79.10533l0 0c0 -31.388912 24.684267 -56.83465 55.133858 -56.83465l0 0c14.622414 0 28.64592 5.987919 38.985527 16.646484c10.3396 10.658562 16.14833 25.114677 16.14833 40.188164l0 0c0 31.388908 -24.684265 56.83464 -55.133858 56.83464l0 0c-30.449589 0 -55.133858 -25.445732 -55.133858 -56.83464z" fill-rule="evenodd"/><path fill="#000000" d="m62.688076 85.08783q0.6875 0 1.3124962 -0.375q0.640625 -0.390625 1.03125 -1.03125l0.96875 0.625q-0.578125 0.953125 -1.359375 1.4375q-0.7812462 0.46875 -1.9062462 0.46875q-1.296875 0 -2.3125 -0.640625q-1.015625 -0.640625 -1.625 -1.96875q-0.609375 -1.328125 -0.609375 -3.328125q0 -2.140625 0.65625 -3.46875q0.65625 -1.34375 1.65625 -1.921875q1.0 -0.578125 2.171875 -0.578125q1.21875 0 2.1249962 0.625q0.90625 0.609375 1.375 1.671875l-1.125 0.515625q-0.015625 0 -0.015625 0q0 -0.015625 0 -0.015625q-0.484375 -0.96875 -1.0781212 -1.359375q-0.59375 -0.390625 -1.328125 -0.390625q-1.46875 0 -2.328125 1.28125q-0.84375 1.28125 -0.84375 3.546875q0 1.4375 0.421875 2.5625q0.4375 1.109375 1.171875 1.734375q0.734375 0.609375 1.640625 0.609375zm2.1406212 -8.015625q0.03125 -0.046875 0.03125 -0.046875q0.046875 0 0.15625 0.125l-0.125 0.046875l-0.0625 -0.125zm0.21875 0.046875q0.078125 0.171875 -0.03125 0.03125l0.03125 -0.03125zm3.171875 8.90625l0 -1.078125l2.53125 0l0 -10.25l-2.421875 0l0 -1.078125l3.78125 0l0 11.328125l2.5 0l0 1.078125l-6.390625 0zm9.71875 0l0 -1.078125l2.1875076 0l0 -6.359375l-2.0625076 0l0 -1.09375l3.4062576 0l0 7.453125l2.0 0l0 1.078125l-5.5312576 0zm2.7812576 -10.3125q-0.390625 0 -0.671875 -0.28125q-0.28125 -0.28125 -0.28125 -0.671875q0 -0.40625 0.265625 -0.6875q0.28125 -0.28125 0.6875 -0.28125q0.390625 0 0.671875 0.296875q0.296875 0.28125 0.296875 0.671875q0 0.390625 -0.296875 0.671875q-0.28125 0.28125 -0.671875 0.28125zm9.765625 10.5q-1.90625 0 -3.03125 -1.15625q-1.125 -1.15625 -1.125 -3.265625q0 -1.40625 0.515625 -2.421875q0.515625 -1.015625 1.40625 -1.546875q0.890625 -0.53125 1.984375 -0.53125q1.546875 0 2.5 1.03125q0.96875 1.03125 0.96875 3.015625q0 0.203125 -0.03125 0.625l-6.0625 0q0.078125 1.5625 0.875 2.375q0.796875 0.796875 2.03125 0.796875q1.34375 0 2.203125 -0.953125l0.75 0.71875q-1.078125 1.3125 -2.984375 1.3125zm1.859375 -5.296875q0 -1.203125 -0.609375 -1.890625q-0.59375 -0.703125 -1.59375 -0.703125q-0.921875 0 -1.625 0.65625q-0.6875 0.640625 -0.859375 1.9375l4.6875 0zm3.734375 -3.421875l1.3125 0l0 1.515625q0.5 -0.78125 1.265625 -1.25q0.765625 -0.46875 1.625 -0.46875q1.125 0 1.8125 0.875q0.6875 0.859375 0.6875 2.6875l0 5.171875l-1.3125 0l0 -5.125q0 -1.28125 -0.4375 -1.859375q-0.421875 -0.578125 -1.125 -0.578125q-0.578125 0 -1.171875 0.34375q-0.578125 0.328125 -0.96875 0.9375q-0.375 0.609375 -0.375 1.375l0 4.90625l-1.3125 0l0 -8.53125zm16.265625 7.75q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625z" fill-rule="nonzero"/><path fill="#e06666" d="m245.87401 93.125015l0 0c0 -33.441494 27.109695 -60.55118 60.551193 -60.55118l0 0c16.059174 0 31.460602 6.3794823 42.81613 17.735031c11.35556 11.355549 17.735046 26.756977 17.735046 42.81615l0 0c0 33.44149 -27.10968 60.551178 -60.551178 60.551178l0 0c-33.441498 0 -60.551193 -27.109688 -60.551193 -60.551178z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m245.87401 93.125015l0 0c0 -33.441494 27.109695 -60.55118 60.551193 -60.55118l0 0c16.059174 0 31.460602 6.3794823 42.81613 17.735031c11.35556 11.355549 17.735046 26.756977 17.735046 42.81615l0 0c0 33.44149 -27.10968 60.551178 -60.551178 60.551178l0 0c-33.441498 0 -60.551193 -27.109688 -60.551193 -60.551178z" fill-rule="evenodd"/><path fill="#000000" d="m286.12833 89.04501l-1.171875 -3.390625l-3.75 0l-1.1875 3.390625l-1.28125 0l4.296875 -11.828125l0.140625 0l4.296875 11.828125l-1.34375 0zm-4.578125 -4.40625l3.0625 0l-1.53125 -4.4375l-1.53125 4.4375zm11.609375 3.40625q1.28125 0 2.21875 -1.046875l0.78125 0.90625q-1.25 1.34375 -3.078125 1.34375q-1.234375 0 -2.203125 -0.578125q-0.96875 -0.578125 -1.515625 -1.59375q-0.546875 -1.015625 -0.546875 -2.28125q0 -1.265625 0.546875 -2.265625q0.546875 -1.015625 1.515625 -1.59375q0.96875 -0.578125 2.1875 -0.578125q1.03125 0 1.859375 0.421875q0.84375 0.40625 1.375 1.15625l-0.84375 0.828125l-0.015625 0.015625q-0.546875 -0.71875 -1.125 -1.0q-0.5625 -0.296875 -1.390625 -0.296875q-0.734375 0 -1.359375 0.40625q-0.625 0.40625 -1.0 1.140625q-0.375 0.71875 -0.375 1.671875q0 0.953125 0.375 1.71875q0.390625 0.765625 1.0625 1.203125q0.6875 0.421875 1.53125 0.421875zm2.078125 -5.25q0 -0.109375 0.15625 0.015625l-0.0625 0.078125l-0.09375 -0.09375zm0.203125 -0.015625q0.09375 0.125 0.046875 0.09375q-0.046875 -0.046875 -0.09375 -0.0625l0.046875 -0.03125zm9.9375 5.484375q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm2.90625 0.78125l0 -1.078125l2.1875 0l0 -6.359375l-2.0625 0l0 -1.09375l3.40625 0l0 7.453125l2.0 0l0 1.078125l-5.53125 0zm2.78125 -10.3125q-0.390625 0 -0.671875 -0.28125q-0.28125 -0.28125 -0.28125 -0.671875q0 -0.40625 0.265625 -0.6875q0.28125 -0.28125 0.6875 -0.28125q0.390625 0 0.671875 0.296875q0.296875 0.28125 0.296875 0.671875q0 0.390625 -0.296875 0.671875q-0.28125 0.28125 -0.671875 0.28125zm13.171875 1.78125q-0.296875 1.515625 -1.25 3.734375l-2.0625 4.796875l-1.046875 0l-3.375 -8.53125l1.34375 0l2.625 6.6875l1.375 -3.15625q0.859375 -1.9375 1.125 -3.53125l1.265625 0zm5.921875 8.71875q-1.90625 0 -3.03125 -1.15625q-1.125 -1.15625 -1.125 -3.265625q0 -1.40625 0.515625 -2.421875q0.515625 -1.015625 1.40625 -1.546875q0.890625 -0.53125 1.984375 -0.53125q1.546875 0 2.5 1.03125q0.96875 1.03125 0.96875 3.015625q0 0.203125 -0.03125 0.625l-6.0625 0q0.078125 1.5625 0.875 2.375q0.796875 0.796875 2.03125 0.796875q1.34375 0 2.203125 -0.953125l0.75 0.71875q-1.078125 1.3125 -2.984375 1.3125zm1.859375 -5.296875q0 -1.203125 -0.609375 -1.890625q-0.59375 -0.703125 -1.59375 -0.703125q-0.921875 0 -1.625 0.65625q-0.6875 0.640625 -0.859375 1.9375l4.6875 0z" fill-rule="nonzero"/><path fill="#000000" d="m293.19864 111.04501l0 -11.625l1.03125 0l2.875 5.6875l2.921875 -5.703125l0.984375 0l0 11.640625l-1.234375 0l0 -8.765625l-2.515625 4.6875l-0.5 0l-2.34375 -4.640625l0 8.71875l-1.21875 0zm9.5625 -11.625l2.71875 0q1.4375 0 2.265625 0.390625q0.84375 0.375 1.453125 1.203125q1.140625 1.53125 1.140625 4.28125q-0.109375 2.828125 -1.3125 4.3125q-1.1875 1.46875 -3.78125 1.453125l-2.484375 0l0 -11.640625zm2.4375 10.625q3.84375 0 3.84375 -4.671875q-0.03125 -2.375 -0.890625 -3.609375q-0.84375 -1.234375 -2.75 -1.234375l-1.40625 0l0 9.515625l1.203125 0zm11.453125 -5.421875q1.65625 0.6875 2.28125 1.421875q0.640625 0.734375 0.640625 1.828125q0 0.859375 -0.421875 1.625q-0.40625 0.765625 -1.296875 1.25q-0.875 0.484375 -2.15625 0.484375q-2.265625 0 -3.640625 -1.46875l0.671875 -1.1875l0 -0.015625q0.015625 0 0.015625 0.015625q0 0 0 0q0.515625 0.671875 1.296875 1.078125q0.796875 0.40625 1.84375 0.40625q1.03125 0 1.71875 -0.578125q0.6875 -0.578125 0.6875 -1.421875q0 -0.546875 -0.21875 -0.90625q-0.21875 -0.359375 -0.78125 -0.71875q-0.546875 -0.359375 -1.671875 -0.84375q-1.71875 -0.671875 -2.453125 -1.515625q-0.734375 -0.859375 -0.734375 -1.9375q0 -1.296875 0.953125 -2.078125q0.953125 -0.78125 2.59375 -0.78125q0.953125 0 1.78125 0.390625q0.828125 0.375 1.4375 1.0625l-0.71875 0.96875l-0.015625 0.015625q-0.5625 -0.765625 -1.171875 -1.0625q-0.609375 -0.296875 -1.515625 -0.296875q-0.90625 0 -1.453125 0.5q-0.546875 0.484375 -0.546875 1.1875q0 0.546875 0.234375 0.953125q0.234375 0.390625 0.84375 0.78125q0.625 0.375 1.796875 0.84375zm1.59375 -2.875q0 -0.046875 0.078125 0q0.078125 0.03125 0.09375 0.03125l-0.046875 0.0625l-0.125 -0.0625l0 -0.03125zm0.21875 -0.03125q0.109375 0.15625 0.03125 0.109375q-0.0625 -0.046875 -0.078125 -0.046875l0.046875 -0.0625zm-5.53125 6.796875q0 0.046875 -0.078125 0.015625q-0.0625 -0.046875 -0.09375 -0.046875l0.046875 -0.0625l0.125 0.0625l0 0.03125zm-0.203125 0.046875q-0.109375 -0.140625 0.03125 -0.078125l-0.03125 0.078125z" fill-rule="nonzero"/><path fill="#6fa8dc" d="m247.85564 203.24673l100.84248 0l0 55.47885c-50.418518 0 -50.418518 21.345428 -100.84248 10.672699zm8.308182 0l0 -7.1924896l100.15378 0l0 55.82686c-3.8070374 0 -7.6194763 0.3867035 -7.6194763 0.3867035l0 -49.021072zm7.809265 -7.1924896l0 -7.018463l101.021454 0l0 55.652832c-4.3384705 0 -8.676941 0.2900238 -8.676941 0.2900238l0 -48.924393z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m247.85564 203.24673l100.84248 0l0 55.47885c-50.418518 0 -50.418518 21.345428 -100.84248 10.672699zm8.308182 0l0 -7.1924896l100.15378 0l0 55.82686c-3.8070374 0 -7.6194763 0.3867035 -7.6194763 0.3867035m-84.72504 -56.213562l0 -7.018463l101.021454 0l0 55.652832c-4.3384705 0 -8.676941 0.2900238 -8.676941 0.2900238" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m247.85564 269.3983c50.423965 10.6727295 50.423965 -10.672699 100.84248 -10.672699l0 -6.457779c0 0 3.812439 -0.3867035 7.6194763 -0.3867035l0 -6.902466c0 0 4.3384705 -0.2900238 8.676941 -0.2900238l0 -55.652832l-101.021454 0l0 7.018463l-7.809265 0l0 7.1924896l-8.308182 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m247.85564 203.24673l100.84248 0l0 55.47885c-50.418518 0 -50.418518 21.345428 -100.84248 10.672699zm8.308182 0l0 -7.1924896l100.15378 0l0 55.82686c-3.8070374 0 -7.6194763 0.3867035 -7.6194763 0.3867035m-84.72504 -56.213562l0 -7.018463l101.021454 0l0 55.652832c-4.3384705 0 -8.676941 0.2900238 -8.676941 0.2900238" fill-rule="evenodd"/><path fill="#000000" d="m272.36282 232.7269l0 6.609375q0.03125 1.421875 -0.375 2.34375q-0.40625 0.90625 -1.109375 1.34375q-0.703125 0.421875 -1.5625 0.421875q-1.703125 0 -2.765625 -1.28125l0.734375 -0.9375l0.015625 -0.015625l0.03125 0.015625q0.515625 0.5625 0.953125 0.8125q0.4375 0.25 1.0 0.25q0.953125 0 1.375 -0.65625q0.421875 -0.671875 0.421875 -2.265625l0 -6.640625l-2.25 0l0 -1.109375l5.328125 0l0 1.109375l-1.796875 0zm-4.84375 8.421875q0 0.078125 -0.0625 0.078125q-0.0625 -0.015625 -0.109375 -0.0625l0.078125 -0.09375l0.09375 0.078125zm-0.21875 0.0625q-0.09375 -0.09375 -0.03125 -0.078125q0.0625 0.015625 0.078125 0.03125l-0.046875 0.046875zm12.328125 2.1875q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm8.78125 1.171875q-1.34375 0 -2.15625 -1.0q-0.796875 -1.0 -0.78125 -2.96875l0.03125 -4.765625l1.296875 0l0 4.765625q0 1.53125 0.53125 2.21875q0.53125 0.671875 1.40625 0.671875q0.96875 0 1.625 -0.75q0.671875 -0.765625 0.671875 -2.203125l0 -4.703125l1.3125 0l0 7.203125q0 0.453125 0.015625 0.75q0.03125 0.28125 0.171875 0.578125l-1.296875 0q-0.125 -0.28125 -0.15625 -0.578125q-0.03125 -0.296875 -0.03125 -0.734375q-0.40625 0.71875 -1.109375 1.125q-0.6875 0.390625 -1.53125 0.390625zm13.15625 -6.953125l0 0.015625q-0.546875 -0.5 -0.921875 -0.671875q-0.359375 -0.171875 -0.84375 -0.171875q-0.671875 0 -1.265625 0.34375q-0.59375 0.328125 -0.96875 1.015625q-0.375 0.671875 -0.375 1.703125l0 4.53125l-1.359375 0l0 -8.546875l1.40625 0l-0.046875 1.578125q0.359375 -0.84375 1.09375 -1.3125q0.734375 -0.46875 1.609375 -0.46875q1.375 0 2.265625 0.9375l-0.59375 1.046875zm0 0.015625q0.125 0.109375 0.0625 0.09375q-0.0625 -0.015625 -0.109375 -0.03125l0.046875 -0.0625zm-0.203125 0.0625q0 -0.0625 0.046875 -0.046875q0.0625 0 0.109375 0.046875l-0.03125 0.09375l-0.125 -0.078125l0 -0.015625zm2.921875 -1.859375l1.3125 0l0 1.515625q0.5 -0.78125 1.265625 -1.25q0.765625 -0.46875 1.625 -0.46875q1.125 0 1.8125 0.875q0.6875 0.859375 0.6875 2.6875l0 5.171875l-1.3125 0l0 -5.125q0 -1.28125 -0.4375 -1.859375q-0.421875 -0.578125 -1.125 -0.578125q-0.578125 0 -1.171875 0.34375q-0.578125 0.328125 -0.96875 0.9375q-0.375 0.609375 -0.375 1.375l0 4.90625l-1.3125 0l0 -8.53125zm12.53125 -0.1875q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm6.734375 0.828125l0 -1.078125l2.53125 0l0 -10.25l-2.421875 0l0 -1.078125l3.78125 0l0 11.328125l2.5 0l0 1.078125l-6.390625 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m306.4252 153.6762l8.062988 35.370087" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m306.4252 153.6762l6.729431 29.520172" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m311.54422 183.56346l2.619049 4.05748l0.6017761 -4.7917023z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m673.7428 143.05284l100.28345 0l0 73.44881l-100.28345 0z" fill-rule="evenodd"/><path fill="#000000" d="m683.5084 169.97284l2.46875 -11.625l1.03125 0l1.671875 5.6875l4.125 -5.703125l0.984375 0l-2.46875 11.640625l-1.234375 0l1.859375 -8.765625l-3.515625 4.6875l-0.5 0l-1.34375 -4.640625l-1.859375 8.71875l-1.21875 0zm13.59375 0.1875q-1.90625 0 -2.78125 -1.15625q-0.875 -1.15625 -0.421875 -3.265625q0.296875 -1.40625 1.015625 -2.421875q0.734375 -1.015625 1.734375 -1.546875q1.015625 -0.53125 2.109375 -0.53125q1.546875 0 2.28125 1.03125q0.75 1.03125 0.328125 3.015625q-0.046875 0.203125 -0.171875 0.625l-6.0625 0q-0.25 1.5625 0.375 2.375q0.625 0.796875 1.859375 0.796875q1.34375 0 2.40625 -0.953125l0.59375 0.71875q-1.359375 1.3125 -3.265625 1.3125zm3.0 -5.296875q0.25 -1.203125 -0.203125 -1.890625q-0.453125 -0.703125 -1.453125 -0.703125q-0.921875 0 -1.765625 0.65625q-0.828125 0.640625 -1.265625 1.9375l4.6875 0zm9.75 4.328125q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm7.28125 -7.9375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm9.40625 1.015625q-0.90625 0 -1.609375 -0.515625q-0.6875 -0.515625 -0.96875 -1.53125q-0.28125 -1.03125 0.03125 -2.484375q0.3125 -1.484375 1.046875 -2.46875q0.734375 -0.984375 1.65625 -1.453125q0.921875 -0.484375 1.828125 -0.484375q0.859375 0 1.40625 0.390625q0.5625 0.390625 0.765625 1.078125l1.09375 -5.125l1.421875 0l-0.03125 0.125q-0.140625 0.125 -0.203125 0.25q-0.046875 0.125 -0.109375 0.453125l-2.171875 10.25q-0.09375 0.453125 -0.125 0.75q-0.03125 0.28125 0.03125 0.578125l-1.328125 0q-0.0625 -0.296875 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.75q-0.5625 0.71875 -1.296875 1.125q-0.71875 0.390625 -1.53125 0.390625zm0.46875 -1.171875q1.15625 0 1.890625 -0.90625q0.734375 -0.921875 1.0625 -2.421875q0.3125 -1.515625 -0.078125 -2.421875q-0.390625 -0.921875 -1.578125 -0.921875q-1.109375 0 -1.890625 0.84375q-0.78125 0.828125 -1.078125 2.265625q-0.34375 1.640625 0.0625 2.609375q0.421875 0.953125 1.609375 0.953125zm10.953125 -7.734375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm13.546875 0.046875q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm7.28125 -7.9375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625z" fill-rule="nonzero"/><path fill="#000000" d="m683.5084 191.97284l2.46875 -11.625l1.03125 0l1.671875 5.6875l4.125 -5.703125l0.984375 0l-2.46875 11.640625l-1.234375 0l1.859375 -8.765625l-3.515625 4.6875l-0.5 0l-1.34375 -4.640625l-1.859375 8.71875l-1.21875 0zm12.640625 0.203125q-1.34375 0 -1.9375 -1.0q-0.59375 -1.0 -0.15625 -2.96875l1.046875 -4.765625l1.296875 0l-1.015625 4.765625q-0.328125 1.53125 0.0625 2.21875q0.390625 0.671875 1.265625 0.671875q0.96875 0 1.796875 -0.75q0.828125 -0.765625 1.125 -2.203125l1.0 -4.703125l1.3125 0l-1.53125 7.203125q-0.09375 0.453125 -0.140625 0.75q-0.03125 0.28125 0.046875 0.578125l-1.296875 0q-0.0625 -0.28125 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.734375q-0.5625 0.71875 -1.34375 1.125q-0.78125 0.390625 -1.625 0.390625zm13.703125 -0.984375q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm7.28125 -7.9375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm13.546875 0.046875q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm2.734375 0.78125l0.234375 -1.078125l2.1875 0l1.34375 -6.359375l-2.0625 0l0.234375 -1.09375l3.40625 0l-1.578125 7.453125l2.0 0l-0.234375 1.078125l-5.53125 0zm4.96875 -10.3125q-0.390625 0 -0.609375 -0.28125q-0.21875 -0.28125 -0.140625 -0.671875q0.09375 -0.40625 0.421875 -0.6875q0.328125 -0.28125 0.734375 -0.28125q0.390625 0 0.625 0.296875q0.234375 0.28125 0.140625 0.671875q-0.078125 0.390625 -0.4375 0.671875q-0.34375 0.28125 -0.734375 0.28125zm7.140625 10.46875q-1.140625 0 -1.921875 -0.5625q-0.78125 -0.5625 -1.0625 -1.5625q-0.28125 -1.015625 -0.015625 -2.296875q0.28125 -1.296875 0.984375 -2.296875q0.71875 -1.015625 1.734375 -1.578125q1.03125 -0.578125 2.171875 -0.578125q1.125 0 1.890625 0.578125q0.78125 0.5625 1.0625 1.578125q0.296875 1.0 0.015625 2.296875q-0.265625 1.28125 -0.984375 2.296875q-0.71875 1.0 -1.734375 1.5625q-1.015625 0.5625 -2.140625 0.5625zm0.234375 -1.125q0.71875 0 1.375 -0.421875q0.671875 -0.4375 1.15625 -1.1875q0.484375 -0.765625 0.6875 -1.71875q0.3125 -1.453125 -0.203125 -2.375q-0.515625 -0.921875 -1.609375 -0.921875q-1.109375 0 -2.015625 0.921875q-0.90625 0.921875 -1.21875 2.375q-0.203125 0.953125 -0.03125 1.71875q0.171875 0.75 0.640625 1.1875q0.484375 0.421875 1.21875 0.421875zm7.609375 -7.5625l1.3125 0l-0.328125 1.515625q0.671875 -0.78125 1.53125 -1.25q0.875 -0.46875 1.734375 -0.46875q1.125 0 1.625 0.875q0.5 0.859375 0.109375 2.6875l-1.09375 5.171875l-1.3125 0l1.09375 -5.125q0.265625 -1.28125 -0.046875 -1.859375q-0.296875 -0.578125 -1.0 -0.578125q-0.578125 0 -1.234375 0.34375q-0.65625 0.328125 -1.171875 0.9375q-0.515625 0.609375 -0.671875 1.375l-1.046875 4.90625l-1.3125 0l1.8125 -8.53125z" fill-rule="nonzero"/><path fill="#000000" d="m689.08655 208.95721q1.515625 0.484375 2.046875 1.0625q0.53125 0.5625 0.34375 1.46875q-0.25 1.15625 -1.3125 1.921875q-1.0625 0.75 -2.78125 0.75q-2.171875 0 -3.328125 -1.34375l1.015625 -1.265625l0.015625 -0.015625l0.015625 0.015625q0.421875 0.75 0.953125 1.125q0.546875 0.359375 1.609375 0.359375q1.015625 0 1.671875 -0.359375q0.65625 -0.359375 0.78125 -1.0q0.109375 -0.53125 -0.28125 -0.890625q-0.390625 -0.359375 -1.515625 -0.75q-3.015625 -0.90625 -2.65625 -2.640625q0.21875 -1.0 1.15625 -1.578125q0.9375 -0.578125 2.453125 -0.578125q1.15625 0 1.875 0.328125q0.71875 0.328125 1.234375 1.015625l-0.96875 0.9375l-0.015625 0.015625q-0.265625 -0.59375 -0.921875 -0.9375q-0.640625 -0.34375 -1.390625 -0.34375q-0.796875 0 -1.375 0.28125q-0.578125 0.28125 -0.671875 0.78125q-0.109375 0.46875 0.328125 0.859375q0.453125 0.390625 1.71875 0.78125zm2.09375 -1.390625q0.015625 -0.09375 0.140625 0.03125l-0.078125 0.0625l-0.0625 -0.09375zm0.21875 -0.03125q0.03125 0.078125 0.015625 0.09375q-0.015625 0.015625 -0.046875 0q-0.03125 -0.015625 -0.046875 -0.03125l0.078125 -0.0625zm-6.09375 3.9375q-0.015625 0.078125 -0.171875 0l0.078125 -0.09375l0.09375 0.078125l0 0.015625zm-0.21875 0.0625q-0.046875 -0.09375 -0.03125 -0.09375q0.03125 0 0.078125 0.03125l-0.046875 0.0625z" fill-rule="nonzero"/><path fill="#e06666" d="m424.69684 98.478035l0 0c0 -33.441498 27.10971 -60.55118 60.551178 -60.55118l0 0c16.059174 0 31.460602 6.3794785 42.816193 17.735027c11.35553 11.355553 17.734985 26.75698 17.734985 42.816154l0 0c0 33.44149 -27.10968 60.551186 -60.551178 60.551186l0 0c-33.441467 0 -60.551178 -27.109695 -60.551178 -60.551186z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m424.69684 98.478035l0 0c0 -33.441498 27.10971 -60.55118 60.551178 -60.55118l0 0c16.059174 0 31.460602 6.3794785 42.816193 17.735027c11.35553 11.355553 17.734985 26.75698 17.734985 42.816154l0 0c0 33.44149 -27.10968 60.551186 -60.551178 60.551186l0 0c-33.441467 0 -60.551178 -27.109695 -60.551178 -60.551186z" fill-rule="evenodd"/><path fill="#000000" d="m458.16208 87.97616q1.65625 0.6875 2.28125 1.421875q0.640625 0.734375 0.640625 1.828125q0 0.859375 -0.421875 1.625q-0.40625 0.765625 -1.296875 1.25q-0.875 0.484375 -2.15625 0.484375q-2.265625 0 -3.640625 -1.46875l0.671875 -1.1875l0 -0.015625q0.015625 0 0.015625 0.015625q0 0 0 0q0.515625 0.671875 1.296875 1.078125q0.796875 0.40625 1.84375 0.40625q1.03125 0 1.71875 -0.578125q0.6875 -0.578125 0.6875 -1.421875q0 -0.546875 -0.21875 -0.90625q-0.21875 -0.359375 -0.78125 -0.71875q-0.546875 -0.359375 -1.671875 -0.84375q-1.71875 -0.671875 -2.453125 -1.515625q-0.734375 -0.859375 -0.734375 -1.9375q0 -1.296875 0.953125 -2.078125q0.953125 -0.78125 2.59375 -0.78125q0.953125 0 1.78125 0.390625q0.828125 0.375 1.4375 1.0625l-0.71875 0.96875l-0.015625 0.015625q-0.5625 -0.765625 -1.171875 -1.0625q-0.609375 -0.296875 -1.515625 -0.296875q-0.90625 0 -1.453125 0.5q-0.546875 0.484375 -0.546875 1.1875q0 0.546875 0.234375 0.953125q0.234375 0.390625 0.84375 0.78125q0.625 0.375 1.796875 0.84375zm1.59375 -2.875q0 -0.046875 0.078125 0q0.078125 0.03125 0.09375 0.03125l-0.046875 0.0625l-0.125 -0.0625l0 -0.03125zm0.21875 -0.03125q0.109375 0.15625 0.03125 0.109375q-0.0625 -0.046875 -0.078125 -0.046875l0.046875 -0.0625zm-5.53125 6.796875q0 0.046875 -0.078125 0.015625q-0.0625 -0.046875 -0.09375 -0.046875l0.046875 -0.0625l0.125 0.0625l0 0.03125zm-0.203125 0.046875q-0.109375 -0.140625 0.03125 -0.078125l-0.03125 0.078125zm15.96875 1.703125q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm5.59375 -7.9375q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm6.609375 -7.703125l1.3125 0l0 1.515625q0.5 -0.78125 1.265625 -1.25q0.765625 -0.46875 1.625 -0.46875q1.125 0 1.8125 0.875q0.6875 0.859375 0.6875 2.6875l0 5.171875l-1.3125 0l0 -5.125q0 -1.28125 -0.4375 -1.859375q-0.421875 -0.578125 -1.125 -0.578125q-0.578125 0 -1.171875 0.34375q-0.578125 0.328125 -0.96875 0.9375q-0.375 0.609375 -0.375 1.375l0 4.90625l-1.3125 0l0 -8.53125zm12.34375 8.71875q-0.90625 0 -1.71875 -0.515625q-0.796875 -0.515625 -1.296875 -1.53125q-0.5 -1.03125 -0.5 -2.484375q0 -1.484375 0.515625 -2.46875q0.53125 -0.984375 1.34375 -1.453125q0.828125 -0.484375 1.734375 -0.484375q0.859375 0 1.5 0.390625q0.640625 0.390625 0.984375 1.078125l0 -5.125l1.421875 0l0 0.125q-0.109375 0.125 -0.140625 0.25q-0.03125 0.125 -0.03125 0.453125l0.015625 10.25q0 0.453125 0.03125 0.75q0.03125 0.28125 0.15625 0.578125l-1.328125 0q-0.125 -0.296875 -0.15625 -0.578125q-0.03125 -0.296875 -0.03125 -0.75q-0.40625 0.71875 -1.046875 1.125q-0.640625 0.390625 -1.453125 0.390625zm0.21875 -1.171875q1.15625 0 1.6875 -0.90625q0.546875 -0.921875 0.546875 -2.421875q0 -1.515625 -0.59375 -2.421875q-0.578125 -0.921875 -1.765625 -0.921875q-1.109375 0 -1.71875 0.84375q-0.59375 0.828125 -0.59375 2.265625q0 1.640625 0.625 2.609375q0.625 0.953125 1.8125 0.953125zm9.71875 1.1875q-0.765625 0 -1.421875 -0.34375q-0.65625 -0.34375 -1.109375 -0.984375l-0.453125 1.125l-0.859375 0l0 -12.40625l1.53125 0l0 0.125q-0.125 0.125 -0.15625 0.25q-0.015625 0.125 -0.015625 0.453125l0 4.359375q0.40625 -0.6875 1.109375 -1.09375q0.703125 -0.421875 1.421875 -0.421875q1.609375 0 2.5625 1.125q0.953125 1.109375 0.953125 3.265625q0 1.453125 -0.515625 2.484375q-0.5 1.03125 -1.328125 1.546875q-0.8125 0.515625 -1.71875 0.515625zm-0.171875 -1.1875q1.0 0 1.671875 -0.796875q0.671875 -0.796875 0.671875 -2.484375q0 -1.640625 -0.625 -2.46875q-0.625 -0.84375 -1.6875 -0.84375q-1.046875 0 -1.703125 0.9375q-0.640625 0.9375 -0.640625 2.40625q0 3.25 2.3125 3.25zm12.7969055 -7.546875q-0.109375 0.765625 -0.34375 1.515625q-0.234375 0.75 -0.671875 1.984375l-2.125 6.0q-0.421875 1.203125 -1.109375 1.734375q-0.6875305 0.546875 -1.6406555 0.546875q-1.203125 0 -1.96875 -0.734375l0.578125 -0.90625l0.015625 -0.03125l0.03125 0.03125q0.296875 0.28125 0.625 0.40625q0.34375 0.125 0.78125 0.125q0.703125 0 1.1094055 -0.484375q0.40625 -0.484375 0.796875 -1.59375l-3.4844055 -8.59375l1.390625 0l2.6562805 6.828125l1.125 -3.421875q0.40625 -1.296875 0.59375 -1.96875q0.203125 -0.6875 0.296875 -1.4375l1.34375 0zm-7.0469055 10.0625q0 0.03125 -0.046875 0.03125l-0.140625 -0.03125l0.0625 -0.09375l0.125 0.078125l0 0.015625zm-0.21875 0.046875q-0.078125 -0.078125 -0.046875 -0.0625q0.046875 0.015625 0.078125 0.015625l-0.03125 0.046875z" fill-rule="nonzero"/><path fill="#000000" d="m472.02145 116.39803l0 -11.625l1.03125 0l2.875 5.6875l2.921875 -5.703125l0.984375 0l0 11.640625l-1.234375 0l0 -8.765625l-2.515625 4.6875l-0.5 0l-2.34375 -4.640625l0 8.71875l-1.21875 0zm9.5625 -11.625l2.71875 0q1.4375 0 2.265625 0.390625q0.84375 0.375 1.453125 1.203125q1.140625 1.53125 1.140625 4.28125q-0.109375 2.828125 -1.3125 4.3125q-1.1875 1.46875 -3.78125 1.453125l-2.484375 0l0 -11.640625zm2.4375 10.625q3.84375 0 3.84375 -4.671875q-0.03125 -2.375 -0.890625 -3.609375q-0.84375 -1.234375 -2.75 -1.234375l-1.40625 0l0 9.515625l1.203125 0zm11.453125 -5.421875q1.65625 0.6875 2.28125 1.421875q0.640625 0.734375 0.640625 1.828125q0 0.859375 -0.421875 1.625q-0.40625 0.765625 -1.296875 1.25q-0.875 0.484375 -2.15625 0.484375q-2.265625 0 -3.640625 -1.46875l0.671875 -1.1875l0 -0.015625q0.015625 0 0.015625 0.015625q0 0 0 0q0.515625 0.671875 1.296875 1.078125q0.796875 0.40625 1.84375 0.40625q1.03125 0 1.71875 -0.578125q0.6875 -0.578125 0.6875 -1.421875q0 -0.546875 -0.21875 -0.90625q-0.21875 -0.359375 -0.78125 -0.71875q-0.546875 -0.359375 -1.671875 -0.84375q-1.71875 -0.671875 -2.453125 -1.515625q-0.734375 -0.859375 -0.734375 -1.9375q0 -1.296875 0.953125 -2.078125q0.953125 -0.78125 2.59375 -0.78125q0.953125 0 1.78125 0.390625q0.828125 0.375 1.4375 1.0625l-0.71875 0.96875l-0.015625 0.015625q-0.5625 -0.765625 -1.171875 -1.0625q-0.609375 -0.296875 -1.515625 -0.296875q-0.90625 0 -1.453125 0.5q-0.546875 0.484375 -0.546875 1.1875q0 0.546875 0.234375 0.953125q0.234375 0.390625 0.84375 0.78125q0.625 0.375 1.796875 0.84375zm1.59375 -2.875q0 -0.046875 0.078125 0q0.078125 0.03125 0.09375 0.03125l-0.046875 0.0625l-0.125 -0.0625l0 -0.03125zm0.21875 -0.03125q0.109375 0.15625 0.03125 0.109375q-0.0625 -0.046875 -0.078125 -0.046875l0.046875 -0.0625zm-5.53125 6.796875q0 0.046875 -0.078125 0.015625q-0.0625 -0.046875 -0.09375 -0.046875l0.046875 -0.0625l0.125 0.0625l0 0.03125zm-0.203125 0.046875q-0.109375 -0.140625 0.03125 -0.078125l-0.03125 0.078125z" fill-rule="nonzero"/><path fill="#e06666" d="m603.52234 93.125015l0 0c0 -33.441494 27.10968 -60.55118 60.551147 -60.55118l0 0c16.059204 0 31.460632 6.3794823 42.816162 17.735031c11.35553 11.355549 17.735046 26.756977 17.735046 42.81615l0 0c0 33.44149 -27.10968 60.551178 -60.55121 60.551178l0 0c-33.441467 0 -60.551147 -27.109688 -60.551147 -60.551178z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m603.52234 93.125015l0 0c0 -33.441494 27.10968 -60.55118 60.551147 -60.55118l0 0c16.059204 0 31.460632 6.3794823 42.816162 17.735031c11.35553 11.355549 17.735046 26.756977 17.735046 42.81615l0 0c0 33.44149 -27.10968 60.551178 -60.55121 60.551178l0 0c-33.441467 0 -60.551147 -27.109688 -60.551147 -60.551178z" fill-rule="evenodd"/><path fill="#000000" d="m643.7766 89.04501l-1.171875 -3.390625l-3.75 0l-1.1875 3.390625l-1.28125 0l4.296875 -11.828125l0.140625 0l4.296875 11.828125l-1.34375 0zm-4.578125 -4.40625l3.0625 0l-1.53125 -4.4375l-1.53125 4.4375zm11.609375 3.40625q1.28125 0 2.21875 -1.046875l0.78125 0.90625q-1.25 1.34375 -3.078125 1.34375q-1.234375 0 -2.203125 -0.578125q-0.96875 -0.578125 -1.515625 -1.59375q-0.546875 -1.015625 -0.546875 -2.28125q0 -1.265625 0.546875 -2.265625q0.546875 -1.015625 1.515625 -1.59375q0.96875 -0.578125 2.1875 -0.578125q1.03125 0 1.859375 0.421875q0.84375 0.40625 1.375 1.15625l-0.84375 0.828125l-0.015625 0.015625q-0.546875 -0.71875 -1.125 -1.0q-0.5625 -0.296875 -1.390625 -0.296875q-0.734375 0 -1.359375 0.40625q-0.625 0.40625 -1.0 1.140625q-0.375 0.71875 -0.375 1.671875q0 0.953125 0.375 1.71875q0.390625 0.765625 1.0625 1.203125q0.6875 0.421875 1.53125 0.421875zm2.078125 -5.25q0 -0.109375 0.15625 0.015625l-0.0625 0.078125l-0.09375 -0.09375zm0.203125 -0.015625q0.09375 0.125 0.046875 0.09375q-0.046875 -0.046875 -0.09375 -0.0625l0.046875 -0.03125zm9.9375 5.484375q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm2.90625 0.78125l0 -1.078125l2.1875 0l0 -6.359375l-2.0625 0l0 -1.09375l3.40625 0l0 7.453125l2.0 0l0 1.078125l-5.53125 0zm2.78125 -10.3125q-0.390625 0 -0.671875 -0.28125q-0.28125 -0.28125 -0.28125 -0.671875q0 -0.40625 0.265625 -0.6875q0.28125 -0.28125 0.6875 -0.28125q0.390625 0 0.671875 0.296875q0.296875 0.28125 0.296875 0.671875q0 0.390625 -0.296875 0.671875q-0.28125 0.28125 -0.671875 0.28125zm13.171875 1.78125q-0.296875 1.515625 -1.25 3.734375l-2.0625 4.796875l-1.046875 0l-3.375 -8.53125l1.34375 0l2.625 6.6875l1.375 -3.15625q0.859375 -1.9375 1.125 -3.53125l1.265625 0zm5.921875 8.71875q-1.90625 0 -3.03125 -1.15625q-1.125 -1.15625 -1.125 -3.265625q0 -1.40625 0.515625 -2.421875q0.515625 -1.015625 1.40625 -1.546875q0.890625 -0.53125 1.984375 -0.53125q1.546875 0 2.5 1.03125q0.96875 1.03125 0.96875 3.015625q0 0.203125 -0.03125 0.625l-6.0625 0q0.078125 1.5625 0.875 2.375q0.796875 0.796875 2.03125 0.796875q1.34375 0 2.203125 -0.953125l0.75 0.71875q-1.078125 1.3125 -2.984375 1.3125zm1.859375 -5.296875q0 -1.203125 -0.609375 -1.890625q-0.59375 -0.703125 -1.59375 -0.703125q-0.921875 0 -1.625 0.65625q-0.6875 0.640625 -0.859375 1.9375l4.6875 0z" fill-rule="nonzero"/><path fill="#000000" d="m650.8469 111.04501l0 -11.625l1.03125 0l2.875 5.6875l2.921875 -5.703125l0.984375 0l0 11.640625l-1.234375 0l0 -8.765625l-2.515625 4.6875l-0.5 0l-2.34375 -4.640625l0 8.71875l-1.21875 0zm9.5625 -11.625l2.71875 0q1.4375 0 2.265625 0.390625q0.84375 0.375 1.453125 1.203125q1.140625 1.53125 1.140625 4.28125q-0.109375 2.828125 -1.3125 4.3125q-1.1875 1.46875 -3.78125 1.453125l-2.484375 0l0 -11.640625zm2.4375 10.625q3.84375 0 3.84375 -4.671875q-0.03125 -2.375 -0.890625 -3.609375q-0.84375 -1.234375 -2.75 -1.234375l-1.40625 0l0 9.515625l1.203125 0zm11.453125 -5.421875q1.65625 0.6875 2.28125 1.421875q0.640625 0.734375 0.640625 1.828125q0 0.859375 -0.421875 1.625q-0.40625 0.765625 -1.296875 1.25q-0.875 0.484375 -2.15625 0.484375q-2.265625 0 -3.640625 -1.46875l0.671875 -1.1875l0 -0.015625q0.015625 0 0.015625 0.015625q0 0 0 0q0.515625 0.671875 1.296875 1.078125q0.796875 0.40625 1.84375 0.40625q1.03125 0 1.71875 -0.578125q0.6875 -0.578125 0.6875 -1.421875q0 -0.546875 -0.21875 -0.90625q-0.21875 -0.359375 -0.78125 -0.71875q-0.546875 -0.359375 -1.671875 -0.84375q-1.71875 -0.671875 -2.453125 -1.515625q-0.734375 -0.859375 -0.734375 -1.9375q0 -1.296875 0.953125 -2.078125q0.953125 -0.78125 2.59375 -0.78125q0.953125 0 1.78125 0.390625q0.828125 0.375 1.4375 1.0625l-0.71875 0.96875l-0.015625 0.015625q-0.5625 -0.765625 -1.171875 -1.0625q-0.609375 -0.296875 -1.515625 -0.296875q-0.90625 0 -1.453125 0.5q-0.546875 0.484375 -0.546875 1.1875q0 0.546875 0.234375 0.953125q0.234375 0.390625 0.84375 0.78125q0.625 0.375 1.796875 0.84375zm1.59375 -2.875q0 -0.046875 0.078125 0q0.078125 0.03125 0.09375 0.03125l-0.046875 0.0625l-0.125 -0.0625l0 -0.03125zm0.21875 -0.03125q0.109375 0.15625 0.03125 0.109375q-0.0625 -0.046875 -0.078125 -0.046875l0.046875 -0.0625zm-5.53125 6.796875q0 0.046875 -0.078125 0.015625q-0.0625 -0.046875 -0.09375 -0.046875l0.046875 -0.0625l0.125 0.0625l0 0.03125zm-0.203125 0.046875q-0.109375 -0.140625 0.03125 -0.078125l-0.03125 0.078125z" fill-rule="nonzero"/><path fill="#6fa8dc" d="m597.18896 215.2218l100.84253 0l0 55.47882c-50.41858 0 -50.41858 21.345459 -100.84253 10.6727295zm8.3081665 0l0 -7.1924896l100.15381 0l0 55.82686c-3.8070068 0 -7.619446 0.38668823 -7.619446 0.38668823l0 -49.021057zm7.809265 -7.1924896l0 -7.0184784l101.021484 0l0 55.652863c-4.338501 0 -8.676941 0.29000854 -8.676941 0.29000854l0 -48.924393z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m597.18896 215.2218l100.84253 0l0 55.47882c-50.41858 0 -50.41858 21.345459 -100.84253 10.6727295zm8.3081665 0l0 -7.1924896l100.15381 0l0 55.82686c-3.8070068 0 -7.619446 0.38668823 -7.619446 0.38668823m-84.7251 -56.213547l0 -7.0184784l101.021484 0l0 55.652863c-4.338501 0 -8.676941 0.29000854 -8.676941 0.29000854" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m597.18896 281.37335c50.42395 10.6727295 50.42395 -10.6727295 100.84253 -10.6727295l0 -6.4577637c0 0 3.812439 -0.38668823 7.619446 -0.38668823l0 -6.902466c0 0 4.33844 -0.29000854 8.676941 -0.29000854l0 -55.652863l-101.021484 0l0 7.0184784l-7.809265 0l0 7.1924896l-8.3081665 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m597.18896 215.2218l100.84253 0l0 55.47882c-50.41858 0 -50.41858 21.345459 -100.84253 10.6727295zm8.3081665 0l0 -7.1924896l100.15381 0l0 55.82686c-3.8070068 0 -7.619446 0.38668823 -7.619446 0.38668823m-84.7251 -56.213547l0 -7.0184784l101.021484 0l0 55.652863c-4.338501 0 -8.676941 0.29000854 -8.676941 0.29000854" fill-rule="evenodd"/><path fill="#000000" d="m621.69617 244.70195l0 6.609375q0.03125 1.421875 -0.375 2.34375q-0.40625 0.90625 -1.109375 1.34375q-0.703125 0.421875 -1.5625 0.421875q-1.703125 0 -2.765625 -1.28125l0.734375 -0.9375l0.015625 -0.015625l0.03125 0.015625q0.515625 0.5625 0.953125 0.8125q0.4375 0.25 1.0 0.25q0.953125 0 1.375 -0.65625q0.421875 -0.671875 0.421875 -2.265625l0 -6.640625l-2.25 0l0 -1.109375l5.328125 0l0 1.109375l-1.796875 0zm-4.84375 8.421875q0 0.078125 -0.0625 0.078125q-0.0625 -0.015625 -0.109375 -0.0625l0.078125 -0.09375l0.09375 0.078125zm-0.21875 0.0625q-0.09375 -0.09375 -0.03125 -0.078125q0.0625 0.015625 0.078125 0.03125l-0.046875 0.046875zm12.328125 2.1875q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm8.78125 1.171875q-1.34375 0 -2.15625 -1.0q-0.796875 -1.0 -0.78125 -2.96875l0.03125 -4.765625l1.296875 0l0 4.765625q0 1.53125 0.53125 2.21875q0.53125 0.671875 1.40625 0.671875q0.96875 0 1.625 -0.75q0.671875 -0.765625 0.671875 -2.203125l0 -4.703125l1.3125 0l0 7.203125q0 0.453125 0.015625 0.75q0.03125 0.28125 0.171875 0.578125l-1.296875 0q-0.125 -0.28125 -0.15625 -0.578125q-0.03125 -0.296875 -0.03125 -0.734375q-0.40625 0.71875 -1.109375 1.125q-0.6875 0.390625 -1.53125 0.390625zm13.15625 -6.953125l0 0.015625q-0.546875 -0.5 -0.921875 -0.671875q-0.359375 -0.171875 -0.84375 -0.171875q-0.671875 0 -1.265625 0.34375q-0.59375 0.328125 -0.96875 1.015625q-0.375 0.671875 -0.375 1.703125l0 4.53125l-1.359375 0l0 -8.546875l1.40625 0l-0.046875 1.578125q0.359375 -0.84375 1.09375 -1.3125q0.734375 -0.46875 1.609375 -0.46875q1.375 0 2.265625 0.9375l-0.59375 1.046875zm0 0.015625q0.125 0.109375 0.0625 0.09375q-0.0625 -0.015625 -0.109375 -0.03125l0.046875 -0.0625zm-0.203125 0.0625q0 -0.0625 0.046875 -0.046875q0.0625 0 0.109375 0.046875l-0.03125 0.09375l-0.125 -0.078125l0 -0.015625zm2.921875 -1.859375l1.3125 0l0 1.515625q0.5 -0.78125 1.265625 -1.25q0.765625 -0.46875 1.625 -0.46875q1.125 0 1.8125 0.875q0.6875 0.859375 0.6875 2.6875l0 5.171875l-1.3125 0l0 -5.125q0 -1.28125 -0.4375 -1.859375q-0.421875 -0.578125 -1.125 -0.578125q-0.578125 0 -1.171875 0.34375q-0.578125 0.328125 -0.96875 0.9375q-0.375 0.609375 -0.375 1.375l0 4.90625l-1.3125 0l0 -8.53125zm12.53125 -0.1875q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm6.734375 0.828125l0 -1.078125l2.53125 0l0 -10.25l-2.421875 0l0 -1.078125l3.78125 0l0 11.328125l2.5 0l0 1.078125l-6.390625 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m664.0735 153.6762l-0.25195312 47.338577" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m664.0735 153.6762l-0.22003174 41.33867" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m662.2017 195.00607l1.6275635 4.546829l1.6759033 -4.529236z" fill-rule="evenodd"/><path fill="#e6b8af" d="m3.406824 307.91898l777.6063 0l0 169.13385l-777.6063 0z" fill-rule="evenodd"/><path fill="#000000" d="m344.085 381.96402l7.890625 0q4.46875 0 6.59375 1.96875q2.125 1.953125 2.125 5.46875q0 1.375 -0.609375 2.75q-0.59375 1.359375 -1.671875 2.40625q-1.078125 1.03125 -2.484375 1.484375l5.828125 10.84375l-5.03125 0l-5.15625 -10.3125l-2.90625 0l0 10.3125l-4.578125 0l0 -24.921875zm8.171875 10.796875q1.921875 0 2.875 -0.890625q0.96875 -0.90625 0.96875 -2.46875q0 -1.75 -0.9375 -2.609375q-0.921875 -0.859375 -2.90625 -0.859375l-3.59375 0l0 6.828125l3.59375 0zm24.921875 14.125l-1.78125 -6.453125l-6.375 0l-1.8125 6.453125l-4.609375 0l8.328125 -24.921875l2.609375 0l8.328125 24.921875l-4.6875 0zm-7.265625 -9.546875l4.625 0l-2.28125 -8.28125l-2.34375 8.28125zm14.0625 -15.375l6.4375 0q3.1875 0 5.0625 0.859375q1.890625 0.84375 3.21875 2.609375q2.5625 3.484375 2.5625 9.171875q0 5.890625 -2.75 9.09375q-2.734375 3.1875 -8.578125 3.1875l-5.953125 0l0 -24.921875zm5.875 21.40625q3.5 0 5.203125 -2.203125q1.703125 -2.21875 1.703125 -6.453125q0 -9.015625 -6.5 -9.015625l-1.9375 0l0 17.671875l1.53125 0zm22.484375 4.234375q-2.953125 0 -5.078125 -1.5625q-2.109375 -1.578125 -3.234375 -4.546875q-1.125 -2.984375 -1.125 -7.125q0 -4.09375 1.125 -7.015625q1.125 -2.9375 3.234375 -4.484375q2.125 -1.546875 5.078125 -1.546875q2.9375 0 5.0625 1.546875q2.125 1.546875 3.234375 4.484375q1.125 2.921875 1.125 7.015625q0 4.140625 -1.125 7.125q-1.109375 2.96875 -3.234375 4.546875q-2.125 1.5625 -5.0625 1.5625zm0 -3.59375q1.484375 0 2.5625 -1.21875q1.078125 -1.21875 1.65625 -3.453125q0.578125 -2.234375 0.578125 -5.203125q0 -2.765625 -0.578125 -4.84375q-0.578125 -2.078125 -1.65625 -3.21875q-1.078125 -1.140625 -2.5625 -1.140625q-1.46875 0 -2.5625 1.140625q-1.09375 1.140625 -1.671875 3.21875q-0.578125 2.078125 -0.578125 4.84375q0 2.96875 0.578125 5.203125q0.578125 2.234375 1.65625 3.453125q1.09375 1.21875 2.578125 1.21875zm21.75 -11.5625q2.546875 1.21875 3.953125 2.34375q1.40625 1.109375 1.984375 2.34375q0.59375 1.234375 0.59375 2.875q0 1.984375 -0.921875 3.71875q-0.921875 1.734375 -2.90625 2.8125q-1.984375 1.0625 -5.03125 1.0625q-4.859375 0 -8.4375 -3.46875l1.953125 -3.40625l0.03125 -0.03125l0.03125 0.03125q2.90625 3.03125 6.796875 3.03125q1.875 0 2.890625 -0.875q1.03125 -0.890625 1.03125 -2.640625q0 -0.875 -0.4375 -1.59375q-0.4375 -0.71875 -1.5 -1.4375q-1.0625 -0.71875 -2.9375 -1.5625q-3.484375 -1.5 -5.046875 -3.375q-1.5625 -1.875 -1.5625 -4.359375q0 -1.796875 1.0 -3.3125q1.015625 -1.515625 2.78125 -2.390625q1.765625 -0.890625 3.984375 -0.890625q2.46875 0 4.265625 0.828125q1.796875 0.8125 3.4375 2.53125l-2.234375 3.125l-0.03125 0.03125q-0.015625 0 -0.015625 0q0 -0.015625 0 -0.015625q-0.921875 -1.46875 -2.34375 -2.140625q-1.421875 -0.671875 -3.125 -0.671875q-1.09375 0 -1.890625 0.390625q-0.78125 0.375 -1.1875 1.0q-0.390625 0.625 -0.390625 1.359375q0 0.875 0.453125 1.5625q0.453125 0.671875 1.5625 1.390625q1.125 0.703125 3.25 1.734375zm3.140625 -4.671875q0 -0.03125 0.046875 -0.03125q0.03125 0 0.15625 0.09375q0.140625 0.09375 0.234375 0.15625l-0.0625 0.09375l-0.375 -0.25l0 -0.0625zm0.546875 0.0625q0.140625 0.21875 0.140625 0.28125q0 0.015625 -0.015625 0.015625q-0.0625 0 -0.234375 -0.140625l0.109375 -0.15625zm-11.90625 12.875q0 0.03125 -0.046875 0.03125q-0.046875 0 -0.203125 -0.078125q-0.140625 -0.078125 -0.234375 -0.109375l0.078125 -0.140625l0.390625 0.234375l0.015625 0.0625zm-0.5625 -0.015625q-0.15625 -0.15625 -0.15625 -0.21875q0 0 0 0q0 -0.015625 0.015625 -0.015625l0.21875 0.09375l-0.078125 0.140625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m168.86089 338.05414l0 0c0 7.6123962 24.68428 13.783478 55.133865 13.783478c30.449585 0 55.13385 -6.1710815 55.13385 -13.783478l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.171051 -55.133865 -13.783478z" fill-rule="evenodd"/><path fill="#e2edf7" d="m168.86089 338.05414l0 0c0 -7.6123962 24.68428 -13.783447 55.133865 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.1710815 -55.133865 -13.783478z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m279.1286 338.05414l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.1710815 -55.133865 -13.783478l0 0c0 -7.6123962 24.68428 -13.783447 55.133865 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.171051 -55.133865 -13.783478l0 -119.078735" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m279.1286 338.05414l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.1710815 -55.133865 -13.783478l0 0c0 -7.6123962 24.68428 -13.783447 55.133865 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.133865 -6.171051 -55.133865 -13.783478l0 -119.078735" fill-rule="evenodd"/><path fill="#000000" d="m183.01819 399.78024l2.71875 0q1.4375 0 2.265625 0.390625q0.84375 0.375 1.453125 1.203125q1.140625 1.53125 1.140625 4.28125q-0.109375 2.828125 -1.3125 4.3125q-1.1875 1.46875 -3.78125 1.453125l-2.484375 0l0 -11.640625zm2.4375 10.625q3.84375 0 3.84375 -4.671875q-0.03125 -2.375 -0.890625 -3.609375q-0.84375 -1.234375 -2.75 -1.234375l-1.40625 0l0 9.515625l1.203125 0zm10.4375 -7.71875q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm13.546875 0.046875q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm5.59375 -7.9375q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm15.6875 -10.796875l3.734375 0q1.84375 0 2.75 0.921875q0.921875 0.90625 0.921875 2.359375q0 1.4375 -0.890625 2.328125q-0.890625 0.890625 -2.6875 0.890625l-2.484375 0l0 5.125l-1.34375 0l0 -11.625zm3.6875 5.34375q1.21875 0 1.796875 -0.53125q0.578125 -0.53125 0.578125 -1.484375q0 -0.921875 -0.59375 -1.5q-0.59375 -0.59375 -1.765625 -0.59375l-2.359375 0l0 4.109375l2.34375 0zm9.21875 6.4375q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm9.328125 1.125q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm6.125 0.96875l0 -1.078125l2.53125 0l0 -10.25l-2.421875 0l0 -1.078125l3.78125 0l0 11.328125l2.5 0l0 1.078125l-6.390625 0z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m500.87402 338.05414l0 0c0 7.6123962 24.684265 13.783478 55.13385 13.783478c30.449585 0 55.13385 -6.1710815 55.13385 -13.783478l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.171051 -55.13385 -13.783478z" fill-rule="evenodd"/><path fill="#e2edf7" d="m500.87402 338.05414l0 0c0 -7.6123962 24.684265 -13.783447 55.13385 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.1710815 -55.13385 -13.783478z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m611.1417 338.05414l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.1710815 -55.13385 -13.783478l0 0c0 -7.6123962 24.684265 -13.783447 55.13385 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.171051 -55.13385 -13.783478l0 -119.078735" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m611.1417 338.05414l0 0c0 7.6123962 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.1710815 -55.13385 -13.783478l0 0c0 -7.6123962 24.684265 -13.783447 55.13385 -13.783447c30.449585 0 55.13385 6.171051 55.13385 13.783447l0 119.078735c0 7.6124268 -24.684265 13.783478 -55.13385 13.783478c-30.449585 0 -55.13385 -6.171051 -55.13385 -13.783478l0 -119.078735" fill-rule="evenodd"/><path fill="#000000" d="m519.461 400.40524l0 -11.625l1.03125 0l2.875 5.6875l2.921875 -5.703125l0.984375 0l0 11.640625l-1.234375 0l0 -8.765625l-2.515625 4.6875l-0.5 0l-2.34375 -4.640625l0 8.71875l-1.21875 0zm13.640625 0.1875q-1.90625 0 -3.03125 -1.15625q-1.125 -1.15625 -1.125 -3.265625q0 -1.40625 0.515625 -2.421875q0.515625 -1.015625 1.40625 -1.546875q0.890625 -0.53125 1.984375 -0.53125q1.546875 0 2.5 1.03125q0.96875 1.03125 0.96875 3.015625q0 0.203125 -0.03125 0.625l-6.0625 0q0.078125 1.5625 0.875 2.375q0.796875 0.796875 2.03125 0.796875q1.34375 0 2.203125 -0.953125l0.75 0.71875q-1.078125 1.3125 -2.984375 1.3125zm1.859375 -5.296875q0 -1.203125 -0.609375 -1.890625q-0.59375 -0.703125 -1.59375 -0.703125q-0.921875 0 -1.625 0.65625q-0.6875 0.640625 -0.859375 1.9375l4.6875 0zm10.671875 4.328125q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm5.59375 -7.9375q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm9.625 1.015625q-0.90625 0 -1.71875 -0.515625q-0.796875 -0.515625 -1.296875 -1.53125q-0.5 -1.03125 -0.5 -2.484375q0 -1.484375 0.515625 -2.46875q0.53125 -0.984375 1.34375 -1.453125q0.828125 -0.484375 1.734375 -0.484375q0.859375 0 1.5 0.390625q0.640625 0.390625 0.984375 1.078125l0 -5.125l1.421875 0l0 0.125q-0.109375 0.125 -0.140625 0.25q-0.03125 0.125 -0.03125 0.453125l0.015625 10.25q0 0.453125 0.03125 0.75q0.03125 0.28125 0.15625 0.578125l-1.328125 0q-0.125 -0.296875 -0.15625 -0.578125q-0.03125 -0.296875 -0.03125 -0.75q-0.40625 0.71875 -1.046875 1.125q-0.640625 0.390625 -1.453125 0.390625zm0.21875 -1.171875q1.15625 0 1.6875 -0.90625q0.546875 -0.921875 0.546875 -2.421875q0 -1.515625 -0.59375 -2.421875q-0.578125 -0.921875 -1.765625 -0.921875q-1.109375 0 -1.71875 0.84375q-0.59375 0.828125 -0.59375 2.265625q0 1.640625 0.625 2.609375q0.625 0.953125 1.8125 0.953125zm9.296875 -7.734375q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625zm13.546875 0.046875q-1.234375 0.90625 -2.703125 0.90625q-1.421875 0 -2.015625 -0.84375q-0.578125 -0.859375 -0.578125 -2.8125q0 -0.3125 0.03125 -1.09375l0.171875 -2.796875l-1.875 0l0 -1.109375l1.953125 0l0.125 -2.265625l1.5 -0.25l0.1875 -0.015625l0.015625 0.109375q-0.125 0.1875 -0.203125 0.328125q-0.0625 0.125 -0.078125 0.40625l-0.203125 1.6875l2.828125 0l0 1.109375l-2.90625 0l-0.171875 2.890625q-0.03125 0.75 -0.03125 0.984375q0 1.5 0.34375 2.03125q0.34375 0.53125 1.109375 0.53125q0.5625 0 1.03125 -0.203125q0.46875 -0.21875 1.0625 -0.65625l0.40625 1.0625zm5.59375 -7.9375q1.828125 0 2.78125 0.953125q0.96875 0.953125 0.96875 3.234375l0 4.53125l-1.453125 0l0 -1.3125q-0.78125 1.515625 -2.90625 1.515625q-1.421875 0 -2.21875 -0.640625q-0.796875 -0.640625 -0.796875 -1.703125q0 -0.90625 0.578125 -1.578125q0.578125 -0.671875 1.546875 -1.03125q0.984375 -0.359375 2.15625 -0.359375q1.0625 0 1.921875 0.109375q-0.109375 -1.4375 -0.75 -2.015625q-0.640625 -0.59375 -1.921875 -0.59375q-0.625 0 -1.21875 0.25q-0.578125 0.234375 -1.0625 0.703125l-0.65625 -0.859375q1.1875 -1.203125 3.03125 -1.203125zm-0.484375 7.890625q1.390625 0 2.1875 -0.796875q0.796875 -0.8125 0.875 -2.34375q-0.84375 -0.140625 -1.8125 -0.140625q-1.40625 0 -2.234375 0.46875q-0.828125 0.46875 -0.828125 1.421875q0 1.390625 1.8125 1.390625z" fill-rule="nonzero"/><path fill="#000000" d="m538.4454 410.78024l3.734375 0q1.84375 0 2.75 0.921875q0.921875 0.90625 0.921875 2.359375q0 1.4375 -0.890625 2.328125q-0.890625 0.890625 -2.6875 0.890625l-2.484375 0l0 5.125l-1.34375 0l0 -11.625zm3.6875 5.34375q1.21875 0 1.796875 -0.53125q0.578125 -0.53125 0.578125 -1.484375q0 -0.921875 -0.59375 -1.5q-0.59375 -0.59375 -1.765625 -0.59375l-2.359375 0l0 4.109375l2.34375 0zm9.21875 6.4375q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm9.328125 1.125q-1.140625 0 -2.046875 -0.5625q-0.890625 -0.5625 -1.390625 -1.5625q-0.5 -1.015625 -0.5 -2.296875q0 -1.296875 0.5 -2.296875q0.5 -1.015625 1.390625 -1.578125q0.90625 -0.578125 2.046875 -0.578125q1.125 0 2.015625 0.578125q0.90625 0.5625 1.40625 1.578125q0.5 1.0 0.5 2.296875q0 1.28125 -0.5 2.296875q-0.5 1.0 -1.40625 1.5625q-0.890625 0.5625 -2.015625 0.5625zm0 -1.125q0.71875 0 1.28125 -0.421875q0.578125 -0.4375 0.90625 -1.1875q0.328125 -0.765625 0.328125 -1.71875q0 -1.453125 -0.71875 -2.375q-0.703125 -0.921875 -1.796875 -0.921875q-1.109375 0 -1.828125 0.921875q-0.703125 0.921875 -0.703125 2.375q0 0.953125 0.328125 1.71875q0.328125 0.75 0.890625 1.1875q0.578125 0.421875 1.3125 0.421875zm6.125 0.96875l0 -1.078125l2.53125 0l0 -10.25l-2.421875 0l0 -1.078125l3.78125 0l0 11.328125l2.5 0l0 1.078125l-6.390625 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m85.40682 135.93997l138.58267 188.34647" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m85.40682 135.93997l135.0268 183.51367" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m219.10321 320.43256l4.0198975 2.676361l-1.3590851 -4.6341553z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m47.26509 193.37305l76.28346 0l0 73.44882l-76.28346 0z" fill-rule="evenodd"/><path fill="#000000" d="m76.1412 213.54305l0 0.015625q-0.453125 -0.5 -0.78125 -0.671875q-0.328125 -0.171875 -0.8125 -0.171875q-0.671875 0 -1.34375 0.34375q-0.65625 0.328125 -1.1875 1.015625q-0.515625 0.671875 -0.734375 1.703125l-0.953125 4.53125l-1.359375 0l1.8125 -8.546875l1.40625 0l-0.375 1.578125q0.53125 -0.84375 1.359375 -1.3125q0.84375 -0.46875 1.71875 -0.46875q1.375 0 2.0625 0.9375l-0.8125 1.046875zm0 0.015625q0.09375 0.109375 0.03125 0.09375q-0.046875 -0.015625 -0.09375 -0.03125l0.0625 -0.0625zm-0.21875 0.0625q0.015625 -0.0625 0.0625 -0.046875q0.046875 0 0.09375 0.046875l-0.0625 0.09375l-0.09375 -0.078125l0 -0.015625zm5.1875 6.859375q-1.90625 0 -2.78125 -1.15625q-0.875 -1.15625 -0.421875 -3.265625q0.296875 -1.40625 1.015625 -2.421875q0.734375 -1.015625 1.734375 -1.546875q1.015625 -0.53125 2.109375 -0.53125q1.546875 0 2.28125 1.03125q0.75 1.03125 0.328125 3.015625q-0.046875 0.203125 -0.171875 0.625l-6.0625 0q-0.25 1.5625 0.375 2.375q0.625 0.796875 1.859375 0.796875q1.34375 0 2.40625 -0.953125l0.59375 0.71875q-1.359375 1.3125 -3.265625 1.3125zm3.0 -5.296875q0.25 -1.203125 -0.203125 -1.890625q-0.453125 -0.703125 -1.453125 -0.703125q-0.921875 0 -1.765625 0.65625q-0.828125 0.640625 -1.265625 1.9375l4.6875 0zm7.703125 -3.609375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm9.40625 1.015625q-0.90625 0 -1.609375 -0.515625q-0.6875 -0.515625 -0.96875 -1.53125q-0.28125 -1.03125 0.03125 -2.484375q0.3125 -1.484375 1.046875 -2.46875q0.734375 -0.984375 1.65625 -1.453125q0.921875 -0.484375 1.828125 -0.484375q0.859375 0 1.40625 0.390625q0.5625 0.390625 0.765625 1.078125l1.09375 -5.125l1.421875 0l-0.03125 0.125q-0.140625 0.125 -0.203125 0.25q-0.046875 0.125 -0.109375 0.453125l-2.171875 10.25q-0.09375 0.453125 -0.125 0.75q-0.03125 0.28125 0.03125 0.578125l-1.328125 0q-0.0625 -0.296875 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.75q-0.5625 0.71875 -1.296875 1.125q-0.71875 0.390625 -1.53125 0.390625zm0.46875 -1.171875q1.15625 0 1.890625 -0.90625q0.734375 -0.921875 1.0625 -2.421875q0.3125 -1.515625 -0.078125 -2.421875q-0.390625 -0.921875 -1.578125 -0.921875q-1.109375 0 -1.890625 0.84375q-0.78125 0.828125 -1.078125 2.265625q-0.34375 1.640625 0.0625 2.609375q0.421875 0.953125 1.609375 0.953125z" fill-rule="nonzero"/><path fill="#000000" d="m64.195885 233.7618l1.171875 0l-0.0625 6.859375l2.734375 -6.046875l0.8125 0l0.53125 6.015625q1.28125 -3.484375 1.625 -4.578125q0.359375 -1.09375 0.546875 -1.96875l0.0625 -0.28125l1.25 0q-1.484375 4.328125 -3.21875 8.546875l-1.28125 0l-0.390625 -5.515625l-2.65625 5.515625l-1.21875 0l0.09375 -8.546875zm16.609375 1.78125l0 0.015625q-0.453125 -0.5 -0.78125 -0.671875q-0.328125 -0.171875 -0.8125 -0.171875q-0.671875 0 -1.34375 0.34375q-0.65625 0.328125 -1.1875 1.015625q-0.515625 0.671875 -0.734375 1.703125l-0.953125 4.53125l-1.359375 0l1.8125 -8.546875l1.40625 0l-0.375 1.578125q0.53125 -0.84375 1.359375 -1.3125q0.84375 -0.46875 1.71875 -0.46875q1.375 0 2.0625 0.9375l-0.8125 1.046875zm0 0.015625q0.09375 0.109375 0.03125 0.09375q-0.046875 -0.015625 -0.09375 -0.03125l0.0625 -0.0625zm-0.21875 0.0625q0.015625 -0.0625 0.0625 -0.046875q0.046875 0 0.09375 0.046875l-0.0625 0.09375l-0.09375 -0.078125l0 -0.015625zm2.015625 6.671875l0.234375 -1.078125l2.1875 0l1.34375 -6.359375l-2.0625 0l0.234375 -1.09375l3.40625 0l-1.578125 7.453125l2.0 0l-0.234375 1.078125l-5.53125 0zm4.96875 -10.3125q-0.390625 0 -0.609375 -0.28125q-0.21875 -0.28125 -0.140625 -0.671875q0.09375 -0.40625 0.421875 -0.6875q0.328125 -0.28125 0.734375 -0.28125q0.390625 0 0.625 0.296875q0.234375 0.28125 0.140625 0.671875q-0.078125 0.390625 -0.4375 0.671875q-0.34375 0.28125 -0.734375 0.28125zm10.953125 9.53125q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm5.90625 0.96875q-1.90625 0 -2.78125 -1.15625q-0.875 -1.15625 -0.421875 -3.265625q0.296875 -1.40625 1.015625 -2.421875q0.734375 -1.015625 1.734375 -1.546875q1.015625 -0.53125 2.109375 -0.53125q1.546875 0 2.28125 1.03125q0.75 1.03125 0.328125 3.015625q-0.046875 0.203125 -0.171875 0.625l-6.0625 0q-0.25 1.5625 0.375 2.375q0.625 0.796875 1.859375 0.796875q1.34375 0 2.40625 -0.953125l0.59375 0.71875q-1.359375 1.3125 -3.265625 1.3125zm3.0 -5.296875q0.25 -1.203125 -0.203125 -1.890625q-0.453125 -0.703125 -1.453125 -0.703125q-0.921875 0 -1.765625 0.65625q-0.828125 0.640625 -1.265625 1.9375l4.6875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m647.6129 281.37335l-91.590515 42.897644" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m647.6129 281.37335l-86.15698 40.352753" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m560.7554 320.23032l-3.4091187 3.4206238l4.8103027 -0.4290161z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m298.2796 269.3983l257.7323 54.86615" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m298.2796 269.3983l251.86383 53.616882" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m549.7995 324.63068l4.7825317 -0.6706238l-4.0947266 -2.5604248z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m621.2573 135.94116l-65.25983 188.31497" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m621.2573 135.94116l-63.295166 182.64575" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m556.4015 318.04605l0.07470703 4.828766l3.0466309 -3.7470703z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m420.3832 219.50296l161.32285 0l0 52.503937l-161.32285 0z" fill-rule="evenodd"/><path fill="#000000" d="m460.40915 235.90733l-1.40625 6.609375q-0.265625 1.421875 -0.875 2.34375q-0.59375 0.90625 -1.390625 1.34375q-0.796875 0.421875 -1.65625 0.421875q-1.703125 0 -2.484375 -1.28125l0.921875 -0.9375l0.03125 -0.015625l0.015625 0.015625q0.40625 0.5625 0.78125 0.8125q0.390625 0.25 0.953125 0.25q0.953125 0 1.515625 -0.65625q0.5625 -0.671875 0.90625 -2.265625l1.40625 -6.640625l-2.25 0l0.234375 -1.109375l5.328125 0l-0.234375 1.109375l-1.796875 0zm-6.640625 8.421875q-0.015625 0.078125 -0.078125 0.078125q-0.046875 -0.015625 -0.09375 -0.0625l0.109375 -0.09375l0.0625 0.078125zm-0.21875 0.0625q-0.078125 -0.09375 -0.03125 -0.078125q0.0625 0.015625 0.078125 0.03125l-0.046875 0.046875zm11.859375 2.1875q-1.140625 0 -1.921875 -0.5625q-0.78125 -0.5625 -1.0625 -1.5625q-0.28125 -1.015625 -0.015625 -2.296875q0.28125 -1.296875 0.984375 -2.296875q0.71875 -1.015625 1.734375 -1.578125q1.03125 -0.578125 2.171875 -0.578125q1.125 0 1.890625 0.578125q0.78125 0.5625 1.0625 1.578125q0.296875 1.0 0.015625 2.296875q-0.265625 1.28125 -0.984375 2.296875q-0.71875 1.0 -1.734375 1.5625q-1.015625 0.5625 -2.140625 0.5625zm0.234375 -1.125q0.71875 0 1.375 -0.421875q0.671875 -0.4375 1.15625 -1.1875q0.484375 -0.765625 0.6875 -1.71875q0.3125 -1.453125 -0.203125 -2.375q-0.515625 -0.921875 -1.609375 -0.921875q-1.109375 0 -2.015625 0.921875q-0.90625 0.921875 -1.21875 2.375q-0.203125 0.953125 -0.03125 1.71875q0.171875 0.75 0.640625 1.1875q0.484375 0.421875 1.21875 0.421875zm8.53125 1.171875q-1.34375 0 -1.9375 -1.0q-0.59375 -1.0 -0.15625 -2.96875l1.046875 -4.765625l1.296875 0l-1.015625 4.765625q-0.328125 1.53125 0.0625 2.21875q0.390625 0.671875 1.265625 0.671875q0.96875 0 1.796875 -0.75q0.828125 -0.765625 1.125 -2.203125l1.0 -4.703125l1.3125 0l-1.53125 7.203125q-0.09375 0.453125 -0.140625 0.75q-0.03125 0.28125 0.046875 0.578125l-1.296875 0q-0.0625 -0.28125 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.734375q-0.5625 0.71875 -1.34375 1.125q-0.78125 0.390625 -1.625 0.390625zm14.640625 -6.953125l0 0.015625q-0.453125 -0.5 -0.78125 -0.671875q-0.328125 -0.171875 -0.8125 -0.171875q-0.671875 0 -1.34375 0.34375q-0.65625 0.328125 -1.1875 1.015625q-0.515625 0.671875 -0.734375 1.703125l-0.953125 4.53125l-1.359375 0l1.8125 -8.546875l1.40625 0l-0.375 1.578125q0.53125 -0.84375 1.359375 -1.3125q0.84375 -0.46875 1.71875 -0.46875q1.375 0 2.0625 0.9375l-0.8125 1.046875zm0 0.015625q0.09375 0.109375 0.03125 0.09375q-0.046875 -0.015625 -0.09375 -0.03125l0.0625 -0.0625zm-0.21875 0.0625q0.015625 -0.0625 0.0625 -0.046875q0.046875 0 0.09375 0.046875l-0.0625 0.09375l-0.09375 -0.078125l0 -0.015625zm3.3125 -1.859375l1.3125 0l-0.328125 1.515625q0.671875 -0.78125 1.53125 -1.25q0.875 -0.46875 1.734375 -0.46875q1.125 0 1.625 0.875q0.5 0.859375 0.109375 2.6875l-1.09375 5.171875l-1.3125 0l1.09375 -5.125q0.265625 -1.28125 -0.046875 -1.859375q-0.296875 -0.578125 -1.0 -0.578125q-0.578125 0 -1.234375 0.34375q-0.65625 0.328125 -1.171875 0.9375q-0.515625 0.609375 -0.671875 1.375l-1.046875 4.90625l-1.3125 0l1.8125 -8.53125zm12.578125 -0.1875q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm6.5625 0.828125l0.234375 -1.078125l2.53125 0l2.1719055 -10.25l-2.4219055 0l0.234375 -1.078125l3.7812805 0l-2.40625 11.328125l2.5 0l-0.234375 1.078125l-6.3906555 0zm18.640656 0l2.46875 -11.640625l6.703125 0l-0.234375 1.140625l-5.390625 0l-0.78125 3.65625l4.34375 0l-0.234375 1.140625l-4.34375 0l-1.21875 5.703125l-1.3125 0zm9.34375 0l0.234375 -1.078125l2.53125 0l2.171875 -10.25l-2.421875 0l0.234375 -1.078125l3.78125 0l-2.40625 11.328125l2.5 0l-0.234375 1.078125l-6.390625 0zm11.9375 0.203125q-1.34375 0 -1.9375 -1.0q-0.59375 -1.0 -0.15625 -2.96875l1.046875 -4.765625l1.296875 0l-1.015625 4.765625q-0.328125 1.53125 0.0625 2.21875q0.390625 0.671875 1.265625 0.671875q0.96875 0 1.796875 -0.75q0.828125 -0.765625 1.125 -2.203125l1.0 -4.703125l1.3125 0l-1.53125 7.203125q-0.09375 0.453125 -0.140625 0.75q-0.03125 0.28125 0.046875 0.578125l-1.296875 0q-0.0625 -0.28125 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.734375q-0.5625 0.71875 -1.34375 1.125q-0.78125 0.390625 -1.625 0.390625zm11.59375 -5.21875q1.515625 0.484375 2.046875 1.0625q0.53125 0.5625 0.34375 1.46875q-0.25 1.15625 -1.3125 1.921875q-1.0625 0.75 -2.78125 0.75q-2.171875 0 -3.328125 -1.34375l1.015625 -1.265625l0.015625 -0.015625l0.015625 0.015625q0.421875 0.75 0.953125 1.125q0.546875 0.359375 1.609375 0.359375q1.015625 0 1.671875 -0.359375q0.65625 -0.359375 0.78125 -1.0q0.109375 -0.53125 -0.28125 -0.890625q-0.390625 -0.359375 -1.515625 -0.75q-3.015625 -0.90625 -2.65625 -2.640625q0.21875 -1.0 1.15625 -1.578125q0.9375 -0.578125 2.453125 -0.578125q1.15625 0 1.875 0.328125q0.71875 0.328125 1.234375 1.015625l-0.96875 0.9375l-0.015625 0.015625q-0.265625 -0.59375 -0.921875 -0.9375q-0.640625 -0.34375 -1.390625 -0.34375q-0.796875 0 -1.375 0.28125q-0.578125 0.28125 -0.671875 0.78125q-0.109375 0.46875 0.328125 0.859375q0.453125 0.390625 1.71875 0.78125zm2.09375 -1.390625q0.015625 -0.09375 0.140625 0.03125l-0.078125 0.0625l-0.0625 -0.09375zm0.21875 -0.03125q0.03125 0.078125 0.015625 0.09375q-0.015625 0.015625 -0.046875 0q-0.03125 -0.015625 -0.046875 -0.03125l0.078125 -0.0625zm-6.09375 3.9375q-0.015625 0.078125 -0.171875 0l0.078125 -0.09375l0.09375 0.078125l0 0.015625zm-0.21875 0.0625q-0.046875 -0.09375 -0.03125 -0.09375q0.03125 0 0.078125 0.03125l-0.046875 0.0625zm10.984375 -9.96875l1.46875 0l-0.03125 0.125q-0.140625 0.125 -0.1875 0.25q-0.046875 0.125 -0.109375 0.453125l-0.984375 4.5625q0.671875 -0.78125 1.53125 -1.25q0.859375 -0.46875 1.640625 -0.46875q1.203125 0 1.703125 0.859375q0.515625 0.859375 0.125 2.703125l-1.09375 5.171875l-1.3125 0l1.09375 -5.125q0.265625 -1.28125 -0.046875 -1.859375q-0.296875 -0.578125 -1.0 -0.578125q-0.59375 0 -1.25 0.328125q-0.640625 0.328125 -1.15625 0.9375q-0.515625 0.609375 -0.671875 1.390625l-1.046875 4.90625l-1.3125 0l2.640625 -12.40625z" fill-rule="nonzero"/><path fill="#ffffff" d="m341.60367 41.995308l0 0c0 -5.917248 4.796875 -10.714127 10.714142 -10.714127l42.855194 0l0 0c2.8415833 0 5.566742 1.1288071 7.57605 3.1380959c2.0092773 2.0092888 3.138092 4.7344666 3.138092 7.5760307l0 42.855217c0 5.917244 -4.796875 10.714119 -10.714142 10.714119l-42.855194 0c-5.917267 0 -10.714142 -4.796875 -10.714142 -10.714119z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m341.60367 41.995308l0 0c0 -5.917248 4.796875 -10.714127 10.714142 -10.714127l42.855194 0l0 0c2.8415833 0 5.566742 1.1288071 7.57605 3.1380959c2.0092773 2.0092888 3.138092 4.7344666 3.138092 7.5760307l0 42.855217c0 5.917244 -4.796875 10.714119 -10.714142 10.714119l-42.855194 0c-5.917267 0 -10.714142 -4.796875 -10.714142 -10.714119z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m371.98032 52.367798l22.11023 22.110237" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m374.66028 55.04773l16.750305 16.750374" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m372.33386 52.72135c0.64242554 -0.6424141 1.68396 -0.6424141 2.3263855 0c0.64242554 0.6424103 0.64242554 1.6839676 0 2.3263817c-0.64242554 0.6424103 -1.68396 0.6424103 -2.3263855 0c-0.642395 -0.6424141 -0.642395 -1.6839714 0 -2.3263817z" fill-rule="nonzero"/><path stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m393.737 74.12448c-0.64242554 0.6424103 -1.68396 0.6424103 -2.3263855 0c-0.642395 -0.6424103 -0.642395 -1.6839676 0 -2.3263855c0.64242554 -0.6424103 1.68396 -0.6424103 2.3263855 0c0.64242554 0.6424179 0.64242554 1.6839752 0 2.3263855z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m374.6601 52.79299l-21.259827 21.259842" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m374.6601 52.792995l-18.579865 18.579906" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m357.24338 72.536095l-2.3263855 2.3263779l-2.326355 -2.3263779l2.326355 -2.3263855l2.3263855 2.3263855z" fill-rule="nonzero"/><path fill="#ffffff" d="m702.1522 41.993996l0 0c0 -5.917248 4.796875 -10.714127 10.714111 -10.714127l42.855225 0l0 0c2.8415527 0 5.5667725 1.1288071 7.57605 3.1380959c2.0092773 2.0092888 3.1380615 4.7344666 3.1380615 7.5760307l0 42.855217c0 5.917244 -4.796875 10.714119 -10.714111 10.714119l-42.855225 0c-5.9172363 0 -10.714111 -4.796875 -10.714111 -10.714119z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m702.1522 41.993996l0 0c0 -5.917248 4.796875 -10.714127 10.714111 -10.714127l42.855225 0l0 0c2.8415527 0 5.5667725 1.1288071 7.57605 3.1380959c2.0092773 2.0092888 3.1380615 4.7344666 3.1380615 7.5760307l0 42.855217c0 5.917244 -4.796875 10.714119 -10.714111 10.714119l-42.855225 0c-5.9172363 0 -10.714111 -4.796875 -10.714111 -10.714119z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m716.458 43.87436l22.11023 22.110237" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m719.13794 46.554295l16.750366 16.750366" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m716.8116 44.227913c0.642395 -0.6424141 1.68396 -0.6424141 2.326355 0c0.642395 0.6424103 0.642395 1.6839676 0 2.3263817c-0.642395 0.6424103 -1.68396 0.6424103 -2.326355 0c-0.64245605 -0.6424141 -0.64245605 -1.6839714 0 -2.3263817z" fill-rule="nonzero"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m738.21466 65.63104c-0.642395 0.6424103 -1.68396 0.6424103 -2.326355 0c-0.642395 -0.6424103 -0.642395 -1.6839676 0 -2.3263817c0.642395 -0.6424141 1.68396 -0.6424141 2.326355 0c0.64245605 0.6424141 0.64245605 1.6839714 0 2.3263817z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m738.14307 61.709003l-21.259888 21.259846" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m738.14307 61.709003l-18.579895 18.579914" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m720.7263 81.4521l-2.326355 2.3263855l-2.326416 -2.3263855l2.326416 -2.3263779l2.326355 2.3263779z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m735.96716 61.34155l21.259888 21.259846" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m735.96716 61.34155l18.579895 18.579914" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m755.71027 78.75827l2.326416 2.3263779l-2.326416 2.3263855l-2.326355 -2.3263855l2.326355 -2.3263779z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m361.7454 127.52721c13.486847 7.3713837 49.210907 36.856964 80.92093 44.228348c31.710052 7.371399 81.85028 8.16481 109.33923 0c27.489014 -8.164795 46.328796 -40.82402 55.594604 -48.988815" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m366.87332 130.64238l0.061187744 0.03933716c0.28182983 0.18249512 0.5683594 0.36921692 0.8595886 0.5599823c2.3294678 1.5260925 4.9551697 3.3113708 7.8257446 5.269394c5.7411804 3.9160461 12.462006 8.523178 19.75232 13.130295c14.580627 9.214233 31.439148 18.428467 47.29419 22.114166c31.709991 7.371399 81.85025 8.16481 109.3392 0c13.744507 -4.0823975 25.326721 -14.288406 34.63098 -24.494415c4.6521606 -5.102997 8.734863 -10.205994 12.233582 -14.543549c0.8746948 -1.0843811 1.7128906 -2.1209412 2.5143433 -3.0976868c0.40075684 -0.4883728 0.79229736 -0.961792 1.1746216 -1.4187622c0.19122314 -0.22849274 0.38006592 -0.45287323 0.5666504 -0.6729584l0.34716797 -0.40646362" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.7309 129.23071l-4.7360535 -0.9445038l3.0209045 3.7678375z" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m604.6718 128.2579l1.9227295 -4.4300766l-4.3204956 2.1577148z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m388.79266 166.79956l192.91339 0l0 43.338577l-192.91339 0z" fill-rule="evenodd"/><path fill="#000000" d="m406.72592 193.71956l2.46875 -11.625l1.03125 0l1.671875 5.6875l4.125 -5.703125l0.984375 0l-2.46875 11.640625l-1.234375 0l1.859375 -8.765625l-3.515625 4.6875l-0.5 0l-1.34375 -4.640625l-1.859375 8.71875l-1.21875 0zm13.59375 0.1875q-1.90625 0 -2.78125 -1.15625q-0.875 -1.15625 -0.421875 -3.265625q0.296875 -1.40625 1.015625 -2.421875q0.734375 -1.015625 1.734375 -1.546875q1.015625 -0.53125 2.109375 -0.53125q1.546875 0 2.28125 1.03125q0.75 1.03125 0.328125 3.015625q-0.046875 0.203125 -0.171875 0.625l-6.0625 0q-0.25 1.5625 0.375 2.375q0.625 0.796875 1.859375 0.796875q1.34375 0 2.40625 -0.953125l0.59375 0.71875q-1.359375 1.3125 -3.265625 1.3125zm3.0 -5.296875q0.25 -1.203125 -0.203125 -1.890625q-0.453125 -0.703125 -1.453125 -0.703125q-0.921875 0 -1.765625 0.65625q-0.828125 0.640625 -1.265625 1.9375l4.6875 0zm9.75 4.328125q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm7.28125 -7.9375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm9.40625 1.015625q-0.90625 0 -1.609375 -0.515625q-0.6875 -0.515625 -0.96875 -1.53125q-0.28125 -1.03125 0.03125 -2.484375q0.3125 -1.484375 1.046875 -2.46875q0.734375 -0.984375 1.65625 -1.453125q0.921875 -0.484375 1.828125 -0.484375q0.859375 0 1.40625 0.390625q0.5625 0.390625 0.765625 1.078125l1.09375 -5.125l1.421875 0l-0.03125 0.125q-0.140625 0.125 -0.203125 0.25q-0.046875 0.125 -0.109375 0.453125l-2.171875 10.25q-0.09375 0.453125 -0.125 0.75q-0.03125 0.28125 0.03125 0.578125l-1.328125 0q-0.0625 -0.296875 -0.03125 -0.578125q0.03125 -0.296875 0.125 -0.75q-0.5625 0.71875 -1.296875 1.125q-0.71875 0.390625 -1.53125 0.390625zm0.46875 -1.171875q1.15625 0 1.890625 -0.90625q0.734375 -0.921875 1.0625 -2.421875q0.3125 -1.515625 -0.078125 -2.421875q-0.390625 -0.921875 -1.578125 -0.921875q-1.109375 0 -1.890625 0.84375q-0.78125 0.828125 -1.078125 2.265625q-0.34375 1.640625 0.0625 2.609375q0.421875 0.953125 1.609375 0.953125zm10.953125 -7.734375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm13.546875 0.046875q-1.4375 0.90625 -2.90625 0.90625q-1.421875 0 -1.828125 -0.84375q-0.40625 -0.859375 0.015625 -2.8125q0.0625 -0.3125 0.265625 -1.09375l0.765625 -2.796875l-1.875 0l0.234375 -1.109375l1.953125 0l0.609375 -2.265625l1.546875 -0.25l0.1875 -0.015625l0 0.109375q-0.171875 0.1875 -0.265625 0.328125q-0.09375 0.125 -0.171875 0.40625l-0.5625 1.6875l2.828125 0l-0.234375 1.109375l-2.90625 0l-0.78125 2.890625q-0.203125 0.75 -0.25 0.984375q-0.3125 1.5 -0.09375 2.03125q0.234375 0.53125 1.0 0.53125q0.5625 0 1.078125 -0.203125q0.515625 -0.21875 1.203125 -0.65625l0.1875 1.0625zm7.28125 -7.9375q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm15.515625 0.828125l2.46875 -11.640625l7.140625 0l-0.234375 1.15625l-5.90625 0l-0.8125 3.828125l4.875 0l-0.265625 1.1875l-4.875 0l-0.90625 4.3125l5.84375 0l-0.25 1.15625l-7.078125 0zm9.109375 0l3.921875 -4.328125l-2.046875 -4.203125l1.46875 0l1.546875 3.15625l2.796875 -3.15625l1.390625 0l-3.640625 4.140625l2.1875 4.390625l-1.515625 0l-1.65625 -3.328125l-2.921875 3.328125l-1.53125 0zm14.0624695 -1.0q1.28125 0 2.4375 -1.046875l0.59375 0.90625q-1.546875 1.34375 -3.375 1.34375q-1.234375 0 -2.0780945 -0.578125q-0.84375 -0.578125 -1.171875 -1.59375q-0.328125 -1.015625 -0.0625 -2.28125q0.265625 -1.265625 1.015625 -2.265625q0.7655945 -1.015625 1.8593445 -1.59375q1.09375 -0.578125 2.3125 -0.578125q1.03125 0 1.78125 0.421875q0.75 0.40625 1.125 1.15625l-1.03125 0.828125l-0.015625 0.015625q-0.390625 -0.71875 -0.90625 -1.0q-0.5 -0.296875 -1.328125 -0.296875q-0.734375 0 -1.453125 0.40625q-0.703125 0.40625 -1.234375 1.140625q-0.53125 0.71875 -0.7343445 1.671875q-0.203125 0.953125 0.015625 1.71875q0.21871948 0.765625 0.8124695 1.203125q0.59375 0.421875 1.4375 0.421875zm3.1875 -5.25q0.03125 -0.109375 0.15625 0.015625l-0.078125 0.078125l-0.078125 -0.09375zm0.203125 -0.015625q0.078125 0.125 0.03125 0.09375q-0.03125 -0.046875 -0.078125 -0.0625l0.046875 -0.03125zm4.328125 -6.140625l1.46875 0l-0.03125 0.125q-0.140625 0.125 -0.1875 0.25q-0.046875 0.125 -0.109375 0.453125l-0.984375 4.5625q0.671875 -0.78125 1.53125 -1.25q0.859375 -0.46875 1.640625 -0.46875q1.203125 0 1.703125 0.859375q0.515625 0.859375 0.125 2.703125l-1.09375 5.171875l-1.3125 0l1.09375 -5.125q0.265625 -1.28125 -0.046875 -1.859375q-0.296875 -0.578125 -1.0 -0.578125q-0.59375 0 -1.25 0.328125q-0.640625 0.328125 -1.15625 0.9375q-0.515625 0.609375 -0.671875 1.390625l-1.046875 4.90625l-1.3125 0l2.640625 -12.40625zm11.734375 3.6875q1.828125 0 2.578125 0.953125q0.765625 0.953125 0.28125 3.234375l-0.96875 4.53125l-1.453125 0l0.28125 -1.3125q-1.109375 1.515625 -3.234375 1.515625q-1.421875 0 -2.078125 -0.640625q-0.65625 -0.640625 -0.4375 -1.703125q0.1875 -0.90625 0.90625 -1.578125q0.734375 -0.671875 1.78125 -1.03125q1.0625 -0.359375 2.234375 -0.359375q1.0625 0 1.890625 0.109375q0.203125 -1.4375 -0.3125 -2.015625q-0.515625 -0.59375 -1.796875 -0.59375q-0.625 0 -1.265625 0.25q-0.640625 0.234375 -1.21875 0.703125l-0.484375 -0.859375q1.453125 -1.203125 3.296875 -1.203125zm-2.171875 7.890625q1.390625 0 2.359375 -0.796875q0.96875 -0.8125 1.375 -2.34375q-0.8125 -0.140625 -1.78125 -0.140625q-1.40625 0 -2.34375 0.46875q-0.921875 0.46875 -1.125 1.421875q-0.296875 1.390625 1.515625 1.390625zm8.25 -7.703125l1.3125 0l-0.328125 1.515625q0.671875 -0.78125 1.53125 -1.25q0.875 -0.46875 1.734375 -0.46875q1.125 0 1.625 0.875q0.5 0.859375 0.109375 2.6875l-1.09375 5.171875l-1.3125 0l1.09375 -5.125q0.265625 -1.28125 -0.046875 -1.859375q-0.296875 -0.578125 -1.0 -0.578125q-0.578125 0 -1.234375 0.34375q-0.65625 0.328125 -1.171875 0.9375q-0.515625 0.609375 -0.671875 1.375l-1.046875 4.90625l-1.3125 0l1.8125 -8.53125zm16.65625 0.875q-0.171875 -0.015625 -0.484375 -0.015625q-0.890625 0 -1.5625 0.390625q0.171875 0.640625 0.015625 1.40625q-0.1875 0.84375 -0.703125 1.53125q-0.5 0.671875 -1.265625 1.0625q-0.765625 0.390625 -1.65625 0.390625q-0.71875 0 -1.265625 -0.265625q-0.421875 0.40625 -0.5 0.78125q-0.09375 0.453125 0.421875 0.65625q0.53125 0.1875 2.015625 0.1875q1.8125 0 2.34375 0.5625q0.546875 0.546875 0.3125 1.609375q-0.234375 1.09375 -1.28125 1.828125q-1.046875 0.734375 -2.953125 0.734375q-1.828125 0 -2.75 -0.5625q-0.90625 -0.546875 -0.6875 -1.640625q0.15625 -0.671875 0.625 -1.15625q0.484375 -0.484375 1.125 -0.765625q-0.53125 -0.40625 -0.375 -1.125q0.15625 -0.75 1.09375 -1.578125q-0.375 -0.40625 -0.5 -0.984375q-0.109375 -0.578125 0.03125 -1.265625q0.171875 -0.828125 0.671875 -1.5q0.515625 -0.6875 1.28125 -1.078125q0.765625 -0.390625 1.65625 -0.390625q1.359375 0 2.0 0.875q0.53125 -0.40625 1.046875 -0.59375q0.515625 -0.203125 1.09375 -0.203125l0.3125 0.015625l-0.0625 1.09375zm-5.40625 3.640625q0.796875 0 1.4375 -0.53125q0.640625 -0.546875 0.8125 -1.328125q0.171875 -0.78125 -0.25 -1.328125q-0.421875 -0.546875 -1.21875 -0.546875q-0.796875 0 -1.453125 0.546875q-0.640625 0.546875 -0.8125 1.328125q-0.171875 0.78125 0.25 1.328125q0.4375 0.53125 1.234375 0.53125zm1.828125 4.859375q0.09375 -0.453125 -0.03125 -0.703125q-0.109375 -0.25 -0.546875 -0.375q-0.421875 -0.125 -1.359375 -0.125q-1.34375 0 -2.09375 -0.234375q-0.515625 0.296875 -0.765625 0.609375q-0.234375 0.3125 -0.359375 0.84375q-0.125 0.640625 0.5 0.984375q0.640625 0.359375 1.90625 0.359375q1.234375 0 1.921875 -0.375q0.6875 -0.359375 0.828125 -0.984375zm7.453125 -0.65625q-1.90625 0 -2.78125 -1.15625q-0.875 -1.15625 -0.421875 -3.265625q0.296875 -1.40625 1.015625 -2.421875q0.734375 -1.015625 1.734375 -1.546875q1.015625 -0.53125 2.109375 -0.53125q1.546875 0 2.28125 1.03125q0.75 1.03125 0.328125 3.015625q-0.046875 0.203125 -0.171875 0.625l-6.0625 0q-0.25 1.5625 0.375 2.375q0.625 0.796875 1.859375 0.796875q1.34375 0 2.40625 -0.953125l0.59375 0.71875q-1.359375 1.3125 -3.265625 1.3125zm3.0 -5.296875q0.25 -1.203125 -0.203125 -1.890625q-0.453125 -0.703125 -1.453125 -0.703125q-0.921875 0 -1.765625 0.65625q-0.828125 0.640625 -1.265625 1.9375l4.6875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m110.45702 4.5455894l171.33856 -0.031496048l0 73.44882l-171.33856 0.03149414z" fill-rule="evenodd"/><path fill="#000000" d="m182.11069 31.608667q-1.140625 2.07901E-4 -1.921875 -0.56214714q-0.78125 -0.56235695 -1.0625 -1.5623055q-0.28125 -1.0155735 -0.015625 -2.2968712q0.28125 -1.2969265 0.984375 -2.2970562q0.71875 -1.0157585 1.734375 -1.5784435q1.03125 -0.57831573 2.171875 -0.57852554q1.125 -2.0599365E-4 1.890625 0.57777786q0.78125 0.56235695 1.0625 1.5779305q0.296875 0.9999447 0.015625 2.2968712q-0.265625 1.2812996 -0.984375 2.2970562q-0.71875 1.0001316 -1.734375 1.5628185q-1.015625 0.5626869 -2.140625 0.5628948zm0.234375 -1.1250439q0.71875 -1.3160706E-4 1.375 -0.42212868q0.671875 -0.43762207 1.15625 -1.1877117q0.484375 -0.76571465 0.6875 -1.7188759q0.3125 -1.4531822 -0.203125 -2.3749638q-0.515625 -0.92177963 -1.609375 -0.92157936q-1.109375 2.040863E-4 -2.015625 0.92224693q-0.90625 0.92204094 -1.21875 2.3752232q-0.203125 0.95316315 -0.03125 1.7187557q0.171875 0.7499695 0.640625 1.1873817q0.484375 0.42178726 1.21875 0.42165184zm7.390625 -7.563858l1.359375 -2.4986267E-4l-0.359375 1.6875648q0.484375 -0.93758774 1.265625 -1.4064827q0.796875 -0.46889687 1.734375 -0.46906853q0.96875 -1.7738342E-4 1.671875 0.49969292q0.703125 0.4998703 0.96875 1.4841976q0.28125 0.9843235 -0.015625 2.406252q-0.3125 1.4375572 -1.046875 2.4689426q-0.71875 1.0313816 -1.65625 1.5628052q-0.9375 0.51579666 -1.875 0.5159683q-0.765625 1.411438E-4 -1.359375 -0.34350014q-0.59375 -0.35926437 -0.890625 -0.99983597l-0.890625 4.234541l-1.375 2.5177002E-4l2.46875 -11.641079zm2.078125 7.5621166q1.03125 -1.8882751E-4 1.890625 -0.76597214q0.859375 -0.7814083 1.234375 -2.531477q0.34375 -1.593813 -0.09375 -2.4374828q-0.4375 -0.8592949 -1.5625 -0.859087q-1.0625 1.9454956E-4 -1.921875 0.89097786q-0.84375 0.8907795 -1.234375 2.703352q-0.28125 1.4688015 0.171875 2.2343426q0.453125 0.765543 1.515625 0.7653465zm9.34375 1.1545334q-1.90625 3.5095215E-4 -2.78125 -1.1557388q-0.875 -1.1560898 -0.421875 -3.2655468q0.296875 -1.4063053 1.015625 -2.422062q0.734375 -1.0157604 1.734375 -1.5471954q1.015625 -0.531435 2.109375 -0.5316372q1.546875 -2.8419495E-4 2.28125 1.0308304q0.75 1.0311127 0.328125 3.0155659q-0.046875 0.20313263 -0.171875 0.6250305l-6.0625 0.001115799q-0.25 1.5625458 0.375 2.3749294q0.625 0.79676056 1.859375 0.7965336q1.34375 -2.4604797E-4 2.40625 -0.9535675l0.59375 0.7186413q-1.359375 1.3127499 -3.265625 1.3131008zm3.0 -5.297426q0.25 -1.2031708 -0.203125 -1.8905888q-0.453125 -0.7030411 -1.453125 -0.702858q-0.921875 1.6975403E-4 -1.765625 0.65657425q-0.828125 0.6407776 -1.265625 1.9377327l4.6875 -8.6021423E-4zm4.453125 -3.4226952l1.3125 -2.4032593E-4l-0.328125 1.515686q0.671875 -0.781374 1.53125 -1.2502823q0.875 -0.46891022 1.734375 -0.46906853q1.125 -2.07901E-4 1.625 0.87470055q0.5 0.85928345 0.109375 2.687481l-1.09375 5.1720753l-1.3125 2.4223328E-4l1.09375 -5.125202q0.265625 -1.2812977 -0.046875 -1.8593655q-0.296875 -0.5780716 -1.0 -0.5779419q-0.578125 1.0681152E-4 -1.234375 0.34397697q-0.65625 0.32824516 -1.171875 0.93771553q-0.515625 0.60947037 -0.671875 1.375124l-1.046875 4.9064426l-1.3125 2.4032593E-4l1.8125 -8.531584z" fill-rule="nonzero"/><path fill="#000000" d="m175.32162 44.922413l1.234375 -2.2506714E-4l-0.171875 0.8437805q0.390625 -0.46882248 0.921875 -0.75016785q0.53125 -0.29697418 1.03125 -0.29706573q0.5625 -1.02996826E-4 0.90625 0.35920715q0.34375 0.35931396 0.34375 0.89056396q0.34375 -0.56256485 0.9375 -0.90642166q0.609375 -0.34386444 1.25 -0.34397888q0.84375 -1.5640259E-4 1.125 0.6091652q0.28125 0.6093254 0.046875 1.5781174l-1.390625 6.5471306l-1.234375 2.2888184E-4l1.28125 -6.0471115q0.234375 -1.062542 0.09375 -1.3750191q-0.140625 -0.3280983 -0.53125 -0.32802582q-0.34375 6.1035156E-5 -0.71875 0.3282585q-0.375 0.31256866 -0.6875 0.8126259q-0.296875 0.4844284 -0.390625 0.96881866l-1.203125 5.6408463l-1.25 2.3269653E-4l1.265625 -5.9377327q0.21875 -1.046917 0.09375 -1.4062691q-0.109375 -0.35935593 -0.65625 -0.35925293q-0.28125 4.9591064E-5 -0.625 0.25011444q-0.34375 0.25006104 -0.640625 0.71886826q-0.28125 0.4531746 -0.40625 1.0469475l-1.203125 5.6877213l-1.234375 2.2888184E-4l1.8125 -8.531586zm11.4375 4.263523l-1.34375 1.031498l-0.6875 3.234501l-1.359375 2.5177002E-4l2.640625 -12.406738l1.53125 -2.784729E-4l-0.03125 0.12500381q-0.140625 0.1250267 -0.203125 0.25003815q-0.046875 0.12500763 -0.109375 0.45314407l-1.484375 6.9690247l5.09375 -3.9540634q0.453125 0.09366608 0.984375 0.09357071l0.484375 -8.773804E-5l-4.3125 3.3757896l3.109375 5.0931816l-1.75 0.078445435l-2.5625 -4.343281zm9.015625 4.4514694q-0.90625 1.6784668E-4 -1.609375 -0.51533127q-0.6875 -0.5154953 -0.96875 -1.5310707q-0.28125 -1.0311966 0.03125 -2.4843788q0.3125 -1.4844322 1.046875 -2.4689445q0.734375 -0.9845085 1.65625 -1.4534302q0.921875 -0.48454285 1.828125 -0.4847107q0.859375 -1.5640259E-4 1.40625 0.3903656q0.5625 0.390522 0.765625 1.0779877l1.09375 -5.125202l1.421875 -2.632141E-4l-0.03125 0.12500763q-0.140625 0.1250267 -0.203125 0.25003815q-0.046875 0.12500763 -0.109375 0.45314407l-2.171875 10.250401q-0.09375 0.45314026 -0.125 0.7500229q-0.03125 0.2812538 0.03125 0.5781174l-1.328125 2.4414062E-4q-0.0625 -0.29686356 -0.03125 -0.5781174q0.03125 -0.29688263 0.125 -0.7500229q-0.5625 0.718853 -1.296875 1.1252365q-0.71875 0.3907585 -1.53125 0.3909073zm0.46875 -1.1719627q1.15625 -2.0980835E-4 1.890625 -0.90659714q0.734375 -0.9220085 1.0625 -2.4220695q0.3125 -1.5156822 -0.078125 -2.4218597q-0.390625 -0.9218025 -1.578125 -0.9215851q-1.109375 2.0217896E-4 -1.890625 0.84409714q-0.78125 0.82826996 -1.078125 2.2658234q-0.34375 1.640686 0.0625 2.6093636q0.421875 0.9530487 1.609375 0.95282745zm6.40625 0.9832001l0.234375 -1.0781708l2.1875 -4.005432E-4l1.34375 -6.359623l-2.0625 3.8146973E-4l0.234375 -1.0937958l3.40625 -6.2561035E-4l-1.578125 7.453415l2.0 -3.6621094E-4l-0.234375 1.078167l-5.53125 0.0010185242zm4.96875 -10.313416q-0.390625 7.247925E-5 -0.609375 -0.28113556q-0.21875 -0.28121185 -0.140625 -0.6718521q0.09375 -0.40626526 0.421875 -0.6875763q0.328125 -0.28131104 0.734375 -0.2813835q0.390625 -7.247925E-5 0.625 0.29675674q0.234375 0.28120804 0.140625 0.6718521q-0.078125 0.39063644 -0.4375 0.6719551q-0.34375 0.28131104 -0.734375 0.2813835zm11.890625 3.5603142l0 0.015625q-0.453125 -0.49991608 -0.78125 -0.67173004q-0.328125 -0.17181396 -0.8125 -0.17172623q-0.671875 1.2207031E-4 -1.34375 0.34399796q-0.65625 0.32824326 -1.1875 1.0158424q-0.515625 0.67197037 -0.734375 1.7032585l-0.953125 4.5314255l-1.359375 2.5177002E-4l1.8125 -8.547211l1.40625 -2.5558472E-4l-0.375 1.5781937q0.53125 -0.8438492 1.359375 -1.3127518q0.84375 -0.4689026 1.71875 -0.46906662q1.375 -2.5177002E-4 2.0625 0.93712234l-0.8125 1.0470238zm0 0.015625q0.09375 0.10935974 0.03125 0.093746185q-0.046875 -0.015617371 -0.09375 -0.031234741l0.0625 -0.062511444zm-0.21875 0.06254196q0.015625 -0.062503815 0.0625 -0.046886444q0.046875 -1.1444092E-5 0.09375 0.046855927l-0.0625 0.093761444l-0.09375 -0.07810593l0 -0.015625z" fill-rule="nonzero"/><path fill="#000000" d="m164.94662 75.455574l0.234375 -1.0781708l2.53125 -4.6539307E-4l2.171875 -10.250397l-2.421875 4.4250488E-4l0.234375 -1.078167l3.78125 -6.942749E-4l-2.40625 11.328564l2.5 -4.5776367E-4l-0.234375 1.0781708l-6.390625 0.0011749268zm9.71875 -0.0017852783l0.234375 -1.0781708l2.1875 -4.043579E-4l1.34375 -6.359619l-2.0625 3.8146973E-4l0.234375 -1.0937958l3.40625 -6.2561035E-4l-1.578125 7.453415l2.0 -3.6621094E-4l-0.234375 1.0781631l-5.53125 0.0010223389zm4.96875 -10.313416q-0.390625 6.866455E-5 -0.609375 -0.28113556q-0.21875 -0.28121185 -0.140625 -0.6718521q0.09375 -0.40626907 0.421875 -0.6875763q0.328125 -0.28131104 0.734375 -0.28138733q0.390625 -7.247925E-5 0.625 0.29676056q0.234375 0.28120804 0.140625 0.6718521q-0.078125 0.39064026 -0.4375 0.6719513q-0.34375 0.28131104 -0.734375 0.28138733zm8.84375 5.29525q1.515625 0.4840927 2.046875 1.0621185q0.53125 0.56240845 0.34375 1.468689q-0.25 1.1562958 -1.3125 1.9221191q-1.0625 0.75019073 -2.78125 0.75051117q-2.171875 3.9672852E-4 -3.328125 -1.3431396l1.015625 -1.2658081l0.015625 -0.01563263l0.015625 0.015625q0.421875 0.7499237 0.953125 1.1248245q0.546875 0.35927582 1.609375 0.35907745q1.015625 -1.8310547E-4 1.671875 -0.35968018q0.65625 -0.35949707 0.78125 -1.000145q0.109375 -0.5312729 -0.28125 -0.8905716q-0.390625 -0.35930634 -1.515625 -0.74972534q-3.015625 -0.90569305 -2.65625 -2.6401367q0.21875 -1.0000381 1.15625 -1.5783386q0.9375 -0.57829285 2.453125 -0.57857513q1.15625 -2.1362305E-4 1.875 0.32778168q0.71875 0.3279953 1.234375 1.0153961l-0.96875 0.9376831l-0.015625 0.015625q-0.265625 -0.5937042 -0.921875 -0.93733215q-0.640625 -0.34362793 -1.390625 -0.3434906q-0.796875 1.449585E-4 -1.375 0.28150177q-0.578125 0.2813568 -0.671875 0.7813721q-0.109375 0.4687729 0.328125 0.85931396q0.453125 0.39054108 1.71875 0.7809372zm2.09375 -1.3910141q0.015625 -0.09375 0.140625 0.031227112l-0.078125 0.06251526l-0.0625 -0.09374237zm0.21875 -0.031288147q0.03125 0.07811737 0.015625 0.09375q-0.015625 0.015625 -0.046875 7.6293945E-6q-0.03125 -0.015617371 -0.046875 -0.03124237l0.078125 -0.06251526zm-6.09375 3.9386215q-0.015625 0.078125 -0.171875 3.0517578E-5l0.078125 -0.09376526l0.09375 0.07810974l0 0.015625zm-0.21875 0.06253815q-0.046875 -0.09374237 -0.03125 -0.09374237q0.03125 -7.6293945E-6 0.078125 0.031234741l-0.046875 0.06250763zm15.4375 1.6534119q-1.4375 0.906517 -2.90625 0.90678406q-1.421875 2.670288E-4 -1.828125 -0.8434143q-0.40625 -0.8592987 0.015625 -2.8125q0.0625 -0.31251526 0.265625 -1.0937958l0.765625 -2.79702l-1.875 3.4332275E-4l0.234375 -1.1094131l1.953125 -3.5858154E-4l0.609375 -2.2657394l1.546875 -0.2502823l0.1875 -0.015663147l0 0.109375q-0.171875 0.18753052 -0.265625 0.32817078q-0.09375 0.12502289 -0.171875 0.40628815l-0.5625 1.6875992l2.828125 -5.187988E-4l-0.234375 1.1094208l-2.90625 5.340576E-4l-0.78125 2.89077q-0.203125 0.7500305 -0.25 0.9844208q-0.3125 1.5000534 -0.09375 2.0312653q0.234375 0.5312042 1.0 0.5310669q0.5625 -1.0681152E-4 1.078125 -0.20332336q0.515625 -0.21884918 1.203125 -0.65647125l0.1875 1.0624619zm5.1875 0.9677963q-0.90625 1.6784668E-4 -1.609375 -0.51532745q-0.6875 -0.5154953 -0.96875 -1.5310745q-0.28125 -1.0311966 0.03125 -2.484375q0.3125 -1.484436 1.046875 -2.4689484q0.734375 -0.9845047 1.65625 -1.4534302q0.921875 -0.48454285 1.828125 -0.4847107q0.859375 -1.5258789E-4 1.40625 0.39037323q0.5625 0.3905182 0.765625 1.07798l1.09375 -5.125202l1.421875 -2.593994E-4l-0.03125 0.12500381q-0.140625 0.1250267 -0.203125 0.25003815q-0.046875 0.12501144 -0.109375 0.4531479l-2.171875 10.250397q-0.09375 0.45314026 -0.125 0.7500229q-0.03125 0.28125763 0.03125 0.5781174l-1.328125 2.4414062E-4q-0.0625 -0.29685974 -0.03125 -0.5781174q0.03125 -0.29688263 0.125 -0.7500229q-0.5625 0.7188568 -1.296875 1.1252365q-0.71875 0.39076233 -1.53125 0.3909073zm0.46875 -1.1719589q1.15625 -2.1362305E-4 1.890625 -0.90660095q0.734375 -0.9220047 1.0625 -2.4220657q0.3125 -1.515686 -0.078125 -2.4218597q-0.390625 -0.92180634 -1.578125 -0.9215851q-1.109375 1.9836426E-4 -1.890625 0.8440933q-0.78125 0.82826996 -1.078125 2.2658234q-0.34375 1.6406937 0.0625 2.6093674q0.421875 0.9530487 1.609375 0.95282745zm6.40625 0.9832001l0.234375 -1.0781708l2.1875 -4.043579E-4l1.34375 -6.359619l-2.0625 3.8146973E-4l0.234375 -1.0937958l3.40625 -6.2561035E-4l-1.578125 7.453415l2.0 -3.6621094E-4l-0.234375 1.0781631l-5.53125 0.0010223389zm4.96875 -10.313416q-0.390625 6.866455E-5 -0.609375 -0.28113556q-0.21875 -0.28121185 -0.140625 -0.6718521q0.09375 -0.40626907 0.421875 -0.6875763q0.328125 -0.28131104 0.734375 -0.28138733q0.390625 -7.247925E-5 0.625 0.29676056q0.234375 0.28120804 0.140625 0.6718521q-0.078125 0.39064026 -0.4375 0.6719513q-0.34375 0.28131104 -0.734375 0.28138733zm11.890625 3.5603104l0 0.015625q-0.453125 -0.49991608 -0.78125 -0.67173004q-0.328125 -0.17181396 -0.8125 -0.17172241q-0.671875 1.2207031E-4 -1.34375 0.34399414q-0.65625 0.32824707 -1.1875 1.0158463q-0.515625 0.67196655 -0.734375 1.7032547l-0.953125 4.5314255l-1.359375 2.5177002E-4l1.8125 -8.547203l1.40625 -2.593994E-4l-0.375 1.5781937q0.53125 -0.8438492 1.359375 -1.3127518q0.84375 -0.4689026 1.71875 -0.46907043q1.375 -2.5177002E-4 2.0625 0.93712616l-0.8125 1.04702zm0 0.015625q0.09375 0.10935974 0.03125 0.09375q-0.046875 -0.015617371 -0.09375 -0.031234741l0.0625 -0.06251526zm-0.21875 0.06254578q0.015625 -0.06250763 0.0625 -0.04689026q0.046875 -7.6293945E-6 0.09375 0.04685974l-0.0625 0.09375763l-0.09375 -0.07810211l0 -0.015625z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m124.98425 40.490856c3.682846 -3.459755 6.026245 -16.741034 22.097107 -20.758532c16.070877 -4.017497 54.685913 -8.257217 74.328094 -3.3464565c19.642166 4.910761 36.270782 27.342522 43.524933 32.811024" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m124.98425 40.490856c3.6828613 -3.459755 6.026245 -16.741034 22.097107 -20.758532c16.070877 -4.017497 54.68593 -8.257218 74.328094 -3.3464565c9.82106 2.4553795 18.888779 9.29101 26.407974 16.196358c3.7595825 3.4526749 7.1320496 6.922779 10.018021 9.871494c0.72146606 0.7371788 1.4125671 1.4417686 2.0716858 2.1053543l0.594635 0.594059" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m259.38858 46.373383l4.46579 1.8382454l-2.23938 -4.27874z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m220.74016 15.71573c65.288284 0 317.17804 -2.5669298 391.7296 0c74.551636 2.566928 46.31671 12.834645 55.58008 15.401574" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m220.74016 15.71573c65.288284 0 317.17804 -2.5669298 391.7296 0c37.27588 1.2834635 48.85504 4.4921255 52.273132 7.7007875c0.42730713 0.401083 0.7270508 0.802166 0.9335327 1.1994877c0.10321045 0.1986618 0.1831665 0.3963833 0.24407959 0.59269524c0.015197754 0.04907608 0.02923584 0.09806633 0.042175293 0.14696121l0.030273438 0.12522316" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m664.44135 26.04712l3.1073608 3.6968708l-0.0040893555 -4.829342z" fill-rule="evenodd"/></g></svg>
\ No newline at end of file
diff -pruN 14.2.16-2/doc/cephfs/cephfs-io-path.rst 15.2.7-0ubuntu4/doc/cephfs/cephfs-io-path.rst
--- 14.2.16-2/doc/cephfs/cephfs-io-path.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/cephfs-io-path.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,49 @@
+=========================
+ Ceph File System IO Path
+=========================
+
+All file data in CephFS is stored as RADOS objects. CephFS clients can directly
+access RADOS to operate on file data. MDS only handles metadata operations.
+
+To read/write a CephFS file, client needs to have 'file read/write' capabilities
+for corresponding inode. If client does not have required capabilities, it sends
+a 'cap message' to MDS, telling MDS what it wants. MDS will issue capabilities
+to client when it is possible. Once client has 'file read/write' capabilities,
+it can directly access RADOS to read/write file data. File data are stored as
+RADOS objects in the form of <inode number>.<object index>. See 'Data Striping'
+section of `Architecture`_ for more information. If the file is only opened by
+one client, MDS also issues 'file cache/buffer' capabilities to the only client.
+The 'file cache' capability means that file read can be satisfied by client
+cache. The 'file buffer' capability means that file write can be buffered in
+client cache.
+
+
+.. ditaa::
+            +---------------------+
+            |     Application     |
+            +---------------------+
+              |
+              V
+            +---------------------+  Data IOs  +--------------------+
+            |    CephFS Library   | ---------> |       LibRados     |
+            +---------------------+            +--------------------+
+              |                                  |
+              | Metadata Operations              | Objects Read/Write
+              V                                  V
+            +---------------------+            +--------------------+
+            |         MDSs        | -=-------> |         OSDs       |
+            +---------------------+            +--------------------+
+
+
+            +----------------------+           +---------------------+
+            | CephFS kernel client | Data IOs  | Ceph kernel library |
+            |      (ceph.ko)       | --------> |     (libceph.ko)    |
+            +----------------------+           +---------------------+
+              |                                  |
+              | Metadata Operations              | Objects Read/Write
+              v                                  v
+            +---------------------+            +--------------------+
+            |         MDSs        | -=-------> |         OSDs       |
+            +---------------------+            +--------------------+
+
+.. _Architecture: ../architecture
diff -pruN 14.2.16-2/doc/cephfs/cephfs-journal-tool.rst 15.2.7-0ubuntu4/doc/cephfs/cephfs-journal-tool.rst
--- 14.2.16-2/doc/cephfs/cephfs-journal-tool.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/cephfs-journal-tool.rst	2020-11-30 19:58:30.000000000 +0000
@@ -6,7 +6,7 @@ Purpose
 -------
 
 If a CephFS journal has become damaged, expert intervention may be required
-to restore the filesystem to a working state.
+to restore the file system to a working state.
 
 The ``cephfs-journal-tool`` utility provides functionality to aid experts in
 examining, modifying, and extracting data from journals.
@@ -14,7 +14,7 @@ examining, modifying, and extracting dat
 .. warning::
 
     This tool is **dangerous** because it directly modifies internal
-    data structures of the filesystem.  Make backups, be careful, and
+    data structures of the file system.  Make backups, be careful, and
     seek expert advice.  If you are unsure, do not run this tool.
 
 Syntax
@@ -133,7 +133,7 @@ Actions:
 
 * ``get`` read the events from the log
 * ``splice`` erase events or regions in the journal
-* ``apply`` extract filesystem metadata from events and attempt to apply it to the metadata store.
+* ``apply`` extract file system metadata from events and attempt to apply it to the metadata store.
 
 Filtering:
 
diff -pruN 14.2.16-2/doc/cephfs/cephfs-shell.rst 15.2.7-0ubuntu4/doc/cephfs/cephfs-shell.rst
--- 14.2.16-2/doc/cephfs/cephfs-shell.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/cephfs-shell.rst	2020-11-30 19:58:30.000000000 +0000
@@ -3,7 +3,7 @@
 CephFS Shell
 =============
 
-The File System (FS) shell includes various shell-like commands that directly interact with the :term:`Ceph Filesystem`.
+The File System (FS) shell includes various shell-like commands that directly interact with the :term:`Ceph File System`.
 
 Usage :
 
@@ -14,6 +14,18 @@ Options :
     -b, --batch FILE      Process a batch file.
     -t, --test FILE       Test against transcript(s) in FILE
 
+
+.. note::
+
+    Latest version of the cmd2 module is required for running cephfs-shell.
+    If CephFS is installed through source, execute cephfs-shell in the build
+    directory. It can also be executed as following using virtualenv:
+
+.. code:: bash
+
+    [build]$ virtualenv -p python3 venv && source venv/bin/activate && pip3 install cmd2
+    [build]$ source vstart_environment.sh && source venv/bin/activate && python3 ../src/tools/cephfs/cephfs-shell
+
 Commands
 ========
 
@@ -35,7 +47,7 @@ Options :
 put
 ---
 
-Copy a file/directory to Ceph Filesystem from Local Filesystem.
+Copy a file/directory to Ceph File System from Local File System.
 
 Usage : 
     
@@ -55,13 +67,13 @@ Options :
 get
 ---
  
-Copy a file from Ceph Filesystem to Local Filesystem.
+Copy a file from Ceph File System to Local File System.
 
 Usage : 
 
     get [options] <source_path> [target_path]
 
-* source_path - remote file/directory path which is to be copied to local filesystem.
+* source_path - remote file/directory path which is to be copied to local file system.
     * if `.` copies all the file/directories in the remote working directory.
                     
 * target_path - local directory path where the files/directories are to be copied to.
@@ -226,18 +238,21 @@ Usage:
 * name - name of the alias being looked up, added, or replaced
 * value - what the alias will be resolved to (if adding or replacing) this can contain spaces and does not need to be quoted
 
-pyscript
---------
+run_pyscript
+------------
 
 Runs a python script file inside the console
 
 Usage: 
     
-    pyscript <script_path> [script_arguments]
+    run_pyscript <script_path> [script_arguments]
 
 * Console commands can be executed inside this script with cmd ("your command")
-  However, you cannot run nested "py" or "pyscript" commands from within this script
-  Paths or arguments that contain spaces must be enclosed in quotes
+  However, you cannot run nested "py" or "pyscript" commands from within this
+  script. Paths or arguments that contain spaces must be enclosed in quotes
+
+.. note:: This command is available as ``pyscript`` for cmd2 versions 0.9.13
+   or less.
 
 py
 --
@@ -254,6 +269,10 @@ shortcuts
 
 Lists shortcuts (aliases) available
 
+Usage :
+
+    shortcuts
+
 history
 -------
 
@@ -296,10 +315,10 @@ Usage :
 
 * Call without arguments for a list of settable parameters with their values.
 
- Options :
- -h     show this help message and exit
- -a     display read-only settings as well
- -l     describe function of parameter
+Options :
+  -h     show this help message and exit
+  -a     display read-only settings as well
+  -l     describe function of parameter
 
 edit
 ----
@@ -312,18 +331,21 @@ Usage:
 
 * file_path - path to a file to open in editor
 
-load
-----
+run_script
+----------
 
 Runs commands in script file that is encoded as either ASCII or UTF-8 text.
+Each command in the script should be separated by a newline.
 
 Usage:  
     
-    load <file_path>
+    run_script <file_path>
+
 
 * file_path - a file path pointing to a script
 
-* Script should contain one command per line, just like command would betyped in console.
+.. note:: This command is available as ``load`` for cmd2 versions 0.9.13
+   or less.
 
 shell
 -----
@@ -337,12 +359,85 @@ Usage:
 locate
 ------
 
-Find an item in Filesystem
+Find an item in File System
 
 Usage:
+
      locate [options] <name>
 
 Options :
   -c       Count number of items found
   -i       Ignore case 
 
+stat
+------
+
+Display file status.
+
+Usage :
+
+     stat [-h] <file_name> [file_name ...]
+
+Options :
+  -h     Shows the help message
+
+snap
+----
+
+Create or Delete Snapshot
+
+Usage:
+
+     snap {create|delete} <snap_name> <dir_name>
+
+* snap_name - Snapshot name to be created or deleted
+
+* dir_name - directory under which snapshot should be created or deleted
+
+setxattr
+--------
+
+Set extended attribute for a file
+
+Usage :
+
+     setxattr [-h] <path> <name> <value>
+
+*  path - Path to the file
+
+*  name - Extended attribute name to get or set
+
+*  value - Extended attribute value to be set
+
+Options:
+  -h, --help   Shows the help message
+
+getxattr
+--------
+
+Get extended attribute value for the name associated with the path
+
+Usage :
+
+     getxattr [-h] <path> <name>
+
+*  path - Path to the file
+
+*  name - Extended attribute name to get or set
+
+Options:
+  -h, --help   Shows the help message
+
+listxattr
+---------
+
+List extended attribute names associated with the path
+
+Usage :
+
+     listxattr [-h] <path>
+
+*  path - Path to the file
+
+Options:
+  -h, --help   Shows the help message
diff -pruN 14.2.16-2/doc/cephfs/client-auth.rst 15.2.7-0ubuntu4/doc/cephfs/client-auth.rst
--- 14.2.16-2/doc/cephfs/client-auth.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/client-auth.rst	2020-11-30 19:58:30.000000000 +0000
@@ -2,7 +2,7 @@
 CephFS Client Capabilities
 ================================
 
-Use Ceph authentication capabilities to restrict your filesystem clients
+Use Ceph authentication capabilities to restrict your file system clients
 to the lowest possible level of authority needed.
 
 .. note::
@@ -10,6 +10,13 @@ to the lowest possible level of authorit
     Path restriction and layout modification restriction are new features
     in the Jewel release of Ceph.
 
+.. note::
+
+   Using Erasure Coded(EC) pools with CephFS is supported only with the
+   BlueStore Backend. They cannot be used as metadata pools and overwrites must
+   be enabled on the data pools.
+
+
 Path restriction
 ================
 
@@ -27,9 +34,9 @@ Syntax
 To grant rw access to the specified directory only, we mention the specified
 directory while creating key for a client using the following syntax. ::
 
- ceph fs authorize *filesystem_name* client.*client_name* /*specified_directory* rw
+ ceph fs authorize *file_system_name* client.*client_name* /*specified_directory* rw
 
-For example, to restrict client ``foo`` to writing only in the ``bar`` directory of filesystem ``cephfs_a``, use ::
+For example, to restrict client ``foo`` to writing only in the ``bar`` directory of file system ``cephfs_a``, use ::
 
  ceph fs authorize cephfs_a client.foo / r /bar rw
 
@@ -47,10 +54,10 @@ root directory ::
  ceph fs authorize cephfs_a client.foo /bar rw
 
 Note that if a client's read access is restricted to a path, they will only
-be able to mount the filesystem when specifying a readable path in the
+be able to mount the file system when specifying a readable path in the
 mount command (see below).
 
-Supplying ``all`` or ``*`` as the filesystem name will grant access to every
+Supplying ``all`` or ``*`` as the file system name will grant access to every
 file system. Note that it is usually necessary to quote ``*`` to protect it from
 the shell.
 
@@ -61,7 +68,7 @@ directory while mounting using the follo
 
  ./ceph-fuse -n client.*client_name* *mount_path* -r *directory_to_be_mounted*
 
-for example, to restrict client ``foo`` to ``mnt/bar`` directory, we will use. ::
+For example, to restrict client ``foo`` to ``mnt/bar`` directory, we will use. ::
 
  ./ceph-fuse -n client.foo mnt -r /bar
 
@@ -72,7 +79,7 @@ By default, when a client is mounting a
 will be calculated from the quota on that sub-directory, rather than reporting
 the overall amount of space used on the cluster.
 
-If you would like the client to report the overall usage of the filesystem,
+If you would like the client to report the overall usage of the file system,
 and not just the quota usage on the sub-directory mounted, then set the
 following config option on the client:
 
@@ -81,7 +88,7 @@ following config option on the client:
     client quota df = false
 
 If quotas are not enabled, or no quota is set on the sub-directory mounted,
-then the overall usage of the filesystem will be reported irrespective of
+then the overall usage of the file system will be reported irrespective of
 the value of this setting.
 
 Layout and Quota restriction (the 'p' flag)
@@ -93,7 +100,7 @@ with a "ceph." prefix, as well as restri
 these fields (such as openc operations with layouts).
 
 For example, in the following snippet client.0 can modify layouts and quotas
-on the filesystem cephfs_a, but client.1 cannot.
+on the file system cephfs_a, but client.1 cannot.
 
 ::
 
@@ -118,7 +125,7 @@ Note that when capability string also co
 appear after it (all flags except 'rw' must be specified in alphabetical order).
 
 For example, in the following snippet client.0 can create or delete snapshots
-in the ``bar`` directory of filesystem ``cephfs_a``.
+in the ``bar`` directory of file system ``cephfs_a``.
 
 ::
 
diff -pruN 14.2.16-2/doc/cephfs/client-config-ref.rst 15.2.7-0ubuntu4/doc/cephfs/client-config-ref.rst
--- 14.2.16-2/doc/cephfs/client-config-ref.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/client-config-ref.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,6 +1,36 @@
-========================
- Client Config Reference
-========================
+Client Configuration
+====================
+
+Updating Client Configuration
+-----------------------------
+
+Certain client configurations can be applied at runtime. To check if a configuration option can be applied (taken into affect by a client) at runtime, use the `config help` command::
+
+   ceph config help debug_client
+    debug_client - Debug level for client
+    (str, advanced)                                                                                                                      Default: 0/5
+    Can update at runtime: true
+
+    The value takes the form 'N' or 'N/M' where N and M are values between 0 and 99.  N is the debug level to log (all values below this are included), and M is the level to gather and buffer in memory.  In the event of a crash, the most recent items <= M are dumped to the log file.
+
+`config help` tells if a given configuration can be applied at runtime along with the defaults and a description of the configuration option.
+
+To update a configuration option at runtime, use the `config set` command::
+
+   ceph config set client debug_client 20/20
+
+Note that this changes a given configuration for all clients.
+
+To check configured options use the `config get` command::
+
+   ceph config get client
+    WHO    MASK LEVEL    OPTION                    VALUE     RO 
+    client      advanced debug_client              20/20          
+    global      advanced osd_pool_default_min_size 1            
+    global      advanced osd_pool_default_size     3            
+
+Client Config Reference
+------------------------
 
 ``client acl type``
 
@@ -183,6 +213,12 @@
 :Type: Integer
 :Default: ``0``
 
+``fuse disable pagecache``
+
+:Description: If set to ``true``, kernel page cache is disabled for ceph-fuse mount. Because when multiple clients read/write to a file at the same time, reader may get stale data from page cache. (Due to limitation of fuse, ceph-fuse can't disable page cache dynamically)
+:Type: Boolean
+:Default: ``false``
+
 Developer Options
 #################
 
diff -pruN 14.2.16-2/doc/cephfs/createfs.rst 15.2.7-0ubuntu4/doc/cephfs/createfs.rst
--- 14.2.16-2/doc/cephfs/createfs.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/createfs.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,17 +1,17 @@
-========================
-Create a Ceph filesystem
-========================
+=========================
+Create a Ceph file system
+=========================
 
 Creating pools
 ==============
 
-A Ceph filesystem requires at least two RADOS pools, one for data and one for metadata.
+A Ceph file system requires at least two RADOS pools, one for data and one for metadata.
 When configuring these pools, you might consider:
 
 - Using a higher replication level for the metadata pool, as any data loss in
-  this pool can render the whole filesystem inaccessible.
+  this pool can render the whole file system inaccessible.
 - Using lower-latency storage such as SSDs for the metadata pool, as this will
-  directly affect the observed latency of filesystem operations on clients.
+  directly affect the observed latency of file system operations on clients.
 - The data pool used to create the file system is the "default" data pool and
   the location for storing all inode backtrace information, used for hard link
   management and disaster recovery. For this reason, all inodes created in
@@ -23,13 +23,13 @@ When configuring these pools, you might
   hierarchy of directories and files (see also :ref:`file-layouts`).
 
 Refer to :doc:`/rados/operations/pools` to learn more about managing pools.  For
-example, to create two pools with default settings for use with a filesystem, you
+example, to create two pools with default settings for use with a file system, you
 might run the following commands:
 
 .. code:: bash
 
-    $ ceph osd pool create cephfs_data <pg_num>
-    $ ceph osd pool create cephfs_metadata <pg_num>
+    $ ceph osd pool create cephfs_data
+    $ ceph osd pool create cephfs_metadata
 
 Generally, the metadata pool will have at most a few gigabytes of data. For
 this reason, a smaller PG count is usually recommended. 64 or 128 is commonly
@@ -38,10 +38,10 @@ used in practice for large clusters.
 .. note:: The names of the file systems, metadata pools, and data pools can
           only have characters in the set [a-zA-Z0-9\_-.].
 
-Creating a filesystem
-=====================
+Creating a file system
+======================
 
-Once the pools are created, you may enable the filesystem using the ``fs new`` command:
+Once the pools are created, you may enable the file system using the ``fs new`` command:
 
 .. code:: bash
 
@@ -55,7 +55,7 @@ For example:
     $ ceph fs ls
     name: cephfs, metadata pool: cephfs_metadata, data pools: [cephfs_data ]
 
-Once a filesystem has been created, your MDS(s) will be able to enter
+Once a file system has been created, your MDS(s) will be able to enter
 an *active* state.  For example, in a single MDS system:
 
 .. code:: bash
@@ -63,8 +63,8 @@ an *active* state.  For example, in a si
     $ ceph mds stat
     cephfs-1/1/1 up {0=a=up:active}
 
-Once the filesystem is created and the MDS is active, you are ready to mount
-the filesystem.  If you have created more than one filesystem, you will
+Once the file system is created and the MDS is active, you are ready to mount
+the file system.  If you have created more than one file system, you will
 choose which to use when mounting.
 
 	- `Mount CephFS`_
@@ -73,8 +73,8 @@ choose which to use when mounting.
 .. _Mount CephFS: ../../cephfs/kernel
 .. _Mount CephFS as FUSE: ../../cephfs/fuse
 
-If you have created more than one filesystem, and a client does not
-specify a filesystem when mounting, you can control which filesystem
+If you have created more than one file system, and a client does not
+specify a file system when mounting, you can control which file system
 they will see by using the `ceph fs set-default` command.
 
 Using Erasure Coded pools with CephFS
diff -pruN 14.2.16-2/doc/cephfs/dirfrags.rst 15.2.7-0ubuntu4/doc/cephfs/dirfrags.rst
--- 14.2.16-2/doc/cephfs/dirfrags.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/dirfrags.rst	2020-11-30 19:58:30.000000000 +0000
@@ -18,6 +18,8 @@ remain conservative about creating very
 have a resource cost in situations such as a CephFS client listing
 the directory, where all the fragments must be loaded at once.
 
+.. tip:: The root directory cannot be fragmented.
+
 All directories are initially created as a single fragment.  This fragment
 may be *split* to divide up the directory into more fragments, and these
 fragments may be *merged* to reduce the number of fragments in the directory.
diff -pruN 14.2.16-2/doc/cephfs/disaster-recovery-experts.rst 15.2.7-0ubuntu4/doc/cephfs/disaster-recovery-experts.rst
--- 14.2.16-2/doc/cephfs/disaster-recovery-experts.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/disaster-recovery-experts.rst	2020-11-30 19:58:30.000000000 +0000
@@ -12,7 +12,7 @@ Advanced: Metadata repair tools
     The tools mentioned here can easily cause damage as well as fixing it.
 
     It is essential to understand exactly what has gone wrong with your
-    filesystem before attempting to repair it.
+    file system before attempting to repair it.
 
     If you do not have access to professional support for your cluster,
     consult the ceph-users mailing list or the #ceph IRC channel.
@@ -68,7 +68,10 @@ truncate it like so:
 
 ::
 
-    cephfs-journal-tool journal reset
+    cephfs-journal-tool [--rank=N] journal reset
+
+Specify the MDS rank using the ``--rank`` option when the file system has/had
+multiple active MDS.
 
 .. warning::
 
@@ -98,7 +101,7 @@ also need to reset the other tables then
 MDS map reset
 -------------
 
-Once the in-RADOS state of the filesystem (i.e. contents of the metadata pool)
+Once the in-RADOS state of the file system (i.e. contents of the metadata pool)
 is somewhat recovered, it may be necessary to update the MDS map to reflect
 the contents of the metadata pool.  Use the following command to reset the MDS
 map to a single MDS:
@@ -200,8 +203,8 @@ Using an alternate metadata pool for rec
    There has not been extensive testing of this procedure. It should be
    undertaken with great care.
 
-If an existing filesystem is damaged and inoperative, it is possible to create
-a fresh metadata pool and attempt to reconstruct the filesystem metadata
+If an existing file system is damaged and inoperative, it is possible to create
+a fresh metadata pool and attempt to reconstruct the file system metadata
 into this new pool, leaving the old metadata in place. This could be used to
 make a safer attempt at recovery since the existing metadata pool would not be
 overwritten.
@@ -219,7 +222,7 @@ it with empty file system data structure
 ::
 
     ceph fs flag set enable_multiple true --yes-i-really-mean-it
-    ceph osd pool create recovery <pg-num> replicated <crush-rule-name>
+    ceph osd pool create recovery replicated <crush-rule-name>
     ceph fs new recovery-fs recovery <data pool> --allow-dangerous-metadata-overlay
     cephfs-data-scan init --force-init --filesystem recovery-fs --alternate-pool recovery
     ceph fs reset recovery-fs --yes-i-really-mean-it
@@ -232,11 +235,11 @@ results to the alternate pool:
 
 ::
 
-    cephfs-data-scan scan_extents --alternate-pool recovery --filesystem <original filesystem name> <original data pool name>
-    cephfs-data-scan scan_inodes --alternate-pool recovery --filesystem <original filesystem name> --force-corrupt --force-init <original data pool name>
+    cephfs-data-scan scan_extents --alternate-pool recovery --filesystem <original file system name> <original data pool name>
+    cephfs-data-scan scan_inodes --alternate-pool recovery --filesystem <original file system name> --force-corrupt --force-init <original data pool name>
     cephfs-data-scan scan_links --filesystem recovery-fs
 
-If the damaged filesystem contains dirty journal data, it may be recovered next
+If the damaged file system contains dirty journal data, it may be recovered next
 with:
 
 ::
@@ -247,8 +250,17 @@ with:
 After recovery, some recovered directories will have incorrect statistics.
 Ensure the parameters mds_verify_scatter and mds_debug_scatterstat are set
 to false (the default) to prevent the MDS from checking the statistics, then
-run a forward scrub to repair them. Ensure you have an MDS running and issue:
+run a forward :doc:`scrub </cephfs/scrub>` to repair them. Ensure you have an
+MDS running and issue:
 
 ::
 
     ceph tell mds.a scrub start / recursive repair
+
+.. note::
+
+    In Nautilus and above versions, tell interface scrub command is preferred
+    than scrub_path. For older versions only scrub_path asok command is
+    supported. Example::
+
+        ceph daemon mds.a scrub_path / recursive repair
diff -pruN 14.2.16-2/doc/cephfs/disaster-recovery.rst 15.2.7-0ubuntu4/doc/cephfs/disaster-recovery.rst
--- 14.2.16-2/doc/cephfs/disaster-recovery.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/disaster-recovery.rst	2020-11-30 19:58:30.000000000 +0000
@@ -6,7 +6,7 @@ Disaster recovery
 Metadata damage and repair
 --------------------------
 
-If a filesystem has inconsistent or missing metadata, it is considered
+If a file system has inconsistent or missing metadata, it is considered
 *damaged*.  You may find out about damage from a health message, or in some
 unfortunate cases from an assertion in a running MDS daemon.
 
@@ -14,7 +14,7 @@ Metadata damage can result either from d
 layer (e.g. multiple disk failures that lose all copies of a PG), or from
 software bugs.
 
-CephFS includes some tools that may be able to recover a damaged filesystem,
+CephFS includes some tools that may be able to recover a damaged file system,
 but to use them safely requires a solid understanding of CephFS internals.
 The documentation for these potentially dangerous operations is on a
 separate page: :ref:`disaster-recovery-experts`.
@@ -22,7 +22,7 @@ separate page: :ref:`disaster-recovery-e
 Data pool damage (files affected by lost data PGs)
 --------------------------------------------------
 
-If a PG is lost in a *data* pool, then the filesystem will continue
+If a PG is lost in a *data* pool, then the file system will continue
 to operate normally, but some parts of some files will simply
 be missing (reads will return zeros).
 
@@ -56,6 +56,6 @@ The output will be a list of paths to po
 per line.
 
 Note that this command acts as a normal CephFS client to find all the
-files in the filesystem and read their layouts, so the MDS must be
+files in the file system and read their layouts, so the MDS must be
 up and running.
 
diff -pruN 14.2.16-2/doc/cephfs/dynamic-metadata-management.rst 15.2.7-0ubuntu4/doc/cephfs/dynamic-metadata-management.rst
--- 14.2.16-2/doc/cephfs/dynamic-metadata-management.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/dynamic-metadata-management.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,90 @@
+==================================
+CephFS Dynamic Metadata Management
+==================================
+Metadata operations usually take up more than 50 percent of all
+file system operations. Also the metadata scales in a more complex
+fashion when compared to scaling storage (which in turn scales I/O
+throughput linearly). This is due to the hierarchical and
+interdependent nature of the file system metadata. So in CephFS,
+the metadata workload is decoupled from data workload so as to
+avoid placing unnecessary strain on the RADOS cluster. The metadata
+is hence handled by a cluster of Metadata Servers (MDSs). 
+CephFS distributes metadata across MDSs via `Dynamic Subtree Partitioning <https://ceph.com/wp-content/uploads/2016/08/weil-mds-sc04.pdf>`__.
+
+Dynamic Subtree Partitioning
+----------------------------
+In traditional subtree partitioning, subtrees of the file system
+hierarchy are assigned to individual MDSs. This metadata distribution
+strategy provides good hierarchical locality, linear growth of
+cache and horizontal scaling across MDSs and a fairly good distribution
+of metadata across MDSs.
+
+.. image:: subtree-partitioning.svg
+
+The problem with traditional subtree partitioning is that the workload
+growth by depth (across a single MDS) leads to a hotspot of activity.
+This results in lack of vertical scaling and wastage of non-busy resources/MDSs. 
+
+This led to the adoption of a more dynamic way of handling
+metadata: Dynamic Subtree Partitioning, where load intensive portions
+of the directory hierarchy from busy MDSs are migrated to non busy MDSs. 
+
+This strategy ensures that activity hotspots are relieved as they
+appear and so leads to vertical scaling of the metadata workload in
+addition to horizontal scaling.
+
+Export Process During Subtree Migration
+---------------------------------------
+
+Once the exporter verifies that the subtree is permissible to be exported
+(Non degraded cluster, non-frozen subtree root), the subtree root
+directory is temporarily auth pinned, the subtree freeze is initiated,
+and the exporter is committed to the subtree migration, barring an
+intervening failure of the importer or itself.
+
+The MExportDiscover message is exchanged to ensure that the inode for the
+base directory being exported is open on the destination node. It is
+auth pinned by the importer to prevent it from being trimmed. This occurs
+before the exporter completes the freeze of the subtree to ensure that
+the importer is able to replicate the necessary metadata. When the
+exporter receives the MDiscoverAck, it allows the freeze to proceed by
+removing its temporary auth pin.
+
+A warning stage occurs only if the base subtree directory is open by
+nodes other than the importer and exporter. If it is not, then this
+implies that no metadata within or nested beneath the subtree is
+replicated by any node other than the importer and exporter. If it is,
+then an MExportWarning message informs any bystanders that the
+authority for the region is temporarily ambiguous, and lists both the
+exporter and importer as authoritative MDS nodes. In particular,
+bystanders who are trimming items from their cache must send
+MCacheExpire messages to both the old and new authorities. This is
+necessary to ensure that the surviving authority reliably receives all
+expirations even if the importer or exporter fails. While the subtree
+is frozen (on both the importer and exporter), expirations will not be
+immediately processed; instead, they will be queued until the region
+is unfrozen and it can be determined that the node is or is not
+authoritative.
+
+The exporter then packages an MExport message containing all metadata
+of the subtree and flags the objects as non-authoritative. The MExport message sends
+the actual subtree metadata to the importer. Upon receipt, the
+importer inserts the data into its cache, marks all objects as
+authoritative, and logs a copy of all metadata in an EImportStart
+journal message. Once that has safely flushed, it replies with an
+MExportAck. The exporter can now log an EExport journal entry, which
+ultimately specifies that the export was a success. In the presence
+of failures, it is the existence of the EExport entry only that
+disambiguates authority during recovery.
+
+Once logged, the exporter will send an MExportNotify to any
+bystanders, informing them that the authority is no longer ambiguous
+and cache expirations should be sent only to the new authority (the
+importer). Once these are acknowledged back to the exporter,
+implicitly flushing the bystander to exporter message streams of any
+stray expiration notices, the exporter unfreezes the subtree, cleans
+up its migration-related state, and sends a final MExportFinish to the
+importer. Upon receipt, the importer logs an EImportFinish(true)
+(noting locally that the export was indeed a success), unfreezes its
+subtree, processes any queued cache expierations, and cleans up its
+state.
diff -pruN 14.2.16-2/doc/cephfs/eviction.rst 15.2.7-0ubuntu4/doc/cephfs/eviction.rst
--- 14.2.16-2/doc/cephfs/eviction.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/eviction.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,14 +1,14 @@
 
-===============================
-Ceph filesystem client eviction
-===============================
+================================
+Ceph file system client eviction
+================================
 
-When a filesystem client is unresponsive or otherwise misbehaving, it
-may be necessary to forcibly terminate its access to the filesystem.  This
+When a file system client is unresponsive or otherwise misbehaving, it
+may be necessary to forcibly terminate its access to the file system.  This
 process is called *eviction*.
 
 Evicting a CephFS client prevents it from communicating further with MDS
-daemons and OSD daemons.  If a client was doing buffered IO to the filesystem,
+daemons and OSD daemons.  If a client was doing buffered IO to the file system,
 any un-flushed data will be lost.
 
 Clients may either be evicted automatically (if they fail to communicate
diff -pruN 14.2.16-2/doc/cephfs/experimental-features.rst 15.2.7-0ubuntu4/doc/cephfs/experimental-features.rst
--- 14.2.16-2/doc/cephfs/experimental-features.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/experimental-features.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,16 +1,17 @@
-
+=====================
 Experimental Features
 =====================
 
-CephFS includes a number of experimental features which are not fully stabilized
-or qualified for users to turn on in real deployments. We generally do our best
-to clearly demarcate these and fence them off so they cannot be used by mistake.
-
-Some of these features are closer to being done than others, though. We describe
-each of them with an approximation of how risky they are and briefly describe
-what is required to enable them. Note that doing so will *irrevocably* flag maps
-in the monitor as having once enabled this flag to improve debugging and
-support processes.
+CephFS includes a number of experimental features which are not fully
+stabilized or qualified for users to turn on in real deployments. We generally
+do our best to clearly demarcate these and fence them off so they cannot be
+used by mistake.
+
+Some of these features are closer to being done than others, though. We
+describe each of them with an approximation of how risky they are and briefly
+describe what is required to enable them. Note that doing so will
+*irrevocably* flag maps in the monitor as having once enabled this flag to
+improve debugging and support processes.
 
 Inline data
 -----------
@@ -23,6 +24,9 @@ failures within it are unlikely to make
 Inline data has always been off by default and requires setting
 the ``inline_data`` flag.
 
+Inline data has been declared deprecated for the Octopus release, and will
+likely be removed altogether in the Q release.
+
 Mantle: Programmable Metadata Load Balancer
 -------------------------------------------
 
@@ -39,7 +43,7 @@ writing, but there is insufficient testi
 every expansion of testing has generally revealed new issues. If you do enable
 snapshots and experience failure, manual intervention will be needed.
 
-Snapshots are known not to work properly with multiple filesystems (below) in
+Snapshots are known not to work properly with multiple file systems (below) in
 some cases. Specifically, if you share a pool for multiple FSes and delete
 a snapshot in one FS, expect to lose snapshotted file data in any other FS using
 snapshots. See the :doc:`/dev/cephfs-snapshots` page for more information.
@@ -49,25 +53,25 @@ to 400 snapshots (http://tracker.ceph.co
 
 Snapshotting was blocked off with the ``allow_new_snaps`` flag prior to Mimic.
 
-Multiple filesystems within a Ceph cluster
-------------------------------------------
+Multiple File Systems within a Ceph Cluster
+-------------------------------------------
 Code was merged prior to the Jewel release which enables administrators
-to create multiple independent CephFS filesystems within a single Ceph cluster.
-These independent filesystems have their own set of active MDSes, cluster maps,
+to create multiple independent CephFS file systems within a single Ceph cluster.
+These independent file systems have their own set of active MDSes, cluster maps,
 and data. But the feature required extensive changes to data structures which
 are not yet fully qualified, and has security implications which are not all
 apparent nor resolved.
 
 There are no known bugs, but any failures which do result from having multiple
-active filesystems in your cluster will require manual intervention and, so far,
-will not have been experienced by anybody else -- knowledgeable help will be
-extremely limited. You also probably do not have the security or isolation
+active file systems in your cluster will require manual intervention and, so
+far, will not have been experienced by anybody else -- knowledgeable help will
+be extremely limited. You also probably do not have the security or isolation
 guarantees you want or think you have upon doing so.
 
-Note that snapshots and multiple filesystems are *not* tested in combination
+Note that snapshots and multiple file systems are *not* tested in combination
 and may not work together; see above.
 
-Multiple filesystems were available starting in the Jewel release candidates
+Multiple file systems were available starting in the Jewel release candidates
 but must be turned on via the ``enable_multiple`` flag until declared stable.
 
 LazyIO
@@ -83,29 +87,24 @@ Directory Fragmentation
 -----------------------
 
 Directory fragmentation was considered experimental prior to the *Luminous*
-(12.2.x).  It is now enabled by default on new filesystems.  To enable directory
-fragmentation on filesystems created with older versions of Ceph, set
-the ``allow_dirfrags`` flag on the filesystem:
+(12.2.x).  It is now enabled by default on new file systems.  To enable
+directory fragmentation on file systems created with older versions of Ceph,
+set the ``allow_dirfrags`` flag on the file system::
 
-::
-
-    ceph fs set <filesystem name> allow_dirfrags 1
+    ceph fs set <file system name> allow_dirfrags 1
 
 Multiple active metadata servers
 --------------------------------
 
 Prior to the *Luminous* (12.2.x) release, running multiple active metadata
-servers within a single filesystem was considered experimental.  Creating
+servers within a single file system was considered experimental.  Creating
 multiple active metadata servers is now permitted by default on new
-filesystems.
-
-Filesystems created with older versions of Ceph still require explicitly
-enabling multiple active metadata servers as follows:
+file systems.
 
-::
+File Systems created with older versions of Ceph still require explicitly
+enabling multiple active metadata servers as follows::
 
-    ceph fs set <filesystem name> allow_multimds 1
+    ceph fs set <file system name> allow_multimds 1
 
 Note that the default size of the active mds cluster (``max_mds``) is
 still set to 1 initially.
-
diff -pruN 14.2.16-2/doc/cephfs/fs-nfs-exports.rst 15.2.7-0ubuntu4/doc/cephfs/fs-nfs-exports.rst
--- 14.2.16-2/doc/cephfs/fs-nfs-exports.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/fs-nfs-exports.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,155 @@
+=======================
+CephFS Exports over NFS
+=======================
+
+CephFS namespaces can be exported over NFS protocol using the
+`NFS-Ganesha NFS server <https://github.com/nfs-ganesha/nfs-ganesha/wiki>`_.
+
+Requirements
+============
+
+-  Latest Ceph file system with mgr enabled
+-  'nfs-ganesha', 'nfs-ganesha-ceph', 'nfs-ganesha-rados-grace' and
+   'nfs-ganesha-rados-urls' packages (version 3.3 and above)
+
+Create NFS Ganesha Cluster
+==========================
+
+.. code:: bash
+
+    $ ceph nfs cluster create <type=cephfs> <clusterid> [<placement>]
+
+This creates a common recovery pool for all Ganesha daemons, new user based on
+cluster_id and common ganesha config rados object.
+
+Here type is export type and placement specifies the size of cluster and hosts.
+For more details on placement specification refer the `orchestrator doc
+<https://docs.ceph.com/docs/master/mgr/orchestrator/#placement-specification>`_.
+Currently only CephFS export type is supported.
+
+Update NFS Ganesha Cluster
+==========================
+
+.. code:: bash
+
+    $ ceph nfs cluster update <clusterid> <placement>
+
+This updates the deployed cluster according to the placement value.
+
+Delete NFS Ganesha Cluster
+==========================
+
+.. code:: bash
+
+    $ ceph nfs cluster delete <clusterid>
+
+This deletes the deployed cluster.
+
+List NFS Ganesha Cluster
+========================
+
+.. code:: bash
+
+    $ ceph nfs cluster ls
+
+This lists deployed clusters.
+
+Show NFS Ganesha Cluster Information
+====================================
+
+.. code:: bash
+
+    $ ceph nfs cluster info [<clusterid>]
+
+This displays ip and port of deployed cluster.
+
+Set Customized Ganesha Configuration
+====================================
+
+.. code:: bash
+
+    $ ceph nfs cluster config set <clusterid> -i <config_file>
+
+With this the nfs cluster will use the specified config and it will have
+precedence over default config blocks.
+
+Reset Ganesha Configuration
+===========================
+
+.. code:: bash
+
+    $ ceph nfs cluster config reset <clusterid>
+
+This removes the user defined configuration.
+
+Create CephFS Export
+====================
+
+.. code:: bash
+
+    $ ceph nfs export create cephfs <fsname> <clusterid> <binding> [--readonly] [--path=/path/in/cephfs]
+
+It creates export rados objects containing the export block. Here binding is
+the pseudo root name and type is export type.
+
+Delete CephFS Export
+====================
+
+.. code:: bash
+
+    $ ceph nfs export delete <clusterid> <binding>
+
+It deletes an export in cluster based on pseudo root name (binding).
+
+List CephFS Export
+==================
+
+.. code:: bash
+
+    $ ceph nfs export ls <clusterid> [--detailed]
+
+It lists export for a cluster. With detailed option enabled it shows entire
+export block.
+
+Get CephFS Export
+=================
+
+.. code:: bash
+
+    $ ceph nfs export get <clusterid> <binding>
+
+It displays export block for a cluster based on pseudo root name (binding).
+
+Configuring NFS-Ganesha to export CephFS with vstart
+====================================================
+
+1) Using cephadm
+
+    .. code:: bash
+
+        $ MDS=1 MON=1 OSD=3 NFS=1 ../src/vstart.sh -n -d --cephadm
+
+    It can deploy only single ganesha daemon with vstart on default ganesha port.
+
+2) Using test orchestrator
+
+    .. code:: bash
+
+       $ MDS=1 MON=1 OSD=3 NFS=1 ../src/vstart.sh -n -d
+
+    It can deploy multiple ganesha daemons on random port. But this requires
+    ganesha packages to be installed.
+
+NFS: It is the number of NFS-Ganesha clusters to be created.
+
+Mount
+=====
+
+After the exports are successfully created and Ganesha daemons are no longer in
+grace period. The exports can be mounted by
+
+.. code:: bash
+
+    $ mount -t nfs -o port=<ganesha-port> <ganesha-host-name>:<ganesha-pseudo-path> <mount-point>
+
+.. note:: Only NFS v4.0+ is supported.
diff -pruN 14.2.16-2/doc/cephfs/fstab.rst 15.2.7-0ubuntu4/doc/cephfs/fstab.rst
--- 14.2.16-2/doc/cephfs/fstab.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/fstab.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,47 +0,0 @@
-========================================
- Mount CephFS in your File Systems Table
-========================================
-
-If you mount CephFS in your file systems table, the Ceph file system will mount
-automatically on startup. 
-
-Kernel Driver
-=============
-
-To mount CephFS in your file systems table as a kernel driver, add the
-following to ``/etc/fstab``::
-
-	{ipaddress}:{port}:/ {mount}/{mountpoint} {filesystem-name}	[name=username,secret=secretkey|secretfile=/path/to/secretfile],[{mount.options}]
-
-For example:: 
-
-	10.10.10.10:6789:/     /mnt/ceph    ceph    name=admin,noatime,_netdev    0       2
-	
-The default for the ``name=`` parameter is ``guest``. If the ``secret`` or
-``secretfile`` options are not specified then the mount helper will attempt to
-find a secret for the given ``name`` in one of the configured keyrings.
- 
-See `User Management`_ for details.
-   
-   
-FUSE
-====
-
-To mount CephFS in your file systems table as a filesystem in user space, add the
-following to ``/etc/fstab``::
-
-       #DEVICE PATH       TYPE      OPTIONS
-       none    /mnt/ceph  fuse.ceph ceph.id={user-ID}[,ceph.conf={path/to/conf.conf}],_netdev,defaults  0 0
-
-For example::
-
-       none    /mnt/ceph  fuse.ceph ceph.id=myuser,_netdev,defaults  0 0
-       none    /mnt/ceph  fuse.ceph ceph.id=myuser,ceph.conf=/etc/ceph/foo.conf,_netdev,defaults  0 0
-
-Ensure you use the ID (e.g., ``admin``, not ``client.admin``). You can pass any valid 
-``ceph-fuse`` option to the command line this way.
-
-See `User Management`_ for details.
-
-
-.. _User Management: ../../rados/operations/user-management/
diff -pruN 14.2.16-2/doc/cephfs/fs-volumes.rst 15.2.7-0ubuntu4/doc/cephfs/fs-volumes.rst
--- 14.2.16-2/doc/cephfs/fs-volumes.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/fs-volumes.rst	2020-11-30 19:58:30.000000000 +0000
@@ -5,7 +5,7 @@ FS volumes and subvolumes
 
 A  single source of truth for CephFS exports is implemented in the volumes
 module of the :term:`Ceph Manager` daemon (ceph-mgr). The OpenStack shared
-file system service (manila_), Ceph Containter Storage Interface (CSI_),
+file system service (manila_), Ceph Container Storage Interface (CSI_),
 storage administrators among others can use the common CLI provided by the
 ceph-mgr volumes module to manage the CephFS exports.
 
@@ -43,11 +43,11 @@ FS Volumes
 
 Create a volume using::
 
-    $ ceph fs volume create <vol_name>
+    $ ceph fs volume create <vol_name> [<placement>]
 
-This creates a CephFS file sytem and its data and metadata pools. It also tries
-to create MDSes for the filesytem using the enabled ceph-mgr orchestrator
-module  (see :doc:`/mgr/orchestrator_cli`) , e.g., rook.
+This creates a CephFS file system and its data and metadata pools. It also tries
+to create MDSes for the filesystem using the enabled ceph-mgr orchestrator
+module  (see :doc:`/mgr/orchestrator`) , e.g., rook.
 
 Remove a volume using::
 
@@ -65,7 +65,7 @@ FS Subvolume groups
 
 Create a subvolume group using::
 
-    $ ceph fs subvolumegroup create <vol_name> <group_name> [--pool_layout <data_pool_name>] [--uid <uid>] [--gid <gid>] [--mode <octal_mode>]
+    $ ceph fs subvolumegroup create <vol_name> <group_name> [--pool_layout <data_pool_name> --uid <uid> --gid <gid> --mode <octal_mode>]
 
 The command succeeds even if the subvolume group already exists.
 
@@ -91,8 +91,12 @@ List subvolume groups using::
 
     $ ceph fs subvolumegroup ls <vol_name>
 
-.. note:: Subvolume group snapshot feature is no longer supported in nautilus CephFS (existing group
-          snapshots can still be listed and deleted)
+Create a snapshot (see :doc:`/cephfs/experimental-features`) of a
+subvolume group using::
+
+    $ ceph fs subvolumegroup snapshot create <vol_name> <group_name> <snap_name>
+
+This implicitly snapshots all the subvolumes under the subvolume group.
 
 Remove a snapshot of a subvolume group using::
 
@@ -111,7 +115,7 @@ FS Subvolumes
 
 Create a subvolume using::
 
-    $ ceph fs subvolume create <vol_name> <subvol_name> [--size <size_in_bytes>] [--group_name <subvol_group_name>] [--pool_layout <data_pool_name>] [--uid <uid>] [--gid <gid>] [--mode <octal_mode>] [--namespace-isolated]
+    $ ceph fs subvolume create <vol_name> <subvol_name> [--size <size_in_bytes> --group_name <subvol_group_name> --pool_layout <data_pool_name> --uid <uid> --gid <gid> --mode <octal_mode> --namespace-isolated]
 
 
 The command succeeds even if the subvolume already exists.
@@ -126,24 +130,16 @@ its parent directory and no size limit.
 
 Remove a subvolume using::
 
-    $ ceph fs subvolume rm <vol_name> <subvol_name> [--group_name <subvol_group_name>] [--force] [--retain-snapshots]
+    $ ceph fs subvolume rm <vol_name> <subvol_name> [--group_name <subvol_group_name> --force]
 
 
 The command removes the subvolume and its contents. It does this in two steps.
-First, it moves the subvolume to a trash folder, and then asynchronously purges
+First, it move the subvolume to a trash folder, and then asynchronously purges
 its contents.
 
 The removal of a subvolume fails if it has snapshots, or is non-existent.
 '--force' flag allows the non-existent subvolume remove command to succeed.
 
-A subvolume can be removed retaining existing snapshots of the subvolume using the
-'--retain-snapshots' option. If snapshots are retained, the subvolume is considered
-empty for all operations not involving the retained snapshots.
-
-.. note:: Snapshot retained subvolumes can be recreated using 'ceph fs subvolume create'
-
-.. note:: Retained snapshots can be used as a clone source to recreate the subvolume, or clone to a newer subvolume.
-
 Resize a subvolume using::
 
     $ ceph fs subvolume resize <vol_name> <subvol_name> <new_size> [--group_name <subvol_group_name>] [--no_shrink]
@@ -179,32 +175,17 @@ The output format is json and contains f
 * type: subvolume type indicating whether it's clone or subvolume
 * pool_namespace: RADOS namespace of the subvolume
 * features: features supported by the subvolume
-* state: current state of the subvolume
-
-If a subvolume has been removed retaining its snapshots, the output only contains fields as follows.
-
-* type: subvolume type indicating whether it's clone or subvolume
-* features: features supported by the subvolume
-* state: current state of the subvolume
 
 The subvolume "features" are based on the internal version of the subvolume and is a list containing
 a subset of the following features,
 
 * "snapshot-clone": supports cloning using a subvolumes snapshot as the source
 * "snapshot-autoprotect": supports automatically protecting snapshots, that are active clone sources, from deletion
-* "snapshot-retention": supports removing subvolume contents, retaining any existing snapshots
-
-The subvolume "state" is based on the current state of the subvolume and contains one of the following values.
-
-* "complete": subvolume is ready for all operations
-* "snapshot-retained": subvolume is removed but its snapshots are retained
 
 List subvolumes using::
 
     $ ceph fs subvolume ls <vol_name> [--group_name <subvol_group_name>]
 
-.. note:: subvolumes that are removed but have snapshots retained, are also listed.
-
 Create a snapshot of a subvolume using::
 
     $ ceph fs subvolume snapshot create <vol_name> <subvol_name> <snap_name> [--group_name <subvol_group_name>]
@@ -212,13 +193,11 @@ Create a snapshot of a subvolume using::
 
 Remove a snapshot of a subvolume using::
 
-    $ ceph fs subvolume snapshot rm <vol_name> <subvol_name> <snap_name> [--group_name <subvol_group_name>] [--force]
+    $ ceph fs subvolume snapshot rm <vol_name> <subvol_name> <snap_name> [--group_name <subvol_group_name> --force]
 
 Using the '--force' flag allows the command to succeed that would otherwise
 fail if the snapshot did not exist.
 
-.. note:: if the last snapshot within a snapshot retained subvolume is removed, the subvolume is also removed
-
 List snapshots of a subvolume using::
 
     $ ceph fs subvolume snapshot ls <vol_name> <subvol_name> [--group_name <subvol_group_name>]
diff -pruN 14.2.16-2/doc/cephfs/full.rst 15.2.7-0ubuntu4/doc/cephfs/full.rst
--- 14.2.16-2/doc/cephfs/full.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/full.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,18 +1,18 @@
 
-Handling a full Ceph filesystem
-===============================
+Handling a full Ceph file system
+================================
 
 When a RADOS cluster reaches its ``mon_osd_full_ratio`` (default
 95%) capacity, it is marked with the OSD full flag.  This flag causes
 most normal RADOS clients to pause all operations until it is resolved
 (for example by adding more capacity to the cluster).
 
-The filesystem has some special handling of the full flag, explained below.
+The file system has some special handling of the full flag, explained below.
 
 Hammer and later
 ----------------
 
-Since the hammer release, a full filesystem will lead to ENOSPC
+Since the hammer release, a full file system will lead to ENOSPC
 results from:
 
  * Data writes on the client
@@ -31,7 +31,7 @@ data made it to disk, and in a full-spac
 may be discarded after an ``fclose`` if no space is available to persist it.
 
 .. warning::
-    If an application appears to be misbehaving on a full filesystem,
+    If an application appears to be misbehaving on a full file system,
     check that it is performing ``fsync()`` calls as necessary to ensure
     data is on disk before proceeding.
 
@@ -53,7 +53,7 @@ There are two dangerous conditions to wa
 
 * If a client had pending writes to a file, then it was not possible
   for the client to release the file to the MDS for deletion: this could
-  lead to difficulty clearing space on a full filesystem
+  lead to difficulty clearing space on a full file system
 * If clients continued to create a large number of empty files, the
   resulting metadata writes from the MDS could lead to total exhaustion
   of space on the OSDs such that no further deletions could be performed.
diff -pruN 14.2.16-2/doc/cephfs/fuse.rst 15.2.7-0ubuntu4/doc/cephfs/fuse.rst
--- 14.2.16-2/doc/cephfs/fuse.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/fuse.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,52 +0,0 @@
-=======================
-Mount CephFS using FUSE
-=======================
-
-Before mounting a Ceph File System in User Space (FUSE), ensure that the client
-host has a copy of the Ceph configuration file and a keyring with CAPS for the
-Ceph metadata server.
-
-#. From your client host, copy the Ceph configuration file from the monitor host 
-   to the ``/etc/ceph`` directory. :: 
-
-	sudo mkdir -p /etc/ceph
-	sudo scp {user}@{server-machine}:/etc/ceph/ceph.conf /etc/ceph/ceph.conf
-
-#. From your client host, copy the Ceph keyring from the monitor host to 
-   to the ``/etc/ceph`` directory. :: 
-
-	sudo scp {user}@{server-machine}:/etc/ceph/ceph.keyring /etc/ceph/ceph.keyring
-
-#. Ensure that the Ceph configuration file and the keyring have appropriate 
-   permissions set on your client machine  (e.g., ``chmod 644``).
-
-For additional details on ``cephx`` configuration, see 
-`CEPHX Config Reference`_.
-
-To mount the Ceph file system as a FUSE, you may use the ``ceph-fuse`` command.
-For example::
-
-	sudo mkdir /home/username/cephfs
-	sudo ceph-fuse -m 192.168.0.1:6789 /home/username/cephfs
-
-If you have more than one filesystem, specify which one to mount using
-the ``--client_mds_namespace`` command line argument, or add a
-``client_mds_namespace`` setting to your ``ceph.conf``.
-
-See `ceph-fuse`_ for additional details.
-
-To automate mounting ceph-fuse, you may add an entry to the system fstab_.
-Additionally, ``ceph-fuse@.service`` and ``ceph-fuse.target`` systemd units are
-available. As usual, these unit files declare the default dependencies and
-recommended execution context for ``ceph-fuse``. An example ceph-fuse mount on
-``/mnt`` would be::
-
-	sudo systemctl start ceph-fuse@/mnt.service
-
-A persistent mount point can be setup via::
-
-	sudo systemctl enable ceph-fuse@/mnt.service
-
-.. _ceph-fuse: ../../man/8/ceph-fuse/
-.. _fstab: ../fstab/#fuse
-.. _CEPHX Config Reference: ../../rados/configuration/auth-config-ref
diff -pruN 14.2.16-2/doc/cephfs/hadoop.rst 15.2.7-0ubuntu4/doc/cephfs/hadoop.rst
--- 14.2.16-2/doc/cephfs/hadoop.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/hadoop.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,202 +0,0 @@
-========================
-Using Hadoop with CephFS
-========================
-
-The Ceph file system can be used as a drop-in replacement for the Hadoop File
-System (HDFS). This page describes the installation and configuration process
-of using Ceph with Hadoop.
-
-Dependencies
-============
-
-* CephFS Java Interface
-* Hadoop CephFS Plugin
-
-.. important:: Currently requires Hadoop 1.1.X stable series
-
-Installation
-============
-
-There are three requirements for using CephFS with Hadoop. First, a running
-Ceph installation is required. The details of setting up a Ceph cluster and
-the file system are beyond the scope of this document. Please refer to the
-Ceph documentation for installing Ceph.
-
-The remaining two requirements are a Hadoop installation, and the Ceph file
-system Java packages, including the Java CephFS Hadoop plugin. The high-level
-steps are two add the dependencies to the Hadoop installation ``CLASSPATH``,
-and configure Hadoop to use the Ceph file system.
-
-CephFS Java Packages
---------------------
-
-* CephFS Hadoop plugin (`hadoop-cephfs.jar <https://download.ceph.com/tarballs/hadoop-cephfs.jar>`_)
-
-Adding these dependencies to a Hadoop installation will depend on your
-particular deployment. In general the dependencies must be present on each
-node in the system that will be part of the Hadoop cluster, and must be in the
-``CLASSPATH`` searched for by Hadoop. Typically approaches are to place the
-additional ``jar`` files into the ``hadoop/lib`` directory, or to edit the
-``HADOOP_CLASSPATH`` variable in ``hadoop-env.sh``.
-
-The native Ceph file system client must be installed on each participating
-node in the Hadoop cluster.
-
-Hadoop Configuration
-====================
-
-This section describes the Hadoop configuration options used to control Ceph.
-These options are intended to be set in the Hadoop configuration file
-`conf/core-site.xml`.
-
-+---------------------+--------------------------+----------------------------+
-|Property             |Value                     |Notes                       |
-|                     |                          |                            |
-+=====================+==========================+============================+
-|fs.default.name      |Ceph URI                  |ceph://[monaddr:port]/      |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.conf.file       |Local path to ceph.conf   |/etc/ceph/ceph.conf         |
-|                     |                          |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.conf.options    |Comma separated list of   |opt1=val1,opt2=val2         |
-|                     |Ceph configuration        |                            |
-|                     |key/value pairs           |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.root.dir        |Mount root directory      |Default value: /            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.mon.address     |Monitor address           |host:port                   |
-|                     |                          |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.auth.id         |Ceph user id              |Example: admin              |
-|                     |                          |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.auth.keyfile    |Ceph key file             |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.auth.keyring    |Ceph keyring file         |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.object.size     |Default file object size  |Default value (64MB):       |
-|                     |in bytes                  |67108864                    |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.data.pools      |List of Ceph data pools   |Default value: default Ceph |
-|                     |for storing file.         |pool.                       |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-|ceph.localize.reads  |Allow reading from file   |Default value: true         |
-|                     |replica objects           |                            |
-|                     |                          |                            |
-|                     |                          |                            |
-+---------------------+--------------------------+----------------------------+
-
-Support For Per-file Custom Replication
----------------------------------------
-
-The Hadoop file system interface allows users to specify a custom replication
-factor (e.g. 3 copies of each block) when creating a file. However, object
-replication factors in the Ceph file system are controlled on a per-pool
-basis, and by default a Ceph file system will contain only a single
-pre-configured pool. Thus, in order to support per-file replication with
-Hadoop over Ceph, additional storage pools with non-default replications
-factors must be created, and Hadoop must be configured to choose from these
-additional pools.
-
-Additional data pools can be specified using the ``ceph.data.pools``
-configuration option. The value of the option is a comma separated list of
-pool names. The default Ceph pool will be used automatically if this
-configuration option is omitted or the value is empty. For example, the
-following configuration setting will consider the pools ``pool1``, ``pool2``, and
-``pool5`` when selecting a target pool to store a file. ::
-
-	<property>
-	  <name>ceph.data.pools</name>
-	  <value>pool1,pool2,pool5</value>
-	</property>
-
-Hadoop will not create pools automatically. In order to create a new pool with
-a specific replication factor use the ``ceph osd pool create`` command, and then
-set the ``size`` property on the pool using the ``ceph osd pool set`` command. For
-more information on creating and configuring pools see the `RADOS Pool
-documentation`_.
-
-.. _RADOS Pool documentation: ../../rados/operations/pools
-
-Once a pool has been created and configured the metadata service must be told
-that the new pool may be used to store file data. A pool is be made available
-for storing file system data using the ``ceph fs add_data_pool`` command.
-
-First, create the pool. In this example we create the ``hadoop1`` pool with
-replication factor 1. ::
-
-    ceph osd pool create hadoop1 100
-    ceph osd pool set hadoop1 size 1
-
-Next, determine the pool id. This can be done by examining the output of the
-``ceph osd dump`` command. For example, we can look for the newly created
-``hadoop1`` pool. ::
-
-    ceph osd dump | grep hadoop1
-
-The output should resemble::
-
-    pool 3 'hadoop1' rep size 1 min_size 1 crush_rule 0...
-
-where ``3`` is the pool id. Next we will use the pool id reference to register
-the pool as a data pool for storing file system data. ::
-
-    ceph fs add_data_pool cephfs 3
-
-The final step is to configure Hadoop to consider this data pool when
-selecting the target pool for new files. ::
-
-	<property>
-		<name>ceph.data.pools</name>
-		<value>hadoop1</value>
-	</property>
-
-Pool Selection Rules
-~~~~~~~~~~~~~~~~~~~~
-
-The following rules describe how Hadoop chooses a pool given a desired
-replication factor and the set of pools specified using the
-``ceph.data.pools`` configuration option.
-
-1. When no custom pools are specified the default Ceph data pool is used.
-2. A custom pool with the same replication factor as the default Ceph data
-   pool will override the default.
-3. A pool with a replication factor that matches the desired replication will
-   be chosen if it exists.
-4. Otherwise, a pool with at least the desired replication factor will be
-   chosen, or the maximum possible.
-
-Debugging Pool Selection
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Hadoop will produce log file entry when it cannot determine the replication
-factor of a pool (e.g. it is not configured as a data pool). The log message
-will appear as follows::
-
-    Error looking up replication of pool: <pool name>
-
-Hadoop will also produce a log entry when it wasn't able to select an exact
-match for replication. This log entry will appear as follows::
-
-    selectDataPool path=<path> pool:repl=<name>:<value> wanted=<value>
diff -pruN 14.2.16-2/doc/cephfs/health-messages.rst 15.2.7-0ubuntu4/doc/cephfs/health-messages.rst
--- 14.2.16-2/doc/cephfs/health-messages.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/health-messages.rst	2020-11-30 19:58:30.000000000 +0000
@@ -9,7 +9,7 @@ Cluster health checks
 =====================
 
 The Ceph monitor daemons will generate health messages in response
-to certain states of the filesystem map structure (and the enclosed MDS maps).
+to certain states of the file system map structure (and the enclosed MDS maps).
 
 Message: mds rank(s) *ranks* have failed
 Description: One or more MDS ranks are not currently assigned to
@@ -59,8 +59,8 @@ by the setting ``mds_log_max_segments``,
 exceeds that setting the MDS starts writing back metadata so that it
 can remove (trim) the oldest segments.  If this writeback is happening
 too slowly, or a software bug is preventing trimming, then this health
-message may appear.  The threshold for this message to appear is controlled by
-the config option ``mds_log_warn_factor``, the default is 2.0.
+message may appear.  The threshold for this message to appear is for the
+number of segments to be double ``mds_log_max_segments``.
 
 Message: "Client *name* failing to respond to capability release"
 Code: MDS_HEALTH_CLIENT_LATE_RELEASE, MDS_HEALTH_CLIENT_LATE_RELEASE_MANY
@@ -75,11 +75,11 @@ Message: "Client *name* failing to respo
 Code: MDS_HEALTH_CLIENT_RECALL, MDS_HEALTH_CLIENT_RECALL_MANY
 Description: Clients maintain a metadata cache.  Items (such as inodes) in the
 client cache are also pinned in the MDS cache, so when the MDS needs to shrink
-its cache (to stay within ``mds_cache_size`` or ``mds_cache_memory_limit``), it
-sends messages to clients to shrink their caches too.  If the client is
-unresponsive or buggy, this can prevent the MDS from properly staying within
-its cache limits and it may eventually run out of memory and crash.  This
-message appears if a client has failed to release more than
+its cache (to stay within ``mds_cache_memory_limit``), it sends messages to
+clients to shrink their caches too.  If the client is unresponsive or buggy,
+this can prevent the MDS from properly staying within its cache limits and it
+may eventually run out of memory and crash.  This message appears if a client
+has failed to release more than
 ``mds_recall_warning_threshold`` capabilities (decaying with a half-life of
 ``mds_recall_max_decay_rate``) within the last
 ``mds_recall_warning_decay_rate`` second.
@@ -126,6 +126,6 @@ Code: MDS_HEALTH_CACHE_OVERSIZED
 Description: The MDS is not succeeding in trimming its cache to comply with the
 limit set by the administrator.  If the MDS cache becomes too large, the daemon
 may exhaust available memory and crash.  By default, this message appears if
-the actual cache size (in inodes or memory) is at least 50% greater than
-``mds_cache_size`` (default 100000) or ``mds_cache_memory_limit`` (default
-1GB). Modify ``mds_health_cache_threshold`` to set the warning ratio.
+the actual cache size (in memory) is at least 50% greater than
+``mds_cache_memory_limit`` (default 1GB). Modify ``mds_health_cache_threshold``
+to set the warning ratio.
diff -pruN 14.2.16-2/doc/cephfs/index.rst 15.2.7-0ubuntu4/doc/cephfs/index.rst
--- 14.2.16-2/doc/cephfs/index.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/index.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,133 +1,207 @@
-.. _ceph-filesystem:
+.. _ceph-file-system:
 
 =================
- Ceph Filesystem
+ Ceph File System
 =================
 
-The Ceph Filesystem (CephFS) is a POSIX-compliant filesystem that uses
-a Ceph Storage Cluster to store its data. The Ceph filesystem uses the same Ceph
-Storage Cluster system as Ceph Block Devices, Ceph Object Storage with its S3
-and Swift APIs, or native bindings (librados).
+The Ceph File System, or **CephFS**, is a POSIX-compliant file system built on
+top of Ceph's distributed object store, **RADOS**. CephFS endeavors to provide
+a state-of-the-art, multi-use, highly available, and performant file store for
+a variety of applications, including traditional use-cases like shared home
+directories, HPC scratch space, and distributed workflow shared storage.
+
+CephFS achieves these goals through the use of some novel architectural
+choices.  Notably, file metadata is stored in a separate RADOS pool from file
+data and served via a resizable cluster of *Metadata Servers*, or **MDS**,
+which may scale to support higher throughput metadata workloads.  Clients of
+the file system have direct access to RADOS for reading and writing file data
+blocks. For this reason, workloads may linearly scale with the size of the
+underlying RADOS object store; that is, there is no gateway or broker mediating
+data I/O for clients.
+
+Access to data is coordinated through the cluster of MDS which serve as
+authorities for the state of the distributed metadata cache cooperatively
+maintained by clients and MDS. Mutations to metadata are aggregated by each MDS
+into a series of efficient writes to a journal on RADOS; no metadata state is
+stored locally by the MDS. This model allows for coherent and rapid
+collaboration between clients within the context of a POSIX file system.
+
+.. image:: cephfs-architecture.svg
+
+CephFS is the subject of numerous academic papers for its novel designs and
+contributions to file system research. It is the oldest storage interface in
+Ceph and was once the primary use-case for RADOS.  Now it is joined by two
+other storage interfaces to form a modern unified storage system: RBD (Ceph
+Block Devices) and RGW (Ceph Object Storage Gateway).
+
+
+Getting Started with CephFS
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For most deployments of Ceph, setting up a CephFS file system is as simple as:
+
+.. code:: bash
+
+    ceph fs volume create <fs name>
+
+The Ceph `Orchestrator`_  will automatically create and configure MDS for
+your file system if the back-end deployment technology supports it (see
+`Orchestrator deployment table`_). Otherwise, please `deploy MDS manually
+as needed`_.
+
+Finally, to mount CephFS on your client nodes, see `Mount CephFS:
+Prerequisites`_ page. Additionally, a command-line shell utility is available
+for interactive access or scripting via the `cephfs-shell`_.
+
+.. _Orchestrator: ../mgr/orchestrator
+.. _deploy MDS manually as needed: add-remove-mds
+.. _Orchestrator deployment table: ../mgr/orchestrator/#current-implementation-status
+.. _Mount CephFS\: Prerequisites: mount-prerequisites
+.. _cephfs-shell: cephfs-shell
 
-.. note:: If you are evaluating CephFS for the first time, please review
-          the best practices for deployment: :doc:`/cephfs/best-practices`
 
-.. ditaa::
-            +-----------------------+  +------------------------+
-            |                       |  |      CephFS FUSE       |
-            |                       |  +------------------------+
-            |                       |
-            |                       |  +------------------------+
-            |  CephFS Kernel Object |  |     CephFS Library     |
-            |                       |  +------------------------+
-            |                       |
-            |                       |  +------------------------+
-            |                       |  |        librados        |
-            +-----------------------+  +------------------------+
+.. raw:: html
 
-            +---------------+ +---------------+ +---------------+
-            |      OSDs     | |      MDSs     | |    Monitors   |
-            +---------------+ +---------------+ +---------------+
+   <!---
 
+Administration
+^^^^^^^^^^^^^^
 
-Using CephFS
-============
+.. raw:: html
 
-Using the Ceph Filesystem requires at least one :term:`Ceph Metadata Server` in
-your Ceph Storage Cluster.
+   --->
 
+.. toctree:: 
+   :maxdepth: 1
+   :hidden:
+
+    Create a CephFS file system <createfs>
+    Administrative commands <administration>
+	Provision/Add/Remove MDS(s) <add-remove-mds>
+    MDS failover and standby configuration <standby>
+    MDS Cache Size Limits <cache-size-limits>
+    MDS Configuration Settings <mds-config-ref>
+    Manual: ceph-mds <../../man/8/ceph-mds>
+    Export over NFS <nfs>
+    Export over NFS with volume nfs interface <fs-nfs-exports>
+    Application best practices <app-best-practices>
+    FS volume and subvolumes <fs-volumes>
+    CephFS Quotas <quota>
+    Health messages <health-messages>
+    Upgrading old file systems <upgrading>
 
 
 .. raw:: html
 
-	<style type="text/css">div.body h3{margin:5px 0px 0px 0px;}</style>
-	<table cellpadding="10"><colgroup><col width="33%"><col width="33%"><col width="33%"></colgroup><tbody valign="top"><tr><td><h3>Step 1: Metadata Server</h3>
+   <!---
 
-To run the Ceph Filesystem, you must have a running Ceph Storage Cluster with at
-least one :term:`Ceph Metadata Server` running.
+Mounting CephFS
+^^^^^^^^^^^^^^^
 
+.. raw:: html
+
+   --->
 
 .. toctree:: 
-	:maxdepth: 1
+   :maxdepth: 1
+   :hidden:
 
-	Provision/Add/Remove MDS(s) <add-remove-mds>
-	MDS failover and standby configuration <standby>
-	MDS Configuration Settings <mds-config-ref>
-	Client Configuration Settings <client-config-ref>
-	Journaler Configuration <journaler>
-	Manpage ceph-mds <../../man/8/ceph-mds>
-
-.. raw:: html 
-
-	</td><td><h3>Step 2: Mount CephFS</h3>
-
-Once you have a healthy Ceph Storage Cluster with at least
-one Ceph Metadata Server, you may create and mount your Ceph Filesystem.
-Ensure that your client has network connectivity and the proper
-authentication keyring.
+    Client Configuration Settings <client-config-ref>
+    Client Authentication <client-auth>
+    Mount CephFS: Prerequisites <mount-prerequisites>
+    Mount CephFS using Kernel Driver <mount-using-kernel-driver>
+    Mount CephFS using FUSE <mount-using-fuse>
+    Use the CephFS Shell <cephfs-shell>
+    Supported Features of Kernel Driver <kernel-features>
+    Manual: ceph-fuse <../../man/8/ceph-fuse>
+    Manual: mount.ceph <../../man/8/mount.ceph>
+    Manual: mount.fuse.ceph <../../man/8/mount.fuse.ceph>
 
-.. toctree:: 
-	:maxdepth: 1
 
-	Create a CephFS file system <createfs>
-	Mount CephFS <kernel>
-	Mount CephFS as FUSE <fuse>
-	Mount CephFS in fstab <fstab>
-	Use the CephFS Shell <cephfs-shell>
-	Supported Features of Kernel Driver <kernel-features>
-	Manpage ceph-fuse <../../man/8/ceph-fuse>
-	Manpage mount.ceph <../../man/8/mount.ceph>
-	Manpage mount.fuse.ceph <../../man/8/mount.fuse.ceph>
+.. raw:: html
+
+   <!---
 
+CephFS Concepts
+^^^^^^^^^^^^^^^
 
-.. raw:: html 
+.. raw:: html
 
-	</td><td><h3>Additional Details</h3>
+   --->
 
 .. toctree:: 
-    :maxdepth: 1
+   :maxdepth: 1
+   :hidden:
 
-    Deployment best practices <best-practices>
     MDS States <mds-states>
-    Administrative commands <administration>
-    Understanding MDS Cache Size Limits <cache-size-limits>
     POSIX compatibility <posix>
-    Experimental Features <experimental-features>
-    CephFS Quotas <quota>
-    Using Ceph with Hadoop <hadoop>
-    cephfs-journal-tool <cephfs-journal-tool>
+    MDS Journaling <mds-journaling>
     File layouts <file-layouts>
-    Client eviction <eviction>
-    Handling full filesystems <full>
-    Health messages <health-messages>
-    Troubleshooting <troubleshooting>
-    Disaster recovery <disaster-recovery>
-    Client authentication <client-auth>
-    Upgrading old filesystems <upgrading>
-    Configuring directory fragmentation <dirfrags>
-    Configuring multiple active MDS daemons <multimds>
-    Export over NFS <nfs>
-    Application best practices <app-best-practices>
-    Scrub <scrub>
+    Distributed Metadata Cache <mdcache>
+    Dynamic Metadata Management in CephFS <dynamic-metadata-management>
+    CephFS IO Path <cephfs-io-path>
     LazyIO <lazyio>
-    FS volume and subvolumes <fs-volumes>
+    Directory fragmentation <dirfrags>
+    Multiple active MDS daemons <multimds>
+
+
+.. raw:: html
+
+   <!---
+
+Troubleshooting and Disaster Recovery
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+   --->
 
 .. toctree:: 
    :hidden:
 
-    Advanced: Metadata repair <disaster-recovery-experts>
+    Client eviction <eviction>
+    Scrubbing the File System <scrub>
+    Handling full file systems <full>
+    Metadata repair <disaster-recovery-experts>
+    Troubleshooting <troubleshooting>
+    Disaster recovery <disaster-recovery>
+    cephfs-journal-tool <cephfs-journal-tool>
+
 
 .. raw:: html
 
-	</td></tr></tbody></table>
+   <!---
 
-For developers
-==============
+Developer Guides
+^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+   --->
 
 .. toctree:: 
-    :maxdepth: 1
+   :maxdepth: 1
+   :hidden:
 
+    Journaler Configuration <journaler>
     Client's Capabilities <capabilities>
-    libcephfs <../../api/libcephfs-java/>
+    libcephfs for Java <../../api/libcephfs-java/>
     Mantle <mantle>
 
+
+.. raw:: html
+
+   <!---
+
+Additional Details
+^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+   --->
+
+.. toctree::
+   :maxdepth: 1
+   :hidden:
+
+    Experimental Features <experimental-features>
diff -pruN 14.2.16-2/doc/cephfs/kernel-features.rst 15.2.7-0ubuntu4/doc/cephfs/kernel-features.rst
--- 14.2.16-2/doc/cephfs/kernel-features.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/kernel-features.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,13 +1,20 @@
 
-Supported Features of Kernel Driver
+Supported Features of the Kernel Driver
 ========================================
+The kernel driver is developed separately from the core ceph code, and as
+such it sometimes differs from the FUSE driver in feature implementation.
+The following details the implementation status of various CephFS features
+in the kernel driver.
 
 Inline data
 -----------
-Inline data was introduced by the Firefly release. Linux kernel clients >= 3.19
-can read inline data, can convert existing inline data to RADOS objects when
-file data is modified. At present, Linux kernel clients do not store file data
-as inline data.
+Inline data was introduced by the Firefly release. This feature is being
+deprecated in mainline CephFS, and may be removed from a future kernel
+release.
+
+Linux kernel clients >= 3.19 can read inline data and convert existing
+inline data to RADOS objects when file data is modified. At present,
+Linux kernel clients do not store file data as inline data.
 
 See `Experimental Features`_ for more information.
 
@@ -19,8 +26,8 @@ quota. At present, no Linux kernel clien
 
 See `Quotas`_ for more information.
 
-Multiple filesystems within a Ceph cluster
-------------------------------------------
+Multiple file systems within a Ceph cluster
+-------------------------------------------
 The feature was introduced by the Jewel release. Linux kernel clients >= 4.7
 can support it.
 
diff -pruN 14.2.16-2/doc/cephfs/kernel.rst 15.2.7-0ubuntu4/doc/cephfs/kernel.rst
--- 14.2.16-2/doc/cephfs/kernel.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/kernel.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,41 +0,0 @@
-====================================
- Mount CephFS with the Kernel Driver
-====================================
-
-To mount the Ceph file system you may use the ``mount`` command if you know the
-monitor host IP address(es), or use the ``mount.ceph`` utility to resolve the 
-monitor host name(s) into IP address(es) for you. For example:: 
-
-	sudo mkdir /mnt/mycephfs
-	sudo mount -t ceph 192.168.0.1:6789:/ /mnt/mycephfs
-
-To mount the Ceph file system with ``cephx`` authentication enabled, the kernel
-must authenticate with the cluster. The default ``name=`` option is ``guest``.
-The mount.ceph helper will automatically attempt to find a secret key in the
-keyring.
-
-The secret can also be specified manually with the ``secret=`` option. ::
-
-	sudo mount -t ceph 192.168.0.1:6789:/ /mnt/mycephfs -o name=admin,secret=AQATSKdNGBnwLhAAnNDKnH65FmVKpXZJVasUeQ==
-
-The foregoing usage leaves the secret in the Bash history. A more secure
-approach reads the secret from a file. For example::
-
-	sudo mount -t ceph 192.168.0.1:6789:/ /mnt/mycephfs -o name=admin,secretfile=/etc/ceph/admin.secret
-
-See `User Management`_ for details on cephx.
-	
-If you have more than one file system, specify which one to mount using
-the ``mds_namespace`` option, e.g. ``-o mds_namespace=myfs``.
-
-To unmount the Ceph file system, you may use the ``umount`` command. For example:: 
-
-	sudo umount /mnt/mycephfs
-
-.. tip:: Ensure that you are not within the file system directories before
-   executing this command.
-
-See `mount.ceph`_ for details.
-
-.. _mount.ceph: ../../man/8/mount.ceph/
-.. _User Management: ../../rados/operations/user-management/
diff -pruN 14.2.16-2/doc/cephfs/lazyio.rst 15.2.7-0ubuntu4/doc/cephfs/lazyio.rst
--- 14.2.16-2/doc/cephfs/lazyio.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/lazyio.rst	2020-11-30 19:58:30.000000000 +0000
@@ -19,5 +19,58 @@ LazyIO can be enabled by following ways.
 - ``ceph_lazyio(...)`` and ``ceph_ll_lazyio(...)`` enable LAZY_IO for file handle
   in libcephfs.
 
-- ``ioctl(fd, CEPH_IOC_LAZYIO, 1UL)`` enables LAZY_IO for file handle in
-   ceph-fuse mount.
+Using LazyIO
+============
+
+LazyIO includes two methods ``lazyio_propagate()`` and ``lazyio_synchronize()``.
+With LazyIO enabled, writes may not be visble to other clients until
+``lazyio_propagate()`` is called. Reads may come from local cache (irrespective of
+changes to the file by other clients) until ``lazyio_synchronize()`` is called.
+
+- ``lazyio_propagate(int fd, loff_t offset, size_t count)`` - Ensures that any
+  buffered writes of the client, in the specific region (offset to offset+count),
+  has been propagated to the shared file. If offset and count are both 0, the 
+  operation is performed on the entire file. Currently only this is supported.
+
+- ``lazyio_synchronize(int fd, loff_t offset, size_t count)`` - Ensures that the
+  client is, in a subsequent read call, able to read the updated file with all 
+  the propagated writes of the other clients. In CephFS this is facilitated by
+  invalidating the file caches pertaining to the inode and hence forces the
+  client to refetch/recache the data from the updated file. Also if the write cache
+  of the calling client is dirty (not propagated), lazyio_synchronize() flushes it as well.
+
+An example usage (utilizing libcephfs) is given below. This is a sample I/O loop for a
+particular client/file descriptor in a parallel application:
+
+::
+
+        /* Client a (ca) opens the shared file file.txt */
+        int fda = ceph_open(ca, "shared_file.txt", O_CREAT|O_RDWR, 0644); 
+
+        /* Enable LazyIO for fda */
+        ceph_lazyio(ca, fda, 1));
+
+        for(i = 0; i < num_iters; i++) {
+            char out_buf[] = "fooooooooo";
+            
+            ceph_write(ca, fda, out_buf, sizeof(out_buf), i);
+            /* Propagate the writes associated with fda to the backing storage*/
+            ceph_propagate(ca, fda, 0, 0);
+            
+            /* The barrier makes sure changes associated with all file descriptors
+            are propagated so that there is certainty that the backing file 
+            is upto date */
+            application_specific_barrier();
+
+            char in_buf[40];
+            /* Calling ceph_lazyio_synchronize here will ascertain that ca will
+            read the updated file with the propagated changes and not read
+            stale cached data */
+            ceph_lazyio_synchronize(ca, fda, 0, 0);
+            ceph_read(ca, fda, in_buf, sizeof(in_buf), 0);
+
+            /* A barrier is required here before returning to the next write
+            phase so as to avoid overwriting the portion of the shared file still
+            being read by another file descriptor */
+            application_specific_barrier();
+        }
diff -pruN 14.2.16-2/doc/cephfs/mdcache.rst 15.2.7-0ubuntu4/doc/cephfs/mdcache.rst
--- 14.2.16-2/doc/cephfs/mdcache.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mdcache.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,77 @@
+=================================
+CephFS Distributed Metadata Cache
+=================================
+While the data for inodes in a Ceph file system is stored in RADOS and
+accessed by the clients directly, inode metadata and directory
+information is managed by the Ceph metadata server (MDS). The MDS's
+act as mediator for all metadata related activity, storing the resulting
+information in a separate RADOS pool from the file data.
+
+CephFS clients can request that the MDS fetch or change inode metadata
+on its behalf, but an MDS can also grant the client **capabilities**
+(aka **caps**) for each inode (see :doc:`/cephfs/capabilities`).
+
+A capability grants the client the ability to cache and possibly
+manipulate some portion of the data or metadata associated with the
+inode. When another client needs access to the same information, the MDS
+will revoke the capability and the client will eventually return it,
+along with an updated version of the inode's metadata (in the event that
+it made changes to it while it held the capability).
+
+Clients can request capabilities and will generally get them, but when
+there is competing access or memory pressure on the MDS, they may be
+**revoked**. When a capability is revoked, the client is responsible for
+returning it as soon as it is able. Clients that fail to do so in a
+timely fashion may end up **blacklisted** and unable to communicate with
+the cluster.
+
+Since the cache is distributed, the MDS must take great care to ensure
+that no client holds capabilities that may conflict with other clients'
+capabilities, or operations that it does itself. This allows cephfs
+clients to rely on much greater cache coherence than a filesystem like
+NFS, where the client may cache data and metadata beyond the point where
+it has changed on the server.
+
+Client Metadata Requests
+------------------------
+When a client needs to query/change inode metadata or perform an
+operation on a directory, it has two options. It can make a request to
+the MDS directly, or serve the information out of its cache. With
+CephFS, the latter is only possible if the client has the necessary
+caps.
+
+Clients can send simple requests to the MDS to query or request changes
+to certain metadata. The replies to these requests may also grant the
+client a certain set of caps for the inode, allowing it to perform
+subsequent requests without consulting the MDS.
+
+Clients can also request caps directly from the MDS, which is necessary
+in order to read or write file data.
+
+Distributed Locks in an MDS Cluster
+-----------------------------------
+When an MDS wants to read or change information about an inode, it must
+gather the appropriate locks for it. The MDS cluster may have a series
+of different types of locks on the given inode and each MDS may have
+disjoint sets of locks.
+
+If there are outstanding caps that would conflict with these locks, then
+they must be revoked before the lock can be acquired. Once the competing
+caps are returned to the MDS, then it can get the locks and do the
+operation.
+
+On a filesystem served by multiple MDS', the metadata cache is also
+distributed among the MDS' in the cluster. For every inode, at any given
+time, only one MDS in the cluster is considered **authoritative**. Any
+requests to change that inode must be done by the authoritative MDS,
+though non-authoritative MDS can forward requests to the authoritative
+one.
+
+Non-auth MDS' can also obtain read locks that prevent the auth MDS from
+changing the data until the lock is dropped, so that they can serve
+inode info to the clients.
+
+The auth MDS for an inode can change over time as well. The MDS' will
+actively balance responsibility for the inode cache amongst
+themselves, but this can be overridden by **pinning** certain subtrees
+to a single MDS.
diff -pruN 14.2.16-2/doc/cephfs/mds-config-ref.rst 15.2.7-0ubuntu4/doc/cephfs/mds-config-ref.rst
--- 14.2.16-2/doc/cephfs/mds-config-ref.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mds-config-ref.rst	2020-11-30 19:58:30.000000000 +0000
@@ -5,9 +5,8 @@
 ``mds cache memory limit``
 
 :Description: The memory limit the MDS should enforce for its cache.
-              Administrators should use this instead of ``mds cache size``.
 :Type:  64-bit Integer Unsigned
-:Default: ``1073741824``
+:Default: ``4G``
 
 ``mds cache reservation``
 
@@ -18,14 +17,6 @@
 :Type:  Float
 :Default: ``0.05``
 
-``mds cache size``
-
-:Description: The number of inodes to cache. A value of 0 indicates an
-              unlimited number. It is recommended to use
-              ``mds_cache_memory_limit`` to limit the amount of memory the MDS
-              cache uses.
-:Type:  32-bit Integer
-:Default: ``0``
 
 ``mds cache mid``
 
diff -pruN 14.2.16-2/doc/cephfs/mds-journaling.rst 15.2.7-0ubuntu4/doc/cephfs/mds-journaling.rst
--- 14.2.16-2/doc/cephfs/mds-journaling.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mds-journaling.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,90 @@
+MDS Journaling
+==============
+
+CephFS Metadata Pool
+--------------------
+
+CephFS uses a separate (metadata) pool for managing file metadata (inodes and
+dentries) in a Ceph File System. The metadata pool has all the information about
+files in a Ceph File System including the File System hierarchy. Additionally,
+CephFS maintains meta information related to other entities in a file system
+such as file system journals, open file table, session map, etc.
+
+This document describes how Ceph Metadata Servers use and rely on journaling.
+
+CephFS MDS Journaling
+---------------------
+
+CephFS metadata servers stream a journal of metadata events into RADOS in the metadata
+pool prior to executing a file system operation. Active MDS daemon(s) manage metadata
+for files and directories in CephFS.
+
+CephFS uses journaling for couple of reasons:
+
+#. Consistency: On an MDS failover, the journal events can be replayed to reach a
+   consistent file system state. Also, metadata operations that require multiple
+   updates to the backing store need to be journaled for crash consistency (along
+   with other consistency mechanisms such as locking, etc..).
+
+#. Performance: Journal updates are (mostly) sequential, hence updates to journals
+   are fast. Furthermore, updates can be batched into single write, thereby saving
+   disk seek time involved in updates to different parts of a file. Having a large
+   journal also helps a standby MDS to warm its cache which helps indirectly during
+   MDS failover.
+
+Each active metadata server maintains its own journal in the metadata pool. Journals
+are striped over multiple objects. Journal entries which are not required (deemed as
+old) are trimmed by the metadata server.
+
+Journal Events
+--------------
+
+Apart from journaling file system metadata updates, CephFS journals various other events
+such as client session info and directory import/export state to name a few. These events
+are used by the metadata sever to reestablish correct state as required, e.g., Ceph MDS
+tries to reconnect clients on restart when journal events get replayed and a specific
+event type in the journal specifies that a client entity type has a session with the MDS
+before it was restarted.
+
+To examine the list of such events recorded in the journal, CephFS provides a command
+line utility `cephfs-journal-tool` which can be used as follows:
+
+::
+
+   cephfs-journal-tool --rank=<fs>:<rank> event get list
+
+`cephfs-journal-tool` is also used to discover and repair a damaged Ceph File System.
+(See :doc:`/cephfs/cephfs-journal-tool` for more details)
+
+Journal Event Types
+-------------------
+
+Following are various event types that are journaled by the MDS.
+
+#. `EVENT_COMMITTED`: Mark a request (id) as committed.
+
+#. `EVENT_EXPORT`: Maps directories to an MDS rank.
+
+#. `EVENT_FRAGMENT`: Tracks various stages of directory fragmentation (split/merge).
+
+#. `EVENT_IMPORTSTART`: Logged when an MDS rank starts importing directory fragments.
+
+#. `EVENT_IMPORTFINISH`: Logged when an MDS rank finishes importing directory fragments.
+
+#. `EVENT_NOOP`: No operation event type for skipping over a journal region.
+
+#. `EVENT_OPEN`: Tracks which inodes have open file handles.
+
+#. `EVENT_RESETJOURNAL`: Used to mark a journal as `reset` post truncation.
+
+#. `EVENT_SESSION`: Tracks open client sessions.
+
+#. `EVENT_SLAVEUPDATE`: Logs various stages of an operation that has been forwarded to a (slave) mds.
+
+#. `EVENT_SUBTREEMAP`: Map of directory inodes to directory contents (subtree partition).
+
+#. `EVENT_TABLECLIENT`: Log transition states of MDSs view of client tables (snap/anchor).
+
+#. `EVENT_TABLESERVER`: Log transition states of MDSs view of server tables (snap/anchor).
+
+#. `EVENT_UPDATE`: Log file operations on an inode.
diff -pruN 14.2.16-2/doc/cephfs/mds-state-diagram.dot 15.2.7-0ubuntu4/doc/cephfs/mds-state-diagram.dot
--- 14.2.16-2/doc/cephfs/mds-state-diagram.dot	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mds-state-diagram.dot	2020-11-30 19:58:30.000000000 +0000
@@ -5,13 +5,20 @@ node [shape=circle,style=unfilled,fixeds
 node [color=blue,peripheries=1];
 N0 [label="up:boot"]
 
+node [color=green,peripheries=1];
+S1 [label="up:standby"]
+N0 -> S1 [color=green,penwidth=2.0];
+S2 [label="up:standby_replay"]
+S1 -> S2 [color=green,penwidth=2.0];
+
 node [color=orange,peripheries=2];
 N1 [label="up:creating"]
-N0 -> N1 [color=orange,penwidth=2.0];
+S1 -> N1 [color=orange,penwidth=2.0];
 N2 [label="up:starting"]
-N0 -> N2 [color=orange,penwidth=2.0];
+S1 -> N2 [color=orange,penwidth=2.0];
 N3 [label="up:replay"]
-N0 -> N3 [color=orange,penwidth=2.0];
+S1 -> N3 [color=orange,penwidth=2.0];
+S2 -> N3 [color=orange,penwidth=2.0];
 N4 [label="up:resolve"]
 N3 -> N4 [color=orange,penwidth=2.0];
 N5 [label="up:reconnect"]
@@ -28,11 +35,6 @@ N7 -> S0 [color=green,penwidth=2.0];
 N1 -> S0 [color=green,penwidth=2.0];
 N2 -> S0 [color=green,penwidth=2.0];
 N6 -> S0 [color=green,penwidth=2.0];
-node [color=green,peripheries=1];
-S1 [label="up:standby"]
-N0 -> S1 [color=green,penwidth=2.0];
-S2 [label="up:standby_replay"]
-N0 -> S2 [color=green,penwidth=2.0];
 
 // going down but still accessible by clients
 node [color=purple,peripheries=2];
diff -pruN 14.2.16-2/doc/cephfs/mds-state-diagram.svg 15.2.7-0ubuntu4/doc/cephfs/mds-state-diagram.svg
--- 14.2.16-2/doc/cephfs/mds-state-diagram.svg	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mds-state-diagram.svg	2020-11-30 19:58:30.000000000 +0000
@@ -4,308 +4,314 @@
 <!-- Generated by graphviz version 2.40.1 (20161225.0304)
  -->
 <!-- Title: %3 Pages: 1 -->
-<svg width="783pt" height="1808pt"
- viewBox="0.00 0.00 783.00 1808.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
-<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1804)">
+<svg width="756pt" height="1980pt"
+ viewBox="0.00 0.00 756.37 1980.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1976)">
 <title>%3</title>
-<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1804 779,-1804 779,4 -4,4"/>
+<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1976 752.3688,-1976 752.3688,4 -4,4"/>
 <!-- N0 -->
 <g id="node1" class="node">
 <title>N0</title>
-<ellipse fill="none" stroke="#0000ff" cx="375" cy="-1728" rx="72" ry="72"/>
-<text text-anchor="middle" x="375" y="-1724.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:boot</text>
+<ellipse fill="none" stroke="#0000ff" cx="529.3688" cy="-1900" rx="72" ry="72"/>
+<text text-anchor="middle" x="529.3688" y="-1896.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:boot</text>
 </g>
-<!-- N1 -->
+<!-- S1 -->
 <g id="node2" class="node">
-<title>N1</title>
-<ellipse fill="none" stroke="#ffa500" cx="375" cy="-1544" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="375" cy="-1544" rx="76" ry="76"/>
-<text text-anchor="middle" x="375" y="-1540.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:creating</text>
+<title>S1</title>
+<ellipse fill="none" stroke="#00ff00" cx="529.3688" cy="-1720" rx="72" ry="72"/>
+<text text-anchor="middle" x="529.3688" y="-1716.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:standby</text>
 </g>
-<!-- N0&#45;&gt;N1 -->
+<!-- N0&#45;&gt;S1 -->
 <g id="edge1" class="edge">
-<title>N0&#45;&gt;N1</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M375,-1655.8064C375,-1647.5034 375,-1638.9744 375,-1630.5077"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="378.5001,-1630.2303 375,-1620.2304 371.5001,-1630.2304 378.5001,-1630.2303"/>
+<title>N0&#45;&gt;S1</title>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M529.3688,-1827.8243C529.3688,-1819.4801 529.3688,-1810.9229 529.3688,-1802.4555"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="532.8689,-1802.1919 529.3688,-1792.1919 525.8689,-1802.192 532.8689,-1802.1919"/>
 </g>
-<!-- N2 -->
+<!-- S2 -->
 <g id="node3" class="node">
-<title>N2</title>
-<ellipse fill="none" stroke="#ffa500" cx="205" cy="-1544" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="205" cy="-1544" rx="76" ry="76"/>
-<text text-anchor="middle" x="205" y="-1540.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:starting</text>
+<title>S2</title>
+<ellipse fill="none" stroke="#00ff00" cx="402.3688" cy="-1540" rx="72" ry="72"/>
+<text text-anchor="middle" x="402.3688" y="-1536.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:standby_replay</text>
 </g>
-<!-- N0&#45;&gt;N2 -->
+<!-- S1&#45;&gt;S2 -->
 <g id="edge2" class="edge">
-<title>N0&#45;&gt;N2</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M325.829,-1674.7796C306.3584,-1653.7056 283.8178,-1629.3086 263.5164,-1607.3354"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="266.082,-1604.9547 256.7251,-1599.9848 260.9405,-1609.705 266.082,-1604.9547"/>
+<title>S1&#45;&gt;S2</title>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M487.7842,-1661.0613C475.7237,-1643.9675 462.4609,-1625.1699 450.0576,-1607.5904"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="452.6717,-1605.2243 444.0468,-1599.0711 446.952,-1609.2599 452.6717,-1605.2243"/>
 </g>
-<!-- N3 -->
+<!-- N1 -->
 <g id="node4" class="node">
-<title>N3</title>
-<ellipse fill="none" stroke="#ffa500" cx="98" cy="-900" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="98" cy="-900" rx="76" ry="76"/>
-<text text-anchor="middle" x="98" y="-896.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:replay</text>
+<title>N1</title>
+<ellipse fill="none" stroke="#ffa500" cx="568.3688" cy="-1356" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="568.3688" cy="-1356" rx="76" ry="76"/>
+<text text-anchor="middle" x="568.3688" y="-1352.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:creating</text>
 </g>
-<!-- N0&#45;&gt;N3 -->
+<!-- S1&#45;&gt;N1 -->
 <g id="edge3" class="edge">
-<title>N0&#45;&gt;N3</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M303.6813,-1718.116C244.3606,-1705.8591 162.9365,-1678.8371 120,-1620 50.549,-1524.8294 96,-1473.8172 96,-1356 96,-1356 96,-1356 96,-1168 96,-1107.3668 96.5182,-1039.0195 97.0271,-986.5224"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="100.5296,-986.2755 97.1286,-976.2414 93.53,-986.2063 100.5296,-986.2755"/>
+<title>S1&#45;&gt;N1</title>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M537.0739,-1648.0853C543.4139,-1588.912 552.4184,-1504.87 559.1421,-1442.1155"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="562.6577,-1442.1568 560.243,-1431.8408 555.6975,-1441.411 562.6577,-1442.1568"/>
 </g>
-<!-- S1 -->
-<g id="node10" class="node">
-<title>S1</title>
-<ellipse fill="none" stroke="#00ff00" cx="541" cy="-1544" rx="72" ry="72"/>
-<text text-anchor="middle" x="541" y="-1540.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:standby</text>
+<!-- N2 -->
+<g id="node5" class="node">
+<title>N2</title>
+<ellipse fill="none" stroke="#ffa500" cx="672.3688" cy="-604" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="672.3688" cy="-604" rx="76" ry="76"/>
+<text text-anchor="middle" x="672.3688" y="-600.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:starting</text>
 </g>
-<!-- N0&#45;&gt;S1 -->
-<g id="edge13" class="edge">
-<title>N0&#45;&gt;S1</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M423.4603,-1674.285C443.0634,-1652.5563 465.795,-1627.3597 486.014,-1604.9483"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="488.6666,-1607.2332 492.7664,-1597.4637 483.4691,-1602.5441 488.6666,-1607.2332"/>
+<!-- S1&#45;&gt;N2 -->
+<g id="edge4" class="edge">
+<title>S1&#45;&gt;N2</title>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M569.2023,-1659.7191C611.5148,-1589.7402 672.3688,-1469.3866 672.3688,-1356 672.3688,-1356 672.3688,-1356 672.3688,-980 672.3688,-880.6681 672.3688,-766.5754 672.3688,-690.3655"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="675.8689,-690.3343 672.3688,-680.3343 668.8689,-690.3344 675.8689,-690.3343"/>
 </g>
-<!-- S2 -->
-<g id="node11" class="node">
-<title>S2</title>
-<ellipse fill="none" stroke="#00ff00" cx="703" cy="-1544" rx="72" ry="72"/>
-<text text-anchor="middle" x="703" y="-1540.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:standby_replay</text>
+<!-- N3 -->
+<g id="node6" class="node">
+<title>N3</title>
+<ellipse fill="none" stroke="#ffa500" cx="398.3688" cy="-1356" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="398.3688" cy="-1356" rx="76" ry="76"/>
+<text text-anchor="middle" x="398.3688" y="-1352.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:replay</text>
 </g>
-<!-- N0&#45;&gt;S2 -->
-<g id="edge14" class="edge">
-<title>N0&#45;&gt;S2</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M443.4232,-1705.2759C494.8612,-1686.5285 565.831,-1657.0318 622,-1620 629.8031,-1614.8555 637.5551,-1608.9346 644.9954,-1602.7159"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="647.6445,-1605.0534 652.9345,-1595.8736 643.0746,-1599.7509 647.6445,-1605.0534"/>
+<!-- S1&#45;&gt;N3 -->
+<g id="edge5" class="edge">
+<title>S1&#45;&gt;N3</title>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M525.1622,-1647.8711C520.0317,-1596.0056 508.6442,-1525.3873 483.3688,-1468 476.1463,-1451.6015 466.077,-1435.3985 455.3873,-1420.6272"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="458.1754,-1418.5111 449.3989,-1412.576 452.5587,-1422.6887 458.1754,-1418.5111"/>
+</g>
+<!-- S2&#45;&gt;N3 -->
+<g id="edge6" class="edge">
+<title>S2&#45;&gt;N3</title>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M400.7994,-1467.8064C400.6169,-1459.4122 400.4294,-1450.7869 400.2433,-1442.2286"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="403.7426,-1442.1519 400.026,-1432.2304 396.7442,-1442.3041 403.7426,-1442.1519"/>
 </g>
 <!-- S0 -->
-<g id="node9" class="node">
+<g id="node11" class="node">
 <title>S0</title>
-<ellipse fill="none" stroke="#00ff00" cx="375" cy="-1356" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#00ff00" cx="375" cy="-1356" rx="76" ry="76"/>
-<text text-anchor="middle" x="375" y="-1352.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:active</text>
+<ellipse fill="none" stroke="#00ff00" cx="474.3688" cy="-416" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#00ff00" cx="474.3688" cy="-416" rx="76" ry="76"/>
+<text text-anchor="middle" x="474.3688" y="-412.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:active</text>
 </g>
 <!-- N1&#45;&gt;S0 -->
-<g id="edge10" class="edge">
+<g id="edge13" class="edge">
 <title>N1&#45;&gt;S0</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M375,-1467.8042C375,-1459.4826 375,-1450.9678 375,-1442.5337"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="378.5001,-1442.3042 375,-1432.3043 371.5001,-1442.3043 378.5001,-1442.3042"/>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M568.3688,-1279.6657C568.3688,-1203.6994 568.3688,-1083.7917 568.3688,-980 568.3688,-980 568.3688,-980 568.3688,-792 568.3688,-687.305 533.6439,-570.9765 506.4053,-495.8318"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="509.6832,-494.6043 502.9547,-486.4202 503.1109,-497.0139 509.6832,-494.6043"/>
 </g>
 <!-- N2&#45;&gt;S0 -->
-<g id="edge11" class="edge">
+<g id="edge14" class="edge">
 <title>N2&#45;&gt;S0</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M256.0056,-1487.5938C275.1652,-1466.4055 297.0838,-1442.1662 316.8451,-1420.3125"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="319.6258,-1422.4558 323.7368,-1412.691 314.4337,-1417.7608 319.6258,-1422.4558"/>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M617.2148,-551.6315C592.3238,-527.9977 562.7987,-499.9638 537.0702,-475.5347"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="539.2319,-472.7608 529.5701,-468.4134 534.412,-477.8371 539.2319,-472.7608"/>
 </g>
 <!-- D0 -->
 <g id="node13" class="node">
 <title>D0</title>
-<polygon fill="none" stroke="#ff0000" points="276.9505,-1034 240.9752,-1052 169.0248,-1052 133.0495,-1034 169.0248,-1016 240.9752,-1016 276.9505,-1034"/>
-<polygon fill="none" stroke="#ff0000" points="285.8886,-1034 241.9189,-1056 168.0811,-1056 124.1114,-1034 168.0811,-1012 241.9189,-1012 285.8886,-1034"/>
-<text text-anchor="middle" x="205" y="-1030.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:failed</text>
+<polygon fill="none" stroke="#ff0000" points="400.3193,-22 364.344,-40 292.3936,-40 256.4183,-22 292.3936,-4 364.344,-4 400.3193,-22"/>
+<polygon fill="none" stroke="#ff0000" points="409.2574,-22 365.2876,-44 291.4499,-44 247.4802,-22 291.4499,0 365.2876,0 409.2574,-22"/>
+<text text-anchor="middle" x="328.3688" y="-18.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:failed</text>
 </g>
 <!-- N2&#45;&gt;D0 -->
-<g id="edge16" class="edge">
+<g id="edge17" class="edge">
 <title>N2&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M205,-1467.9234C205,-1354.6806 205,-1146.5379 205,-1066.5209"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="208.5001,-1066.1037 205,-1056.1037 201.5001,-1066.1037 208.5001,-1066.1037"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M694.3918,-531.0749C723.206,-419.3057 758.0861,-205.4415 648.3688,-80 618.1861,-45.4916 497.5217,-31.4247 413.3193,-25.7515"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="413.4449,-22.2524 403.2401,-25.1004 412.9936,-29.2378 413.4449,-22.2524"/>
 </g>
 <!-- N4 -->
-<g id="node5" class="node">
+<g id="node7" class="node">
 <title>N4</title>
-<ellipse fill="none" stroke="#ffa500" cx="142" cy="-712" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="142" cy="-712" rx="76" ry="76"/>
-<text text-anchor="middle" x="142" y="-708.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:resolve</text>
+<ellipse fill="none" stroke="#ffa500" cx="211.3688" cy="-1168" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="211.3688" cy="-1168" rx="76" ry="76"/>
+<text text-anchor="middle" x="211.3688" y="-1164.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:resolve</text>
 </g>
 <!-- N3&#45;&gt;N4 -->
-<g id="edge4" class="edge">
+<g id="edge7" class="edge">
 <title>N3&#45;&gt;N4</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M115.3268,-825.9673C117.6174,-816.1801 119.9789,-806.0901 122.3056,-796.1487"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="125.7458,-796.8081 124.6168,-786.2736 118.93,-795.2128 125.7458,-796.8081"/>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M344.5315,-1301.8748C321.9439,-1279.1664 295.5653,-1252.6468 272.272,-1229.2289"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="274.6834,-1226.6902 265.1497,-1222.0686 269.7205,-1231.6268 274.6834,-1226.6902"/>
 </g>
 <!-- N5 -->
-<g id="node6" class="node">
+<g id="node8" class="node">
 <title>N5</title>
-<ellipse fill="none" stroke="#ffa500" cx="180" cy="-524" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="180" cy="-524" rx="76" ry="76"/>
-<text text-anchor="middle" x="180" y="-520.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:reconnect</text>
+<ellipse fill="none" stroke="#ffa500" cx="303.3688" cy="-980" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="303.3688" cy="-980" rx="76" ry="76"/>
+<text text-anchor="middle" x="303.3688" y="-976.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:reconnect</text>
 </g>
 <!-- N3&#45;&gt;N5 -->
-<g id="edge5" class="edge">
+<g id="edge8" class="edge">
 <title>N3&#45;&gt;N5</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M66.8389,-830.5319C46.9319,-775.6268 29.5354,-698.153 57,-636 68.0235,-611.0535 87.6518,-589.2711 108.0365,-571.7132"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="110.423,-574.2805 115.8697,-565.193 105.9448,-568.9004 110.423,-574.2805"/>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M379.7132,-1282.1631C363.857,-1219.406 341.0993,-1129.3335 324.5591,-1063.869"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="327.9058,-1062.8267 322.0628,-1053.9888 321.1191,-1064.5415 327.9058,-1062.8267"/>
 </g>
 <!-- N3&#45;&gt;D0 -->
-<g id="edge17" class="edge">
+<g id="edge18" class="edge">
 <title>N3&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M149.6142,-956.282C162.8016,-972.5015 176.1362,-989.5814 186.4752,-1003.5856"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="183.7873,-1005.8407 192.4974,-1011.8726 189.45,-1001.7255 183.7873,-1005.8407"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M429.2219,-1286.1488C463.1132,-1212.2797 512.3688,-1091.2645 512.3688,-980 512.3688,-980 512.3688,-980 512.3688,-792 512.3688,-474.3192 783.7821,-319.7602 575.3688,-80 552.9853,-54.2499 469.2016,-38.7797 405.305,-30.3735"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="405.5966,-26.8823 395.2332,-29.0845 404.708,-33.8257 405.5966,-26.8823"/>
 </g>
 <!-- D1 -->
 <g id="node14" class="node">
 <title>D1</title>
-<polygon fill="none" stroke="#000000" points="326,-18 290,-36 218,-36 182,-18 218,0 290,0 326,-18"/>
-<text text-anchor="middle" x="254" y="-14.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:damaged</text>
+<polygon fill="none" stroke="#000000" points="350.3688,-98 314.3688,-116 242.3688,-116 206.3688,-98 242.3688,-80 314.3688,-80 350.3688,-98"/>
+<text text-anchor="middle" x="278.3688" y="-94.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:damaged</text>
 </g>
 <!-- N3&#45;&gt;D1 -->
-<g id="edge25" class="edge">
+<g id="edge26" class="edge">
 <title>N3&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M58.9011,-834.7037C51.302,-819.7688 44.1186,-803.6682 39,-788 2.1684,-675.257 0,-642.6067 0,-524 0,-524 0,-524 0,-336 0,-208.8889 18.5148,-160.2479 110,-72 131.8215,-50.9507 162.6171,-37.9503 190.132,-29.9999"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="191.3646,-33.292 200.0979,-27.2937 189.5302,-26.5367 191.3646,-33.292"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M322.8204,-1347.7263C258.5111,-1336.2473 169.8464,-1308.9217 126.3688,-1244 59.1011,-1143.5544 -158.2652,-797.8876 204.3688,-152 211.1318,-139.9545 221.8244,-129.8664 233.0015,-121.7941"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="235.2451,-124.5029 241.5861,-116.0151 231.3361,-118.6961 235.2451,-124.5029"/>
 </g>
 <!-- N4&#45;&gt;N5 -->
-<g id="edge6" class="edge">
+<g id="edge9" class="edge">
 <title>N4&#45;&gt;N5</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M157.0732,-637.4274C158.9802,-627.9927 160.9424,-618.2849 162.8783,-608.7071"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="166.3322,-609.2852 164.8829,-598.79 159.471,-607.8983 166.3322,-609.2852"/>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M244.8432,-1099.5959C251.5443,-1085.9023 258.6292,-1071.4243 265.4677,-1057.4501"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="268.6675,-1058.8738 269.9194,-1048.3532 262.38,-1055.7969 268.6675,-1058.8738"/>
 </g>
 <!-- N4&#45;&gt;D0 -->
-<g id="edge18" class="edge">
+<g id="edge19" class="edge">
 <title>N4&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M170.8211,-782.5993C175.5042,-796.1615 179.8456,-810.3838 183,-824 197.3463,-885.9266 202.3514,-960.3122 204.0876,-1001.8649"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="200.5932,-1002.0802 204.4707,-1011.94 207.5881,-1001.8142 200.5932,-1002.0802"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M182.2708,-1097.5883C130.6372,-966.0773 28.3688,-673.1423 28.3688,-416 28.3688,-416 28.3688,-416 28.3688,-228 28.3688,-154.2584 42.6488,-121.8439 103.3688,-80 128.4366,-62.7251 202.1607,-45.6583 258.4514,-34.5595"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="259.2404,-37.9717 268.3878,-32.6261 257.9034,-31.1006 259.2404,-37.9717"/>
 </g>
 <!-- N4&#45;&gt;D1 -->
-<g id="edge26" class="edge">
+<g id="edge27" class="edge">
 <title>N4&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M108.3518,-643.7605C102.8165,-629.615 97.934,-614.574 95,-600 81.6674,-533.7732 87.5907,-515.148 95,-448 113.78,-277.8024 84.6917,-214.919 179,-72 186.8932,-60.0384 198.4086,-49.85 210.0577,-41.6536"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="212.1179,-44.487 218.5139,-36.0405 208.2466,-38.6549 212.1179,-44.487"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M209.2083,-1091.7553C205.5506,-945.7608 199.6177,-616.649 211.3688,-340 214.9276,-256.2179 193.1085,-228.5397 227.3688,-152 232.1284,-141.3666 239.8803,-131.5131 247.9197,-123.2133"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="250.564,-125.5245 255.3061,-116.0501 245.6908,-120.4994 250.564,-125.5245"/>
 </g>
 <!-- N6 -->
-<g id="node7" class="node">
+<g id="node9" class="node">
 <title>N6</title>
-<ellipse fill="none" stroke="#ffa500" cx="334" cy="-336" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="334" cy="-336" rx="76" ry="76"/>
-<text text-anchor="middle" x="334" y="-332.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:rejoin</text>
+<ellipse fill="none" stroke="#ffa500" cx="303.3688" cy="-792" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="303.3688" cy="-792" rx="76" ry="76"/>
+<text text-anchor="middle" x="303.3688" y="-788.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:rejoin</text>
 </g>
 <!-- N5&#45;&gt;N6 -->
-<g id="edge7" class="edge">
+<g id="edge10" class="edge">
 <title>N5&#45;&gt;N6</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M228.304,-465.0314C244.4236,-445.353 262.5011,-423.2844 279.0854,-403.0386"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="281.9735,-405.0361 285.6028,-395.0823 276.5583,-400.6003 281.9735,-405.0361"/>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M303.3688,-903.8042C303.3688,-895.4826 303.3688,-886.9678 303.3688,-878.5337"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="306.8689,-878.3042 303.3688,-868.3043 299.8689,-878.3043 306.8689,-878.3042"/>
 </g>
 <!-- N5&#45;&gt;D0 -->
-<g id="edge19" class="edge">
+<g id="edge20" class="edge">
 <title>N5&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M213.6482,-592.2395C219.1835,-606.385 224.066,-621.426 227,-636 253.9006,-769.6228 226.3843,-933.2287 212.2624,-1001.5818"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="208.7642,-1001.2075 210.1213,-1011.715 215.613,-1002.6546 208.7642,-1001.2075"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M254.0847,-921.7614C241.397,-905.0698 228.485,-886.3944 218.3688,-868 116.2346,-682.2886 66.3688,-627.9436 66.3688,-416 66.3688,-416 66.3688,-416 66.3688,-228 66.3688,-152.5844 87.2542,-122.7701 149.3688,-80 181.6933,-57.7424 223.1014,-43.6017 257.948,-34.8615"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="259.1567,-38.1706 268.0605,-32.4284 257.5192,-31.3648 259.1567,-38.1706"/>
 </g>
 <!-- N5&#45;&gt;D1 -->
-<g id="edge27" class="edge">
+<g id="edge28" class="edge">
 <title>N5&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M174.0783,-447.8601C169.6026,-355.8973 170.5717,-197.8787 216,-72 219.5236,-62.2364 225.3085,-52.624 231.3143,-44.2773"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="234.2827,-46.1569 237.5673,-36.0841 228.7181,-41.91 234.2827,-46.1569"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M355.2,-924.1401C368.1413,-907.2479 380.5465,-887.8749 388.3688,-868 419.1254,-789.8535 403.1412,-763.8747 407.3688,-680 410.7695,-612.5301 422.8899,-593.7484 407.3688,-528 403.2588,-510.59 395.5752,-508.7774 389.3688,-492 335.3278,-345.9134 357.3181,-298.4858 304.3688,-152 301.1307,-143.0417 296.8836,-133.5849 292.7633,-125.1317"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="295.8407,-123.4606 288.2256,-116.0943 289.5849,-126.6016 295.8407,-123.4606"/>
 </g>
 <!-- N7 -->
-<g id="node8" class="node">
+<g id="node10" class="node">
 <title>N7</title>
-<ellipse fill="none" stroke="#ffa500" cx="401" cy="-148" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#ffa500" cx="401" cy="-148" rx="76" ry="76"/>
-<text text-anchor="middle" x="401" y="-144.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:clientreplay</text>
+<ellipse fill="none" stroke="#ffa500" cx="322.3688" cy="-604" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#ffa500" cx="322.3688" cy="-604" rx="76" ry="76"/>
+<text text-anchor="middle" x="322.3688" y="-600.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:clientreplay</text>
 </g>
 <!-- N6&#45;&gt;N7 -->
-<g id="edge8" class="edge">
+<g id="edge11" class="edge">
 <title>N6&#45;&gt;N7</title>
-<path fill="none" stroke="#ffa500" stroke-width="2" d="M359.521,-264.389C363.5946,-252.9584 367.8354,-241.0588 371.9829,-229.4212"/>
-<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="375.3789,-230.3179 375.4391,-219.7232 368.7851,-227.9679 375.3789,-230.3179"/>
+<path fill="none" stroke="#ffa500" stroke-width="2" d="M311.0147,-716.3459C311.8988,-707.5982 312.8051,-698.6301 313.7015,-689.7605"/>
+<polygon fill="#ffa500" stroke="#ffa500" stroke-width="2" points="317.1843,-690.1066 314.7076,-679.8054 310.2197,-689.4027 317.1843,-690.1066"/>
 </g>
 <!-- N6&#45;&gt;S0 -->
-<g id="edge12" class="edge">
+<g id="edge15" class="edge">
 <title>N6&#45;&gt;S0</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M374.6915,-400.4499C416.6522,-473.2042 476,-596.5177 476,-712 476,-1034 476,-1034 476,-1034 476,-1127.7146 490.1099,-1156.3293 457,-1244 450.8791,-1260.2075 441.7293,-1276.1446 431.7345,-1290.685"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="428.7105,-1288.8981 425.7686,-1299.0763 434.4156,-1292.9542 428.7105,-1288.8981"/>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M360.4621,-741.3761C377.7051,-723.4948 395.2541,-702.2572 407.3688,-680 437.473,-624.6922 454.3874,-555.4885 463.6883,-501.8117"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="467.1928,-502.0756 465.3932,-491.6348 460.289,-500.919 467.1928,-502.0756"/>
 </g>
 <!-- N6&#45;&gt;D0 -->
-<g id="edge20" class="edge">
+<g id="edge21" class="edge">
 <title>N6&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M350.4852,-410.3817C374.2134,-536.5952 404.3269,-796.4598 298,-976 289.2477,-990.7789 275.1051,-1002.3467 260.4616,-1011.1304"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="258.3409,-1008.3063 251.323,-1016.2436 261.7588,-1014.4152 258.3409,-1008.3063"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M248.8296,-738.5591C234.4494,-721.2875 220.6505,-701.1069 212.3688,-680 114.7809,-431.287 23.5812,-299.8683 175.3688,-80 186.1466,-64.3881 227.326,-49.2787 264.2499,-38.3971"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="265.3806,-41.7138 274.022,-35.5839 263.444,-34.987 265.3806,-41.7138"/>
 </g>
 <!-- N6&#45;&gt;D1 -->
-<g id="edge28" class="edge">
+<g id="edge29" class="edge">
 <title>N6&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M315.4453,-262.2451C298.1087,-193.3322 273.1414,-94.0872 261.0685,-46.0972"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="264.3934,-44.9675 258.5594,-36.1235 257.6049,-46.6753 264.3934,-44.9675"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M259.0732,-730.0838C250.1412,-714.4524 242.0601,-697.2016 237.3688,-680 180.9547,-473.1485 245.862,-210.4698 270.032,-125.7018"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="273.41,-126.6196 272.8319,-116.0405 266.6866,-124.671 273.41,-126.6196"/>
 </g>
 <!-- N7&#45;&gt;S0 -->
-<g id="edge9" class="edge">
+<g id="edge12" class="edge">
 <title>N7&#45;&gt;S0</title>
-<path fill="none" stroke="#00ff00" stroke-width="2" d="M443.6651,-211.1162C488.3175,-283.4252 552,-407.0513 552,-524 552,-1034 552,-1034 552,-1034 552,-1132.382 532.7639,-1159.7264 482,-1244 470.3882,-1263.2768 454.9343,-1281.9188 439.391,-1298.2812"/>
-<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="436.7092,-1296.0189 432.2566,-1305.6327 441.7326,-1300.8939 436.7092,-1296.0189"/>
+<path fill="none" stroke="#00ff00" stroke-width="2" d="M370.2539,-544.7737C386.0793,-525.2002 403.7979,-503.285 420.0683,-483.1611"/>
+<polygon fill="#00ff00" stroke="#00ff00" stroke-width="2" points="422.8978,-485.2284 426.4633,-475.2515 417.4543,-480.8273 422.8978,-485.2284"/>
 </g>
 <!-- N7&#45;&gt;D0 -->
-<g id="edge21" class="edge">
+<g id="edge22" class="edge">
 <title>N7&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M414.0091,-223.1818C415.8671,-235.4615 417.6144,-248.0669 419,-260 432.5684,-376.8517 438,-406.3632 438,-524 438,-712 438,-712 438,-712 438,-837.0027 427.1077,-885.3846 341,-976 322.1123,-995.8765 295.3079,-1009.2638 270.482,-1018.1167"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="269.0809,-1014.8955 260.7233,-1021.4071 271.3175,-1021.5286 269.0809,-1014.8955"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M278.6936,-541.4308C212.0474,-437.4794 102.2193,-226.1856 197.3688,-80 211.4404,-58.3807 235.6107,-44.7591 259.3879,-36.2034"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="260.7836,-39.4276 269.1659,-32.9478 258.5723,-32.7861 260.7836,-39.4276"/>
 </g>
 <!-- N7&#45;&gt;D1 -->
-<g id="edge29" class="edge">
+<g id="edge30" class="edge">
 <title>N7&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M343.745,-97.3664C322.6326,-78.6955 299.6377,-58.3599 282.21,-42.9476"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="284.4307,-40.2392 274.6212,-36.2364 279.7935,-45.4829 284.4307,-40.2392"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M315.7815,-528.2467C305.8418,-413.94 287.4667,-202.6265 280.8359,-126.3719"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="284.2984,-125.788 279.9452,-116.1288 277.3248,-126.3945 284.2984,-125.788"/>
 </g>
 <!-- S3 -->
 <g id="node12" class="node">
 <title>S3</title>
-<ellipse fill="none" stroke="#a020f0" cx="372" cy="-1168" rx="72" ry="72"/>
-<ellipse fill="none" stroke="#a020f0" cx="372" cy="-1168" rx="76" ry="76"/>
-<text text-anchor="middle" x="372" y="-1164.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:stopping</text>
+<ellipse fill="none" stroke="#a020f0" cx="474.3688" cy="-228" rx="72" ry="72"/>
+<ellipse fill="none" stroke="#a020f0" cx="474.3688" cy="-228" rx="76" ry="76"/>
+<text text-anchor="middle" x="474.3688" y="-224.3" font-family="Times,serif" font-size="14.00" fill="#000000">up:stopping</text>
 </g>
 <!-- S0&#45;&gt;S3 -->
-<g id="edge15" class="edge">
+<g id="edge16" class="edge">
 <title>S0&#45;&gt;S3</title>
-<path fill="none" stroke="#a020f0" stroke-width="2" d="M373.7841,-1279.8042C373.6487,-1271.318 373.5101,-1262.6309 373.3729,-1254.0333"/>
-<polygon fill="#a020f0" stroke="#a020f0" stroke-width="2" points="376.872,-1253.9418 373.2127,-1243.9989 369.8728,-1254.0536 376.872,-1253.9418"/>
+<path fill="none" stroke="#a020f0" stroke-width="2" d="M474.3688,-339.8042C474.3688,-331.4826 474.3688,-322.9678 474.3688,-314.5337"/>
+<polygon fill="#a020f0" stroke="#a020f0" stroke-width="2" points="477.8689,-314.3042 474.3688,-304.3043 470.8689,-314.3043 477.8689,-314.3042"/>
 </g>
 <!-- S0&#45;&gt;D0 -->
-<g id="edge22" class="edge">
+<g id="edge23" class="edge">
 <title>S0&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M324.7658,-1298.7259C311.4118,-1281.8155 297.7347,-1262.7868 287,-1244 253.0065,-1184.5078 227.1586,-1108.1765 214.2126,-1065.8585"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="217.5124,-1064.678 211.2761,-1056.113 210.8101,-1066.6976 217.5124,-1064.678"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M527.0686,-361.1971C540.1945,-344.1784 552.4791,-324.4748 559.3688,-304 591.1196,-209.6433 620.5965,-158.5014 559.3688,-80 540.2875,-55.5355 464.1975,-39.8225 404.4932,-31.0076"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="404.7058,-27.5023 394.3101,-29.5466 403.7116,-34.4314 404.7058,-27.5023"/>
 </g>
 <!-- S0&#45;&gt;D1 -->
-<g id="edge30" class="edge">
+<g id="edge31" class="edge">
 <title>S0&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M442.7372,-1320.9248C473.4011,-1301.9277 507.694,-1275.8005 530,-1244 585.7414,-1164.5323 590,-1131.0681 590,-1034 590,-1034 590,-1034 590,-336 590,-214.8369 609.9264,-155.3634 522,-72 494.3419,-45.7772 395.9102,-31.2035 326.3899,-23.9832"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="326.6553,-20.4923 316.3544,-22.9708 325.9526,-27.4569 326.6553,-20.4923"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M424.3129,-358.5726C411.5474,-341.8108 398.7888,-322.897 389.3688,-304 358.0472,-241.1673 385.2932,-210.4284 346.3688,-152 338.776,-140.6027 328.0091,-130.4943 317.2769,-122.1953"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="319.2611,-119.31 309.1298,-116.2103 315.1168,-124.9514 319.2611,-119.31"/>
 </g>
 <!-- S3&#45;&gt;D0 -->
-<g id="edge23" class="edge">
+<g id="edge24" class="edge">
 <title>S3&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M312.6518,-1120.3793C288.6501,-1101.1204 261.7353,-1079.5241 240.8219,-1062.7433"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="242.8216,-1059.8605 232.8316,-1056.3319 238.4408,-1065.3202 242.8216,-1059.8605"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M430.366,-165.9139C403.7723,-128.3912 371.0479,-82.2184 349.9551,-52.4574"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="352.7039,-50.2828 344.0659,-44.148 346.9928,-54.3305 352.7039,-50.2828"/>
 </g>
 <!-- S3&#45;&gt;D1 -->
-<g id="edge31" class="edge">
+<g id="edge32" class="edge">
 <title>S3&#45;&gt;D1</title>
-<path fill="none" stroke="#000000" stroke-width="2" d="M422.8642,-1111.3791C463.6096,-1060.0328 514,-980.2995 514,-900 514,-900 514,-900 514,-336 514,-218.0086 564.146,-160.4035 486,-72 464.8015,-48.019 384.4146,-33.2954 324.1685,-25.4019"/>
-<polygon fill="#000000" stroke="#000000" stroke-width="2" points="324.2918,-21.8895 313.9302,-24.1001 323.4088,-28.8336 324.2918,-21.8895"/>
+<path fill="none" stroke="#000000" stroke-width="2" d="M417.8114,-176.8892C407.0411,-168.1353 395.6128,-159.467 384.3688,-152 366.1181,-139.88 344.7922,-128.5324 326.0732,-119.3982"/>
+<polygon fill="#000000" stroke="#000000" stroke-width="2" points="327.366,-116.1365 316.8361,-114.965 324.3371,-122.4473 327.366,-116.1365"/>
 </g>
 <!-- D3 -->
 <g id="node15" class="node">
 <title>D3</title>
-<polygon fill="none" stroke="#a020f0" points="448,-1034 412,-1052 340,-1052 304,-1034 340,-1016 412,-1016 448,-1034"/>
-<text text-anchor="middle" x="376" y="-1030.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:stopped</text>
+<polygon fill="none" stroke="#a020f0" points="550.3688,-98 514.3688,-116 442.3688,-116 406.3688,-98 442.3688,-80 514.3688,-80 550.3688,-98"/>
+<text text-anchor="middle" x="478.3688" y="-94.3" font-family="Times,serif" font-size="14.00" fill="#000000">down:stopped</text>
 </g>
 <!-- S3&#45;&gt;D3 -->
-<g id="edge33" class="edge">
+<g id="edge34" class="edge">
 <title>S3&#45;&gt;D3</title>
-<path fill="none" stroke="#a020f0" stroke-width="2" d="M374.2688,-1091.995C374.5847,-1081.4121 374.8915,-1071.1346 375.1569,-1062.2444"/>
-<polygon fill="#a020f0" stroke="#a020f0" stroke-width="2" points="378.6578,-1062.2611 375.4579,-1052.1611 371.661,-1062.0522 378.6578,-1062.2611"/>
+<path fill="none" stroke="#a020f0" stroke-width="2" d="M476.7072,-152.0023C476.9861,-142.9385 477.2559,-134.1686 477.4942,-126.4252"/>
+<polygon fill="#a020f0" stroke="#a020f0" stroke-width="2" points="480.9983,-126.3415 477.8076,-116.2385 474.0016,-126.1261 480.9983,-126.3415"/>
 </g>
 <!-- D0&#45;&gt;N3 -->
-<g id="edge24" class="edge">
+<g id="edge25" class="edge">
 <title>D0&#45;&gt;N3</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M182.1648,-1011.8726C171.9859,-1000.3518 159.6454,-985.5549 147.5698,-970.521"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="150.2651,-968.2872 141.2921,-962.6537 144.7935,-972.6532 150.2651,-968.2872"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M397.758,-27.8117C467.676,-35.4452 568.4799,-51.3676 593.3688,-80 801.7821,-319.7602 530.3688,-474.3192 530.3688,-792 530.3688,-980 530.3688,-980 530.3688,-980 530.3688,-1089.5002 482.6629,-1208.4443 444.4976,-1282.5994"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="441.1858,-1281.3802 439.6575,-1291.8642 447.3902,-1284.6215 441.1858,-1281.3802"/>
 </g>
 <!-- D1&#45;&gt;D0 -->
-<g id="edge32" class="edge">
+<g id="edge33" class="edge">
 <title>D1&#45;&gt;D0</title>
-<path fill="none" stroke="#ff0000" stroke-width="2" d="M253.4952,-36.2127C252.406,-76.5308 249.8638,-176.379 249,-260 248.3022,-327.552 234.976,-345.9161 249,-412 252.6347,-429.1277 261.3653,-430.8723 265,-448 279.024,-514.0839 267.0633,-532.476 265,-600 259.8865,-767.3454 293.786,-816.7869 242,-976 238.9016,-985.5258 233.8623,-995.035 228.4863,-1003.5163"/>
-<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="225.4987,-1001.687 222.8293,-1011.94 231.31,-1005.5896 225.4987,-1001.687"/>
+<path fill="none" stroke="#ff0000" stroke-width="2" d="M290.2181,-79.9891C295.5579,-71.8726 302.0166,-62.0553 308.0897,-52.8243"/>
+<polygon fill="#ff0000" stroke="#ff0000" stroke-width="2" points="311.1727,-54.5061 313.745,-44.2282 305.3248,-50.6587 311.1727,-54.5061"/>
 </g>
 </g>
 </svg>
diff -pruN 14.2.16-2/doc/cephfs/mds-states.rst 15.2.7-0ubuntu4/doc/cephfs/mds-states.rst
--- 14.2.16-2/doc/cephfs/mds-states.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mds-states.rst	2020-11-30 19:58:30.000000000 +0000
@@ -211,7 +211,7 @@ Color
 - Orange: MDS is in transient state trying to become active.
 - Red: MDS is indicating a state that causes the rank to be marked failed.
 - Purple: MDS and rank is stopping.
-- Red: MDS is indicating a state that causes the rank to be marked damaged.
+- Black: MDS is indicating a state that causes the rank to be marked damaged.
 
 Shape
 ~~~~~
diff -pruN 14.2.16-2/doc/cephfs/mount-prerequisites.rst 15.2.7-0ubuntu4/doc/cephfs/mount-prerequisites.rst
--- 14.2.16-2/doc/cephfs/mount-prerequisites.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mount-prerequisites.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,70 @@
+Mount CephFS: Prerequisites
+===========================
+
+You can use CephFS by mounting it to your local filesystem or by using
+`cephfs-shell`_. CephFS can be mounted `using kernel`_ as well as `using
+FUSE`_. Both have their own advantages. Read the following section to
+understand more about both of these ways to mount CephFS.
+
+Which CephFS Client?
+--------------------
+
+The FUSE client is the most accessible and the easiest to upgrade to the
+version of Ceph used by the storage cluster, while the kernel client will
+always gives better performance.
+
+When encountering bugs or performance issues, it is often instructive to
+try using the other client, in order to find out whether the bug was
+client-specific or not (and then to let the developers know).
+
+General Pre-requisite for Mounting CephFS
+-----------------------------------------
+Before mounting CephFS, ensure that the client host (where CephFS has to be
+mounted and used) has a copy of the Ceph configuration file (i.e.
+``ceph.conf``) and a keyring of the CephX user that has permission to access
+the MDS. Both of these files must already be present on the host where the
+Ceph MON resides.
+
+#. Generate a minimal conf file for the client host and place it at a
+   standard location::
+
+    # on client host
+    mkdir -p -m 755 /etc/ceph
+    ssh {user}@{mon-host} "sudo ceph config generate-minimal-conf" | sudo tee /etc/ceph/ceph.conf
+
+   Alternatively, you may copy the conf file. But the above method generates
+   a conf with minimal details which is usually sufficient. For more
+   information, see `Client Authentication`_ and :ref:`bootstrap-options`.
+
+#. Ensure that the conf has appropriate permissions::
+
+    chmod 644 /etc/ceph/ceph.conf
+
+#. Create a CephX user and get its secret key::
+
+    ssh {user}@{mon-host} "sudo ceph fs authorize cephfs client.foo / rw" | sudo tee /etc/ceph/ceph.client.foo.keyring
+
+   In above command, replace ``cephfs`` with the name of your CephFS, ``foo``
+   by the name you want for your CephX user and ``/`` by the path within your
+   CephFS for which you want to allow access to the client host and ``rw``
+   stands for both read and write permissions. Alternatively, you may copy the
+   Ceph keyring from the MON host to client host at ``/etc/ceph`` but creating
+   a keyring specific to the client host is better. While creating a CephX
+   keyring/client, using same client name across multiple machines is perfectly
+   fine.
+
+   .. note:: If you get 2 prompts for password while running above any of 2
+             above command, run ``sudo ls`` (or any other trivial command with
+             sudo) immediately before these commands.
+
+#. Ensure that the keyring has appropriate permissions::
+
+    chmod 600 /etc/ceph/ceph.client.foo.keyring
+
+.. note:: There might be few more prerequisites for kernel and FUSE mounts
+   individually, please check respective mount documents.
+
+.. _Client Authentication: ../client-auth
+.. _cephfs-shell: ../cephfs-shell
+.. _using kernel: ../mount-using-kernel-driver
+.. _using FUSE: ../mount-using-fuse
diff -pruN 14.2.16-2/doc/cephfs/mount-using-fuse.rst 15.2.7-0ubuntu4/doc/cephfs/mount-using-fuse.rst
--- 14.2.16-2/doc/cephfs/mount-using-fuse.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mount-using-fuse.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,108 @@
+========================
+ Mount CephFS using FUSE
+========================
+
+`ceph-fuse`_ is an alternate way of mounting CephFS, although it mounts it
+in userspace. Therefore, performance of FUSE can be relatively lower but FUSE
+clients can be more manageable, especially while upgrading CephFS.
+
+Prerequisites
+=============
+
+Complete General Prerequisites
+------------------------------
+Go through the prerequisites required by both, kernel as well as FUSE mounts,
+in `Mount CephFS: Prerequisites`_ page.
+
+``fuse.conf`` option
+--------------------
+
+#. If you are mounting Ceph with FUSE not as superuser/root user/system admin
+   you would need to add the option ``user_allow_other`` to ``/etc/fuse.conf``
+   (under no section in the conf).
+
+Synopsis
+========
+In general, the command to mount CephFS via FUSE looks like this::
+
+    ceph-fuse {mountpoint} {options}
+
+Mounting CephFS
+===============
+To FUSE-mount the Ceph file system, use the ``ceph-fuse`` command::
+
+    mkdir /mnt/mycephfs
+    ceph-fuse --id foo /mnt/mycephfs
+
+Option ``-id`` passes the name of the CephX user whose keyring we intend to
+use for mounting CephFS. In the above command, it's ``foo``. You can also use
+``-n`` instead, although ``--id`` is evidently easier::
+
+    ceph-fuse -n client.foo /mnt/mycephfs
+
+In case the keyring is not present in standard locations, you may pass it
+too::
+
+    ceph-fuse --id foo -k /path/to/keyring /mnt/mycephfs
+
+You may pass the MON's socket too, although this is not mandatory::
+
+    ceph-fuse --id foo -m 192.168.0.1:6789 /mnt/mycephfs
+
+You can also mount a specific directory within CephFS instead of mounting
+root of CephFS on your local FS::
+
+    ceph-fuse --id foo -r /path/to/dir /mnt/mycephfs
+
+If you have more than one FS on your Ceph cluster, use the option
+``--client_fs`` to mount the non-default FS::
+
+    ceph-fuse --id foo --client_fs mycephfs2 /mnt/mycephfs2
+
+You may also add a ``client_fs`` setting to your ``ceph.conf``
+
+Unmounting CephFS
+=================
+
+Use ``umount`` to unmount CephFS like any other FS::
+
+    umount /mnt/mycephfs
+
+.. tip:: Ensure that you are not within the file system directories before
+   executing this command.
+
+Persistent Mounts
+=================
+
+To mount CephFS as a file system in user space, add the following to ``/etc/fstab``::
+
+       #DEVICE PATH       TYPE      OPTIONS
+       none    /mnt/mycephfs  fuse.ceph ceph.id={user-ID}[,ceph.conf={path/to/conf.conf}],_netdev,defaults  0 0
+
+For example::
+
+       none    /mnt/mycephfs  fuse.ceph ceph.id=myuser,_netdev,defaults  0 0
+       none    /mnt/mycephfs  fuse.ceph ceph.id=myuser,ceph.conf=/etc/ceph/foo.conf,_netdev,defaults  0 0
+
+Ensure you use the ID (e.g., ``myuser``, not ``client.myuser``). You can pass
+any valid ``ceph-fuse`` option to the command line this way.
+
+To mount a subdirectory of the CephFS, add the following to ``/etc/fstab``::
+
+       none    /mnt/mycephfs  fuse.ceph ceph.id=myuser,ceph.client_mountpoint=/path/to/dir,_netdev,defaults  0 0
+
+``ceph-fuse@.service`` and ``ceph-fuse.target`` systemd units are available.
+As usual, these unit files declare the default dependencies and recommended
+execution context for ``ceph-fuse``. After making the fstab entry shown above,
+run following commands::
+
+    systemctl start ceph-fuse@/mnt/mycephfs.service
+    systemctl enable ceph-fuse.target
+    systemctl enable ceph-fuse@-mnt-mycephfs.service
+
+See :ref:`User Management <user-management>` for details on CephX user management and `ceph-fuse`_
+manual for more options it can take. For troubleshooting, see
+:ref:`ceph_fuse_debugging`.
+
+.. _ceph-fuse: ../../man/8/ceph-fuse/#options
+.. _Mount CephFS\: Prerequisites: ../mount-prerequisites
diff -pruN 14.2.16-2/doc/cephfs/mount-using-kernel-driver.rst 15.2.7-0ubuntu4/doc/cephfs/mount-using-kernel-driver.rst
--- 14.2.16-2/doc/cephfs/mount-using-kernel-driver.rst	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/mount-using-kernel-driver.rst	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1,131 @@
+=================================
+ Mount CephFS using Kernel Driver
+=================================
+
+The CephFS kernel driver is part of the Linux kernel. It allows mounting
+CephFS as a regular file system with native kernel performance. It is the
+client of choice for most use-cases.
+
+Prerequisites
+=============
+
+Complete General Prerequisites
+------------------------------
+Go through the prerequisites required by both, kernel as well as FUSE mounts,
+in `Mount CephFS: Prerequisites`_ page.
+
+Is mount helper is present?
+---------------------------
+``mount.ceph`` helper is installed by Ceph packages. The helper passes the
+monitor address(es) and CephX user keyrings automatically saving the Ceph
+admin the effort to pass these details explicitly while mountng CephFS. In
+case the helper is not present on the client machine, CephFS can still be
+mounted using kernel but by passing these details explicitly to the ``mount``
+command. To check whether it is present on your system, do::
+
+    stat /sbin/mount.ceph
+
+Which Kernel Version?
+---------------------
+
+Because the kernel client is distributed as part of the linux kernel (not
+as part of packaged ceph releases), you will need to consider which kernel
+version to use on your client nodes. Older kernels are known to include buggy
+ceph clients, and may not support features that more recent Ceph clusters
+support.
+
+Remember that the "latest" kernel in a stable linux distribution is likely
+to be years behind the latest upstream linux kernel where Ceph development
+takes place (including bug fixes).
+
+As a rough guide, as of Ceph 10.x (Jewel), you should be using a least a 4.x
+kernel. If you absolutely have to use an older kernel, you should use the
+fuse client instead of the kernel client.
+
+This advice does not apply if you are using a linux distribution that
+includes CephFS support, as in this case the distributor will be responsible
+for backporting fixes to their stable kernel: check with your vendor.
+
+Synopsis
+========
+In general, the command to mount CephFS via kernel driver looks like this::
+
+    mount -t ceph {device-string}:{path-to-mounted} {mount-point} -o {key-value-args} {other-args}
+
+Mounting CephFS
+===============
+On Ceph clusters, CephX is enabled by default. Use ``mount`` command to
+mount CephFS with the kernel driver::
+
+    mkdir /mnt/mycephfs
+    mount -t ceph :/ /mnt/mycephfs -o name=foo
+
+The key-value argument right after option ``-o`` is CephX credential;
+``name`` is the username of the CephX user we are using to mount CephFS. The
+default value for ``name`` is ``guest``.
+
+The kernel driver also requires MON's socket and the secret key for the CephX
+user. In case of the above command, ``mount.ceph`` helper figures out these
+details automatically by finding and reading Ceph conf file and keyring. In
+case you don't have these files on the host where you're running mount
+command, you can pass these details yourself too::
+
+    mount -t ceph 192.168.0.1:6789,192.168.0.2:6789:/ /mnt/mycephfs -o name=foo,secret=AQATSKdNGBnwLhAAnNDKnH65FmVKpXZJVasUeQ==
+
+Passing a single MON socket in above command works too. A potential problem
+with the command above is that the secret key is left in your shell's command
+history. To prevent that you can copy the secret key inside a file and pass
+the file by using the option ``secretfile`` instead of ``secret``::
+
+    mount -t ceph :/ /mnt/mycephfs -o name=foo,secretfile=/etc/ceph/foo.secret
+
+Ensure the permissions on the secret key file are appropriate (preferably,
+``600``).
+
+In case CephX is disabled, you can omit ``-o`` and the list of key-value
+arguments that follow it::
+
+    mount -t ceph :/ /mnt/mycephfs
+
+To mount a subtree of the CephFS root, append the path to the device string::
+
+    mount -t ceph :/subvolume/dir1/dir2 /mnt/mycephfs -o name=fs
+
+If you have more than one file system on your Ceph cluster, you can mount the
+non-default FS as follows::
+
+    mount -t ceph :/ /mnt/mycephfs2 -o name=fs,fs=mycephfs2
+
+Unmounting CephFS
+=================
+To unmount the Ceph file system, use the ``umount`` command as usual::
+
+    umount /mnt/mycephfs
+
+.. tip:: Ensure that you are not within the file system directories before
+   executing this command.
+
+Persistent Mounts
+==================
+
+To mount CephFS in your file systems table as a kernel driver, add the
+following to ``/etc/fstab``::
+
+    [{ipaddress}:{port}]:/ {mount}/{mountpoint} ceph [name=username,secret=secretkey|secretfile=/path/to/secretfile],[{mount.options}]
+
+For example::
+
+    :/     /mnt/ceph    ceph    name=admin,noatime,_netdev    0       2
+
+The default for the ``name=`` parameter is ``guest``. If the ``secret`` or
+``secretfile`` options are not specified then the mount helper will attempt to
+find a secret for the given ``name`` in one of the configured keyrings.
+
+See `User Management`_ for details on CephX user management and mount.ceph_
+manual for more options it can take. For troubleshooting, see
+:ref:`kernel_mount_debugging`.
+
+.. _fstab: ../fstab/#kernel-driver
+.. _Mount CephFS\: Prerequisites: ../mount-prerequisites
+.. _mount.ceph: ../../man/8/mount.ceph/
+.. _User Management: ../../rados/operations/user-management/
diff -pruN 14.2.16-2/doc/cephfs/multimds.rst 15.2.7-0ubuntu4/doc/cephfs/multimds.rst
--- 14.2.16-2/doc/cephfs/multimds.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/multimds.rst	2020-11-30 19:58:30.000000000 +0000
@@ -5,7 +5,7 @@ Configuring multiple active MDS daemons
 
 *Also known as: multi-mds, active-active MDS*
 
-Each CephFS filesystem is configured for a single active MDS daemon
+Each CephFS file system is configured for a single active MDS daemon
 by default.  To scale metadata performance for large scale systems, you
 may enable multiple active MDS daemons, which will share the metadata
 workload with one another.
@@ -28,8 +28,8 @@ are those with many clients, perhaps wor
 Increasing the MDS active cluster size
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Each CephFS filesystem has a *max_mds* setting, which controls how many ranks
-will be created.  The actual number of ranks in the filesystem will only be
+Each CephFS file system has a *max_mds* setting, which controls how many ranks
+will be created.  The actual number of ranks in the file system will only be
 increased if a spare daemon is available to take on the new rank. For example,
 if there is only one MDS daemon running, and max_mds is set to two, no second
 rank will be created. (Note that such a configuration is not Highly Available
@@ -135,3 +135,102 @@ directory's export pin. For example:
     setfattr -n ceph.dir.pin -v 0 a/b
     # a/b is now pinned to rank 0 and a/ and the rest of its children are still pinned to rank 1
 
+
+Setting subtree partitioning policies
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It is also possible to setup **automatic** static partitioning of subtrees via
+a set of **policies**. In CephFS, this automatic static partitioning is
+referred to as **ephemeral pinning**. Any directory (inode) which is
+ephemerally pinned will be automatically assigned to a particular rank
+according to a consistent hash of its inode number. The set of all
+ephemerally pinned directories should be uniformly distributed across all
+ranks.
+
+Ephemerally pinned directories are so named because the pin may not persist
+once the directory inode is dropped from cache. However, an MDS failover does
+not affect the ephemeral nature of the pinned directory. The MDS records what
+subtrees are ephemerally pinned in its journal so MDS failovers do not drop
+this information.
+
+A directory is either ephemerally pinned or not. Which rank it is pinned to is
+derived from its inode number and a consistent hash. This means that
+ephemerally pinned directories are somewhat evenly spread across the MDS
+cluster. The **consistent hash** also minimizes redistribution when the MDS
+cluster grows or shrinks. So, growing an MDS cluster may automatically increase
+your metadata throughput with no other administrative intervention.
+
+Presently, there are two types of ephemeral pinning:
+
+**Distributed Ephemeral Pins**: This policy indicates that **all** of a
+directory's immediate children should be ephemerally pinned. The canonical
+example would be the ``/home`` directory: we want every user's home directory
+to be spread across the entire MDS cluster. This can be set via:
+
+::
+
+    setfattr -n ceph.dir.pin.distributed -v 1 /cephfs/home
+
+
+**Random Ephemeral Pins**: This policy indicates any descendent sub-directory
+may be ephemerally pinned. This is set through the extended attribute
+``ceph.dir.pin.random`` with the value set to the percentage of directories
+that should be pinned. For example:
+
+::
+
+    setfattr -n ceph.dir.pin.random -v 0.5 /cephfs/tmp
+
+Would cause any directory loaded into cache or created under ``/tmp`` to be
+ephemerally pinned 50 percent of the time.
+
+It is recomended to only set this to small values, like ``.001`` or ``0.1%``.
+Having too many subtrees may degrade performance. For this reason, the config
+``mds_export_ephemeral_random_max`` enforces a cap on the maximum of this
+percentage (default: ``.01``). The MDS returns ``EINVAL`` when attempting to
+set a value beyond this config.
+
+Both random and distributed ephemeral pin policies are off by default in
+Octopus. The features may be enabled via the
+``mds_export_ephemeral_random`` and ``mds_export_ephemeral_distributed``
+configuration options.
+
+Ephemeral pins may override parent export pins and vice versa. What determines
+which policy is followed is the rule of the closest parent: if a closer parent
+directory has a conflicting policy, use that one instead. For example:
+
+::
+
+    mkdir -p foo/bar1/baz foo/bar2
+    setfattr -n ceph.dir.pin -v 0 foo
+    setfattr -n ceph.dir.pin.distributed -v 1 foo/bar1
+
+The ``foo/bar1/baz`` directory will be ephemerally pinned because the
+``foo/bar1`` policy overrides the export pin on ``foo``. The ``foo/bar2``
+directory will obey the pin on ``foo`` normally.
+
+For the reverse situation:
+
+::
+
+    mkdir -p home/{patrick,john}
+    setfattr -n ceph.dir.pin.distributed -v 1 home
+    setfattr -n ceph.dir.pin -v 2 home/patrick
+
+The ``home/patrick`` directory and its children will be pinned to rank 2
+because its export pin overrides the policy on ``home``.
+
+If a directory has an export pin and an ephemeral pin policy, the export pin
+applies to the directory itself and the policy to its children. So:
+
+::
+
+    mkdir -p home/{patrick,john}
+    setfattr -n ceph.dir.pin -v 0 home
+    setfattr -n ceph.dir.pin.distributed -v 1 home
+
+The home directory inode (and all of its directory fragments) will always be
+located on rank 0. All children including ``home/patrick`` and ``home/john``
+will be ephemerally pinned according to the distributed policy. This may only
+matter for some obscure performance advantages. All the same, it's mentioned
+here so the override policy is clear.
diff -pruN 14.2.16-2/doc/cephfs/nfs.rst 15.2.7-0ubuntu4/doc/cephfs/nfs.rst
--- 14.2.16-2/doc/cephfs/nfs.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/nfs.rst	2020-11-30 19:58:30.000000000 +0000
@@ -8,7 +8,7 @@ CephFS namespaces can be exported over N
 Requirements
 ============
 
--  Ceph filesystem (preferably latest stable luminous or higher versions)
+-  Ceph file system (preferably latest stable luminous or higher versions)
 -  In the NFS server host machine, 'libcephfs2' (preferably latest stable
    luminous or higher), 'nfs-ganesha' and 'nfs-ganesha-ceph' packages (latest
    ganesha v2.5 stable or higher versions)
@@ -56,10 +56,10 @@ Configuration for libcephfs clients
 Required ceph.conf for libcephfs clients includes:
 
 * a [client] section with ``mon_host`` option set to let the clients connect
-  to the Ceph cluster's monitors, e.g., ::
+  to the Ceph cluster's monitors, usually generated via ``ceph config generate-minimal-conf``, e.g., ::
 
-    [client]
-            mon host = 192.168.1.7:6789, 192.168.1.8:6789, 192.168.1.9:6789
+    [global]
+            mon host = [v2:192.168.1.7:3300,v1:192.168.1.7:6789], [v2:192.168.1.8:3300,v1:192.168.1.8:6789], [v2:192.168.1.9:3300,v1:192.168.1.9:6789]
 
 Mount using NFSv4 clients
 =========================
@@ -77,5 +77,197 @@ From the command line::
 Current limitations
 ===================
 
-- Per running ganesha daemon, FSAL_CEPH can only export one Ceph filesystem
-  although multiple directories in a Ceph filesystem may be exported.
+- Per running ganesha daemon, FSAL_CEPH can only export one Ceph file system
+  although multiple directories in a Ceph file system may be exported.
+
+Exporting over NFS clusters deployed using rook
+===============================================
+
+This tutorial assumes you have a kubernetes cluster deployed. If not `minikube
+<https://kubernetes.io/docs/setup/learning-environment/minikube/>`_ can be used
+to setup a single node cluster. In this tutorial minikube is used.
+
+.. note:: Configuration of this tutorial should not be used in a a real
+          production cluster. For the purpose of simplification, the security
+          aspects of Ceph are overlooked in this setup.
+
+`Rook <https://rook.io/docs/rook/master/ceph-quickstart.html>`_ Setup And Cluster Deployment
+--------------------------------------------------------------------------------------------
+
+Clone the rook repository::
+
+        git clone https://github.com/rook/rook.git
+
+Deploy the rook operator::
+
+        cd cluster/examples/kubernetes/ceph
+        kubectl create -f common.yaml
+        kubectl create -f operator.yaml
+
+.. note:: Nautilus release or latest Ceph image should be used.
+
+Before proceding check if the pods are running::
+
+        kubectl -n rook-ceph get pod
+
+
+.. note::
+        For troubleshooting on any pod use::
+
+                kubectl describe -n rook-ceph pod <pod-name>
+
+If using minikube cluster change the **dataDirHostPath** to **/data/rook** in
+cluster-test.yaml file. This is to make sure data persists across reboots.
+
+Deploy the ceph cluster::
+
+        kubectl create -f cluster-test.yaml
+
+To interact with Ceph Daemons, let's deploy toolbox::
+
+        kubectl create -f ./toolbox.yaml
+
+Exec into the rook-ceph-tools pod::
+
+        kubectl -n rook-ceph exec -it $(kubectl -n rook-ceph get pod -l "app=rook-ceph-tools" -o jsonpath='{.items[0].metadata.name}') bash
+
+Check if you have one Ceph monitor, manager, OSD running and cluster is healthy::
+
+        [root@minikube /]# ceph -s
+           cluster:
+                id:     3a30f44c-a9ce-4c26-9f25-cc6fd23128d0
+                health: HEALTH_OK
+
+           services:
+                mon: 1 daemons, quorum a (age 14m)
+                mgr: a(active, since 13m)
+                osd: 1 osds: 1 up (since 13m), 1 in (since 13m)
+
+           data:
+                pools:   0 pools, 0 pgs
+                objects: 0 objects, 0 B
+                usage:   5.0 GiB used, 11 GiB / 16 GiB avail
+                pgs:
+
+.. note:: Single monitor should never be used in real production deployment. As
+          it can cause single point of failure.
+
+Create a Ceph File System
+-------------------------
+Using ceph-mgr volumes module, we will create a ceph file system::
+
+        [root@minikube /]# ceph fs volume create myfs
+
+By default replicated size for OSD is 3. Since we are using only one OSD. It can cause error. Let's fix this up by setting replicated size to 1.::
+
+        [root@minikube /]# ceph osd pool set cephfs.myfs.meta size 1
+        [root@minikube /]# ceph osd pool set cephfs.myfs.data size 1
+
+.. note:: The replicated size should never be less than 3 in real production deployment.
+
+Check Cluster status again::
+
+        [root@minikube /]# ceph -s
+          cluster:
+            id:     3a30f44c-a9ce-4c26-9f25-cc6fd23128d0
+            health: HEALTH_OK
+
+          services:
+            mon: 1 daemons, quorum a (age 27m)
+            mgr: a(active, since 27m)
+            mds: myfs:1 {0=myfs-a=up:active} 1 up:standby-replay
+            osd: 1 osds: 1 up (since 56m), 1 in (since 56m)
+
+          data:
+            pools:   2 pools, 24 pgs
+            objects: 22 objects, 2.2 KiB
+            usage:   5.1 GiB used, 11 GiB / 16 GiB avail
+            pgs:     24 active+clean
+
+          io:
+            client:   639 B/s rd, 1 op/s rd, 0 op/s wr
+
+Create a NFS-Ganesha Server Cluster
+-----------------------------------
+Add Storage for NFS-Ganesha Servers to prevent recovery conflicts::
+
+        [root@minikube /]# ceph osd pool create nfs-ganesha 64
+        pool 'nfs-ganesha' created
+        [root@minikube /]# ceph osd pool set nfs-ganesha size 1
+        [root@minikube /]# ceph orch nfs add mynfs nfs-ganesha ganesha
+
+Here we have created a NFS-Ganesha cluster called "mynfs" in "ganesha"
+namespace with "nfs-ganesha" OSD pool.
+
+Scale out NFS-Ganesha cluster::
+
+        [root@minikube /]# ceph orch nfs update mynfs 2
+
+Configure NFS-Ganesha Exports
+-----------------------------
+Initially rook creates ClusterIP service for the dashboard. With this service
+type, only the pods in same kubernetes cluster can access it.
+
+Expose Ceph Dashboard port::
+
+        kubectl patch service -n rook-ceph -p '{"spec":{"type": "NodePort"}}' rook-ceph-mgr-dashboard
+        kubectl get service -n rook-ceph rook-ceph-mgr-dashboard
+        NAME                      TYPE       CLUSTER-IP       EXTERNAL-IP   PORT(S)          AGE
+        rook-ceph-mgr-dashboard   NodePort   10.108.183.148   <none>        8443:31727/TCP   117m
+
+This makes the dashboard reachable outside kubernetes cluster and the service
+type is changed to NodePort service.
+
+Create JSON file for dashboard::
+
+        $ cat ~/export.json
+        {
+              "cluster_id": "mynfs",
+              "path": "/",
+              "fsal": {"name": "CEPH", "user_id":"admin", "fs_name": "myfs", "sec_label_xattr": null},
+              "pseudo": "/cephfs",
+              "tag": null,
+              "access_type": "RW",
+              "squash": "no_root_squash",
+              "protocols": [4],
+              "transports": ["TCP"],
+              "security_label": true,
+              "daemons": ["mynfs.a", "mynfs.b"],
+              "clients": []
+        }
+
+.. note:: Don't use this JSON file for real production deployment. As here the
+          ganesha servers are given client-admin access rights.
+
+We need to download and run this `script
+<https://raw.githubusercontent.com/ceph/ceph/master/src/pybind/mgr/dashboard/run-backend-rook-api-request.sh>`_
+to pass the JSON file contents. Dashboard creates NFS-Ganesha export file
+based on this JSON file.::
+
+        ./run-backend-rook-api-request.sh POST /api/nfs-ganesha/export "$(cat <json-file-path>)"
+
+Expose the NFS Servers::
+
+        kubectl patch service -n rook-ceph -p '{"spec":{"type": "NodePort"}}' rook-ceph-nfs-mynfs-a
+        kubectl patch service -n rook-ceph -p '{"spec":{"type": "NodePort"}}' rook-ceph-nfs-mynfs-b
+        kubectl get services -n rook-ceph rook-ceph-nfs-mynfs-a rook-ceph-nfs-mynfs-b
+        NAME                    TYPE       CLUSTER-IP       EXTERNAL-IP   PORT(S)          AGE
+        rook-ceph-nfs-mynfs-a   NodePort   10.101.186.111   <none>        2049:31013/TCP   72m
+        rook-ceph-nfs-mynfs-b   NodePort   10.99.216.92     <none>        2049:31587/TCP   63m
+
+.. note:: Ports are chosen at random by Kubernetes from a certain range.
+          Specific port number can be added to nodePort field in spec.
+
+Testing access to NFS Servers
+-----------------------------
+Open a root shell on the host and mount one of the NFS servers::
+
+        mkdir -p /mnt/rook
+        mount -t nfs -o port=31013 $(minikube ip):/cephfs /mnt/rook
+
+Normal file operations can be performed on /mnt/rook if the mount is successful.
+
+.. note:: If minikube is used then VM host is the only client for the servers.
+          In a real kubernetes cluster, multiple hosts can be used as clients,
+          only when kubernetes cluster node IP addresses are accessible to
+          them.
diff -pruN 14.2.16-2/doc/cephfs/posix.rst 15.2.7-0ubuntu4/doc/cephfs/posix.rst
--- 14.2.16-2/doc/cephfs/posix.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/posix.rst	2020-11-30 19:58:30.000000000 +0000
@@ -88,11 +88,11 @@ fsync() and error reporting
 POSIX is somewhat vague about the state of an inode after fsync reports
 an error. In general, CephFS uses the standard error-reporting
 mechanisms in the client's kernel, and therefore follows the same
-conventions as other filesystems.
+conventions as other file systems.
 
 In modern Linux kernels (v4.17 or later), writeback errors are reported
 once to every file description that is open at the time of the error. In
-addition, unreported errors that occured before the file description was
+addition, unreported errors that occurred before the file description was
 opened will also be returned on fsync.
 
 See `PostgreSQL's summary of fsync() error reporting across operating systems
diff -pruN 14.2.16-2/doc/cephfs/scrub.rst 15.2.7-0ubuntu4/doc/cephfs/scrub.rst
--- 14.2.16-2/doc/cephfs/scrub.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/scrub.rst	2020-11-30 19:58:30.000000000 +0000
@@ -1,24 +1,24 @@
 .. _mds-scrub:
 
-=====================
-Ceph Filesystem Scrub
-=====================
+======================
+Ceph File System Scrub
+======================
 
-CephFS provides the cluster admin (operator) to check consistency of a filesystem
+CephFS provides the cluster admin (operator) to check consistency of a file system
 via a set of scrub commands. Scrub can be classified into two parts:
 
-#. Forward Scrub: In which the scrub operation starts at the root of the filesystem
+#. Forward Scrub: In which the scrub operation starts at the root of the file system
    (or a sub directory) and looks at everything that can be touched in the hierarchy
    to ensure consistency.
 
 #. Backward Scrub: In which the scrub operation looks at every RADOS object in the
-   filesystem pools and maps it back to the filesystem hierarchy.
+   file system pools and maps it back to the file system hierarchy.
 
 This document details commands to initiate and control forward scrub (referred as
 scrub thereafter).
 
-Initiate Filesystem Scrub
-=========================
+Initiate File System Scrub
+==========================
 
 To start a scrub operation for a directory tree use the following command
 
@@ -36,7 +36,7 @@ a random string that can used to monitor
 further in this document).
 
 Custom tag can also be specified when initiating the scrub operation. Custom tags get
-persisted in the metadata object for every inode in the filesystem tree that is being
+persisted in the metadata object for every inode in the file system tree that is being
 scrubbed.
 
 ::
@@ -49,8 +49,8 @@ scrubbed.
    }
 
 
-Monitor (ongoing) Filesystem Scrubs
-===================================
+Monitor (ongoing) File System Scrubs
+====================================
 
 Status of ongoing scrubs can be monitored using in `scrub status` command. This commands
 lists out ongoing scrubs (identified by the tag) along with the path and options used to
@@ -85,8 +85,8 @@ gets displayed in `ceph status`.
 
    [...]
 
-Control (ongoing) Filesystem Scrubs
-===================================
+Control (ongoing) File System Scrubs
+====================================
 
 - Pause: Pausing ongoing scrub operations results in no new or pending inodes being
   scrubbed after in-flight RADOS ops (for the inodes that are currently being scrubbed)
diff -pruN 14.2.16-2/doc/cephfs/standby.rst 15.2.7-0ubuntu4/doc/cephfs/standby.rst
--- 14.2.16-2/doc/cephfs/standby.rst	2020-12-16 17:35:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/standby.rst	2020-11-30 19:58:30.000000000 +0000
@@ -3,14 +3,14 @@
 Terminology
 -----------
 
-A Ceph cluster may have zero or more CephFS *filesystems*.  CephFS
-filesystems have a human readable name (set in ``fs new``)
-and an integer ID.  The ID is called the filesystem cluster ID,
+A Ceph cluster may have zero or more CephFS *file systems*.  CephFS
+file systems have a human readable name (set in ``fs new``)
+and an integer ID.  The ID is called the file system cluster ID,
 or *FSCID*.
 
-Each CephFS filesystem has a number of *ranks*, one by default,
+Each CephFS file system has a number of *ranks*, one by default,
 which start at zero.  A rank may be thought of as a metadata shard.
-Controlling the number of ranks in a filesystem is described
+Controlling the number of ranks in a file system is described
 in :doc:`/cephfs/multimds`
 
 Each CephFS ceph-mds process (a *daemon*) initially starts up
@@ -26,6 +26,10 @@ A daemon has a *name* that is set static
 when the daemon is first configured.  Typical configurations
 use the hostname where the daemon runs as the daemon name.
 
+A ceph-mds daemons can be assigned to a particular file system by
+setting the `mds_join_fs` configuration option to the file system
+name.
+
 Each time a daemon starts up, it is also assigned a *GID*, which
 is unique to this particular process lifetime of the daemon.  The
 GID is an integer.
@@ -38,12 +42,12 @@ accept a flexible argument format that m
 or a name.
 
 Where a rank is used, this may optionally be qualified with
-a leading filesystem name or ID.  If a daemon is a standby (i.e.
+a leading file system name or ID.  If a daemon is a standby (i.e.
 it is not currently assigned a rank), then it may only be
 referred to by GID or name.
 
 For example, if we had an MDS daemon which was called 'myhost',
-had GID 5446, and was assigned rank 0 in the filesystem 'myfs'
+had GID 5446, and was assigned rank 0 in the file system 'myfs'
 which had FSCID 3, then any of the following would be suitable
 forms of the 'fail' command:
 
@@ -53,7 +57,7 @@ forms of the 'fail' command:
     ceph mds fail myhost   # Daemon name
     ceph mds fail 0        # Unqualified rank
     ceph mds fail 3:0      # FSCID and rank
-    ceph mds fail myfs:0   # Filesystem name and rank
+    ceph mds fail myfs:0   # File System name and rank
 
 Managing failover
 -----------------
@@ -101,3 +105,85 @@ standby for the rank that it is followin
 standby-replay daemon will not be used as a replacement, even if no other
 standbys are available. For this reason, it is advised that if standby-replay
 is used then every active MDS should have a standby-replay daemon.
+
+.. _mds-join-fs:
+
+Configuring MDS file system affinity
+------------------------------------
+
+You may want to have an MDS used for a particular file system. Or, perhaps you
+have larger MDSs on better hardware that should be preferred over a last-resort
+standby on lesser or over-provisioned hardware. To express this preference,
+CephFS provides a configuration option for MDS called ``mds_join_fs`` which
+enforces this `affinity`.
+
+As part of any failover, the Ceph monitors will prefer standby daemons with
+``mds_join_fs`` equal to the file system name with the failed rank.  If no
+standby exists with ``mds_join_fs`` equal to the file system name, it will
+choose a `vanilla` standby (no setting for ``mds_join_fs``) for the replacement
+or any other available standby as a last resort. Note, this does not change the
+behavior that ``standby-replay`` daemons are always selected before looking at
+other standbys.
+
+Even further, the monitors will regularly examine the CephFS file systems when
+stable to check if a standby with stronger affinity is available to replace an
+MDS with lower affinity. This process is also done for standby-replay daemons:
+if a regular standby has stronger affinity than the standby-replay MDS, it will
+replace the standby-replay MDS.
+
+For example, given this stable and healthy file system:
+
+::
+
+    $ ceph fs dump
+    dumped fsmap epoch 399
+    ...
+    Filesystem 'cephfs' (27)
+    ...
+    e399
+    max_mds 1
+    in      0
+    up      {0=20384}
+    failed
+    damaged
+    stopped
+    ...
+    [mds.a{0:20384} state up:active seq 239 addr [v2:127.0.0.1:6854/966242805,v1:127.0.0.1:6855/966242805]]
+
+    Standby daemons:
+
+    [mds.b{-1:10420} state up:standby seq 2 addr [v2:127.0.0.1:6856/2745199145,v1:127.0.0.1:6857/2745199145]]
+
+
+You may set ``mds_join_fs`` on the standby to enforce your preference: ::
+
+    $ ceph config set mds.b mds_join_fs cephfs
+
+after automatic failover: ::
+
+    $ ceph fs dump
+    dumped fsmap epoch 405
+    e405
+    ...
+    Filesystem 'cephfs' (27)
+    ...
+    max_mds 1
+    in      0
+    up      {0=10420}
+    failed
+    damaged
+    stopped
+    ...
+    [mds.b{0:10420} state up:active seq 274 join_fscid=27 addr [v2:127.0.0.1:6856/2745199145,v1:127.0.0.1:6857/2745199145]]
+
+    Standby daemons:
+
+    [mds.a{-1:10720} state up:standby seq 2 addr [v2:127.0.0.1:6854/1340357658,v1:127.0.0.1:6855/1340357658]]
+
+Note in the above example that ``mds.b`` now has ``join_fscid=27``. In this
+output, the file system name from ``mds_join_fs`` is changed to the file system
+identifier (27). If the file system is recreated with the same name, the
+standby will follow the new file system as expected.
+
+Finally, if the file system is degraded or undersized, no failover will occur
+to enforce ``mds_join_fs``.
diff -pruN 14.2.16-2/doc/cephfs/subtree-partitioning.svg 15.2.7-0ubuntu4/doc/cephfs/subtree-partitioning.svg
--- 14.2.16-2/doc/cephfs/subtree-partitioning.svg	1970-01-01 00:00:00.000000000 +0000
+++ 15.2.7-0ubuntu4/doc/cephfs/subtree-partitioning.svg	2020-11-30 19:58:30.000000000 +0000
@@ -0,0 +1 @@
+<svg version="1.1" viewBox="0.0 0.0 908.7690288713911 373.36482939632543" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l908.76904 0l0 373.36484l-908.76904 0l0 -373.36484z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l908.76904 0l0 373.36484l-908.76904 0z" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.1)"><use xlink:href="#p.1" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.1" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.1"><path fill="#ebeded" d="m259.5493 158.67822c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m259.5493 158.67822c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.2)"><use xlink:href="#p.2" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.2" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.2"><path fill="#717d84" d="m437.25455 5.221119c-0.43972778 0 -0.8501892 0.36909866 -0.8501892 0.7646594l0 13.396389c0 0.39556122 0.41046143 0.79112244 0.8501892 0.79112244l58.752655 0c0.44024658 0 0.87994385 -0.39556122 0.87994385 -0.79112244l0 -13.396389c0 -0.39556074 -0.43969727 -0.7646594 -0.87994385 -0.7646594l-58.752655 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m437.25455 5.221119c-0.43972778 0 -0.8501892 0.36909866 -0.8501892 0.7646594l0 13.396389c0 0.39556122 0.41046143 0.79112244 0.8501892 0.79112244l58.752655 0c0.44024658 0 0.87994385 -0.39556122 0.87994385 -0.79112244l0 -13.396389c0 -0.39556074 -0.43969727 -0.7646594 -0.87994385 -0.7646594l-58.752655 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.3)"><use xlink:href="#p.3" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.3" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.3"><path fill="#90dae3" d="m595.4631 55.41167c-0.46917725 0 -0.90875244 0.3957138 -0.90875244 0.81785583l0 14.326885c0 0.4221344 0.4395752 0.8442764 0.90875244 0.8442764l41.4339 0c0.46917725 0 0.9383545 -0.42214203 0.9383545 -0.8442764l0 -14.326885c0 -0.42214203 -0.46917725 -0.81785583 -0.9383545 -0.81785583l-41.4339 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m595.4631 55.41167c-0.46917725 0 -0.90875244 0.3957138 -0.90875244 0.81785583l0 14.326885c0 0.4221344 0.4395752 0.8442764 0.90875244 0.8442764l41.4339 0c0.46917725 0 0.9383545 -0.42214203 0.9383545 -0.8442764l0 -14.326885c0 -0.42214203 -0.46917725 -0.81785583 -0.9383545 -0.81785583l-41.4339 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m315.78055 55.438137l150.90213 -50.2082" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m315.78055 55.438137l150.90213 -50.2082" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m466.6484 5.2211194l149.58078 50.208202" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m466.6484 5.2211194l149.58078 50.208202" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.4)"><use xlink:href="#p.4" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.4" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.4"><path fill="#717d84" d="m295.0487 55.41167c-0.46920776 0 -0.9088135 0.3957138 -0.9088135 0.81785583l0 14.326885c0 0.4221344 0.4396057 0.8442764 0.9088135 0.8442764l41.433838 0c0.46920776 0 0.9384155 -0.42214203 0.9384155 -0.8442764l0 -14.326885c0 -0.42214203 -0.46920776 -0.81785583 -0.9384155 -0.81785583l-41.433838 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m295.0487 55.41167c-0.46920776 0 -0.9088135 0.3957138 -0.9088135 0.81785583l0 14.326885c0 0.4221344 0.4396057 0.8442764 0.9088135 0.8442764l41.433838 0c0.46920776 0 0.9384155 -0.42214203 0.9384155 -0.8442764l0 -14.326885c0 -0.42214203 -0.46920776 -0.81785583 -0.9384155 -0.81785583l-41.433838 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.5)"><use xlink:href="#p.5" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.5" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.5"><path fill="#717d84" d="m302.62457 101.419685c-0.29312134 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.52716064 0.5566406 0.52716064l26.135773 0c0.2928772 0 0.585968 -0.26358032 0.585968 -0.52716064l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.585968 -0.5007782l-26.135773 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m302.62457 101.419685c-0.29312134 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.52716064 0.5566406 0.52716064l26.135773 0c0.2928772 0 0.585968 -0.26358032 0.585968 -0.52716064l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.585968 -0.5007782l-26.135773 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.6)"><use xlink:href="#p.6" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.6" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.6"><path fill="#ebeded" d="m445.97543 55.41167c-0.46939087 0 -0.90893555 0.3957138 -0.90893555 0.81785583l0 14.326885c0 0.4221344 0.43954468 0.8442764 0.90893555 0.8442764l41.286926 0c0.4694214 0 0.93844604 -0.42214203 0.93844604 -0.8442764l0 -14.326885c0 -0.42214203 -0.46902466 -0.81785583 -0.93844604 -0.81785583l-41.286926 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m445.97543 55.41167c-0.46939087 0 -0.90893555 0.3957138 -0.90893555 0.81785583l0 14.326885c0 0.4221344 0.43954468 0.8442764 0.90893555 0.8442764l41.286926 0c0.4694214 0 0.93844604 -0.42214203 0.93844604 -0.8442764l0 -14.326885c0 -0.42214203 -0.46902466 -0.81785583 -0.93844604 -0.81785583l-41.286926 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.7)"><use xlink:href="#p.7" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.7" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.7"><path fill="#717d84" d="m361.4975 101.419685c-0.2929077 0 -0.556427 0.23719788 -0.556427 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.52716064 0.556427 0.52716064l26.160553 0c0.2929077 0 0.5858154 -0.26358032 0.5858154 -0.52716064l0 -9.093201c0 -0.26358032 -0.2929077 -0.5007782 -0.5858154 -0.5007782l-26.160553 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m361.4975 101.419685c-0.2929077 0 -0.556427 0.23719788 -0.556427 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.52716064 0.556427 0.52716064l26.160553 0c0.2929077 0 0.5858154 -0.26358032 0.5858154 -0.52716064l0 -9.093201c0 -0.26358032 -0.2929077 -0.5007782 -0.5858154 -0.5007782l-26.160553 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.8)"><use xlink:href="#p.8" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.8" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.8"><path fill="#90dae3" d="m542.25726 101.419685c-0.29309082 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.2635498 0.52716064 0.5566406 0.52716064l26.135742 0c0.2929077 0 0.58599854 -0.26358032 0.58599854 -0.52716064l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.58599854 -0.5007782l-26.135742 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m542.25726 101.419685c-0.29309082 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.2635498 0.52716064 0.5566406 0.52716064l26.135742 0c0.2929077 0 0.58599854 -0.26358032 0.58599854 -0.52716064l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.58599854 -0.5007782l-26.135742 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.9)"><use xlink:href="#p.9" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.9" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.9"><path fill="#ebeded" d="m302.62503 131.78285c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m302.62503 131.78285c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.10)"><use xlink:href="#p.10" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.10" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.10"><path fill="#ebeded" d="m602.9806 101.419685c-0.29315186 0 -0.5569458 0.23719788 -0.5569458 0.5007782l0 9.093201c0 0.26358032 0.26379395 0.52716064 0.5569458 0.52716064l26.359985 0c0.26379395 0 0.55718994 -0.26358032 0.55718994 -0.52716064l0 -9.093201c0 -0.26358032 -0.293396 -0.5007782 -0.55718994 -0.5007782l-26.359985 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m602.9806 101.419685c-0.29315186 0 -0.5569458 0.23719788 -0.5569458 0.5007782l0 9.093201c0 0.26358032 0.26379395 0.52716064 0.5569458 0.52716064l26.359985 0c0.26379395 0 0.55718994 -0.26358032 0.55718994 -0.52716064l0 -9.093201c0 -0.26358032 -0.293396 -0.5007782 -0.55718994 -0.5007782l-26.359985 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.11)"><use xlink:href="#p.11" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.11" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.11"><path fill="#ff4b47" d="m581.3394 131.78285c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.307251 0c0.2929077 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29309082 -0.50120544 -0.58599854 -0.50120544l-26.307251 0" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m581.3394 131.78285c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.307251 0c0.2929077 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29309082 -0.50120544 -0.58599854 -0.50120544l-26.307251 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.12)"><use xlink:href="#p.12" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.12" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.12"><path fill="#ebeded" d="m245.95436 101.419685c-0.29327393 0 -0.55729675 0.23719788 -0.55729675 0.5007782l0 9.093201c0 0.26358032 0.26402283 0.52716064 0.55729675 0.52716064l26.285995 0c0.2640381 0 0.557312 -0.26358032 0.557312 -0.52716064l0 -9.093201c0 -0.26358032 -0.29327393 -0.5007782 -0.557312 -0.5007782l-26.285995 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m245.95436 101.419685c-0.29327393 0 -0.55729675 0.23719788 -0.55729675 0.5007782l0 9.093201c0 0.26358032 0.26402283 0.52716064 0.55729675 0.52716064l26.285995 0c0.2640381 0 0.557312 -0.26358032 0.557312 -0.52716064l0 -9.093201c0 -0.26358032 -0.29327393 -0.5007782 -0.557312 -0.5007782l-26.285995 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.13)"><use xlink:href="#p.13" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.13" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.13"><path fill="#717d84" d="m259.5493 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m259.5493 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.14)"><use xlink:href="#p.14" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.14" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.14"><path fill="#ebeded" d="m541.8753 131.78285c-0.2929077 0 -0.5563965 0.2373352 -0.5563965 0.50120544l0 9.074539c0 0.23744202 0.26348877 0.5012207 0.5563965 0.5012207l26.160583 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160583 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m541.8753 131.78285c-0.2929077 0 -0.5563965 0.2373352 -0.5563965 0.50120544l0 9.074539c0 0.23744202 0.26348877 0.5012207 0.5563965 0.5012207l26.160583 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160583 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.15)"><use xlink:href="#p.15" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.15" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.15"><path fill="#ebeded" d="m581.3394 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.307251 0c0.2929077 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29309082 -0.50120544 -0.58599854 -0.50120544l-26.307251 0" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m581.3394 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.307251 0c0.2929077 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29309082 -0.50120544 -0.58599854 -0.50120544l-26.307251 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.16)"><use xlink:href="#p.16" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.16" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.16"><path fill="#ebeded" d="m622.8298 158.67822c-0.29309082 0 -0.5566406 0.2373352 -0.5566406 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5566406 0.5012207l26.28247 0c0.29309082 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29284668 -0.50120544 -0.5859375 -0.50120544l-26.28247 0" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m622.8298 158.67822c-0.29309082 0 -0.5566406 0.2373352 -0.5566406 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5566406 0.5012207l26.28247 0c0.29309082 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29284668 -0.50120544 -0.5859375 -0.50120544l-26.28247 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m315.7218 101.44615l0.09790039 -46.03889" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m315.7218 101.44615l0.09790039 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m374.6243 101.44615l-58.82419 -46.03889" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m374.6243 101.44615l-58.82419 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m259.10968 101.44615l56.695343 -46.03889" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m259.10968 101.44615l56.695343 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m315.7218 131.80933l0.024475098 -30.39846" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m315.7218 131.80933l0.024475098 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m376.35672 131.80933l-1.7617798 -30.39846" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m376.35672 131.80933l-1.7617798 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m315.7512 101.419685l-43.1149 30.39846" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m315.7512 101.419685l-43.1149 30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m272.6461 158.70467l0.02444458 -26.890945" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m272.6461 158.70467l0.02444458 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m466.6484 55.438137l0.024475098 -50.2082" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m466.6484 55.438137l0.024475098 -50.2082" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m555.3545 101.44615l60.879578 -46.03889" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m555.3545 101.44615l60.879578 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m616.1655 101.44615l0.07342529 -46.03889" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m616.1655 101.44615l0.07342529 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m679.0027 101.975586l-62.812683 -46.590385" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m679.0027 101.975586l-62.812683 -46.590385" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m554.9728 131.80933l0.4159546 -30.39846" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m554.9728 131.80933l0.4159546 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m594.5543 131.80933l-39.224304 -30.39846" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m594.5543 131.80933l-39.224304 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m636.0151 158.70467l-41.49994 -26.890945" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m636.0151 158.70467l-41.49994 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m594.5249 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m594.5249 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m510.6638 131.80933l44.729828 -30.39846" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m510.6638 131.80933l44.729828 -30.39846" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.17)"><use xlink:href="#p.17" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.17" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.17"><path fill="#ebeded" d="m407.98 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.5572815 -0.2637787 0.5572815 -0.5012207l0 -9.074539c0 -0.26387024 -0.29330444 -0.50120544 -0.5572815 -0.50120544l-26.164001 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m407.98 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.5572815 -0.2637787 0.5572815 -0.5012207l0 -9.074539c0 -0.26387024 -0.29330444 -0.50120544 -0.5572815 -0.50120544l-26.164001 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m421.10617 131.80933l-46.516113 -30.39846" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m421.10617 131.80933l-46.516113 -30.39846" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.18)"><use xlink:href="#p.18" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.18" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.18"><path fill="#717d84" d="m363.2306 131.78285c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m363.2306 131.78285c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.19)"><use xlink:href="#p.19" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.19" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.19"><path fill="#f9afaa" d="m192.04312 101.419685c-0.29301453 0 -0.5567932 0.23719788 -0.5567932 0.5007782l0 9.093201c0 0.26358032 0.2637787 0.52716064 0.5567932 0.52716064l26.233093 0c0.2932434 0 0.58625793 -0.26358032 0.58625793 -0.52716064l0 -9.093201c0 -0.26358032 -0.29301453 -0.5007782 -0.58625793 -0.5007782l-26.233093 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m192.04312 101.419685c-0.29301453 0 -0.5567932 0.23719788 -0.5567932 0.5007782l0 9.093201c0 0.26358032 0.2637787 0.52716064 0.5567932 0.52716064l26.233093 0c0.2932434 0 0.58625793 -0.26358032 0.58625793 -0.52716064l0 -9.093201c0 -0.26358032 -0.29301453 -0.5007782 -0.58625793 -0.5007782l-26.233093 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.20)"><use xlink:href="#p.20" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.20" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.20"><path fill="#f9afaa" d="m184.55559 55.41167c-0.46939087 0 -0.9092102 0.3957138 -0.9092102 0.81785583l0 14.326885c0 0.4221344 0.43981934 0.8442764 0.9092102 0.8442764l41.384506 0c0.46903992 0 0.9384308 -0.42214203 0.9384308 -0.8442764l0 -14.326885c0 -0.42214203 -0.46939087 -0.81785583 -0.9384308 -0.81785583l-41.384506 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m184.55559 55.41167c-0.46939087 0 -0.9092102 0.3957138 -0.9092102 0.81785583l0 14.326885c0 0.4221344 0.43981934 0.8442764 0.9092102 0.8442764l41.384506 0c0.46903992 0 0.9384308 -0.42214203 0.9384308 -0.8442764l0 -14.326885c0 -0.42214203 -0.46939087 -0.81785583 -0.9384308 -0.81785583l-41.384506 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.21)"><use xlink:href="#p.21" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.21" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.21"><path fill="#ebeded" d="m736.84656 55.41167c-0.46899414 0 -0.90875244 0.3957138 -0.90875244 0.81785583l0 14.326885c0 0.4221344 0.4397583 0.8442764 0.90875244 0.8442764l41.483215 0c0.46899414 0 0.9379883 -0.42214203 0.9379883 -0.8442764l0 -14.326885c0 -0.42214203 -0.46899414 -0.81785583 -0.9379883 -0.81785583l-41.483215 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m736.84656 55.41167c-0.46899414 0 -0.90875244 0.3957138 -0.90875244 0.81785583l0 14.326885c0 0.4221344 0.4397583 0.8442764 0.90875244 0.8442764l41.483215 0c0.46899414 0 0.9379883 -0.42214203 0.9379883 -0.8442764l0 -14.326885c0 -0.42214203 -0.46899414 -0.81785583 -0.9379883 -0.81785583l-41.483215 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.22)"><use xlink:href="#p.22" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.22" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.22"><path fill="#90dae3" d="m665.8761 101.94912c-0.29315186 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.26348877 0.5271683 0.5566406 0.5271683l26.135742 0c0.2929077 0 0.58599854 -0.26358795 0.58599854 -0.5271683l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.58599854 -0.5007782l-26.135742 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m665.8761 101.94912c-0.29315186 0 -0.5566406 0.23719788 -0.5566406 0.5007782l0 9.093201c0 0.26358032 0.26348877 0.5271683 0.5566406 0.5271683l26.135742 0c0.2929077 0 0.58599854 -0.26358795 0.58599854 -0.5271683l0 -9.093201c0 -0.26358032 -0.29309082 -0.5007782 -0.58599854 -0.5007782l-26.135742 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.23)"><use xlink:href="#p.23" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.23" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.23"><path fill="#ebeded" d="m192.04282 131.78285c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331833 0c0.2929077 0 0.5858307 -0.2637787 0.5858307 -0.5012207l0 -9.074539c0 -0.26387024 -0.29292297 -0.50120544 -0.5858307 -0.50120544l-26.331833 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m192.04282 131.78285c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331833 0c0.2929077 0 0.5858307 -0.2637787 0.5858307 -0.5012207l0 -9.074539c0 -0.26387024 -0.29292297 -0.50120544 -0.5858307 -0.50120544l-26.331833 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.24)"><use xlink:href="#p.24" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.24" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.24"><path fill="#ebeded" d="m149.17256 158.67822c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160568 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160568 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m149.17256 158.67822c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160568 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160568 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.25)"><use xlink:href="#p.25" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.25" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.25"><path fill="#ebeded" d="m665.8765 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m665.8765 131.78285c-0.29309082 0 -0.5570679 0.2373352 -0.5570679 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570679 0.5012207l26.164001 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.164001 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.26)"><use xlink:href="#p.26" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.26" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.26"><path fill="#ebeded" d="m707.5716 131.78285c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.184875 0c0.29296875 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.5859375 -0.50120544l-26.184875 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m707.5716 131.78285c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.184875 0c0.29296875 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.5859375 -0.50120544l-26.184875 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.27)"><use xlink:href="#p.27" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.27" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.27"><path fill="#ebeded" d="m750.88226 158.67822c-0.2929077 0 -0.55651855 0.2373352 -0.55651855 0.50120544l0 9.074539c0 0.23744202 0.26361084 0.5012207 0.55651855 0.5012207l26.331848 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.331848 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m750.88226 158.67822c-0.2929077 0 -0.55651855 0.2373352 -0.55651855 0.50120544l0 9.074539c0 0.23744202 0.26361084 0.5012207 0.55651855 0.5012207l26.331848 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.331848 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.28)"><use xlink:href="#p.28" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.28" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.28"><path fill="#ebeded" d="m106.53792 158.67822c-0.29310608 0 -0.55708313 0.2373352 -0.55708313 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.55708313 0.5012207l26.163986 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.163986 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m106.53792 158.67822c-0.29310608 0 -0.55708313 0.2373352 -0.55708313 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.55708313 0.5012207l26.163986 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.163986 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.29)"><use xlink:href="#p.29" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.29" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.29"><path fill="#ebeded" d="m191.01506 158.67822c-0.2928772 0 -0.55644226 0.2373352 -0.55644226 0.50120544l0 9.074539c0 0.23744202 0.26356506 0.5012207 0.55644226 0.5012207l26.307266 0c0.2928772 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29312134 -0.50120544 -0.58599854 -0.50120544l-26.307266 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m191.01506 158.67822c-0.2928772 0 -0.55644226 0.2373352 -0.55644226 0.50120544l0 9.074539c0 0.23744202 0.26356506 0.5012207 0.55644226 0.5012207l26.307266 0c0.2928772 0 0.58599854 -0.2637787 0.58599854 -0.5012207l0 -9.074539c0 -0.26387024 -0.29312134 -0.50120544 -0.58599854 -0.50120544l-26.307266 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.30)"><use xlink:href="#p.30" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.30" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.30"><path fill="#0433ff" d="m303.35846 158.67822c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160553 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160553 0" fill-rule="evenodd"/><path stroke="#021ca1" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m303.35846 158.67822c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160553 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160553 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.31)"><use xlink:href="#p.31" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.31" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.31"><path fill="#ebeded" d="m363.2306 158.67822c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m363.2306 158.67822c-0.29312134 0 -0.5570984 0.2373352 -0.5570984 0.50120544l0 9.074539c0 0.23744202 0.26397705 0.5012207 0.5570984 0.5012207l26.16397 0c0.26397705 0 0.557312 -0.2637787 0.557312 -0.5012207l0 -9.074539c0 -0.26387024 -0.29333496 -0.50120544 -0.557312 -0.50120544l-26.16397 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.32)"><use xlink:href="#p.32" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.32" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.32"><path fill="#ebeded" d="m409.41852 158.67822c-0.2932129 0 -0.5567932 0.2373352 -0.5567932 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.5567932 0.5012207l26.380066 0c0.29296875 0 0.5861511 -0.2637787 0.5861511 -0.5012207l0 -9.074539c0 -0.26387024 -0.29318237 -0.50120544 -0.5861511 -0.50120544l-26.380066 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m409.41852 158.67822c-0.2932129 0 -0.5567932 0.2373352 -0.5567932 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.5567932 0.5012207l26.380066 0c0.29296875 0 0.5861511 -0.2637787 0.5861511 -0.5012207l0 -9.074539c0 -0.26387024 -0.29318237 -0.50120544 -0.5861511 -0.50120544l-26.380066 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.33)"><use xlink:href="#p.33" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.33" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.33"><path fill="#ebeded" d="m707.5716 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.184875 0c0.29296875 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.5859375 -0.50120544l-26.184875 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m707.5716 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.184875 0c0.29296875 0 0.5859375 -0.2637787 0.5859375 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.5859375 -0.50120544l-26.184875 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.34)"><use xlink:href="#p.34" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.34" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.34"><path fill="#ebeded" d="m497.4783 158.67822c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331818 0c0.2929077 0 0.58587646 -0.2637787 0.58587646 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.58587646 -0.50120544l-26.331818 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m497.4783 158.67822c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331818 0c0.2929077 0 0.58587646 -0.2637787 0.58587646 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.58587646 -0.50120544l-26.331818 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.35)"><use xlink:href="#p.35" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.35" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.35"><path fill="#f9afaa" d="m149.17256 131.78285c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160568 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160568 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m149.17256 131.78285c-0.2929077 0 -0.556427 0.2373352 -0.556427 0.50120544l0 9.074539c0 0.23744202 0.2635193 0.5012207 0.556427 0.5012207l26.160568 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160568 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.36)"><use xlink:href="#p.36" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.36" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.36"><path fill="#90dae3" d="m497.4783 131.78285c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331818 0c0.2929077 0 0.58587646 -0.2637787 0.58587646 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.58587646 -0.50120544l-26.331818 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m497.4783 131.78285c-0.2929077 0 -0.55648804 0.2373352 -0.55648804 0.50120544l0 9.074539c0 0.23744202 0.26358032 0.5012207 0.55648804 0.5012207l26.331818 0c0.2929077 0 0.58587646 -0.2637787 0.58587646 -0.5012207l0 -9.074539c0 -0.26387024 -0.29296875 -0.50120544 -0.58587646 -0.50120544l-26.331818 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.37)"><use xlink:href="#p.37" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.37" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.37"><path fill="#90dae3" d="m750.88226 131.78285c-0.2929077 0 -0.55651855 0.2373352 -0.55651855 0.50120544l0 9.074539c0 0.23744202 0.26361084 0.5012207 0.55651855 0.5012207l26.331848 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.331848 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m750.88226 131.78285c-0.2929077 0 -0.55651855 0.2373352 -0.55651855 0.50120544l0 9.074539c0 0.23744202 0.26361084 0.5012207 0.55651855 0.5012207l26.331848 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.331848 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.38)"><use xlink:href="#p.38" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.38" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.38"><path fill="#ebeded" d="m459.74677 158.67822c-0.29284668 0 -0.5565796 0.2373352 -0.5565796 0.50120544l0 9.074539c0 0.23744202 0.2637329 0.5012207 0.5565796 0.5012207l26.111206 0c0.2930603 0 0.5861206 -0.2637787 0.5861206 -0.5012207l0 -9.074539c0 -0.26387024 -0.2930603 -0.50120544 -0.5861206 -0.50120544l-26.111206 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m459.74677 158.67822c-0.29284668 0 -0.5565796 0.2373352 -0.5565796 0.50120544l0 9.074539c0 0.23744202 0.2637329 0.5012207 0.5565796 0.5012207l26.111206 0c0.2930603 0 0.5861206 -0.2637787 0.5861206 -0.5012207l0 -9.074539c0 -0.26387024 -0.2930603 -0.50120544 -0.5861206 -0.50120544l-26.111206 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.39)"><use xlink:href="#p.39" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.39" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.39"><path fill="#ebeded" d="m536.7074 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.160583 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160583 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m536.7074 158.67822c-0.2929077 0 -0.5564575 0.2373352 -0.5564575 0.50120544l0 9.074539c0 0.23744202 0.2635498 0.5012207 0.5564575 0.5012207l26.160583 0c0.2929077 0 0.5858154 -0.2637787 0.5858154 -0.5012207l0 -9.074539c0 -0.26387024 -0.2929077 -0.50120544 -0.5858154 -0.50120544l-26.160583 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m205.25764 55.438137l261.40546 -50.2082" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m205.25764 55.438137l261.40546 -50.2082" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m757.63727 55.438137l-290.98883 -50.2082" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m757.63727 55.438137l-290.98883 -50.2082" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m205.16956 101.44615l0.12234497 -46.03889" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m205.16956 101.44615l0.12234497 -46.03889" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m205.25764 131.80933l-0.09786987 -30.39846" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m205.25764 131.80933l-0.09786987 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m162.26999 101.975586l43.017014 -46.590385" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m162.26999 101.975586l43.017014 -46.590385" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m162.26999 131.80933l42.919144 -30.39846" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m162.26999 131.80933l42.919144 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m678.9733 131.80933l0.024475098 -29.869026" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m678.9733 131.80933l0.024475098 -29.869026" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m720.69836 131.80933l43.6532 -30.39846" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m720.69836 131.80933l43.6532 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m764.06775 131.80933l0.29364014 -30.39846" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m764.06775 131.80933l0.29364014 -30.39846" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m119.634705 158.70467l42.67444 -26.890945" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m119.634705 158.70467l42.67444 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m204.22993 158.70467l-41.96483 -26.890945" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m204.22993 158.70467l-41.96483 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m316.48526 158.70467l-43.84897 -26.890945" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m316.48526 158.70467l-43.84897 -26.890945" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.40)"><use xlink:href="#p.40" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.40" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.40"><path fill="#ebeded" d="m149.17256 101.94912c-0.2929077 0 -0.556427 0.23719788 -0.556427 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.5271683 0.556427 0.5271683l26.160568 0c0.2929077 0 0.5858154 -0.26358795 0.5858154 -0.5271683l0 -9.093201c0 -0.26358032 -0.2929077 -0.5007782 -0.5858154 -0.5007782l-26.160568 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m149.17256 101.94912c-0.2929077 0 -0.556427 0.23719788 -0.556427 0.5007782l0 9.093201c0 0.26358032 0.2635193 0.5271683 0.556427 0.5271683l26.160568 0c0.2929077 0 0.5858154 -0.26358795 0.5858154 -0.5271683l0 -9.093201c0 -0.26358032 -0.2929077 -0.5007782 -0.5858154 -0.5007782l-26.160568 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m422.66238 158.70467l-46.320343 -26.890945" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m422.66238 158.70467l-46.320343 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m549.8342 158.70467l-39.175354 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m549.8342 158.70467l-39.175354 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m472.81467 158.70467l37.87845 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m472.81467 158.70467l37.87845 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m720.69836 158.70467l43.40851 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m720.69836 158.70467l43.40851 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m804.6476 158.70467l-40.57007 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m804.6476 158.70467l-40.57007 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m764.06775 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m764.06775 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m376.32736 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m376.32736 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m510.6638 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m510.6638 158.70467l0.024475098 -26.890945" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.41)"><use xlink:href="#p.41" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.41" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.41"><path fill="#90dae3" d="m751.14703 101.419685c-0.29315186 0 -0.5569458 0.23719788 -0.5569458 0.5007782l0 9.093201c0 0.26358032 0.26379395 0.52716064 0.5569458 0.52716064l26.360046 0c0.26379395 0 0.5571289 -0.26358032 0.5571289 -0.52716064l0 -9.093201c0 -0.26358032 -0.29333496 -0.5007782 -0.5571289 -0.5007782l-26.360046 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m751.14703 101.419685c-0.29315186 0 -0.5569458 0.23719788 -0.5569458 0.5007782l0 9.093201c0 0.26358032 0.26379395 0.52716064 0.5569458 0.52716064l26.360046 0c0.26379395 0 0.5571289 -0.26358032 0.5571289 -0.52716064l0 -9.093201c0 -0.26358032 -0.29333496 -0.5007782 -0.5571289 -0.5007782l-26.360046 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m764.3614 101.44615l-148.16156 -46.03889" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="4.005249343832021" stroke-linejoin="round" stroke-linecap="butt" d="m764.3614 101.44615l-148.16156 -46.03889" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.42)"><use xlink:href="#p.42" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.42" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.42"><path fill="#d7dada" d="m276.6083 192.50917c-1.1743164 0 -2.3179016 1.0310669 -2.3179016 2.0888672l0 36.888687c0 1.0578003 1.1435852 2.115265 2.3179016 2.115265l107.05585 0c1.1734009 0 2.3477173 -1.0574646 2.3477173 -2.115265l0 -36.888687c0 -1.0578003 -1.1743164 -2.0888672 -2.3477173 -2.0888672l-107.05585 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m276.6083 192.50917c-1.1743164 0 -2.3179016 1.0310669 -2.3179016 2.0888672l0 36.888687c0 1.0578003 1.1435852 2.115265 2.3179016 2.115265l107.05585 0c1.1734009 0 2.3477173 -1.0574646 2.3477173 -2.115265l0 -36.888687c0 -1.0578003 -1.1743164 -2.0888672 -2.3477173 -2.0888672l-107.05585 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.43)"><use xlink:href="#p.43" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.43" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.43"><path fill="#717d84" d="m287.06165 197.2741c-0.9097595 0 -1.8187866 0.8192291 -1.8187866 1.6384583l0 28.965668c0 0.8192291 0.9090271 1.6648407 1.8187866 1.6648407l85.71365 0c0.9097595 0 1.8485718 -0.8456116 1.8485718 -1.6648407l0 -28.965668c0 -0.8192291 -0.93881226 -1.6384583 -1.8485718 -1.6384583l-85.71365 0" fill-rule="evenodd"/><path stroke="#65bbc6" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m287.06165 197.2741c-0.9097595 0 -1.8187866 0.8192291 -1.8187866 1.6384583l0 28.965668c0 0.8192291 0.9090271 1.6648407 1.8187866 1.6648407l85.71365 0c0.9097595 0 1.8485718 -0.8456116 1.8485718 -1.6648407l0 -28.965668c0 -0.8192291 -0.93881226 -1.6384583 -1.8485718 -1.6384583l-85.71365 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m305.03363 202.62143l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m305.03363 202.62143l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m305.03363 206.5922l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m305.03363 206.5922l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m305.03363 210.53648l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m305.03363 210.53648l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m305.03363 214.50725l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m305.03363 214.50725l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m305.03363 218.45154l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m305.03363 218.45154l66.60541 0" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.44)"><use xlink:href="#p.44" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.44" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.44"><path fill="#d7dada" d="m139.9815 192.50917c-1.1738129 0 -2.3178253 1.0310669 -2.3178253 2.0888672l0 36.888687c0 1.0578003 1.1440125 2.115265 2.3178253 2.115265l107.00708 0c1.1738129 0 2.3476105 -1.0574646 2.3476105 -2.115265l0 -36.888687c0 -1.0578003 -1.1737976 -2.0888672 -2.3476105 -2.0888672l-107.00708 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m139.9815 192.50917c-1.1738129 0 -2.3178253 1.0310669 -2.3178253 2.0888672l0 36.888687c0 1.0578003 1.1440125 2.115265 2.3178253 2.115265l107.00708 0c1.1738129 0 2.3476105 -1.0574646 2.3476105 -2.115265l0 -36.888687c0 -1.0578003 -1.1737976 -2.0888672 -2.3476105 -2.0888672l-107.00708 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.45)"><use xlink:href="#p.45" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.45" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.45"><path fill="#717d84" d="m150.37743 197.2741c-0.9100189 0 -1.8200226 0.8192291 -1.8200226 1.6384583l0 28.965668c0 0.8192291 0.91000366 1.6648407 1.8200226 1.6648407l85.838806 0c0.91000366 0 1.8200073 -0.8456116 1.8200073 -1.6648407l0 -28.965668c0 -0.8192291 -0.91000366 -1.6384583 -1.8200073 -1.6384583l-85.838806 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m150.37743 197.2741c-0.9100189 0 -1.8200226 0.8192291 -1.8200226 1.6384583l0 28.965668c0 0.8192291 0.91000366 1.6648407 1.8200226 1.6648407l85.838806 0c0.91000366 0 1.8200073 -0.8456116 1.8200073 -1.6648407l0 -28.965668c0 -0.8192291 -0.91000366 -1.6384583 -1.8200073 -1.6384583l-85.838806 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m168.37753 202.62143l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m168.37753 202.62143l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m168.37753 206.5922l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m168.37753 206.5922l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m168.37753 210.53648l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m168.37753 210.53648l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m168.37753 214.50725l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m168.37753 214.50725l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m168.37753 218.45154l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m168.37753 218.45154l66.60541 0" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.46)"><use xlink:href="#p.46" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.46" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.46"><path fill="#d7dada" d="m413.99884 192.72095c-1.174469 0 -2.3182373 1.0310669 -2.3182373 2.0888672l0 36.888687c0 1.0578003 1.1437683 2.115265 2.3182373 2.115265l106.98181 0c1.173523 0 2.3480225 -1.0574646 2.3480225 -2.115265l0 -36.888687c0 -1.0578003 -1.1744995 -2.0888672 -2.3480225 -2.0888672l-106.98181 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m413.99884 192.72095c-1.174469 0 -2.3182373 1.0310669 -2.3182373 2.0888672l0 36.888687c0 1.0578003 1.1437683 2.115265 2.3182373 2.115265l106.98181 0c1.173523 0 2.3480225 -1.0574646 2.3480225 -2.115265l0 -36.888687c0 -1.0578003 -1.1744995 -2.0888672 -2.3480225 -2.0888672l-106.98181 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.47)"><use xlink:href="#p.47" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.47" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.47"><path fill="#717d84" d="m424.36423 197.45943c-0.91000366 0 -1.8192749 0.8189697 -1.8192749 1.6382294l0 28.988144c0 0.8192444 0.90927124 1.6646271 1.8192749 1.6646271l85.810455 0c0.91000366 0 1.8491211 -0.8453827 1.8491211 -1.6646271l0 -28.988144c0 -0.81925964 -0.93911743 -1.6382294 -1.8491211 -1.6382294l-85.810455 0" fill-rule="evenodd"/><path stroke="#f57469" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m424.36423 197.45943c-0.91000366 0 -1.8192749 0.8189697 -1.8192749 1.6382294l0 28.988144c0 0.8192444 0.90927124 1.6646271 1.8192749 1.6646271l85.810455 0c0.91000366 0 1.8491211 -0.8453827 1.8491211 -1.6646271l0 -28.988144c0 -0.81925964 -0.93911743 -1.6382294 -1.8491211 -1.6382294l-85.810455 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m442.36508 202.83318l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m442.36508 202.83318l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m442.36508 206.7775l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m442.36508 206.7775l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m442.36508 210.74826l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m442.36508 210.74826l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m442.36508 214.69255l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m442.36508 214.69255l66.60541 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m442.36508 218.66333l66.60541 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m442.36508 218.66333l66.60541 0" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.48)"><use xlink:href="#p.48" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.48" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.48"><path fill="#d7dada" d="m551.53564 192.72095c-1.1734619 0 -2.3180542 1.0310669 -2.3180542 2.0888672l0 36.888687c0 1.0578003 1.1445923 2.115265 2.3180542 2.115265l107.15332 0c1.1744385 0 2.3479004 -1.0574646 2.3479004 -2.115265l0 -36.888687c0 -1.0578003 -1.1734619 -2.0888672 -2.3479004 -2.0888672l-107.15332 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m551.53564 192.72095c-1.1734619 0 -2.3180542 1.0310669 -2.3180542 2.0888672l0 36.888687c0 1.0578003 1.1445923 2.115265 2.3180542 2.115265l107.15332 0c1.1744385 0 2.3479004 -1.0574646 2.3479004 -2.115265l0 -36.888687c0 -1.0578003 -1.1734619 -2.0888672 -2.3479004 -2.0888672l-107.15332 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.49)"><use xlink:href="#p.49" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.49" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.49"><path fill="#717d84" d="m561.9303 197.45943c-0.90948486 0 -1.8189697 0.8189697 -1.8189697 1.6382294l0 28.988144c0 0.8192444 0.90948486 1.6646271 1.8189697 1.6646271l85.909546 0c0.90948486 0 1.8481445 -0.8453827 1.8481445 -1.6646271l0 -28.988144c0 -0.81925964 -0.93865967 -1.6382294 -1.8481445 -1.6382294l-85.909546 0" fill-rule="evenodd"/><path stroke="#e93317" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m561.9303 197.45943c-0.90948486 0 -1.8189697 0.8189697 -1.8189697 1.6382294l0 28.988144c0 0.8192444 0.90948486 1.6646271 1.8189697 1.6646271l85.909546 0c0.90948486 0 1.8481445 -0.8453827 1.8481445 -1.6646271l0 -28.988144c0 -0.81925964 -0.93865967 -1.6382294 -1.8481445 -1.6382294l-85.909546 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m579.9902 202.83318l66.65436 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m579.9902 202.83318l66.65436 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m579.9902 206.7775l66.65436 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m579.9902 206.7775l66.65436 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m579.9902 210.74826l66.65436 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m579.9902 210.74826l66.65436 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m579.9902 214.69255l66.65436 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m579.9902 214.69255l66.65436 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m579.9902 218.66333l66.65436 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m579.9902 218.66333l66.65436 0" fill-rule="evenodd"/><g filter="url(#shadowFilter-p.50)"><use xlink:href="#p.50" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.50" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.50"><path fill="#d7dada" d="m686.0774 192.72095c-1.1738281 0 -2.317871 1.0310669 -2.317871 2.0888672l0 36.888687c0 1.0578003 1.144043 2.115265 2.317871 2.115265l107.00708 0c1.1737671 0 2.3475952 -1.0574646 2.3475952 -2.115265l0 -36.888687c0 -1.0578003 -1.1738281 -2.0888672 -2.3475952 -2.0888672l-107.00708 0" fill-rule="evenodd"/><path stroke="#47545c" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m686.0774 192.72095c-1.1738281 0 -2.317871 1.0310669 -2.317871 2.0888672l0 36.888687c0 1.0578003 1.144043 2.115265 2.317871 2.115265l107.00708 0c1.1737671 0 2.3475952 -1.0574646 2.3475952 -2.115265l0 -36.888687c0 -1.0578003 -1.1738281 -2.0888672 -2.3475952 -2.0888672l-107.00708 0" fill-rule="evenodd"/></g><g filter="url(#shadowFilter-p.51)"><use xlink:href="#p.51" transform="matrix(1.0 0.0 0.0 1.0 0.0 2.418897573716371)"/></g><defs><filter id="shadowFilter-p.51" filterUnits="userSpaceOnUse"><feGaussianBlur in="SourceAlpha" stdDeviation="0.0" result="blur"/><feComponentTransfer in="blur" color-interpolation-filters="sRGB"><feFuncR type="linear" slope="0" intercept="0.0"/><feFuncG type="linear" slope="0" intercept="0.0"/><feFuncB type="linear" slope="0" intercept="0.0"/><feFuncA type="linear" slope="0.349" intercept="0"/></feComponentTransfer></filter></defs><g id="p.51"><path fill="#717d84" d="m696.53125 197.45943c-0.9100342 0 -1.8192749 0.8189697 -1.8192749 1.6382294l0 28.988144c0 0.8192444 0.9092407 1.6646271 1.8192749 1.6646271l85.810486 0c0.90997314 0 1.84906 -0.8453827 1.84906 -1.6646271l0 -28.988144c0 -0.81925964 -0.9390869 -1.6382294 -1.84906 -1.6382294l-85.810486 0" fill-rule="evenodd"/><path stroke="#021ca1" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m696.53125 197.45943c-0.9100342 0 -1.8192749 0.8189697 -1.8192749 1.6382294l0 28.988144c0 0.8192444 0.9092407 1.6646271 1.8192749 1.6646271l85.810486 0c0.90997314 0 1.84906 -0.8453827 1.84906 -1.6646271l0 -28.988144c0 -0.81925964 -0.9390869 -1.6382294 -1.84906 -1.6382294l-85.810486 0" fill-rule="evenodd"/></g><path fill="#000000" fill-opacity="0.0" d="m714.50275 202.83318l66.55646 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m714.50275 202.83318l66.55646 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m714.50275 206.7775l66.55646 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m714.50275 206.7775l66.55646 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m714.50275 210.74826l66.55646 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m714.50275 210.74826l66.55646 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m714.50275 214.69255l66.55646 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m714.50275 214.69255l66.55646 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m714.50275 218.66333l66.55646 0" fill-rule="evenodd"/><path stroke="#ebeded" stroke-width="2.6824146981627295" stroke-linejoin="round" stroke-linecap="butt" d="m714.50275 218.66333l66.55646 0" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m158.46217 234.2839l70.10451 0l0 32.91327l-70.10451 0z" fill-rule="evenodd"/><path fill="#000000" d="m168.8528 261.2039l0 -13.35936l2.65625 0l3.15625 9.45311q0.4375 1.328125 0.640625 1.984375q0.234375 -0.734375 0.703125 -2.140625l3.203125 -9.29686l2.375 0l0 13.35936l-1.703125 0l0 -11.17186l-3.875 11.17186l-1.59375 0l-3.859375 -11.374985l0 11.374985l-1.703125 0zm15.587677 0l0 -13.35936l4.609375 0q1.546875 0 2.375 0.203125q1.140625 0.25 1.953125 0.953125q1.0625 0.890625 1.578125 2.28125q0.53125 1.390625 0.53125 3.171875q0 1.515625 -0.359375 2.7031097q-0.359375 1.171875 -0.921875 1.9375q-0.546875 0.765625 -1.203125 1.21875q-0.65625 0.4375 -1.59375 0.671875q-0.9375 0.21875 -2.140625 0.21875l-4.828125 0zm1.765625 -1.578125l2.859375 0q1.3125 0 2.0625 -0.234375q0.75 -0.25 1.203125 -0.703125q0.625 -0.625 0.96875 -1.6875q0.359375 -1.0624847 0.359375 -2.5781097q0 -2.09375 -0.6875 -3.21875q-0.6875 -1.125 -1.671875 -1.5q-0.703125 -0.28125 -2.28125 -0.28125l-2.8125 0l0 10.20311zm11.113571 -2.71875l1.65625 -0.140625q0.125 1.0 0.546875 1.640625q0.4375 0.640625 1.34375 1.046875q0.921875 0.390625 2.0625 0.390625q1.0 0 1.78125 -0.296875q0.78125 -0.296875 1.15625 -0.8125q0.375 -0.53125 0.375 -1.15625q0 -0.625 -0.375 -1.09375q-0.359375 -0.46875 -1.1875 -0.79685974q-0.546875 -0.203125 -2.390625 -0.640625q-1.828125 -0.453125 -2.5625 -0.84375q-0.96875 -0.5 -1.4375 -1.234375q-0.46875 -0.75 -0.46875 -1.671875q0 -1.0 0.578125 -1.875q0.578125 -0.890625 1.671875 -1.34375q1.109375 -0.453125 2.453125 -0.453125q1.484375 0 2.609375 0.484375q1.140625 0.46875 1.75 1.40625q0.609375 0.921875 0.65625 2.09375l-1.6875 0.125q-0.140625 -1.265625 -0.9375 -1.90625q-0.78125 -0.65625 -2.3125 -0.65625q-1.609375 0 -2.34375 0.59375q-0.734375 0.59375 -0.734375 1.421875q0 0.71875 0.53125 1.171875q0.5 0.46875 2.65625 0.96875q2.15625 0.484375 2.953125 0.84375q1.171875 0.53125 1.71875 1.359375q0.5625 0.82810974 0.5625 1.9062347q0 1.0625 -0.609375 2.015625q-0.609375 0.9375 -1.75 1.46875q-1.140625 0.515625 -2.578125 0.515625q-1.8125 0 -3.046875 -0.53125q-1.21875 -0.53125 -1.921875 -1.59375q-0.6875 -1.0625 -0.71875 -2.40625z" fill-rule="nonzero"/><path fill="#000000" d="m168.24342 276.61014q0 -2.359375 0.484375 -3.796875q0.484375 -1.453125 1.4375 -2.234375q0.96875 -0.78125 2.421875 -0.78125q1.078125 0 1.890625 0.4375q0.8125 0.421875 1.328125 1.25q0.53125 0.8125 0.828125 1.984375q0.3125 1.15625 0.3125 3.140625q0 2.359375 -0.484375 3.8125q-0.484375 1.4375 -1.453125 2.234375q-0.953125 0.78125 -2.421875 0.78125q-1.921875 0 -3.03125 -1.390625q-1.3125 -1.671875 -1.3125 -5.4375zm1.671875 0q0 3.296875 0.765625 4.390625q0.78125 1.078125 1.90625 1.078125q1.140625 0 1.90625 -1.09375q0.765625 -1.09375 0.765625 -4.375q0 -3.296875 -0.765625 -4.375q-0.765625 -1.078125 -1.921875 -1.078125q-1.125 0 -1.796875 0.953125q-0.859375 1.21875 -0.859375 4.5z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m295.11298 234.2839l70.10452 0l0 32.91327l-70.10452 0z" fill-rule="evenodd"/><path fill="#000000" d="m305.5036 261.2039l0 -13.35936l2.65625 0l3.15625 9.45311q0.4375 1.328125 0.640625 1.984375q0.234375 -0.734375 0.703125 -2.140625l3.203125 -9.29686l2.375 0l0 13.35936l-1.703125 0l0 -11.17186l-3.875 11.17186l-1.59375 0l-3.859375 -11.374985l0 11.374985l-1.703125 0zm15.587677 0l0 -13.35936l4.609375 0q1.546875 0 2.375 0.203125q1.140625 0.25 1.953125 0.953125q1.0625 0.890625 1.578125 2.28125q0.53125 1.390625 0.53125 3.171875q0 1.515625 -0.359375 2.7031097q-0.359375 1.171875 -0.921875 1.9375q-0.546875 0.765625 -1.203125 1.21875q-0.65625 0.4375 -1.59375 0.671875q-0.9375 0.21875 -2.140625 0.21875l-4.828125 0zm1.765625 -1.578125l2.859375 0q1.3125 0 2.0625 -0.234375q0.75 -0.25 1.203125 -0.703125q0.625 -0.625 0.96875 -1.6875q0.359375 -1.0624847 0.359375 -2.5781097q0 -2.09375 -0.6875 -3.21875q-0.6875 -1.125 -1.671875 -1.5q-0.703125 -0.28125 -2.28125 -0.28125l-2.8125 0l0 10.20311zm11.113556 -2.71875l1.65625 -0.140625q0.125 1.0 0.546875 1.640625q0.4375 0.640625 1.34375 1.046875q0.921875 0.390625 2.0625 0.390625q1.0 0 1.78125 -0.296875q0.78125 -0.296875 1.15625 -0.8125q0.375 -0.53125 0.375 -1.15625q0 -0.625 -0.375 -1.09375q-0.359375 -0.46875 -1.1875 -0.79685974q-0.546875 -0.203125 -2.390625 -0.640625q-1.828125 -0.453125 -2.5625 -0.84375q-0.96875 -0.5 -1.4375 -1.234375q-0.46875 -0.75 -0.46875 -1.671875q0 -1.0 0.578125 -1.875q0.578125 -0.890625 1.671875 -1.34375q1.109375 -0.453125 2.453125 -0.453125q1.484375 0 2.609375 0.484375q1.140625 0.46875 1.75 1.40625q0.609375 0.921875 0.65625 2.09375l-1.6875 0.125q-0.140625 -1.265625 -0.9375 -1.90625q-0.78125 -0.65625 -2.3125 -0.65625q-1.609375 0 -2.34375 0.59375q-0.734375 0.59375 -0.734375 1.421875q0 0.71875 0.53125 1.171875q0.5 0.46875 2.65625 0.96875q2.15625 0.484375 2.953125 0.84375q1.171875 0.53125 1.71875 1.359375q0.5625 0.82810974 0.5625 1.9062347q0 1.0625 -0.609375 2.015625q-0.609375 0.9375 -1.75 1.46875q-1.140625 0.515625 -2.578125 0.515625q-1.8125 0 -3.046875 -0.53125q-1.21875 -0.53125 -1.921875 -1.59375q-0.6875 -1.0625 -0.71875 -2.40625z" fill-rule="nonzero"/><path fill="#000000" d="m311.0661 283.2039l-1.640625 0l0 -10.453125q-0.59375 0.5625 -1.5625 1.140625q-0.953125 0.5625 -1.71875 0.84375l0 -1.59375q1.375 -0.640625 2.40625 -1.5625q1.03125 -0.921875 1.453125 -1.78125l1.0625 0l0 13.40625z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m437.7791 234.2839l70.10452 0l0 32.91327l-70.10452 0z" fill-rule="evenodd"/><path fill="#000000" d="m448.16974 261.2039l0 -13.35936l2.65625 0l3.15625 9.45311q0.4375 1.328125 0.640625 1.984375q0.234375 -0.734375 0.703125 -2.140625l3.203125 -9.29686l2.375 0l0 13.35936l-1.703125 0l0 -11.17186l-3.875 11.17186l-1.59375 0l-3.859375 -11.374985l0 11.374985l-1.703125 0zm15.587677 0l0 -13.35936l4.609375 0q1.546875 0 2.375 0.203125q1.140625 0.25 1.953125 0.953125q1.0625 0.890625 1.578125 2.28125q0.53125 1.390625 0.53125 3.171875q0 1.515625 -0.359375 2.7031097q-0.359375 1.171875 -0.921875 1.9375q-0.546875 0.765625 -1.203125 1.21875q-0.65625 0.4375 -1.59375 0.671875q-0.9375 0.21875 -2.140625 0.21875l-4.828125 0zm1.765625 -1.578125l2.859375 0q1.3125 0 2.0625 -0.234375q0.75 -0.25 1.203125 -0.703125q0.625 -0.625 0.96875 -1.6875q0.359375 -1.0624847 0.359375 -2.5781097q0 -2.09375 -0.6875 -3.21875q-0.6875 -1.125 -1.671875 -1.5q-0.703125 -0.28125 -2.28125 -0.28125l-2.8125 0l0 10.20311zm11.113586 -2.71875l1.65625 -0.140625q0.125 1.0 0.546875 1.640625q0.4375 0.640625 1.34375 1.046875q0.921875 0.390625 2.0625 0.390625q1.0 0 1.78125 -0.296875q0.78125 -0.296875 1.15625 -0.8125q0.375 -0.53125 0.375 -1.15625q0 -0.625 -0.375 -1.09375q-0.359375 -0.46875 -1.1875 -0.79685974q-0.546875 -0.203125 -2.390625 -0.640625q-1.828125 -0.453125 -2.5625 -0.84375q-0.96875 -0.5 -1.4375 -1.234375q-0.46875 -0.75 -0.46875 -1.671875q0 -1.0 0.578125 -1.875q0.578125 -0.890625 1.671875 -1.34375q1.109375 -0.453125 2.453125 -0.453125q1.484375 0 2.609375 0.484375q1.140625 0.46875 1.75 1.40625q0.609375 0.921875 0.65625 2.09375l-1.6875 0.125q-0.140625 -1.265625 -0.9375 -1.90625q-0.78125 -0.65625 -2.3125 -0.65625q-1.609375 0 -2.34375 0.59375q-0.734375 0.59375 -0.734375 1.421875q0 0.71875 0.53125 1.171875q0.5 0.46875 2.65625 0.96875q2.15625 0.484375 2.953125 0.84375q1.171875 0.53125 1.71875 1.359375q0.5625 0.82810974 0.5625 1.9062347q0 1.0625 -0.609375 2.015625q-0.609375 0.9375 -1.75 1.46875q-1.140625 0.515625 -2.578125 0.515625q-1.8125 0 -3.046875 -0.53125q-1.21875 -0.53125 -1.921875 -1.59375q-0.6875 -1.0625 -0.71875 -2.40625z" fill-rule="nonzero"/><path fill="#000000" d="m456.16974 281.62576l0 1.578125l-8.828125 0q-0.015625 -0.59375 0.1875 -1.140625q0.34375 -0.90625 1.078125 -1.78125q0.75 -0.875 2.15625 -2.015625q2.171875 -1.78125 2.9375 -2.828125q0.765625 -1.046875 0.765625 -1.96875q0 -0.984375 -0.703125 -1.640625q-0.6875 -0.671875 -1.8125 -0.671875q-1.1875 0 -1.90625 0.71875q-0.703125 0.703125 -0.703125 1.953125l-1.6875 -0.171875q0.171875 -1.890625 1.296875 -2.875q1.140625 -0.984375 3.03125 -0.984375q1.921875 0 3.046875 1.0625q1.125 1.0625 1.125 2.640625q0 0.796875 -0.328125 1.578125q-0.328125 0.78125 -1.09375 1.640625q-0.75 0.84375 -2.53125 2.34375q-1.46875 1.234375 -1.890625 1.6875q-0.421875 0.4375 -0.6875 0.875l6.546875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m570.0907 234.2839l70.10455 0l0 32.91327l-70.10455 0z" fill-rule="evenodd"/><path fill="#000000" d="m580.4813 261.2039l0 -13.35936l2.65625 0l3.15625 9.45311q0.4375 1.328125 0.640625 1.984375q0.234375 -0.734375 0.703125 -2.140625l3.203125 -9.29686l2.375 0l0 13.35936l-1.703125 0l0 -11.17186l-3.875 11.17186l-1.59375 0l-3.859375 -11.374985l0 11.374985l-1.703125 0zm15.5877075 0l0 -13.35936l4.609375 0q1.546875 0 2.375 0.203125q1.140625 0.25 1.953125 0.953125q1.0625 0.890625 1.578125 2.28125q0.53125 1.390625 0.53125 3.171875q0 1.515625 -0.359375 2.7031097q-0.359375 1.171875 -0.921875 1.9375q-0.546875 0.765625 -1.203125 1.21875q-0.65625 0.4375 -1.59375 0.671875q-0.9375 0.21875 -2.140625 0.21875l-4.828125 0zm1.765625 -1.578125l2.859375 0q1.3125 0 2.0625 -0.234375q0.75 -0.25 1.203125 -0.703125q0.625 -0.625 0.96875 -1.6875q0.359375 -1.0624847 0.359375 -2.5781097q0 -2.09375 -0.6875 -3.21875q-0.6875 -1.125 -1.671875 -1.5q-0.703125 -0.28125 -2.28125 -0.28125l-2.8125 0l0 10.20311zm11.113525 -2.71875l1.65625 -0.140625q0.125 1.0 0.546875 1.640625q0.4375 0.640625 1.34375 1.046875q0.921875 0.390625 2.0625 0.390625q1.0 0 1.78125 -0.296875q0.78125 -0.296875 1.15625 -0.8125q0.375 -0.53125 0.375 -1.15625q0 -0.625 -0.375 -1.09375q-0.359375 -0.46875 -1.1875 -0.79685974q-0.546875 -0.203125 -2.390625 -0.640625q-1.828125 -0.453125 -2.5625 -0.84375q-0.96875 -0.5 -1.4375 -1.234375q-0.46875 -0.75 -0.46875 -1.671875q0 -1.0 0.578125 -1.875q0.578125 -0.890625 1.671875 -1.34375q1.109375 -0.453125 2.453125 -0.453125q1.484375 0 2.609375 0.484375q1.140625 0.46875 1.75 1.40625q0.609375 0.921875 0.65625 2.09375l-1.6875 0.125q-0.140625 -1.265625 -0.9375 -1.90625q-0.78125 -0.65625 -2.3125 -0.65625q-1.609375 0 -2.34375 0.59375q-0.734375 0.59375 -0.734375 1.421875q0 0.71875 0.53125 1.171875q0.5 0.46875 2.65625 0.96875q2.15625 0.484375 2.953125 0.84375q1.171875 0.53125 1.71875 1.359375q0.5625 0.82810974 0.5625 1.9062347q0 1.0625 -0.609375 2.015625q-0.609375 0.9375 -1.75 1.46875q-1.140625 0.515625 -2.578125 0.515625q-1.8125 0 -3.046875 -0.53125q-1.21875 -0.53125 -1.921875 -1.59375q-0.6875 -1.0625 -0.71875 -2.40625z" fill-rule="nonzero"/><path fill="#000000" d="m579.87195 279.67264l1.640625 -0.21875q0.28125 1.40625 0.953125 2.015625q0.6875 0.609375 1.65625 0.609375q1.15625 0 1.953125 -0.796875q0.796875 -0.796875 0.796875 -1.984375q0 -1.125 -0.734375 -1.859375q-0.734375 -0.734375 -1.875 -0.734375q-0.46875 0 -1.15625 0.171875l0.1875 -1.4375q0.15625 0.015625 0.265625 0.015625q1.046875 0 1.875 -0.546875q0.84375 -0.546875 0.84375 -1.671875q0 -0.90625 -0.609375 -1.5q-0.609375 -0.59375 -1.578125 -0.59375q-0.953125 0 -1.59375 0.609375q-0.640625 0.59375 -0.8125 1.796875l-1.640625 -0.296875q0.296875 -1.640625 1.359375 -2.546875q1.0625 -0.90625 2.65625 -0.90625q1.09375 0 2.0 0.46875q0.921875 0.46875 1.40625 1.28125q0.5 0.8125 0.5 1.71875q0 0.859375 -0.46875 1.578125q-0.46875 0.703125 -1.375 1.125q1.1875 0.28125 1.84375 1.140625q0.65625 0.859375 0.65625 2.15625q0 1.734375 -1.28125 2.953125q-1.265625 1.21875 -3.21875 1.21875q-1.765625 0 -2.921875 -1.046875q-1.15625 -1.046875 -1.328125 -2.71875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m704.5576 234.2839l70.10449 0l0 32.91327l-70.10449 0z" fill-rule="evenodd"/><path fill="#000000" d="m714.94824 261.2039l0 -13.35936l2.65625 0l3.15625 9.45311q0.4375 1.328125 0.640625 1.984375q0.234375 -0.734375 0.703125 -2.140625l3.203125 -9.29686l2.375 0l0 13.35936l-1.703125 0l0 -11.17186l-3.875 11.17186l-1.59375 0l-3.859375 -11.374985l0 11.374985l-1.703125 0zm15.5876465 0l0 -13.35936l4.609375 0q1.546875 0 2.375 0.203125q1.140625 0.25 1.953125 0.953125q1.0625 0.890625 1.578125 2.28125q0.53125 1.390625 0.53125 3.171875q0 1.515625 -0.359375 2.7031097q-0.359375 1.171875 -0.921875 1.9375q-0.546875 0.765625 -1.203125 1.21875q-0.65625 0.4375 -1.59375 0.671875q-0.9375 0.21875 -2.140625 0.21875l-4.828125 0zm1.765625 -1.578125l2.859375 0q1.3125 0 2.0625 -0.234375q0.75 -0.25 1.203125 -0.703125q0.625 -0.625 0.96875 -1.6875q0.359375 -1.0624847 0.359375 -2.5781097q0 -2.09375 -0.6875 -3.21875q-0.6875 -1.125 -1.671875 -1.5q-0.703125 -0.28125 -2.28125 -0.28125l-2.8125 0l0 10.20311zm11.113586 -2.71875l1.65625 -0.140625q0.125 1.0 0.546875 1.640625q0.4375 0.640625 1.34375 1.046875q0.921875 0.390625 2.0625 0.390625q1.0 0 1.78125 -0.296875q0.78125 -0.296875 1.15625 -0.8125q0.375 -0.53125 0.375 -1.15625q0 -0.625 -0.375 -1.09375q-0.359375 -0.46875 -1.1875 -0.79685974q-0.546875 -0.203125 -2.390625 -0.640625q-1.828125 -0.453125 -2.5625 -0.84375q-0.96875 -0.5 -1.4375 -1.234375q-0.46875 -0.75 -0.46875 -1.671875q0 -1.0 0.578125 -1.875q0.578125 -0.890625 1.671875 -1.34375q1.109375 -0.453125 2.453125 -0.453125q1.484375 0 2.609375 0.484375q1.140625 0.46875 1.75 1.40625q0.609375 0.921875 0.65625 2.09375l-1.6875 0.125q-0.140625 -1.265625 -0.9375 -1.90625q-0.78125 -0.65625 -2.3125 -0.65625q-1.609375 0 -2.34375 0.59375q-0.734375 0.59375 -0.734375 1.421875q0 0.71875 0.53125 1.171875q0.5 0.46875 2.65625 0.96875q2.15625 0.484375 2.953125 0.84375q1.171875 0.53125 1.71875 1.359375q0.5625 0.82810974 0.5625 1.9062347q0 1.0625 -0.609375 2.015625q-0.609375 0.9375 -1.75 1.46875q-1.140625 0.515625 -2.578125 0.515625q-1.8125 0 -3.046875 -0.53125q-1.21875 -0.53125 -1.921875 -1.59375q-0.6875 -1.0625 -0.71875 -2.40625z" fill-rule="nonzero"/><path fill="#000000" d="m719.58887 283.2039l0 -3.203125l-5.796875 0l0 -1.5l6.09375 -8.65625l1.34375 0l0 8.65625l1.796875 0l0 1.5l-1.796875 0l0 3.203125l-1.640625 0zm0 -4.703125l0 -6.015625l-4.1875 6.015625l4.1875 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m27.929134 282.41995l877.6693 0l0 27.937012l-877.6693 0z" fill-rule="evenodd"/><path fill="#000000" d="m38.429134 306.77994l0 -11.453125l1.515625 0l0 11.453125l-1.515625 0zm4.0078125 0l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm16.40625 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.4843712 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.2968712 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm8.898434 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm2.9921875 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm13.3359375 2.484375l0 -7.203125l-1.234375 0l0 -1.09375l1.234375 0l0 -0.890625q0 -0.828125 0.15625 -1.234375q0.203125 -0.546875 0.703125 -0.890625q0.515625 -0.34375 1.4375 -0.34375q0.59375 0 1.3125 0.140625l-0.203125 1.234375q-0.4375 -0.078125 -0.828125 -0.078125q-0.640625 0 -0.90625 0.28125q-0.265625 0.265625 -0.265625 1.015625l0 0.765625l1.609375 0l0 1.09375l-1.609375 0l0 7.203125l-1.40625 0zm4.1171875 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5234375 0l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.2578125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.5 5.6875l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm7.5 -5.6875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.5468826 0.4375 1.5000076 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.5468826 -0.390625 -2.1562576 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.5781326 -0.171875 1.2187576 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.9687576 0 -1.3906326 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.2500076 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.6250076 0 -2.4687576 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm11.625008 1.21875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm7.0546875 -1.40625l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.8359375 4.953125l0 -8.296875l1.25 0l0 1.15625q0.390625 -0.609375 1.03125 -0.96875q0.65625 -0.375 1.484375 -0.375q0.921875 0 1.515625 0.390625q0.59375 0.375 0.828125 1.0625q0.984375 -1.453125 2.5625 -1.453125q1.234375 0 1.890625 0.6875q0.671875 0.671875 0.671875 2.09375l0 5.703125l-1.390625 0l0 -5.234375q0 -0.84375 -0.140625 -1.203125q-0.140625 -0.375 -0.5 -0.59375q-0.359375 -0.234375 -0.84375 -0.234375q-0.875 0 -1.453125 0.578125q-0.578125 0.578125 -0.578125 1.859375l0 4.828125l-1.40625 0l0 -5.390625q0 -0.9375 -0.34375 -1.40625q-0.34375 -0.46875 -1.125 -0.46875q-0.59375 0 -1.09375 0.3125q-0.5 0.3125 -0.734375 0.921875q-0.21875 0.59375 -0.21875 1.71875l0 4.3125l-1.40625 0zm17.773438 0l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm8.8984375 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm9.2265625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.8203125 4.953125l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm10.75 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.5859375 4.171875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm10.75 -3.046875l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm2.59375 3.046875l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm8.8359375 3.203125l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm15.5703125 -4.46875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm8.1953125 4.953125l0 -1.609375l1.609375 0l0 1.609375q0 0.890625 -0.3125 1.421875q-0.3125 0.546875 -1.0 0.84375l-0.390625 -0.59375q0.453125 -0.203125 0.65625 -0.578125q0.21875 -0.375 0.234375 -1.09375l-0.796875 0zm11.59375 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.5625 2.484375l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.3046875 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm8.9765625 4.171875l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.6328125 1.46875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2109375 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm12.40625 4.140625l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm8.3671875 -4.15625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm13.3515625 4.15625l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.6328125 1.46875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm12.9921875 2.484375l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.8359375 8.140625l0 -11.484375l1.28125 0l0 1.078125q0.453125 -0.640625 1.015625 -0.953125q0.578125 -0.3125 1.390625 -0.3125q1.0625 0 1.875 0.546875q0.8125 0.546875 1.21875 1.546875q0.421875 0.984375 0.421875 2.171875q0 1.28125 -0.46875 2.296875q-0.453125 1.015625 -1.328125 1.5625q-0.859375 0.546875 -1.828125 0.546875q-0.703125 0 -1.265625 -0.296875q-0.546875 -0.296875 -0.90625 -0.75l0 4.046875l-1.40625 0zm1.265625 -7.296875q0 1.609375 0.640625 2.375q0.65625 0.765625 1.578125 0.765625q0.9375 0 1.609375 -0.796875q0.671875 -0.796875 0.671875 -2.453125q0 -1.59375 -0.65625 -2.375q-0.65625 -0.796875 -1.5625 -0.796875q-0.890625 0 -1.59375 0.84375q-0.6875 0.84375 -0.6875 2.4375zm7.6171875 4.109375l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.234375 -0.1875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.8359375 4.953125l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm11.9609375 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm11.203125 1.265625l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm7.9609375 -5.703125l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5390625 0l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2421875 1.90625l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm5.65625 1.78125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm0.8515625 -2.890625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9609375 4.15625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm5.34375 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm9.2265625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm18.414062 1.453125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.6015625 4.171875l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm14.2734375 0l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm17.84375 4.140625l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm3.4609375 0l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm8.3359375 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.5625 2.484375l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.3046875 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm8.9765625 4.171875l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.6328125 1.46875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2109375 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm12.40625 4.140625l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm8.3671875 -4.15625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm13.3515625 4.15625l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.6328125 1.46875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm13.3359375 2.484375l0 -7.203125l-1.234375 0l0 -1.09375l1.234375 0l0 -0.890625q0 -0.828125 0.15625 -1.234375q0.203125 -0.546875 0.703125 -0.890625q0.515625 -0.34375 1.4375 -0.34375q0.59375 0 1.3125 0.140625l-0.203125 1.234375q-0.4375 -0.078125 -0.828125 -0.078125q-0.640625 0 -0.90625 0.28125q-0.265625 0.265625 -0.265625 1.015625l0 0.765625l1.609375 0l0 1.09375l-1.609375 0l0 7.203125l-1.40625 0zm4.1171875 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5234375 0l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.2578125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.953125 2.484375l0 -1.609375l1.609375 0l0 1.609375l-1.609375 0zm11.3046875 0l0 -10.109375l-3.78125 0l0 -1.34375l9.078125 0l0 1.34375l-3.78125 0l0 10.109375l-1.515625 0zm6.6796875 0l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm18.414062 1.453125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.5859375 4.171875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875z" fill-rule="nonzero"/><path fill="#000000" d="m37.991634 328.96744l0 -11.484375l1.28125 0l0 1.078125q0.453125 -0.640625 1.015625 -0.953125q0.578125 -0.3125 1.390625 -0.3125q1.0625 0 1.875 0.546875q0.8125 0.546875 1.21875 1.546875q0.421875 0.984375 0.421875 2.171875q0 1.28125 -0.46875 2.296875q-0.453125 1.015625 -1.328125 1.5625q-0.859375 0.546875 -1.828125 0.546875q-0.703125 0 -1.265625 -0.296875q-0.546875 -0.296875 -0.90625 -0.75l0 4.046875l-1.40625 0zm1.265625 -7.296875q0 1.609375 0.640625 2.375q0.65625 0.765625 1.578125 0.765625q0.9375 0 1.609375 -0.796875q0.671875 -0.796875 0.671875 -2.453125q0 -1.59375 -0.65625 -2.375q-0.65625 -0.796875 -1.5625 -0.796875q-0.890625 0 -1.59375 0.84375q-0.6875 0.84375 -0.6875 2.4375zm13.0390625 3.078125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.5859375 4.171875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm8.406246 -1.265625l0.203125 1.25q-0.5937462 0.125 -1.0624962 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.4062462 0l0 1.09375l-1.4062462 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.6093712 -0.0625zm1.3828125 -8.578125l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm6.6171875 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 -8.578125l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.0234375 -4.15625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9765625 4.15625l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2109375 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm12.40625 -5.703125l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5546875 0l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm12.781258 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm8.8671875 -3.046875l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm2.59375 3.046875l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm18.75 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm9.578125 4.171875l-2.546875 -8.296875l1.453125 0l1.328125 4.78125l0.484375 1.78125q0.03125 -0.125 0.4375 -1.703125l1.3125 -4.859375l1.453125 0l1.234375 4.8125l0.421875 1.578125l0.46875 -1.59375l1.421875 -4.796875l1.375 0l-2.59375 8.296875l-1.46875 0l-1.3125 -4.96875l-0.328125 -1.421875l-1.671875 6.390625l-1.46875 0zm15.4296875 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.5390625 7.375l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm15.5703125 -4.46875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.3046875 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm6.6640625 2.90625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm8.890625 0l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm12.28125 4.953125l0 -8.296875l1.25 0l0 1.15625q0.390625 -0.609375 1.03125 -0.96875q0.65625 -0.375 1.484375 -0.375q0.921875 0 1.515625 0.390625q0.59375 0.375 0.828125 1.0625q0.984375 -1.453125 2.5625 -1.453125q1.234375 0 1.890625 0.6875q0.671875 0.671875 0.671875 2.09375l0 5.703125l-1.390625 0l0 -5.234375q0 -0.84375 -0.140625 -1.203125q-0.140625 -0.375 -0.5 -0.59375q-0.359375 -0.234375 -0.84375 -0.234375q-0.875 0 -1.453125 0.578125q-0.578125 0.578125 -0.578125 1.859375l0 4.828125l-1.40625 0l0 -5.390625q0 -0.9375 -0.34375 -1.40625q-0.34375 -0.46875 -1.125 -0.46875q-0.59375 0 -1.09375 0.3125q-0.5 0.3125 -0.734375 0.921875q-0.21875 0.59375 -0.21875 1.71875l0 4.3125l-1.40625 0zm19.0 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm10.8984375 3.6875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm6.7890625 0.234375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm8.9765625 4.171875l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.3671875 3.109375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm6.6640625 2.90625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm6.7890625 0.234375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm8.375 4.171875l0 -7.203125l-1.234375 0l0 -1.09375l1.234375 0l0 -0.890625q0 -0.828125 0.15625 -1.234375q0.203125 -0.546875 0.703125 -0.890625q0.515625 -0.34375 1.4375 -0.34375q0.59375 0 1.3125 0.140625l-0.203125 1.234375q-0.4375 -0.078125 -0.828125 -0.078125q-0.640625 0 -0.90625 0.28125q-0.265625 0.265625 -0.265625 1.015625l0 0.765625l1.609375 0l0 1.09375l-1.609375 0l0 7.203125l-1.40625 0zm3.5859375 -4.15625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9609375 4.15625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm12.8515625 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm12.015625 5.640625l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm7.9765625 4.296875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.7734375 8.15625l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm11.9453125 -5.6875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm12.28125 -4.890625l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm2.9921875 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm13.0078125 2.484375l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.3046875 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.6015625 4.171875l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm14.2734375 0l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm7.9296875 4.140625l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.2578125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2109375 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.703125 4.140625l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5546875 7.421875l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm12.6328125 -3.203125l0 -11.453125l2.28125 0l2.71875 8.109375q0.375 1.125 0.546875 1.6875q0.1875 -0.625 0.609375 -1.828125l2.734375 -7.96875l2.046875 0l0 11.453125l-1.46875 0l0 -9.59375l-3.328125 9.59375l-1.359375 0l-3.3125 -9.75l0 9.75l-1.46875 0zm13.375 0l0 -11.453125l3.953125 0q1.328125 0 2.03125 0.15625q0.984375 0.234375 1.6875 0.828125q0.90625 0.765625 1.34375 1.953125q0.453125 1.1875 0.453125 2.71875q0 1.3125 -0.3125 2.328125q-0.296875 1.0 -0.78125 1.65625q-0.46875 0.65625 -1.03125 1.046875q-0.5625 0.375 -1.375 0.578125q-0.796875 0.1875 -1.828125 0.1875l-4.140625 0zm1.515625 -1.359375l2.453125 0q1.125 0 1.765625 -0.203125q0.65625 -0.21875 1.03125 -0.59375q0.546875 -0.546875 0.84375 -1.453125q0.296875 -0.90625 0.296875 -2.203125q0 -1.796875 -0.59375 -2.765625q-0.578125 -0.96875 -1.421875 -1.296875q-0.609375 -0.234375 -1.96875 -0.234375l-2.40625 0l0 8.75zm9.5234375 -2.328125l1.4375 -0.125q0.09375 0.859375 0.46875 1.421875q0.375 0.546875 1.15625 0.890625q0.78125 0.328125 1.75 0.328125q0.875 0 1.53125 -0.25q0.671875 -0.265625 0.984375 -0.703125q0.328125 -0.453125 0.328125 -0.984375q0 -0.546875 -0.3125 -0.9375q-0.3125 -0.40625 -1.03125 -0.6875q-0.453125 -0.171875 -2.03125 -0.546875q-1.578125 -0.390625 -2.21875 -0.71875q-0.8125 -0.4375 -1.21875 -1.0625q-0.40625 -0.640625 -0.40625 -1.4375q0 -0.859375 0.484375 -1.609375q0.5 -0.765625 1.4375 -1.15625q0.953125 -0.390625 2.109375 -0.390625q1.28125 0 2.25 0.421875q0.96875 0.40625 1.484375 1.203125q0.53125 0.796875 0.578125 1.796875l-1.453125 0.109375q-0.125 -1.078125 -0.796875 -1.625q-0.671875 -0.5625 -2.0 -0.5625q-1.375 0 -2.0 0.5q-0.625 0.5 -0.625 1.21875q0 0.609375 0.4375 1.015625q0.4375 0.390625 2.28125 0.8125q1.859375 0.421875 2.546875 0.734375q1.0 0.453125 1.46875 1.171875q0.484375 0.703125 0.484375 1.625q0 0.90625 -0.53125 1.71875q-0.515625 0.8125 -1.5 1.265625q-0.984375 0.453125 -2.203125 0.453125q-1.5625 0 -2.609375 -0.453125q-1.046875 -0.46875 -1.65625 -1.375q-0.59375 -0.90625 -0.625 -2.0625zm15.0703125 -1.96875q0 -2.03125 0.40625 -3.265625q0.421875 -1.234375 1.25 -1.90625q0.828125 -0.671875 2.078125 -0.671875q0.921875 0 1.609375 0.375q0.703125 0.359375 1.15625 1.0625q0.453125 0.703125 0.703125 1.703125q0.265625 1.0 0.265625 2.703125q0 2.015625 -0.421875 3.265625q-0.40625 1.234375 -1.234375 1.921875q-0.828125 0.671875 -2.078125 0.671875q-1.65625 0 -2.609375 -1.203125q-1.125 -1.421875 -1.125 -4.65625zm1.4375 0q0 2.828125 0.65625 3.765625q0.671875 0.921875 1.640625 0.921875q0.96875 0 1.625 -0.9375q0.65625 -0.9375 0.65625 -3.75q0 -2.828125 -0.65625 -3.75q-0.65625 -0.9375 -1.640625 -0.9375q-0.96875 0 -1.546875 0.828125q-0.734375 1.046875 -0.734375 3.859375zm8.2109375 5.65625l0 -1.609375l1.609375 0l0 1.609375q0 0.890625 -0.3125 1.421875q-0.3125 0.546875 -1.0 0.84375l-0.390625 -0.59375q0.453125 -0.203125 0.65625 -0.578125q0.21875 -0.375 0.234375 -1.09375l-0.796875 0zm11.59375 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm12.25 4.953125l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm3.5859375 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.2890625 0.6875l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm7.9921875 4.296875l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm11.9609375 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm7.125 1.265625l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5859375 4.21875l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.0234375 0l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm9.1328125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.578125 4.953125l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5546875 7.421875l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm12.6328125 -3.203125l0 -11.453125l2.28125 0l2.71875 8.109375q0.375 1.125 0.546875 1.6875q0.1875 -0.625 0.609375 -1.828125l2.734375 -7.96875l2.046875 0l0 11.453125l-1.46875 0l0 -9.59375l-3.328125 9.59375l-1.359375 0l-3.3125 -9.75l0 9.75l-1.46875 0zm13.375 0l0 -11.453125l3.953125 0q1.328125 0 2.03125 0.15625q0.984375 0.234375 1.6875 0.828125q0.90625 0.765625 1.34375 1.953125q0.453125 1.1875 0.453125 2.71875q0 1.3125 -0.3125 2.328125q-0.296875 1.0 -0.78125 1.65625q-0.46875 0.65625 -1.03125 1.046875q-0.5625 0.375 -1.375 0.578125q-0.796875 0.1875 -1.828125 0.1875l-4.140625 0zm1.515625 -1.359375l2.453125 0q1.125 0 1.765625 -0.203125q0.65625 -0.21875 1.03125 -0.59375q0.546875 -0.546875 0.84375 -1.453125q0.296875 -0.90625 0.296875 -2.203125q0 -1.796875 -0.59375 -2.765625q-0.578125 -0.96875 -1.421875 -1.296875q-0.609375 -0.234375 -1.96875 -0.234375l-2.40625 0l0 8.75zm9.5234375 -2.328125l1.4375 -0.125q0.09375 0.859375 0.46875 1.421875q0.375 0.546875 1.15625 0.890625q0.78125 0.328125 1.75 0.328125q0.875 0 1.53125 -0.25q0.671875 -0.265625 0.984375 -0.703125q0.328125 -0.453125 0.328125 -0.984375q0 -0.546875 -0.3125 -0.9375q-0.3125 -0.40625 -1.03125 -0.6875q-0.453125 -0.171875 -2.03125 -0.546875q-1.578125 -0.390625 -2.21875 -0.71875q-0.8125 -0.4375 -1.21875 -1.0625q-0.40625 -0.640625 -0.40625 -1.4375q0 -0.859375 0.484375 -1.609375q0.5 -0.765625 1.4375 -1.15625q0.953125 -0.390625 2.109375 -0.390625q1.28125 0 2.25 0.421875q0.96875 0.40625 1.484375 1.203125q0.53125 0.796875 0.578125 1.796875l-1.453125 0.109375q-0.125 -1.078125 -0.796875 -1.625q-0.671875 -0.5625 -2.0 -0.5625q-1.375 0 -2.0 0.5q-0.625 0.5 -0.625 1.21875q0 0.609375 0.4375 1.015625q0.4375 0.390625 2.28125 0.8125q1.859375 0.421875 2.546875 0.734375q1.0 0.453125 1.46875 1.171875q0.484375 0.703125 0.484375 1.625q0 0.90625 -0.53125 1.71875q-0.515625 0.8125 -1.5 1.265625q-0.984375 0.453125 -2.203125 0.453125q-1.5625 0 -2.609375 -0.453125q-1.046875 -0.46875 -1.65625 -1.375q-0.59375 -0.90625 -0.625 -2.0625zm20.367188 3.6875l-1.40625 0l0 -8.96875q-0.515625 0.484375 -1.34375 0.96875q-0.8125 0.484375 -1.46875 0.734375l0 -1.359375q1.171875 -0.5625 2.046875 -1.34375q0.890625 -0.796875 1.265625 -1.53125l0.90625 0l0 11.5zm4.3515625 0l0 -1.609375l1.609375 0l0 1.609375q0 0.890625 -0.3125 1.421875q-0.3125 0.546875 -1.0 0.84375l-0.390625 -0.59375q0.453125 -0.203125 0.65625 -0.578125q0.21875 -0.375 0.234375 -1.09375l-0.796875 0z" fill-rule="nonzero"/><path fill="#000000" d="m41.054134 343.5143l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.749996 0.796875q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9609375 4.15625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm10.75 -1.03125q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.6015625 4.171875l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm8.6328125 0.6875l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm13.6640625 1.625l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.000008 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.6250076 -0.28125 -0.9375076 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.4062576 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.578125 4.953125l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5546875 7.421875l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm12.6328125 -3.203125l0 -11.453125l2.28125 0l2.71875 8.109375q0.375 1.125 0.546875 1.6875q0.1875 -0.625 0.609375 -1.828125l2.734375 -7.96875l2.046875 0l0 11.453125l-1.46875 0l0 -9.59375l-3.328125 9.59375l-1.359375 0l-3.3125 -9.75l0 9.75l-1.46875 0zm13.375 0l0 -11.453125l3.953125 0q1.328125 0 2.03125 0.15625q0.984375 0.234375 1.6875 0.828125q0.90625 0.765625 1.34375 1.953125q0.453125 1.1875 0.453125 2.71875q0 1.3125 -0.3125 2.328125q-0.296875 1.0 -0.78125 1.65625q-0.46875 0.65625 -1.03125 1.046875q-0.5625 0.375 -1.375 0.578125q-0.796875 0.1875 -1.828125 0.1875l-4.140625 0zm1.515625 -1.359375l2.453125 0q1.125 0 1.765625 -0.203125q0.65625 -0.21875 1.03125 -0.59375q0.546875 -0.546875 0.84375 -1.453125q0.296875 -0.90625 0.296875 -2.203125q0 -1.796875 -0.59375 -2.765625q-0.578125 -0.96875 -1.421875 -1.296875q-0.609375 -0.234375 -1.96875 -0.234375l-2.40625 0l0 8.75zm9.5234375 -2.328125l1.4375 -0.125q0.09375 0.859375 0.46875 1.421875q0.375 0.546875 1.15625 0.890625q0.78125 0.328125 1.75 0.328125q0.875 0 1.53125 -0.25q0.671875 -0.265625 0.984375 -0.703125q0.328125 -0.453125 0.328125 -0.984375q0 -0.546875 -0.3125 -0.9375q-0.3125 -0.40625 -1.03125 -0.6875q-0.453125 -0.171875 -2.03125 -0.546875q-1.578125 -0.390625 -2.21875 -0.71875q-0.8125 -0.4375 -1.21875 -1.0625q-0.40625 -0.640625 -0.40625 -1.4375q0 -0.859375 0.484375 -1.609375q0.5 -0.765625 1.4375 -1.15625q0.953125 -0.390625 2.109375 -0.390625q1.28125 0 2.25 0.421875q0.96875 0.40625 1.484375 1.203125q0.53125 0.796875 0.578125 1.796875l-1.453125 0.109375q-0.125 -1.078125 -0.796875 -1.625q-0.671875 -0.5625 -2.0 -0.5625q-1.375 0 -2.0 0.5q-0.625 0.5 -0.625 1.21875q0 0.609375 0.4375 1.015625q0.4375 0.390625 2.28125 0.8125q1.859375 0.421875 2.546875 0.734375q1.0 0.453125 1.46875 1.171875q0.484375 0.703125 0.484375 1.625q0 0.90625 -0.53125 1.71875q-0.515625 0.8125 -1.5 1.265625q-0.984375 0.453125 -2.203125 0.453125q-1.5625 0 -2.609375 -0.453125q-1.046875 -0.46875 -1.65625 -1.375q-0.59375 -0.90625 -0.625 -2.0625zm22.460938 2.328125l0 1.359375l-7.578125 0q-0.015625 -0.515625 0.171875 -0.984375q0.28125 -0.765625 0.921875 -1.515625q0.640625 -0.75 1.84375 -1.734375q1.859375 -1.53125 2.515625 -2.421875q0.65625 -0.90625 0.65625 -1.703125q0 -0.828125 -0.59375 -1.40625q-0.59375 -0.578125 -1.5625 -0.578125q-1.015625 0 -1.625 0.609375q-0.609375 0.609375 -0.609375 1.6875l-1.453125 -0.140625q0.15625 -1.625 1.125 -2.46875q0.96875 -0.84375 2.59375 -0.84375q1.65625 0 2.609375 0.921875q0.96875 0.90625 0.96875 2.25q0 0.6875 -0.28125 1.359375q-0.28125 0.65625 -0.9375 1.390625q-0.65625 0.734375 -2.171875 2.015625q-1.265625 1.0625 -1.625 1.453125q-0.359375 0.375 -0.59375 0.75l5.625 0zm2.2578125 1.359375l0 -1.609375l1.609375 0l0 1.609375q0 0.890625 -0.3125 1.421875q-0.3125 0.546875 -1.0 0.84375l-0.390625 -0.59375q0.453125 -0.203125 0.65625 -0.578125q0.21875 -0.375 0.234375 -1.09375l-0.796875 0zm11.59375 -1.265625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm12.265625 4.953125l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2109375 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm11.84375 1.65625l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.578125 4.953125l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5546875 7.421875l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm12.6328125 -3.203125l0 -11.453125l2.28125 0l2.71875 8.109375q0.375 1.125 0.546875 1.6875q0.1875 -0.625 0.609375 -1.828125l2.734375 -7.96875l2.046875 0l0 11.453125l-1.46875 0l0 -9.59375l-3.328125 9.59375l-1.359375 0l-3.3125 -9.75l0 9.75l-1.46875 0zm13.375 0l0 -11.453125l3.953125 0q1.328125 0 2.03125 0.15625q0.984375 0.234375 1.6875 0.828125q0.90625 0.765625 1.34375 1.953125q0.453125 1.1875 0.453125 2.71875q0 1.3125 -0.3125 2.328125q-0.296875 1.0 -0.78125 1.65625q-0.46875 0.65625 -1.03125 1.046875q-0.5625 0.375 -1.375 0.578125q-0.796875 0.1875 -1.828125 0.1875l-4.140625 0zm1.515625 -1.359375l2.453125 0q1.125 0 1.765625 -0.203125q0.65625 -0.21875 1.03125 -0.59375q0.546875 -0.546875 0.84375 -1.453125q0.296875 -0.90625 0.296875 -2.203125q0 -1.796875 -0.59375 -2.765625q-0.578125 -0.96875 -1.421875 -1.296875q-0.609375 -0.234375 -1.96875 -0.234375l-2.40625 0l0 8.75zm9.5234375 -2.328125l1.4375 -0.125q0.09375 0.859375 0.46875 1.421875q0.375 0.546875 1.15625 0.890625q0.78125 0.328125 1.75 0.328125q0.875 0 1.53125 -0.25q0.671875 -0.265625 0.984375 -0.703125q0.328125 -0.453125 0.328125 -0.984375q0 -0.546875 -0.3125 -0.9375q-0.3125 -0.40625 -1.03125 -0.6875q-0.453125 -0.171875 -2.03125 -0.546875q-1.578125 -0.390625 -2.21875 -0.71875q-0.8125 -0.4375 -1.21875 -1.0625q-0.40625 -0.640625 -0.40625 -1.4375q0 -0.859375 0.484375 -1.609375q0.5 -0.765625 1.4375 -1.15625q0.953125 -0.390625 2.109375 -0.390625q1.28125 0 2.25 0.421875q0.96875 0.40625 1.484375 1.203125q0.53125 0.796875 0.578125 1.796875l-1.453125 0.109375q-0.125 -1.078125 -0.796875 -1.625q-0.671875 -0.5625 -2.0 -0.5625q-1.375 0 -2.0 0.5q-0.625 0.5 -0.625 1.21875q0 0.609375 0.4375 1.015625q0.4375 0.390625 2.28125 0.8125q1.859375 0.421875 2.546875 0.734375q1.0 0.453125 1.46875 1.171875q0.484375 0.703125 0.484375 1.625q0 0.90625 -0.53125 1.71875q-0.515625 0.8125 -1.5 1.265625q-0.984375 0.453125 -2.203125 0.453125q-1.5625 0 -2.609375 -0.453125q-1.046875 -0.46875 -1.65625 -1.375q-0.59375 -0.90625 -0.625 -2.0625zm15.0703125 0.65625l1.40625 -0.1875q0.25 1.203125 0.828125 1.734375q0.578125 0.515625 1.421875 0.515625q0.984375 0 1.671875 -0.6875q0.6875 -0.6875 0.6875 -1.703125q0 -0.96875 -0.640625 -1.59375q-0.625 -0.625 -1.609375 -0.625q-0.390625 0 -0.984375 0.15625l0.15625 -1.234375q0.140625 0.015625 0.21875 0.015625q0.90625 0 1.625 -0.46875q0.71875 -0.46875 0.71875 -1.453125q0 -0.765625 -0.53125 -1.265625q-0.515625 -0.515625 -1.34375 -0.515625q-0.828125 0 -1.375 0.515625q-0.546875 0.515625 -0.703125 1.546875l-1.40625 -0.25q0.265625 -1.421875 1.171875 -2.1875q0.921875 -0.78125 2.28125 -0.78125q0.9375 0 1.71875 0.40625q0.796875 0.390625 1.203125 1.09375q0.421875 0.6875 0.421875 1.46875q0 0.75 -0.40625 1.359375q-0.390625 0.609375 -1.171875 0.96875q1.015625 0.234375 1.578125 0.96875q0.5625 0.734375 0.5625 1.84375q0 1.5 -1.09375 2.546875q-1.09375 1.046875 -2.765625 1.046875q-1.5 0 -2.5 -0.890625q-1.0 -0.90625 -1.140625 -2.34375zm19.140625 2.0q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.6015625 4.171875l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm14.2734375 0l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm15.46875 2.875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3828125 1.265625l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm17.65625 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.3671875 3.109375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.5859375 4.171875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm5.34375 0l0 -11.453125l1.40625 0l0 6.53125l3.328125 -3.375l1.828125 0l-3.171875 3.078125l3.484375 5.21875l-1.734375 0l-2.734375 -4.25l-1.0 0.953125l0 3.296875l-1.40625 0zm13.7421875 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm7.5859375 4.21875l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.0234375 0l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm9.1328125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.71875 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm4.7578125 0l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.703125 1.359375q0.25 0.78125 0.25 1.671875q0 2.140625 -1.0625 3.3125q-1.046875 1.15625 -2.53125 1.15625q-1.46875 0 -2.296875 -1.234375l0 1.046875zm-0.015625 -4.21875q0 1.5 0.40625 2.15625q0.65625 1.09375 1.796875 1.09375q0.921875 0 1.59375 -0.796875q0.671875 -0.8125 0.671875 -2.390625q0 -1.625 -0.65625 -2.390625q-0.640625 -0.78125 -1.546875 -0.78125q-0.921875 0 -1.59375 0.796875q-0.671875 0.796875 -0.671875 2.3125zm10.6796875 2.953125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm1.3671875 1.265625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.5078125 2.28125l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm10.5234375 8.328125q-1.171875 -1.46875 -1.984375 -3.4375q-0.796875 -1.984375 -0.796875 -4.09375q0 -1.859375 0.609375 -3.5625q0.703125 -1.96875 2.171875 -3.9375l1.0 0q-0.9375 1.625 -1.25 2.328125q-0.46875 1.078125 -0.75 2.25q-0.328125 1.453125 -0.328125 2.9375q0 3.75 2.328125 7.515625l-1.0 0zm8.6171875 -3.375l-2.546875 -8.296875l1.453125 0l1.328125 4.78125l0.484375 1.78125q0.03125 -0.125 0.4375 -1.703125l1.3125 -4.859375l1.453125 0l1.234375 4.8125l0.421875 1.578125l0.46875 -1.59375l1.421875 -4.796875l1.375 0l-2.59375 8.296875l-1.46875 0l-1.3125 -4.96875l-0.328125 -1.421875l-1.671875 6.390625l-1.46875 0zm10.0234375 0l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm8.8984375 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm8.9609375 -3.046875l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm2.59375 3.046875l0 -11.453125l1.40625 0l0 4.109375q0.984375 -1.140625 2.484375 -1.140625q0.921875 0 1.59375 0.359375q0.6875 0.359375 0.96875 1.0q0.296875 0.640625 0.296875 1.859375l0 5.265625l-1.40625 0l0 -5.265625q0 -1.046875 -0.453125 -1.53125q-0.453125 -0.484375 -1.296875 -0.484375q-0.625 0 -1.171875 0.328125q-0.546875 0.328125 -0.78125 0.890625q-0.234375 0.546875 -0.234375 1.515625l0 4.546875l-1.40625 0zm13.34375 -9.84375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm2.9921875 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm12.9921875 -7.34375l0 -1.625l1.40625 0l0 1.625l-1.40625 0zm-1.78125 13.046875l0.265625 -1.1875q0.421875 0.109375 0.671875 0.109375q0.421875 0 0.625 -0.296875q0.21875 -0.28125 0.21875 -1.421875l0 -8.71875l1.40625 0l0 8.75q0 1.53125 -0.390625 2.140625q-0.515625 0.78125 -1.6875 0.78125q-0.578125 0 -1.109375 -0.15625zm10.7890625 -3.21875l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm2.8984375 -2.484375l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm11.625 1.21875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm11.234375 0.234375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm7.484375 1.6875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.5625 -7.359375l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5546875 0l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm8.6328125 0.6875l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm7.9609375 4.296875l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.2578125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm17.65625 4.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm7.9609375 -5.703125l0 -1.609375l1.40625 0l0 1.609375l-1.40625 0zm0 9.84375l0 -8.296875l1.40625 0l0 8.296875l-1.40625 0zm3.5390625 0l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2421875 1.90625l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm5.65625 1.78125l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm0.8515625 -2.890625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9609375 4.15625l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm5.28125 3.203125l-0.15625 -1.328125q0.453125 0.125 0.796875 0.125q0.46875 0 0.75 -0.15625q0.28125 -0.15625 0.46875 -0.4375q0.125 -0.203125 0.421875 -1.046875q0.046875 -0.109375 0.125 -0.34375l-3.140625 -8.3125l1.515625 0l1.71875 4.796875q0.34375 0.921875 0.609375 1.921875q0.234375 -0.96875 0.578125 -1.890625l1.765625 -4.828125l1.40625 0l-3.15625 8.4375q-0.5 1.375 -0.78125 1.890625q-0.375 0.6875 -0.859375 1.015625q-0.484375 0.328125 -1.15625 0.328125q-0.40625 0 -0.90625 -0.171875zm8.984375 0.171875l-1.015625 0q2.34375 -3.765625 2.34375 -7.515625q0 -1.46875 -0.34375 -2.921875q-0.265625 -1.171875 -0.734375 -2.25q-0.3125 -0.703125 -1.265625 -2.34375l1.015625 0q1.46875 1.96875 2.171875 3.9375q0.59375 1.703125 0.59375 3.5625q0 2.109375 -0.8125 4.09375q-0.796875 1.96875 -1.953125 3.4375zm10.1484375 -3.375l-1.3125 0l0 -11.453125l1.40625 0l0 4.078125q0.890625 -1.109375 2.28125 -1.109375q0.765625 0 1.4375 0.3125q0.6875 0.296875 1.125 0.859375q0.453125 0.5625 0.7