Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package benji for openSUSE:Factory checked 
in at 2022-09-26 18:48:23
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/benji (Old)
 and      /work/SRC/openSUSE:Factory/.benji.new.2275 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "benji"

Mon Sep 26 18:48:23 2022 rev:16 rq:1006021 version:0.17.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/benji/benji.changes      2022-04-21 
15:48:34.364310727 +0200
+++ /work/SRC/openSUSE:Factory/.benji.new.2275/benji.changes    2022-09-26 
18:48:24.820078044 +0200
@@ -1,0 +2,12 @@
+Mon Sep 26 08:07:18 UTC 2022 - Michael Vetter <[email protected]>
+
+- Update to 0.17.0:
+  * Support fsfreeze feature on Kubernetes 1.24+ (requires the updated Helm 
chart)
+  * Support for storage classes in S3
+  * Small change to get Benji to work with MySQL/MariaDB
+  * k8s-tools: Switch logging to structlog and JSON output
+  * k8s-tools: Pass through JSON logging from benji unaltered
+  * k8s-tools: Create PVC with default storage class if nothing is specified
+  * k8s-tools: Put an upper bound on the time we wait for PV creation
+
+-------------------------------------------------------------------

Old:
----
  v0.16.1.tar.gz

New:
----
  v0.17.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ benji.spec ++++++
--- /var/tmp/diff_new_pack.CjQOsd/_old  2022-09-26 18:48:25.336079019 +0200
+++ /var/tmp/diff_new_pack.CjQOsd/_new  2022-09-26 18:48:25.340079027 +0200
@@ -17,7 +17,7 @@
 
 
 Name:           benji
-Version:        0.16.1
+Version:        0.17.0
 Release:        0
 Summary:        Deduplicating block based backup software
 License:        LGPL-3.0-only

++++++ v0.16.1.tar.gz -> v0.17.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/CHANGES.md new/benji-0.17.0/CHANGES.md
--- old/benji-0.16.1/CHANGES.md 2022-04-18 14:31:49.000000000 +0200
+++ new/benji-0.17.0/CHANGES.md 2022-09-24 19:42:41.000000000 +0200
@@ -1,3 +1,13 @@
+## 0.17.0, 24.09.2022
+
+* Support fsfreeze feature on Kubernetes 1.24+ (requires the updated Helm 
chart)
+* Support for storage classes in S3
+* Small change to get Benji to work with MySQL/MariaDB
+* k8s-tools: Switch logging to structlog and JSON output
+* k8s-tools: Pass through JSON logging from benji unaltered
+* k8s-tools: Create PVC with default storage class if nothing is specified
+* k8s-tools: Put an upper bound on the time we wait for PV creation
+
 ## 0.16.1, 18.04.2021
 
 * Fix typo in PersistentVolume key name (#140)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/charts/benji-k8s/values.yaml 
new/benji-0.17.0/charts/benji-k8s/values.yaml
--- old/benji-0.16.1/charts/benji-k8s/values.yaml       2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/charts/benji-k8s/values.yaml       2022-09-24 
19:42:41.000000000 +0200
@@ -122,7 +122,7 @@
         type: DirectoryOrCreate
     - name: rbd-mounts-2
       hostPath:
-        path: /var/lib/kubelet/plugins/kubernetes.io/csi/pv/
+        path: /var/lib/kubelet/plugins/kubernetes.io/csi/
         type: DirectoryOrCreate
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/docs/source/configuration.rst 
new/benji-0.17.0/docs/source/configuration.rst
--- old/benji-0.16.1/docs/source/configuration.rst      2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/docs/source/configuration.rst      2022-09-24 
19:42:41.000000000 +0200
@@ -686,6 +686,22 @@
 
 Sets the region of the bucket.
 
+* name: **storageClass**
+* type: string
+
+Sets the storage class.  Certain storage classes like ``GLACIER`` or
+``DEEP_ARCHIVE`` and certain configurations of ``INTELLIGENT_TIERING`` are
+not suitable for Benji as they require an explicit restore operation and do
+not support immediate retrieval.  Third party implementations of S3 might
+only support a subset of the storage classes supported by AWS, they might
+define their own storage classes or they might ignore any configured storage
+class altogether.
+
+.. ATTENTION:: Benji uses the ``boto3`` library which (according to the
+    Ceph documentation) only supports storage class names supported by
+    AWS S3, even tough other storage class names could be defined and
+    supported by a third party implementation.
+
 * name: **useSsl**
 * type: bool
 * default: from ``boto3`` library, ignored if **endpointUrl** is specified
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/kubernetes.py 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/kubernetes.py
--- 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/kubernetes.py   
    2022-04-18 14:31:49.000000000 +0200
+++ 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/kubernetes.py   
    2022-09-24 19:42:41.000000000 +0200
@@ -1,4 +1,5 @@
 import datetime
+import hashlib
 import json
 import logging
 import re
@@ -18,7 +19,10 @@
 SERVICE_NAMESPACE_FILENAME = 
'/var/run/secrets/kubernetes.io/serviceaccount/namespace'
 
 NODE_RBD_MOUNT_PATH_FORMAT = 
'/var/lib/kubelet/plugins/kubernetes.io/rbd/mounts/rbd-image-{image}'
-NODE_CSI_MOUNT_PATH_FORMAT = 
'/var/lib/kubelet/plugins/kubernetes.io/csi/pv/{pv}/globalmount/{volume_handle}'
+# For Kubernetes < 1.24
+NODE_CSI_MOUNT_PATH_FORMAT_LT_1_24 = 
'/var/lib/kubelet/plugins/kubernetes.io/csi/pv/{pv}/globalmount/{volume_handle}'
+# For Kubernetes >= 1.24
+NODE_CSI_MOUNT_PATH_FORMAT_GE_1_24 = 
'/var/lib/kubelet/plugins/kubernetes.io/csi/pv/{csi_driver}/{volume_handle_hash}/globalmount/{volume_handle}'
 
 logger = logging.getLogger()
 
@@ -35,6 +39,12 @@
             raise RuntimeError('No Kubernetes configuration found.')
 
 
+# Returns the Kubernetes server version as an integer (major * 1000 + minor) 
for easy comparison
+def server_version() -> int:
+    version_info = kubernetes.client.VersionApi().get_code()
+    return int(version_info.major) * 1000 + int(version_info.minor)
+
+
 def service_account_namespace() -> str:
     with open(SERVICE_NAMESPACE_FILENAME, 'r') as f:
         namespace = f.read()
@@ -180,28 +190,34 @@
     return core_v1_api.create_namespaced_event(namespace=pvc_namespace, 
body=event)
 
 
-def create_pvc(pvc_name: str, pvc_namespace: int, pvc_size: str,
-               pvc_storage_class: str) -> 
kubernetes.client.models.v1_persistent_volume_claim.V1PersistentVolumeClaim:
+def create_pvc(
+        *,
+        name: str,
+        namespace: int,
+        size: str,
+        storage_class: str = None) -> 
kubernetes.client.models.v1_persistent_volume_claim.V1PersistentVolumeClaim:
     pvc = {
         'kind': 'PersistentVolumeClaim',
         'apiVersion': 'v1',
         'metadata': {
-            'namespace': pvc_namespace,
-            'name': pvc_name,
+            'namespace': namespace,
+            'name': name,
         },
         'spec': {
-            'storageClassName': pvc_storage_class,
             'accessModes': ['ReadWriteOnce'],
             'resources': {
                 'requests': {
-                    'storage': pvc_size
+                    'storage': size
                 }
             }
         }
     }
 
+    if storage_class is not None:
+        pvc['spec']['storageClassName'] = storage_class
+
     core_v1_api = kubernetes.client.CoreV1Api()
-    return 
core_v1_api.create_namespaced_persistent_volume_claim(namespace=pvc_namespace, 
body=pvc)
+    return 
core_v1_api.create_namespaced_persistent_volume_claim(namespace=namespace, 
body=pvc)
 
 
 # Mark this as private because the class should only be instantiated by 
determine_rbd_info_from_pv.
@@ -231,12 +247,23 @@
     # name.
     elif keys_exist(pv.spec, ['csi.driver', 'csi.volume_handle', 
'csi.volume_attributes.pool', 'csi.volume_attributes.imageName']) \
         and (key_get(pv.spec, 'csi.driver') == 'rbd.csi.ceph.com' or 
key_get(pv.spec, 'csi.driver').endswith('.rbd.csi.ceph.com')):
+
+        kubernetes_1_24 = 1 * 1000 + 24
+        if server_version() < kubernetes_1_24:
+            mount_point = 
NODE_CSI_MOUNT_PATH_FORMAT_LT_1_24.format(pv=pv.metadata.name,
+                                                                    
volume_handle=key_get(pv.spec, 'csi.volume_handle'))
+        else:
+            volume_handle = key_get(pv.spec, 'csi.volume_handle')
+            volume_handle_hash = 
hashlib.sha256(volume_handle.encode()).hexdigest()
+            mount_point = 
NODE_CSI_MOUNT_PATH_FORMAT_GE_1_24.format(pv=pv.metadata.name,
+                                                                    
csi_driver=key_get(pv.spec, 'csi.driver'),
+                                                                    
volume_handle_hash=volume_handle_hash,
+                                                                    
volume_handle=volume_handle)
+
         rbd_info = _RBDInfo(pool=key_get(pv.spec, 
'csi.volume_attributes.pool'),
                             namespace=key_get(pv.spec, 
'csi.volume_attributes.radosNamespace', ''),
                             image=key_get(pv.spec, 
'csi.volume_attributes.imageName'),
-                            
mount_point=NODE_CSI_MOUNT_PATH_FORMAT.format(pv=pv.metadata.name,
-                                                                          
volume_handle=key_get(
-                                                                              
pv.spec, 'csi.volume_handle')))
+                            mount_point=mount_point)
 
     return rbd_info
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/backup_pvc.py
 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/backup_pvc.py
--- 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/backup_pvc.py
       2022-04-18 14:31:49.000000000 +0200
+++ 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/backup_pvc.py
       2022-09-24 19:42:41.000000000 +0200
@@ -1,6 +1,5 @@
 #!/usr/bin/env python3
 import argparse
-import logging
 import random
 import string
 import sys
@@ -13,8 +12,8 @@
 import benji.helpers.ceph as ceph
 import benji.helpers.prometheus as prometheus
 import benji.helpers.settings as settings
-import benji.helpers.utils as utils
 import benji.k8s_tools.kubernetes
+from benji.helpers.utils import setup_logging, logger
 
 FSFREEZE_TIMEOUT = 15
 FSFREEZE_UNFREEZE_TRIES = (0, 1, 1, 1, 15, 30)
@@ -22,8 +21,7 @@
 FSFREEZE_POD_LABEL_SELECTOR = 'benji-backup.me/component=fsfreeze'
 FSFREEZE_CONTAINER_NAME = 'fsfreeze'
 
-utils.setup_logging()
-logger = logging.getLogger()
+setup_logging()
 
 
 def _random_string(length: int, characters: str = string.ascii_lowercase + 
string.digits) -> str:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/command.py
 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/command.py
--- 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/command.py
  2022-04-18 14:31:49.000000000 +0200
+++ 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/command.py
  2022-09-24 19:42:41.000000000 +0200
@@ -1,13 +1,12 @@
 #!/usr/bin/env python3
 import sys
 import time
-from typing import Optional
 
 import benji.helpers.prometheus as prometheus
 import benji.helpers.settings as settings
-import benji.helpers.utils as utils
+from benji.helpers.utils import setup_logging, subprocess_run
 
-utils.setup_logging()
+setup_logging()
 
 
 def main():
@@ -16,7 +15,7 @@
 
     prometheus.command_start_time.labels(command=command).set(start_time)
     try:
-        utils.subprocess_run(['benji', '--log-level', 
settings.benji_log_level] + sys.argv[1:])
+        print(subprocess_run(['benji', '--machine-output', '--log-level', 
settings.benji_log_level] + sys.argv[1:]))
     except Exception as exception:
         prometheus.command_status_failed.labels(command=command).set(1)
         completion_time = time.time()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/restore_pvc.py
 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/restore_pvc.py
--- 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/restore_pvc.py
      2022-04-18 14:31:49.000000000 +0200
+++ 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/restore_pvc.py
      2022-09-24 19:42:41.000000000 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 import argparse
-import logging
+import os
 import sys
 import time
 
@@ -8,11 +8,13 @@
 from kubernetes.client.rest import ApiException
 
 import benji.helpers.settings as settings
-import benji.helpers.utils as utils
 import benji.k8s_tools.kubernetes
+from benji.helpers.utils import setup_logging, logger, subprocess_run
 
-utils.setup_logging()
-logger = logging.getLogger()
+PVC_CREATION_MAX_POLLS = 15
+PVC_CREATION_POLL_INTERVAL = 2  # seconds
+
+setup_logging()
 
 
 def main():
@@ -27,7 +29,7 @@
     parser.add_argument('--pvc-storage-class',
                         metavar='pvc_storage_class',
                         dest='pvc_storage_class',
-                        default='rbd',
+                        default=None,
                         help='PVC storage class (only takes effect if the PVC 
does not exist yet)')
     parser.add_argument('--restore-url-template',
                         metavar='restore_url_template',
@@ -44,7 +46,7 @@
 
     logger.info(f'Restoring version {args.version_uid} to PVC 
{args.pvc_namespace}/{args.pvc_name}.')
 
-    benji_ls = utils.subprocess_run(
+    benji_ls = subprocess_run(
         ['benji', '--machine-output', '--log-level', settings.benji_log_level, 
'ls', f'uid == "{args.version_uid}"'],
         decode_json=True)
     assert isinstance(benji_ls, dict)
@@ -68,8 +70,10 @@
             raise RuntimeError(f'Unexpected Kubernetes API exception: 
{str(exception)}')
 
     if pvc is None:
-        pvc = benji.k8s_tools.kubernetes.create_pvc(args.pvc_name, 
args.pvc_namespace, version_size,
-                                                    args.pvc_storage_class)
+        pvc = benji.k8s_tools.kubernetes.create_pvc(name=args.pvc_name,
+                                                    
namespace=args.pvc_namespace,
+                                                    size=version_size,
+                                                    
storage_class=args.pvc_storage_class)
     else:
         if not args.force:
             raise RuntimeError('PVC already exists. Will not overwrite it 
unless forced.')
@@ -81,26 +85,36 @@
         elif pvc_size > version_size:
             logger.warning(f'Existing PVC is {pvc_size - version_size} bytes 
bigger than version {args.version_uid}.')
 
-    while True:
+    polls = 0
+    while polls < PVC_CREATION_MAX_POLLS:
         pvc = 
core_v1_api.read_namespaced_persistent_volume_claim(args.pvc_name, 
args.pvc_namespace)
         if pvc.status.phase == 'Bound':
             break
-        logger.info('Waiting for persistent volume creation.')
-        time.sleep(1)
+        time.sleep(PVC_CREATION_POLL_INTERVAL)
+        polls += 1
+        logger.info('Waiting for persistent volume creation... %d/%d', polls, 
PVC_CREATION_MAX_POLLS)
+    if pvc.status.phase == 'Bound':
+        logger.info('Persistent volume creation completed.')
+    else:
+        logger.error('Persistent volume creation did not complete after %d 
seconds.',
+                     PVC_CREATION_MAX_POLLS * PVC_CREATION_POLL_INTERVAL)
+        sys.exit(os.EX_CANTCREAT)
 
     pv = core_v1_api.read_persistent_volume(pvc.spec.volume_name)
     rbd_info = benji.k8s_tools.kubernetes.determine_rbd_info_from_pv(pv)
     if rbd_info is None:
         raise RuntimeError(f'Unable to determine RBD information for 
{pv.metadata.name}')
 
-    utils.subprocess_run([
-        'benji',
-        '--log-level',
-        settings.benji_log_level,
-        'restore',
-        '--sparse',
-        '--force',
-        args.version_uid,
-        args.restore_url_template.format(pool=rbd_info.pool, 
namespace=rbd_info.namespace, image=rbd_info.image),
-    ])
+    print(
+        subprocess_run([
+            'benji',
+            '--machine-output',
+            '--log-level',
+            settings.benji_log_level,
+            'restore',
+            '--sparse',
+            '--force',
+            args.version_uid,
+            args.restore_url_template.format(pool=rbd_info.pool, 
namespace=rbd_info.namespace, image=rbd_info.image),
+        ]))
     sys.exit(0)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/versions_status.py
 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/versions_status.py
--- 
old/benji-0.16.1/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/versions_status.py
  2022-04-18 14:31:49.000000000 +0200
+++ 
new/benji-0.17.0/images/benji-k8s/k8s-tools/src/benji/k8s_tools/scripts/versions_status.py
  2022-09-24 19:42:41.000000000 +0200
@@ -3,13 +3,13 @@
 
 import benji.helpers.prometheus as prometheus
 import benji.helpers.settings as settings
-import benji.helpers.utils as utils
+from benji.helpers.utils import setup_logging, subprocess_run
 
-utils.setup_logging()
+setup_logging()
 
 
 def main():
-    incomplete_versions = utils.subprocess_run([
+    incomplete_versions = subprocess_run([
         'benji',
         '--machine-output',
         '--log-level',
@@ -17,9 +17,9 @@
         'ls',
         'status == "incomplete" and date < "1 day ago"',
     ],
-                                               decode_json=True)
+                                         decode_json=True)
 
-    invalid_versions = utils.subprocess_run([
+    invalid_versions = subprocess_run([
         'benji',
         '--machine-output',
         '--log-level',
@@ -27,7 +27,7 @@
         'ls',
         'status == "invalid"',
     ],
-                                            decode_json=True)
+                                      decode_json=True)
 
     
prometheus.older_incomplete_versions.set(len(incomplete_versions['versions']))
     prometheus.invalid_versions.set(len(invalid_versions['versions']))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/_static_version.py 
new/benji-0.17.0/src/benji/_static_version.py
--- old/benji-0.16.1/src/benji/_static_version.py       2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/_static_version.py       2022-09-24 
19:42:41.000000000 +0200
@@ -8,5 +8,5 @@
 version = "__use_git__"
 
 # These values are only set if the distribution was created with 'git archive'
-refnames = "tag: v0.16.1"
-git_hash = "c70d54e2"
+refnames = "HEAD -> master, tag: v0.17.0"
+git_hash = "8cbbf8c6"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/benji.py 
new/benji-0.17.0/src/benji/benji.py
--- old/benji-0.16.1/src/benji/benji.py 2022-04-18 14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/benji.py 2022-09-24 19:42:41.000000000 +0200
@@ -1095,7 +1095,6 @@
                 with StringIO() as metadata_export:
                     Database.export([version.uid], metadata_export)
                     storage = StorageFactory.get_by_name(version.storage.name)
-                    logger.debug(metadata_export.getvalue())
                     storage.write_version(version.uid, 
metadata_export.getvalue(), overwrite=overwrite)
                 logger.info('Backed up metadata of version 
{}.'.format(version.uid))
         finally:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/config.py 
new/benji-0.17.0/src/benji/config.py
--- old/benji-0.16.1/src/benji/config.py        2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/config.py        2022-09-24 19:42:41.000000000 
+0200
@@ -40,7 +40,7 @@
     _PARENTS_KEY = 'parents'
     _YAML_SUFFIX = '.yaml'
 
-    _SCHEMA_VERSIONS = [semantic_version.Version('1', partial=True)]
+    _SCHEMA_VERSIONS = [semantic_version.Version(major=1, minor=0, patch=0)]
 
     _schema_registry: Dict[str, Dict] = {}
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/database.py 
new/benji-0.17.0/src/benji/database.py
--- old/benji-0.16.1/src/benji/database.py      2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/database.py      2022-09-24 19:42:41.000000000 
+0200
@@ -334,13 +334,16 @@
     bytes_sparse = sqlalchemy.Column(sqlalchemy.BigInteger)
     duration = sqlalchemy.Column(sqlalchemy.BigInteger)
 
-    # See 
https://docs.sqlalchemy.org/en/13/orm/collections.html#passive-deletes
+    # Eagerly load labels so that the attribute can be accessed even when 
there is no associated session anymore.
+    # See 
https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#what-kind-of-loading.
+    # See 
https://docs.sqlalchemy.org/en/13/orm/collections.html#passive-deletes for the 
cascade delete behaviour.
     labels = sqlalchemy.orm.relationship('Label',
                                          backref='version',
                                          order_by='asc(Label.name)',
                                          passive_deletes=True,
                                          cascade='all, delete-orphan',
-                                         
collection_class=attribute_mapped_collection('name'))
+                                         
collection_class=attribute_mapped_collection('name'),
+                                         lazy='selectin')
 
     @sqlalchemy.orm.reconstructor
     def __init__(self, *args, **kwargs):
@@ -786,7 +789,7 @@
         sqlalchemy.Index(None, 'uid_left', 'uid_right'),
         # Maybe using an hash index on PostgeSQL might be beneficial in the 
future
         # Index(None, 'checksum', postgresql_using='hash'),
-        sqlalchemy.Index(None, 'checksum'),
+        sqlalchemy.Index(None, 'checksum', 
mysql_length=MAXIMUM_CHECKSUM_LENGTH),
     )
 
     def deref(self) -> DereferencedBlock:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/formatrenderer.py 
new/benji-0.17.0/src/benji/formatrenderer.py
--- old/benji-0.16.1/src/benji/formatrenderer.py        2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/formatrenderer.py        1970-01-01 
01:00:00.000000000 +0100
@@ -1,77 +0,0 @@
-import string
-from datetime import datetime
-from io import StringIO
-
-import colorama
-
-
-class FormatRenderer:
-
-    def __init__(self, fmt: str, colors: bool = True, force_colors: bool = 
False):
-        if colors is True:
-            if force_colors:
-                colorama.deinit()
-                colorama.init(strip=False)
-            else:
-                colorama.init()
-
-            self._level_to_color = {
-                "critical": colorama.Fore.RED,
-                "exception": colorama.Fore.RED,
-                "error": colorama.Fore.RED,
-                "warn": colorama.Fore.YELLOW,
-                "warning": colorama.Fore.YELLOW,
-                "info": colorama.Fore.GREEN,
-                "debug": colorama.Fore.WHITE,
-                "notset": colorama.Back.RED,
-            }
-
-            self._reset = colorama.Style.RESET_ALL
-        else:
-            self._level_to_color = {
-                "critical": '',
-                "exception": '',
-                "error": '',
-                "warn": '',
-                "warning": '',
-                "info": '',
-                "debug": '',
-                "notset": '',
-            }
-
-            self._reset = ''
-
-        self._vformat = string.Formatter().vformat
-        self._fmt = fmt
-
-    def __call__(self, _, __, event_dict):
-        message = StringIO()
-
-        event_dict['log_color_reset'] = self._reset
-
-        if 'level' in event_dict:
-            level = event_dict['level']
-            if level in self._level_to_color:
-                event_dict['log_color'] = self._level_to_color[level]
-            else:
-                event_dict['log_color'] = ''
-            event_dict['level_uc'] = level.upper()
-        else:
-            event_dict['log_color'] = ''
-
-        if 'timestamp' in event_dict:
-            event_dict['timestamp_local_ctime'] = 
datetime.fromtimestamp(event_dict['timestamp']).ctime()
-
-        message.write(self._vformat(self._fmt, [], event_dict))
-
-        stack = event_dict.pop("stack", None)
-        exception = event_dict.pop("exception", None)
-
-        if stack is not None:
-            message.write("\n" + stack)
-        if exception is not None:
-            message.write("\n" + exception)
-
-        message.write(self._reset)
-
-        return message.getvalue()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/helpers/ceph.py 
new/benji-0.17.0/src/benji/helpers/ceph.py
--- old/benji-0.16.1/src/benji/helpers/ceph.py  2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/helpers/ceph.py  2022-09-24 19:42:41.000000000 
+0200
@@ -1,4 +1,3 @@
-import logging
 from datetime import datetime
 from tempfile import NamedTemporaryFile
 from typing import Dict, Any, Optional
@@ -6,14 +5,13 @@
 from blinker import signal
 
 from benji.helpers.settings import benji_log_level
+from benji.helpers.utils import logger
 from benji.helpers.utils import subprocess_run
 
 SIGNAL_SENDER = 'ceph'
 RBD_SNAP_CREATE_TIMEOUT = 30
 RBD_SNAP_NAME_PREFIX = 'b-'
 
-logger = logging.getLogger()
-
 signal_snapshot_create_pre = signal('snapshot_create_pre')
 signal_snapshot_create_post_success = signal('snapshot_create_post_success')
 signal_snapshot_create_post_error = signal('snapshot_create_post_error')
@@ -47,7 +45,7 @@
                                     context=context)
     snapshot_path = _rbd_image_path(pool=pool, namespace=namespace, 
image=image, snapshot=snapshot)
     try:
-        subprocess_run(['rbd', 'snap', 'create', snapshot_path], 
timeout=RBD_SNAP_CREATE_TIMEOUT)
+        subprocess_run(['rbd', 'snap', 'create', '--no-progress', 
snapshot_path], timeout=RBD_SNAP_CREATE_TIMEOUT)
     except Exception as exception:
         signal_snapshot_create_post_error.send(SIGNAL_SENDER,
                                                volume=volume,
@@ -133,7 +131,7 @@
     snapshot_create(volume=volume, pool=pool, namespace=namespace, 
image=image, snapshot=snapshot, context=context)
     stdout = subprocess_run(
         ['rbd', 'diff', '--whole-object', '--format=json', '--from-snap', 
last_snapshot, snapshot_path])
-    subprocess_run(['rbd', 'snap', 'rm', last_snapshot_path])
+    subprocess_run(['rbd', 'snap', 'rm', '--no-progress', last_snapshot_path])
 
     with NamedTemporaryFile(mode='w+', encoding='utf-8') as rbd_hints:
         assert isinstance(stdout, str)
@@ -218,7 +216,7 @@
             for snapshot in benjis_snapshots[:-1]:
                 snapshot_path = _rbd_image_path(pool=pool, 
namespace=namespace, image=image, snapshot=snapshot)
                 logger.info(f'Deleting older RBD snapshot {snapshot_path}.')
-                subprocess_run(['rbd', 'snap', 'rm', snapshot_path])
+                subprocess_run(['rbd', 'snap', 'rm', '--no-progress', 
snapshot_path])
 
             last_snapshot = benjis_snapshots[-1]
             last_snapshot_path = _rbd_image_path(pool=pool, 
namespace=namespace, image=image, snapshot=last_snapshot)
@@ -248,7 +246,7 @@
                                              context=context)
             else:
                 logger.info(f'Existing RBD snapshot {last_snapshot_path} not 
found in Benji, deleting it and reverting to initial backup.')
-                subprocess_run(['rbd', 'snap', 'rm', last_snapshot_path])
+                subprocess_run(['rbd', 'snap', 'rm', '--no-progress', 
last_snapshot_path])
                 result = backup_initial(volume=volume,
                                         pool=pool,
                                         namespace=namespace,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/helpers/prometheus.py 
new/benji-0.17.0/src/benji/helpers/prometheus.py
--- old/benji-0.16.1/src/benji/helpers/prometheus.py    2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/helpers/prometheus.py    2022-09-24 
19:42:41.000000000 +0200
@@ -1,12 +1,11 @@
-import logging
 import urllib.error
+from typing import Dict
 
 from prometheus_client import CollectorRegistry, Gauge, pushadd_to_gateway, 
generate_latest
-from typing import Dict
 
 from benji.helpers.settings import prom_push_gateway, benji_instance
+from benji.helpers.utils import logger
 
-logger = logging.getLogger()
 command_registry = CollectorRegistry()
 backup_registry = CollectorRegistry()
 version_status_registry = CollectorRegistry()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/helpers/utils.py 
new/benji-0.17.0/src/benji/helpers/utils.py
--- old/benji-0.16.1/src/benji/helpers/utils.py 2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/helpers/utils.py 2022-09-24 19:42:41.000000000 
+0200
@@ -1,32 +1,116 @@
 import json
 import logging
-import re
+import os
 import subprocess
+import sys
+import threading
 from json import JSONDecodeError
 from typing import Dict, List, Union, Any, Sequence
 
+import structlog
+from structlog._frames import _find_first_app_frame_and_name
+
 from benji.helpers.settings import benji_log_level
 
-logger = logging.getLogger()
+logger = structlog.get_logger()
 
 
 def setup_logging() -> None:
-    # Don't raise exceptions occurring during logging
-    logging.raiseExceptions = False
-    logger.addHandler(logging.StreamHandler())
-    logger.setLevel(benji_log_level)
 
+    def sl_processor_add_source_context(_, __, event_dict: Dict) -> Dict:
+        frame, name = _find_first_app_frame_and_name([__name__, 'logging'])
+        event_dict['file'] = frame.f_code.co_filename
+        event_dict['line'] = frame.f_lineno
+        event_dict['function'] = frame.f_code.co_name
+        return event_dict
+
+    def sl_processor_add_process_context(_, __, event_dict: Dict) -> Dict:
+        event_dict['process'] = os.getpid()
+        event_dict['thread_name'] = threading.current_thread().name
+        event_dict['thread_id'] = threading.get_ident()
+        return event_dict
+
+    sl_processor_timestamper = structlog.processors.TimeStamper(utc=True)
+
+    sl_foreign_pre_chain = [
+        structlog.stdlib.add_log_level,
+        sl_processor_timestamper,
+        sl_processor_add_source_context,
+        sl_processor_add_process_context,
+    ]
+
+    sl_processors = [
+        structlog.stdlib.add_log_level,
+        structlog.stdlib.PositionalArgumentsFormatter(),
+        sl_processor_timestamper,
+        sl_processor_add_source_context,
+        sl_processor_add_process_context,
+        structlog.processors.StackInfoRenderer(),
+        structlog.processors.format_exc_info,
+        structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
+    ]
+
+    structlog.configure(
+        processors=sl_processors,
+        context_class=dict,
+        logger_factory=structlog.stdlib.LoggerFactory(),
+        wrapper_class=structlog.stdlib.BoundLogger,
+        cache_logger_on_first_use=True,
+    )
+
+    formatter = 
structlog.stdlib.ProcessorFormatter(foreign_pre_chain=sl_foreign_pre_chain,
+                                                    processors=[
+                                                        
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
+                                                        
structlog.processors.JSONRenderer(),
+                                                    ])
+
+    # StreamHandler() will log to sys.stderr by default.
+    handler = logging.StreamHandler()
+    handler.setFormatter(formatter)
+    root_logger = logging.getLogger()
+    root_logger.addHandler(handler)
+
+    try:
+        benji_log_level_int = int(benji_log_level)
+    except ValueError:
+        try:
+            benji_log_level_int = 
int(logging.getLevelName(benji_log_level.upper()))
+        except ValueError:
+            logger.warning('Unknown logging level %s, falling back to INFO.', 
benji_log_level)
+            benji_log_level_int = logging.INFO
+    root_logger.setLevel(logging.getLevelName(benji_log_level_int))
+
+    # Source: 
https://stackoverflow.com/questions/6234405/logging-uncaught-exceptions-in-python/16993115#16993115
+    def _handle_exception(exc_type, exc_value, exc_traceback):
+        if issubclass(exc_type, KeyboardInterrupt):
+            sys.__excepthook__(exc_type, exc_value, exc_traceback)
+            return
+
+        logger.error("Uncaught exception", exc_info=(exc_type, exc_value, 
exc_traceback))
+
+    sys.excepthook = _handle_exception
+
+
+def log_jsonl(line_json: Any, default_level: int = logging.INFO) -> None:
+    try:
+        level = line_json['level'].upper()
+    except (NameError, TypeError):
+        level = default_level
+    else:
+        try:
+            level = int(logging.getLevelName(level))
+        except ValueError:
+            level = default_level
 
-def _one_line_stderr(stderr: str):
-    stderr = re.sub(r'\n(?!$)', ' | ', stderr)
-    stderr = re.sub(r'\s+', ' ', stderr)
-    return stderr
+    if logger.isEnabledFor(level):
+        print(json.dumps(line_json, sort_keys=True), file=sys.stderr)
 
 
 def subprocess_run(args: List[str],
                    input: str = None,
                    timeout: int = None,
-                   decode_json: bool = False) -> Union[Dict, List, str]:
+                   decode_json: bool = False,
+                   jsonl_passthru: bool = True) -> Union[Dict, List, str]:
     logger.debug('Running process: {}'.format(' '.join(args)))
     try:
 
@@ -38,14 +122,21 @@
                                 errors='ignore',
                                 timeout=timeout)
     except subprocess.TimeoutExpired as exception:
-        stderr = _one_line_stderr(exception.stderr)
-        raise RuntimeError(f'{args[0]} invocation failed due to timeout with 
output: ' + stderr) from None
+        raise RuntimeError(f'{args[0]} invocation failed due to timeout with 
output.') from None
     except Exception as exception:
         raise RuntimeError(f'{args[0]} invocation failed with a 
{type(exception).__name__} exception: {str(exception)}') from None
 
     if result.stderr != '':
-        for line in result.stderr.splitlines():
-            logger.info(line)
+        for line in result.stderr.splitlines(keepends=False):
+            if jsonl_passthru:
+                try:
+                    line_json = json.loads(line)
+                except JSONDecodeError:
+                    logger.info(line)
+                else:
+                    log_jsonl(line_json)
+            else:
+                logger.info(line)
 
     if result.returncode == 0:
         logger.debug('Process finished successfully.')
@@ -53,16 +144,17 @@
             try:
                 stdout_json = json.loads(result.stdout)
             except JSONDecodeError:
-                raise RuntimeError(f'{args[0]} invocation was successful but 
did not return valid JSON. Output on stderr was: 
{_one_line_stderr(result.stderr)}.')
+                raise RuntimeError(f'{args[0]} invocation was successful but 
did not return valid JSON.')
 
             if stdout_json is None or not isinstance(stdout_json, (dict, 
list)):
-                raise RuntimeError(f'{args[0]} invocation was successful but 
did return null or empty JSON dictonary. Output on stderr was: 
{_one_line_stderr(result.stderr)}.')
+                raise RuntimeError(f'{args[0]} invocation was successful but 
did return null or neither a JSON list nor'
+                                   'a dictionary.')
 
             return stdout_json
         else:
             return result.stdout
     else:
-        raise RuntimeError(f'{args[0]} invocation failed with return code 
{result.returncode} and output: {_one_line_stderr(result.stderr)}')
+        raise RuntimeError(f'{args[0]} invocation failed with return code 
{result.returncode}.')
 
 
 # A copy of this function is in benji.utils.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/logging.py 
new/benji-0.17.0/src/benji/logging.py
--- old/benji-0.16.1/src/benji/logging.py       2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/logging.py       2022-09-24 19:42:41.000000000 
+0200
@@ -3,20 +3,95 @@
 import logging
 import logging.config
 import os
+import string
 import sys
 import threading
 import warnings
-from typing import Dict
+from datetime import datetime
+from io import StringIO
+from typing import Dict, Union
 
+import colorama
 import structlog
 from structlog._frames import _find_first_app_frame_and_name
 
 from benji.exception import UsageError
-from benji.formatrenderer import FormatRenderer
 
 logger = structlog.get_logger()
 
 
+class _FormatRenderer:
+
+    def __init__(self, fmt: str, colors: bool = True, force_colors: bool = 
False):
+        if colors is True:
+            if force_colors:
+                colorama.deinit()
+                colorama.init(strip=False)
+            else:
+                colorama.init()
+
+            self._level_to_color = {
+                "critical": colorama.Fore.RED,
+                "exception": colorama.Fore.RED,
+                "error": colorama.Fore.RED,
+                "warn": colorama.Fore.YELLOW,
+                "warning": colorama.Fore.YELLOW,
+                "info": colorama.Fore.GREEN,
+                "debug": colorama.Fore.WHITE,
+                "notset": colorama.Back.RED,
+            }
+
+            self._reset = colorama.Style.RESET_ALL
+        else:
+            self._level_to_color = {
+                "critical": '',
+                "exception": '',
+                "error": '',
+                "warn": '',
+                "warning": '',
+                "info": '',
+                "debug": '',
+                "notset": '',
+            }
+
+            self._reset = ''
+
+        self._vformat = string.Formatter().vformat
+        self._fmt = fmt
+
+    def __call__(self, _, __, event_dict):
+        message = StringIO()
+
+        event_dict['log_color_reset'] = self._reset
+
+        if 'level' in event_dict:
+            level = event_dict['level']
+            if level in self._level_to_color:
+                event_dict['log_color'] = self._level_to_color[level]
+            else:
+                event_dict['log_color'] = ''
+            event_dict['level_uc'] = level.upper()
+        else:
+            event_dict['log_color'] = ''
+
+        if 'timestamp' in event_dict:
+            event_dict['timestamp_local_ctime'] = 
datetime.fromtimestamp(event_dict['timestamp']).ctime()
+
+        message.write(self._vformat(self._fmt, [], event_dict))
+
+        stack = event_dict.pop("stack", None)
+        exception = event_dict.pop("exception", None)
+
+        if stack is not None:
+            message.write("\n" + stack)
+        if exception is not None:
+            message.write("\n" + exception)
+
+        message.write(self._reset)
+
+        return message.getvalue()
+
+
 def _sl_processor_add_source_context(_, __, event_dict: Dict) -> Dict:
     frame, name = _find_first_app_frame_and_name([__name__, 'logging'])
     event_dict['file'] = frame.f_code.co_filename
@@ -53,11 +128,19 @@
 ]
 
 
-def init_logging(*,
-                 logfile: str = None,
-                 console_level: str = 'INFO',
-                 console_formatter: str = 'json',
-                 logfile_formatter: str = 'legacy') -> None:
+def setup_logging(*,
+                  logfile: str = None,
+                  console_level: Union[str, int] = 'INFO',
+                  console_formatter: str = 'json',
+                  logfile_formatter: str = 'legacy') -> None:
+    try:
+        console_level_int = int(console_level)
+    except ValueError:
+        try:
+            console_level_int = 
int(logging.getLevelName(console_level.upper()))
+        except ValueError:
+            logger.warning('Unknown logging level %s, falling back to INFO.', 
console_level)
+            console_level_int = logging.INFO
 
     logging_config: Dict = {
         "version": 1,
@@ -65,19 +148,19 @@
         "formatters": {
             "console-plain": {
                 "()": structlog.stdlib.ProcessorFormatter,
-                "processor": FormatRenderer(colors=False, 
fmt='{log_color}{level_uc:>8s}: {event:s}'),
+                "processor": _FormatRenderer(colors=False, 
fmt='{log_color}{level_uc:>8s}: {event:s}'),
                 "foreign_pre_chain": _sl_foreign_pre_chain,
             },
             "console-colored": {
                 "()": structlog.stdlib.ProcessorFormatter,
-                "processor": FormatRenderer(colors=True, 
fmt='{log_color}{level_uc:>8s}: {event:s}'),
+                "processor": _FormatRenderer(colors=True, 
fmt='{log_color}{level_uc:>8s}: {event:s}'),
                 "foreign_pre_chain": _sl_foreign_pre_chain,
             },
             "legacy": {
                 "()":
                     structlog.stdlib.ProcessorFormatter,
                 "processor":
-                    FormatRenderer(
+                    _FormatRenderer(
                         colors=False,
                         fmt='{timestamp_local_ctime} 
{process:d}/{thread_name:s} {file:s}:{line:d} {level_uc:s} {event:s}'),
                 "foreign_pre_chain":
@@ -119,11 +202,11 @@
         raise UsageError('Event formatter {} is 
unknown.'.format(logfile_formatter))
 
     logging_config['handlers']['console']['formatter'] = console_formatter
-    logging_config['handlers']['console']['level'] = console_level
+    logging_config['handlers']['console']['level'] = console_level_int
 
     if logfile is not None:
         logging_config['handlers']['file']['filename'] = logfile
-        logging_config['handlers']['file']['level'] = 
min(logging.getLevelName(console_level), logging.INFO)
+        logging_config['handlers']['file']['level'] = min(console_level_int, 
logging.INFO)
         logging_config['handlers']['file']['formatter'] = logfile_formatter
     else:
         del (logging_config['handlers']['file'])
@@ -152,7 +235,7 @@
     cache_logger_on_first_use=True,
 )
 
-init_logging()
+setup_logging()
 
 # silence alembic
 logging.getLogger('alembic').setLevel(logging.WARN)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/nbdserver.py 
new/benji-0.17.0/src/benji/nbdserver.py
--- old/benji-0.16.1/src/benji/nbdserver.py     2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/nbdserver.py     2022-09-24 19:42:41.000000000 
+0200
@@ -93,6 +93,18 @@
     NBD_CMD_BLOCK_STATUS = 7  # Not implemented
     NBD_CMD_RESIZE = 8  # Not implemented (experimental resize extension)
 
+    NBD_CMD_MAP = {
+        NBD_CMD_READ: "read",
+        NBD_CMD_WRITE: "write",
+        NBD_CMD_DISC: "disconnect",
+        NBD_CMD_FLUSH: "flush",
+        NBD_CMD_TRIM: "trim",
+        NBD_CMD_CACHE: "cache",
+        NBD_CMD_WRITE_ZEROES: "write-zeroes",
+        NBD_CMD_BLOCK_STATUS: "block-status",
+        NBD_CMD_RESIZE: "resize",
+    }
+
     NBD_CMD_FLAG_FUA = (1 << 0) << NBD_CMD_FLAGS_SHIFT  # Not implemented
     NBD_CMD_FLAG_NO_HOLE = (1 << 1) << NBD_CMD_FLAGS_SHIFT  # Not implemented 
(only relevant to NBD_CMD_WRITE_ZEROES)
     NBD_CMD_FLAG_DF = (1 << 2) << NBD_CMD_FLAGS_SHIFT  # Not implemented
@@ -151,16 +163,17 @@
             asyncio.set_event_loop(asyncio.new_event_loop())
         self.loop = asyncio.get_event_loop()
 
-    @asyncio.coroutine
-    def nbd_response(self, writer: StreamWriter, handle: int, error: int = 0,
-                     data: bytes = None) -> Generator[Any, None, None]:
+    async def nbd_response(self,
+                           writer: StreamWriter,
+                           handle: int,
+                           error: int = 0,
+                           data: bytes = None) -> Generator[Any, None, None]:
         writer.write(struct.pack('>LLQ', self.NBD_REPLY_MAGIC, error, handle))
         if data:
             writer.write(data)
-        yield from writer.drain()
+        await writer.drain()
 
-    @asyncio.coroutine
-    def handler(self, reader: StreamReader, writer: StreamWriter) -> 
Generator[Any, None, None]:
+    async def handler(self, reader: StreamReader, writer: StreamWriter) -> 
Generator[Any, None, None]:
         data: Optional[bytes]
         try:
             host, port = writer.get_extra_info("peername")
@@ -170,9 +183,9 @@
 
             # Initial handshake
             writer.write(struct.pack(">QQH", self.INIT_PASSWD, 
self.CLISERV_MAGIC, self.NBD_HANDSHAKE_FLAGS))
-            yield from writer.drain()
+            await writer.drain()
 
-            data = yield from reader.readexactly(4)
+            data = await reader.readexactly(4)
             try:
                 client_flags = struct.unpack(">L", data)[0]
             except struct.error:
@@ -194,7 +207,7 @@
 
             # Negotiation phase
             while True:
-                header = yield from reader.readexactly(16)
+                header = await reader.readexactly(16)
                 try:
                     (magic, opt, length) = struct.unpack(">QLL", header)
                 except struct.error:
@@ -204,7 +217,7 @@
                     raise IOError("Negotiation failed: Bad magic number: %s." 
% magic)
 
                 if length:
-                    data = yield from reader.readexactly(length)
+                    data = await reader.readexactly(length)
                     if len(data) != length:
                         raise IOError("Negotiation failed: %s bytes expected." 
% length)
                 else:
@@ -222,7 +235,7 @@
                             raise IOError("Negotiation failed: Unknown export 
name.")
 
                         writer.write(struct.pack(">QLLL", 
self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ERR_UNSUP, 0))
-                        yield from writer.drain()
+                        await writer.drain()
                         continue
 
                     self.log.info("[%s:%s] Negotiated export: %s." % (host, 
port, version_uid))
@@ -246,7 +259,7 @@
                     writer.write(struct.pack('>QH', size, export_flags))
                     if not no_zeros:
                         writer.write(b"\x00" * 124)
-                    yield from writer.drain()
+                    await writer.drain()
 
                     # Transition to transmission phase
                     break
@@ -260,14 +273,14 @@
                                         len(list_version_encoded) + 4))
                         writer.write(struct.pack(">L", 
len(list_version_encoded)))
                         writer.write(list_version_encoded)
-                        yield from writer.drain()
+                        await writer.drain()
 
                     writer.write(struct.pack(">QLLL", 
self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ACK, 0))
-                    yield from writer.drain()
+                    await writer.drain()
 
                 elif opt == self.NBD_OPT_ABORT:
                     writer.write(struct.pack(">QLLL", 
self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ACK, 0))
-                    yield from writer.drain()
+                    await writer.drain()
 
                     raise _NbdServerAbortedNegotiationError()
                 else:
@@ -280,11 +293,11 @@
                         raise IOError("Unsupported option: %s." % (opt))
 
                     writer.write(struct.pack(">QLLL", 
self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ERR_UNSUP, 0))
-                    yield from writer.drain()
+                    await writer.drain()
 
             # Transmission phase
             while True:
-                header = yield from reader.readexactly(28)
+                header = await reader.readexactly(28)
                 try:
                     (magic, cmd, handle, offset, length) = 
struct.unpack(">LLQQL", header)
                 except struct.error:
@@ -296,12 +309,11 @@
                 cmd_flags = cmd & self.NBD_CMD_MASK_FLAGS
                 cmd = cmd & self.NBD_CMD_MASK_COMMAND
 
-                self.log.debug("[%s:%s]: cmd=%s, cmd_flags=%s, handle=%s, 
offset=%s, len=%s" %
-                               (host, port, cmd, cmd_flags, handle, offset, 
length))
+                self.log.debug(f"[{host}:{port}]: 
cmd={self.NBD_CMD_MAP.get(cmd + 10, 'unknown')}({cmd}), cmd_flags={cmd_flags}, 
handle={handle}, offset={offset}, length={length}")
 
                 # We don't support any command flags
                 if cmd_flags != 0:
-                    yield from self.nbd_response(writer, handle, 
error=self.EINVAL)
+                    await self.nbd_response(writer, handle, error=self.EINVAL)
                     continue
 
                 if cmd == self.NBD_CMD_DISC:
@@ -309,12 +321,12 @@
                     break
 
                 elif cmd == self.NBD_CMD_WRITE:
-                    data = yield from reader.readexactly(length)
+                    data = await reader.readexactly(length)
                     if len(data) != length:
                         raise IOError("%s bytes expected, disconnecting." % 
length)
 
                     if self.read_only:
-                        yield from self.nbd_response(writer, handle, 
error=self.EPERM)
+                        await self.nbd_response(writer, handle, 
error=self.EPERM)
                         continue
 
                     if not cow_version:
@@ -324,25 +336,25 @@
                     except Exception as exception:
                         self.log.error("[%s:%s] NBD_CMD_WRITE: %s\n%s." %
                                        (host, port, exception, 
traceback.format_exc()))
-                        yield from self.nbd_response(writer, handle, 
error=self.EIO)
+                        await self.nbd_response(writer, handle, error=self.EIO)
                         continue
 
-                    yield from self.nbd_response(writer, handle)
+                    await self.nbd_response(writer, handle)
 
                 elif cmd == self.NBD_CMD_READ:
                     try:
                         data = self.store.read(version, cow_version, offset, 
length)
                     except Exception as exception:
                         self.log.error("[%s:%s] NBD_CMD_READ: %s\n%s." % 
(host, port, exception, traceback.format_exc()))
-                        yield from self.nbd_response(writer, handle, 
error=self.EIO)
+                        await self.nbd_response(writer, handle, error=self.EIO)
                         continue
 
-                    yield from self.nbd_response(writer, handle, data=data)
+                    await self.nbd_response(writer, handle, data=data)
 
                 elif cmd == self.NBD_CMD_FLUSH:
                     # Return success right away when we're read only or when 
we haven't written anything yet.
                     if self.read_only or not cow_version:
-                        yield from self.nbd_response(writer, handle)
+                        await self.nbd_response(writer, handle)
                         continue
 
                     try:
@@ -350,14 +362,14 @@
                     except Exception as exception:
                         self.log.error("[%s:%s] NBD_CMD_FLUSH: %s\n%s." %
                                        (host, port, exception, 
traceback.format_exc()))
-                        yield from self.nbd_response(writer, handle, 
error=self.EIO)
+                        await self.nbd_response(writer, handle, error=self.EIO)
                         continue
 
-                    yield from self.nbd_response(writer, handle)
+                    await self.nbd_response(writer, handle)
 
                 else:
                     self.log.warning("[%s:%s] Unknown cmd %s, ignoring." % 
(host, port, cmd))
-                    yield from self.nbd_response(writer, handle, 
error=self.EINVAL)
+                    await self.nbd_response(writer, handle, error=self.EINVAL)
                     continue
 
         except _NbdServerAbortedNegotiationError:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/benji-0.16.1/src/benji/schemas/v1/benji.storage.s3.yaml 
new/benji-0.17.0/src/benji/schemas/v1/benji.storage.s3.yaml
--- old/benji-0.16.1/src/benji/schemas/v1/benji.storage.s3.yaml 2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/schemas/v1/benji.storage.s3.yaml 2022-09-24 
19:42:41.000000000 +0200
@@ -47,6 +47,9 @@
       type: string
       empty: False
       required: True
+    storageClass:
+      type: string
+      empty: False
     disableEncodingType:
       type: boolean
       empty: False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/scripts/benji.py 
new/benji-0.17.0/src/benji/scripts/benji.py
--- old/benji-0.16.1/src/benji/scripts/benji.py 2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/scripts/benji.py 2022-09-24 19:42:41.000000000 
+0200
@@ -294,7 +294,7 @@
         sys.exit(os.EX_OK)
 
     from benji.config import Config
-    from benji.logging import logger, init_logging
+    from benji.logging import logger, setup_logging
     if args.config_file is not None and args.config_file != '':
         try:
             cfg = open(args.config_file, 'r', encoding='utf-8').read()
@@ -311,9 +311,9 @@
     elif args.no_color:
         console_formatter = 'console-plain'
 
-    init_logging(logfile=config.get('logFile', types=(str, type(None))),
-                 console_level=args.log_level,
-                 console_formatter=console_formatter)
+    setup_logging(logfile=config.get('logFile', types=(str, type(None))),
+                  console_level=args.log_level,
+                  console_formatter=console_formatter)
 
     IOFactory.initialize(config)
     StorageFactory.initialize(config)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/storage/s3.py 
new/benji-0.17.0/src/benji/storage/s3.py
--- old/benji-0.16.1/src/benji/storage/s3.py    2022-04-18 14:31:49.000000000 
+0200
+++ new/benji-0.17.0/src/benji/storage/s3.py    2022-09-24 19:42:41.000000000 
+0200
@@ -39,6 +39,7 @@
         max_attempts = Config.get_from_dict(module_configuration, 
'maxAttempts', types=int)
 
         self._bucket_name = Config.get_from_dict(module_configuration, 
'bucketName', types=str)
+        self._storage_class = Config.get_from_dict(module_configuration, 
'storageClass', None, types=str)
         self._disable_encoding_type = 
Config.get_from_dict(module_configuration, 'disableEncodingType', types=bool)
 
         self._resource_config = {
@@ -90,7 +91,10 @@
     def _write_object(self, key: str, data: bytes) -> None:
         self._init_connection()
         object = self._local.bucket.Object(key)
-        object.put(Body=data)
+        if self._storage_class is not None:
+            object.put(Body=data, StorageClass=self._storage_class)
+        else:
+            object.put(Body=data)
 
     def _read_object(self, key: str) -> bytes:
         self._init_connection()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/tests/test_nbd.py 
new/benji-0.17.0/src/benji/tests/test_nbd.py
--- old/benji-0.16.1/src/benji/tests/test_nbd.py        2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/tests/test_nbd.py        2022-09-24 
19:42:41.000000000 +0200
@@ -8,7 +8,6 @@
 from unittest import TestCase
 
 from benji.benji import BenjiStore
-from benji.database import VersionUid
 from benji.logging import logger
 from benji.nbdserver import NbdServer
 from benji.tests.testcase import BenjiTestCaseBase
@@ -79,8 +78,7 @@
         self.nbd_server.serve_forever()
         self.nbd_client_thread.join()
 
-        self.assertEqual({self.version_uid[0], VersionUid(2)},
-                         {version.uid for version in 
benji_obj.find_versions_with_filter()})
+        self.assertEqual({self.version_uid[0]}, {version.uid for version in 
benji_obj.find_versions_with_filter()})
 
         benji_obj.close()
 
@@ -102,13 +100,13 @@
     def nbd_client(self, version_uid):
         self.subprocess_run(args=['sudo', 'nbd-client', '127.0.0.1', '-p',
                                   str(self.SERVER_PORT), '-l'],
-                            success_regexp='^Negotiation: 
..\n{}\n$'.format(version_uid[0]))
+                            success_regexp=r'^Negotiation: 
..\n{}\n$'.format(version_uid[0]))
 
         version_uid, size = version_uid
         self.subprocess_run(
             args=['sudo', 'nbd-client', '-N', version_uid, '127.0.0.1', '-p',
                   str(self.SERVER_PORT), self.NBD_DEVICE],
-            success_regexp='^Negotiation: ..size = \d+MB\nbs=1024, sz=\d+ 
bytes\n$|^Negotiation: ..size = \d+MB|Connected /dev/nbd\d+$')
+            success_regexp=r'^Negotiation: ..size = \d+MB\nbs=1024, sz=\d+ 
bytes\n$|^Negotiation: ..size = \d+MB|Connected /dev/nbd\d+$')
 
         count = 0
         nbd_data = bytearray()
@@ -141,7 +139,7 @@
         os.close(f)
 
         self.subprocess_run(args=['sudo', 'nbd-client', '-d', self.NBD_DEVICE],
-                            success_regexp='^disconnect, sock, done\n$')
+                            success_regexp=r'^disconnect, sock, done\n$')
 
         # Signal NBD server to stop
         self.nbd_server.stop()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/tests/testcase.py 
new/benji-0.17.0/src/benji/tests/testcase.py
--- old/benji-0.16.1/src/benji/tests/testcase.py        2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/tests/testcase.py        2022-09-24 
19:42:41.000000000 +0200
@@ -9,7 +9,7 @@
 from benji.config import Config
 from benji.database import Database
 from benji.io.factory import IOFactory
-from benji.logging import init_logging
+from benji.logging import setup_logging
 from benji.storage.factory import StorageFactory
 
 
@@ -45,8 +45,8 @@
 
     def setUp(self):
         self.testpath = _TestPath()
-        init_logging(console_level=logging.WARN if 
os.environ.get('UNITTEST_QUIET', False) else logging.DEBUG,
-                     console_formatter='console-plain')
+        setup_logging(console_level=logging.WARN if 
os.environ.get('UNITTEST_QUIET', False) else logging.DEBUG,
+                      console_formatter='console-plain')
         self.config = 
Config(ad_hoc_config=self.CONFIG.format(testpath=self.testpath.path))
 
     def tearDown(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benji-0.16.1/src/benji/transform/aes_256_gcm_ecc.py 
new/benji-0.17.0/src/benji/transform/aes_256_gcm_ecc.py
--- old/benji-0.16.1/src/benji/transform/aes_256_gcm_ecc.py     2022-04-18 
14:31:49.000000000 +0200
+++ new/benji-0.17.0/src/benji/transform/aes_256_gcm_ecc.py     2022-09-24 
19:42:41.000000000 +0200
@@ -17,9 +17,6 @@
 
         ecc_key = self._unpack_envelope_key(base64.b64decode(ecc_key_der))
 
-        if ecc_key.curve != ecc_curve:
-            raise ValueError(f'Key eccKey does not match the eccCurve setting 
(found: {ecc_key.curve}, expected: {ecc_curve}).')
-
         self._ecc_key = ecc_key
         self._ecc_curve = ecc_key.curve
 

Reply via email to