[elbe-devel] [PATCH 32/37] flake8: Target all quote warnings (Q00x)

Benedikt Spranger b.spranger at linutronix.de
Wed Feb 7 15:28:59 CET 2024


Consolidate quote usage in E.L.B.E.
No functional change.

Signed-off-by: Benedikt Spranger <b.spranger at linutronix.de>
---
 elbe                                    |  14 +-
 elbepack/aptpkgutils.py                 |  32 +-
 elbepack/aptprogress.py                 |  20 +-
 elbepack/archivedir.py                  |  34 +--
 elbepack/asyncworker.py                 | 234 +++++++--------
 elbepack/cdroms.py                      |  56 ++--
 elbepack/changelogxml.py                |   2 +-
 elbepack/commands/add.py                |  10 +-
 elbepack/commands/adjustpkgs.py         |  36 +--
 elbepack/commands/bootup-check.py       |  16 +-
 elbepack/commands/buildchroot.py        |  80 ++---
 elbepack/commands/buildsdk.py           |  18 +-
 elbepack/commands/buildsysroot.py       |  18 +-
 elbepack/commands/check-build.py        | 176 +++++------
 elbepack/commands/check_updates.py      |  54 ++--
 elbepack/commands/chg_archive.py        |  22 +-
 elbepack/commands/chroot.py             |  40 +--
 elbepack/commands/control.py            | 160 +++++-----
 elbepack/commands/daemon.py             |  28 +-
 elbepack/commands/db.py                 |   4 +-
 elbepack/commands/diff.py               |  52 ++--
 elbepack/commands/fetch_initvm_pkgs.py  |  64 ++--
 elbepack/commands/gen_update.py         |  66 ++---
 elbepack/commands/genlicence.py         |  30 +-
 elbepack/commands/get_archive.py        |  18 +-
 elbepack/commands/hdimg.py              |  48 +--
 elbepack/commands/init.py               | 194 ++++++------
 elbepack/commands/initvm.py             |  74 ++---
 elbepack/commands/mkcdrom.py            |  72 ++---
 elbepack/commands/parselicence.py       |  50 ++--
 elbepack/commands/pbuilder.py           |  72 ++---
 elbepack/commands/pin_versions.py       |  24 +-
 elbepack/commands/pkgdiff.py            |  18 +-
 elbepack/commands/preprocess.py         |  22 +-
 elbepack/commands/prjrepo.py            |  78 ++---
 elbepack/commands/remove_sign.py        |   8 +-
 elbepack/commands/repodir.py            |  12 +-
 elbepack/commands/setsel.py             |  10 +-
 elbepack/commands/show.py               |  52 ++--
 elbepack/commands/sign.py               |   6 +-
 elbepack/commands/test.py               |  58 ++--
 elbepack/commands/toolchainextract.py   |  34 +--
 elbepack/commands/updated.py            |  44 +--
 elbepack/commands/validate.py           |  12 +-
 elbepack/commands/xsdtoasciidoc.py      |  16 +-
 elbepack/config.py                      |  16 +-
 elbepack/daemons/soap/__init__.py       |   2 +-
 elbepack/daemons/soap/authentication.py |   4 +-
 elbepack/daemons/soap/esoap.py          |  54 ++--
 elbepack/daemons/soap/faults.py         |  40 +--
 elbepack/db.py                          | 378 ++++++++++++------------
 elbepack/dbaction.py                    | 106 +++----
 elbepack/debinstaller.py                |  60 ++--
 elbepack/debpkg.py                      |  10 +-
 elbepack/directories.py                 |  14 +-
 elbepack/dump.py                        | 156 +++++-----
 elbepack/efilesystem.py                 | 184 ++++++------
 elbepack/egpg.py                        |  56 ++--
 elbepack/elbeproject.py                 | 364 +++++++++++------------
 elbepack/elbexml.py                     | 188 ++++++------
 elbepack/filesystem.py                  |  40 +--
 elbepack/finetuning.py                  | 168 +++++------
 elbepack/fstab.py                       |  72 ++---
 elbepack/hashes.py                      |   4 +-
 elbepack/hdimg.py                       | 248 ++++++++--------
 elbepack/initvmaction.py                | 220 +++++++-------
 elbepack/isooptions.py                  |  30 +-
 elbepack/junit.py                       |  12 +-
 elbepack/licencexml.py                  |   8 +-
 elbepack/log.py                         |  34 +--
 elbepack/pbuilder.py                    |  60 ++--
 elbepack/pbuilderaction.py              | 164 +++++-----
 elbepack/pkgarchive.py                  |  38 +--
 elbepack/pkgutils.py                    |  48 +--
 elbepack/projectmanager.py              |  46 +--
 elbepack/repodir.py                     |  30 +-
 elbepack/repomanager.py                 | 180 +++++------
 elbepack/rfs.py                         | 168 +++++------
 elbepack/rpcaptcache.py                 |  60 ++--
 elbepack/shellhelper.py                 |  18 +-
 elbepack/soapclient.py                  | 152 +++++-----
 elbepack/templates.py                   |  34 +--
 elbepack/tests/notest_pylint.py         |  30 +-
 elbepack/tests/test_doctest.py          |   4 +-
 elbepack/tests/test_flake8.py           |   6 +-
 elbepack/tests/test_preproc.py          |  16 +-
 elbepack/tests/test_version.py          |   2 +-
 elbepack/tests/test_xml.py              |  36 +--
 elbepack/toolchain.py                   |  50 ++--
 elbepack/treeutils.py                   |  20 +-
 elbepack/updated.py                     | 188 ++++++------
 elbepack/updated_monitors.py            |  10 +-
 elbepack/updatepkg.py                   |  38 +--
 elbepack/validate.py                    |  54 ++--
 elbepack/version.py                     |   2 +-
 elbepack/virtapt.py                     |  96 +++---
 elbepack/xmldefaults.py                 | 286 +++++++++---------
 elbepack/xmlpreprocess.py               | 140 ++++-----
 elbepack/ziparchives.py                 |   2 +-
 99 files changed, 3317 insertions(+), 3317 deletions(-)

diff --git a/elbe b/elbe
index 1d9d4dae..1e8f2d50 100755
--- a/elbe
+++ b/elbe
@@ -19,11 +19,11 @@ from elbepack.directories import init_directories, get_cmdlist
 
 
 def usage():
-    print("elbe v%s" % elbe_version)
+    print('elbe v%s' % elbe_version)
     print("need a subcommand: e.g. \'elbe initvm\'. \n\
     Available subcommands are: \n")
     for i in get_cmdlist():
-        print("        * %s" % i)
+        print('        * %s' % i)
 
 # First initialise the directories module
 # so that it knows, where the current elbe
@@ -36,18 +36,18 @@ if len(sys.argv) < 2:
     usage()
     sys.exit(20)
 
-if sys.argv[1] == "--version":
-    print("elbe v%s" % (elbe_version))
+if sys.argv[1] == '--version':
+    print('elbe v%s' % (elbe_version))
     sys.exit(0)
 
 cmd_list = get_cmdlist()
 
-if not sys.argv[1] in cmd_list:
-    print("Unknown subcommand !\n")
+if sys.argv[1] not in cmd_list:
+    print('Unknown subcommand !\n')
     usage()
     sys.exit(20)
 
-modname = "elbepack.commands." + sys.argv[1]
+modname = 'elbepack.commands.' + sys.argv[1]
 
 mod = __import__(modname)
 cmdmod = sys.modules[modname]
diff --git a/elbepack/aptpkgutils.py b/elbepack/aptpkgutils.py
index c04d4ae2..6b58597d 100644
--- a/elbepack/aptpkgutils.py
+++ b/elbepack/aptpkgutils.py
@@ -19,12 +19,12 @@ INSTALLED = 4
 NOTINSTALLED = 5
 
 statestring = {
-    MARKED_INSTALL: "MARKED_INSTALL",
-    MARKED_UPGRADE: "MARKED_UPGRADE",
-    MARKED_DELETE: "MARKED_DELETE",
-    UPGRADABLE: "UPGRADABLE",
-    INSTALLED: "INSTALLED",
-    NOTINSTALLED: "NOT INSTALLED"
+    MARKED_INSTALL: 'MARKED_INSTALL',
+    MARKED_UPGRADE: 'MARKED_UPGRADE',
+    MARKED_DELETE: 'MARKED_DELETE',
+    UPGRADABLE: 'UPGRADABLE',
+    INSTALLED: 'INSTALLED',
+    NOTINSTALLED: 'NOT INSTALLED'
 }
 
 
@@ -32,18 +32,18 @@ def apt_pkg_md5(pkg):
     hashes = pkg._records.hashes
     for i in range(len(hashes)):
         h = str(hashes[i])
-        if h.startswith("MD5"):
+        if h.startswith('MD5'):
             return h.split(':')[1]
-    return ""
+    return ''
 
 
 def apt_pkg_sha256(pkg):
     hashes = pkg._records.hashes
     for i in range(len(hashes)):
         h = str(hashes[i])
-        if h.startswith("SHA256"):
+        if h.startswith('SHA256'):
             return h.split(':')[1]
-    return ""
+    return ''
 
 
 def getdeps(pkg):
@@ -88,7 +88,7 @@ def pkgstate(pkg):
 def pkgorigin(pkg):
     if pkg.installed:
         o = pkg.installed.origins[0]
-        origin = f"{o.site} {o.archive} {o.component}"
+        origin = f'{o.site} {o.archive} {o.component}'
     else:
         origin = None
 
@@ -132,7 +132,7 @@ def fetch_binary(version, destdir='', progress=None):
     acq = apt_pkg.Acquire(progress or apt.progress.text.AcquireProgress())
     acqfile = apt_pkg.AcquireFile(acq,
                                   version.uri,
-                                  "SHA256:" + version._records.sha256_hash,
+                                  'SHA256:' + version._records.sha256_hash,
                                   version.size,
                                   base,
                                   destfile=destfile)
@@ -140,8 +140,8 @@ def fetch_binary(version, destdir='', progress=None):
 
     if acqfile.status != acqfile.STAT_DONE:
         raise FetchError(
-            f"The item {acqfile.destfile} could not be fetched: "
-            f"{acqfile.error_text}")
+            f'The item {acqfile.destfile} could not be fetched: '
+            f'{acqfile.error_text}')
 
     return os.path.abspath(destfile)
 
@@ -170,8 +170,8 @@ class PackageBase:
         self.architecture = architecture
 
     def __repr__(self):
-        return (f"<APTPackage {self.name}-{self.installed_version} state: "
-                f"{statestring[self.state]}>")
+        return (f'<APTPackage {self.name}-{self.installed_version} state: '
+                f'{statestring[self.state]}>')
 
     def __eq__(self, other):
         vereq = (self.installed_version == other.installed_version)
diff --git a/elbepack/aptprogress.py b/elbepack/aptprogress.py
index 743c8982..0a9fdc54 100644
--- a/elbepack/aptprogress.py
+++ b/elbepack/aptprogress.py
@@ -23,7 +23,7 @@ class ElbeInstallProgress (InstallProgress):
             #
             self.percent = 100
 
-        line = str(self.percent) + "% " + line
+        line = str(self.percent) + '% ' + line
         line.replace('\f', '')
         if self.cb:
             self.cb(line)
@@ -31,19 +31,19 @@ class ElbeInstallProgress (InstallProgress):
             print(line)
 
     def processing(self, pkg, stage):
-        self.write("processing: " + pkg + " - " + stage)
+        self.write('processing: ' + pkg + ' - ' + stage)
 
     def dpkg_status_change(self, pkg, status):
-        self.write(pkg + " - " + status)
+        self.write(pkg + ' - ' + status)
 
     def status_change(self, pkg, percent, status):
-        self.write(pkg + " - " + status + " " + str(percent) + "%")
+        self.write(pkg + ' - ' + status + ' ' + str(percent) + '%')
 
     def run(self, obj):
         try:
             obj.do_install(self.fileno)
         except AttributeError:
-            print("installing .deb files is not supported by elbe progress")
+            print('installing .deb files is not supported by elbe progress')
             raise SystemError
         return 0
 
@@ -58,7 +58,7 @@ class ElbeInstallProgress (InstallProgress):
         return retval
 
     def finishUpdate(self):
-        self.write("update finished")
+        self.write('update finished')
 
 
 class ElbeAcquireProgress (AcquireProgress):
@@ -78,21 +78,21 @@ class ElbeAcquireProgress (AcquireProgress):
     def ims_hit(self, item):
         line = 'Hit ' + item.description
         if item.owner.filesize:
-            line += f" [{size_to_str(item.owner.filesize)}B]"
+            line += f' [{size_to_str(item.owner.filesize)}B]'
         self.write(line)
 
     def fail(self, item):
         if item.owner.status == item.owner.STAT_DONE:
-            self.write("Ign " + item.description)
+            self.write('Ign ' + item.description)
 
     def fetch(self, item):
         if item.owner.complete:
             return
         item.owner.id = self._id
         self._id += 1
-        line = "Get:" + str(item.owner.id) + " " + item.description
+        line = 'Get:' + str(item.owner.id) + ' ' + item.description
         if item.owner.filesize:
-            line += (f" [{size_to_str(item.owner.filesize)}B]")
+            line += (f' [{size_to_str(item.owner.filesize)}B]')
 
         self.write(line)
 
diff --git a/elbepack/archivedir.py b/elbepack/archivedir.py
index 67338822..e5220852 100644
--- a/elbepack/archivedir.py
+++ b/elbepack/archivedir.py
@@ -23,7 +23,7 @@ class ArchivedirError(Exception):
 
 
 def enbase(fname, compress=True):
-    with open(fname, "rb") as infile:
+    with open(fname, 'rb') as infile:
         s = infile.read()
         if compress:
             s = bz2.compress(s)
@@ -51,7 +51,7 @@ def chg_archive(xml, path, keep):
         archive = path
         compress = False
 
-    arch = xml.ensure_child("archive")
+    arch = xml.ensure_child('archive')
     arch.set_text(enbase(archive, compress))
 
     if os.path.isdir(path):
@@ -69,21 +69,21 @@ def archive_tmpfile(arch_elem):
 
 def prepare_path(url):
     url = urlparse(url)
-    path = url.geturl().replace(f"{url.scheme}://", "", 1)
-    return re.sub(r'/$', "", path)
+    path = url.geturl().replace(f'{url.scheme}://', '', 1)
+    return re.sub(r'/$', '', path)
 
 
 def get_and_append_local(url, tararchive, keep):
     if urlparse(url).netloc:
-        msg = f"Reject suspicious file:// URI \"{url}\". "
-        msg += "Please use an absolute URI (file:///a/b/c) or a "
-        msg += "relative URI (a/b/c) instead."
+        msg = f'Reject suspicious file:// URI \"{url}\". '
+        msg += 'Please use an absolute URI (file:///a/b/c) or a '
+        msg += 'relative URI (a/b/c) instead.'
         raise ArchivedirError(msg)
     collect(tararchive, prepare_path(url), keep)
 
 
 def get_and_append_unknown(url, _archive):
-    msg = f"unhandled scheme \"{urlparse(url).scheme}://\""
+    msg = f'unhandled scheme \"{urlparse(url).scheme}://\"'
     raise NotImplementedError(msg)
 
 
@@ -103,7 +103,7 @@ def _combinearchivedir(xml, xpath, use_volume):
 
         try:
             archiveurl = urljoin(archivedir.et.base, archivedir.et.text)
-            keep = archivedir.bool_attr("keep-attributes")
+            keep = archivedir.bool_attr('keep-attributes')
             parent = archivedir.get_parent()
 
             if use_volume:
@@ -113,31 +113,31 @@ def _combinearchivedir(xml, xpath, use_volume):
                 arch = parent.node(f"archive[@volume='{volume_attr}']")
 
                 if arch is None:
-                    arch = parent.append("archive")
-                    arch.et.set("volume", volume_attr)
+                    arch = parent.append('archive')
+                    arch.et.set('volume', volume_attr)
 
             else:
-                arch = parent.ensure_child("archive")
+                arch = parent.ensure_child('archive')
                 fname_suffix = ''
 
             get_and_append = get_and_append_method(archiveurl)
 
-            archname = tmp.fname(f"archive{fname_suffix}.tar.bz2")
+            archname = tmp.fname(f'archive{fname_suffix}.tar.bz2')
             get_and_append(archiveurl, archname, keep)
             arch.set_text(enbase(archname, True))
 
             parent.remove_child(archivedir)
         except (CalledProcessError, OSError):
-            msg = "Failure while processing \"" + archivedir.text + "\":\n"
+            msg = 'Failure while processing \"' + archivedir.text + '\":\n'
             msg += str(sys.exc_info()[1])
             raise ArchivedirError(msg)
 
 
 def combinearchivedir(xml):
-    if xml.find("//archivedir") is None:
+    if xml.find('//archivedir') is None:
         return xml
 
-    _combinearchivedir(xml, "archivedir", False)
-    _combinearchivedir(xml, "src-cdrom/archivedir", True)
+    _combinearchivedir(xml, 'archivedir', False)
+    _combinearchivedir(xml, 'src-cdrom/archivedir', True)
 
     return xml
diff --git a/elbepack/asyncworker.py b/elbepack/asyncworker.py
index 5bbb4e6a..a978eb52 100644
--- a/elbepack/asyncworker.py
+++ b/elbepack/asyncworker.py
@@ -21,9 +21,9 @@ from elbepack.log import elbe_logging, read_maxlevel, reset_level
 
 class AsyncWorkerJob:
 
-    build_done = "build_done"
-    build_failed = "build_failed"
-    has_changes = "has_changes"
+    build_done = 'build_done'
+    build_failed = 'build_failed'
+    has_changes = 'has_changes'
 
     def __init__(self, project):
         self.project = project
@@ -42,24 +42,24 @@ class BuildSysrootJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for building sysroot")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for building sysroot')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Build sysroot started")
+            logging.info('Build sysroot started')
             self.project.build_sysroot()
             db.update_project_files(self.project)
         except Exception:
-            logging.exception("Build sysroot failed")
+            logging.exception('Build sysroot failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Build finished with Error")
+                logging.info('Build finished with Error')
             else:
-                logging.info("Build finished successfully")
+                logging.info('Build finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -72,23 +72,23 @@ class BuildSDKJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for building SDK")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for building SDK')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Build SDK started")
+            logging.info('Build SDK started')
             self.project.build_sdk()
         except Exception:
-            logging.exception("Build SDK Failed")
+            logging.exception('Build SDK Failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Build finished with Error")
+                logging.info('Build finished with Error')
             else:
-                logging.info("Build finished successfully")
+                logging.info('Build finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -103,23 +103,23 @@ class BuildCDROMsJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for building CDROMs")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for building CDROMs')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Build CDROMs started")
+            logging.info('Build CDROMs started')
             self.project.build_cdroms(self.build_bin, self.build_src)
         except Exception:
-            logging.exception("Build CDROMs failed")
+            logging.exception('Build CDROMs failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Build finished with Error")
+                logging.info('Build finished with Error')
             else:
-                logging.info("Build finished successfully")
+                logging.info('Build finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -132,23 +132,23 @@ class BuildChrootTarJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for building croot tar")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for building croot tar')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Build chroot tarball started")
+            logging.info('Build chroot tarball started')
             self.project.build_chroottarball()
         except Exception:
-            logging.exception("Build chrroot tarball failed")
+            logging.exception('Build chrroot tarball failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Build finished with Error")
+                logging.info('Build finished with Error')
             else:
-                logging.info("Build finished successfully")
+                logging.info('Build finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -164,39 +164,39 @@ class BuildJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for build")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for build')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
 
         success = self.build_failed
         try:
-            logging.info("Build started")
+            logging.info('Build started')
             self.project.build(skip_pkglist=False,
                                build_bin=self.build_bin,
                                build_sources=self.build_src,
                                skip_pbuild=self.skip_pbuilder)
         except (DebootstrapException, AptCacheCommitError, AptCacheUpdateError) as e:
             if isinstance(e, DebootstrapException):
-                err = "Debootstrap failed to install the base rootfilesystem."
+                err = 'Debootstrap failed to install the base rootfilesystem.'
             elif isinstance(e, AptCacheCommitError):
-                err = "Failed to commit the AptCache changes."
+                err = 'Failed to commit the AptCache changes.'
             elif isinstance(e, AptCacheUpdateError):
-                err = "Failed to build the Apt Cache."
+                err = 'Failed to build the Apt Cache.'
 
-            logging.exception("%s\n"
-                              "Probable cause might be:\n"
-                              "  - Problems with internet connection\n"
-                              "  - Broken mirrors\n", err)
+            logging.exception('%s\n'
+                              'Probable cause might be:\n'
+                              '  - Problems with internet connection\n'
+                              '  - Broken mirrors\n', err)
         except Exception:
-            logging.exception("Build failed")
+            logging.exception('Build failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Build finished with Error")
+                logging.info('Build finished with Error')
             else:
-                logging.info("Build finished successfully")
+                logging.info('Build finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -204,7 +204,7 @@ class BuildJob(AsyncWorkerJob):
 
 
 class PdebuildJob(AsyncWorkerJob):
-    def __init__(self, project, cpuset=-1, profile="", cross=False):
+    def __init__(self, project, cpuset=-1, profile='', cross=False):
         AsyncWorkerJob.__init__(self, project)
         self.cpuset = cpuset
         self.profile = profile
@@ -212,23 +212,23 @@ class PdebuildJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project for pdebuild")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project for pdebuild')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Pdebuild started")
+            logging.info('Pdebuild started')
             self.project.pdebuild(self.cpuset, self.profile, self.cross)
         except Exception:
-            logging.exception("Pdebuild failed")
+            logging.exception('Pdebuild failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Pdeb finished with Error")
+                logging.info('Pdeb finished with Error')
             else:
-                logging.info("Pdeb finished successfully")
+                logging.info('Pdeb finished successfully')
                 success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -244,21 +244,21 @@ class CreatePbuilderJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project to have the pbuilder built")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project to have the pbuilder built')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Building pbuilder started")
+            logging.info('Building pbuilder started')
             self.project.create_pbuilder(self.cross, self.noccache,
                                          self.ccachesize)
         except Exception:
-            logging.exception("Pbuilder failed")
+            logging.exception('Pbuilder failed')
         else:
-            logging.info("Pbuilder finished successfully")
+            logging.info('Pbuilder finished successfully')
             success = self.build_done
         finally:
             db.update_project_files(self.project)
@@ -271,24 +271,24 @@ class UpdatePbuilderJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["empty_project", "needs_build", "has_changes",
-                     "build_done", "build_failed"])
-        logging.info("Enqueueing project to update the pbuilder")
+                    ['empty_project', 'needs_build', 'has_changes',
+                     'build_done', 'build_failed'])
+        logging.info('Enqueueing project to update the pbuilder')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_done
         try:
-            logging.info("Updating pbuilder started")
+            logging.info('Updating pbuilder started')
             self.project.update_pbuilder()
         except Exception:
             db.update_project_files(self.project)
-            logging.exception("update Pbuilder failed")
+            logging.exception('update Pbuilder failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Updating Pbuilder finished with Error")
+                logging.info('Updating Pbuilder finished with Error')
             else:
-                logging.info("Updating Pbuilder finished successfully")
+                logging.info('Updating Pbuilder finished successfully')
                 success = self.build_done
         finally:
             db.reset_busy(self.project.builddir, success)
@@ -300,23 +300,23 @@ class APTUpdateJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         db.set_busy(self.project.builddir,
-                    ["build_done", "has_changes"])
-        logging.info("Enqueueing project for APT cache update")
+                    ['build_done', 'has_changes'])
+        logging.info('Enqueueing project for APT cache update')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("APT cache update started")
+            logging.info('APT cache update started')
             with self.project.buildenv:
                 self.project.get_rpcaptcache().update()
         except Exception:
-            logging.exception("APT cache update failed")
+            logging.exception('APT cache update failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("APT cache update finished with Error")
+                logging.info('APT cache update finished with Error')
             else:
-                logging.info("APT cache update finished successfully")
+                logging.info('APT cache update finished successfully')
                 success = self.has_changes
         finally:
             db.reset_busy(self.project.builddir, success)
@@ -327,25 +327,25 @@ class APTUpdUpgrJob(AsyncWorkerJob):
         AsyncWorkerJob.__init__(self, project)
 
     def enqueue(self, queue, db):
-        db.set_busy(self.project.builddir, ["build_done", "has_changes"])
-        logging.info("Enqueueing project for APT update & upgrade")
+        db.set_busy(self.project.builddir, ['build_done', 'has_changes'])
+        logging.info('Enqueueing project for APT update & upgrade')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("APT update started")
+            logging.info('APT update started')
             with self.project.buildenv:
                 self.project.get_rpcaptcache().update()
-            logging.info("APT update finished, upgrade started")
+            logging.info('APT update finished, upgrade started')
             self.project.get_rpcaptcache().upgrade()
         except Exception:
-            logging.exception("APT update & upgrade failed")
+            logging.exception('APT update & upgrade failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("APT upgrade finished with Error")
+                logging.info('APT upgrade finished with Error')
             else:
-                logging.info("APT upgrade finished")
+                logging.info('APT upgrade finished')
                 success = self.has_changes
         finally:
             db.reset_busy(self.project.builddir, success)
@@ -357,9 +357,9 @@ class APTCommitJob(AsyncWorkerJob):
 
     def enqueue(self, queue, db):
         old_status = db.set_busy(self.project.builddir,
-                                 ["build_done", "has_changes"])
+                                 ['build_done', 'has_changes'])
         if self.project.get_rpcaptcache().get_changes():
-            logging.info("Enqueueing project for package changes")
+            logging.info('Enqueueing project for package changes')
             AsyncWorkerJob.enqueue(self, queue, db)
         else:
             db.reset_busy(self.project.builddir, old_status)
@@ -367,7 +367,7 @@ class APTCommitJob(AsyncWorkerJob):
     def execute(self, db):
         success = self.build_failed
         try:
-            logging.info("Applying package changes")
+            logging.info('Applying package changes')
             with self.project.buildenv:
                 # Commit changes, update full package list and write
                 # out new source.xml
@@ -377,15 +377,15 @@ class APTCommitJob(AsyncWorkerJob):
                               self.project.get_rpcaptcache())
 
             sourcexmlpath = path.join(self.project.builddir,
-                                      "source.xml")
+                                      'source.xml')
             self.project.xml.xml.write(sourcexmlpath)
         except Exception:
-            logging.exception("Applying package changes failed")
+            logging.exception('Applying package changes failed')
         else:
             if read_maxlevel(self.project.builddir) >= logging.ERROR:
-                logging.info("Package changes applied with Error")
+                logging.info('Package changes applied with Error')
             else:
-                logging.info("Package changes applied successfully")
+                logging.info('Package changes applied successfully')
                 success = self.has_changes
         finally:
             db.reset_busy(self.project.builddir, success)
@@ -394,40 +394,40 @@ class APTCommitJob(AsyncWorkerJob):
 class GenUpdateJob(AsyncWorkerJob):
     def __init__(self, project, base_version):
         AsyncWorkerJob.__init__(self, project)
-        self.name = project.xml.text("/project/name")
+        self.name = project.xml.text('/project/name')
         self.base_version = base_version
-        self.current_version = project.xml.text("/project/version")
+        self.current_version = project.xml.text('/project/version')
         self.old_status = None
         self.base_version_xml = None
 
     def enqueue(self, queue, db):
         self.old_status = db.set_busy(self.project.builddir,
-                                      ["build_done", "has_changes"])
+                                      ['build_done', 'has_changes'])
         self.base_version_xml = db.get_version_xml(self.project.builddir,
                                                    self.base_version)
 
-        logging.info("Enqueueing project for generating update package")
+        logging.info('Enqueueing project for generating update package')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
         upd_filename = self._gen_upd_filename()
         upd_pathname = path.join(self.project.builddir, upd_filename)
 
-        logging.info("Generating update package")
+        logging.info('Generating update package')
 
         try:
             gen_update_pkg(self.project, self.base_version_xml, upd_pathname)
-            logging.info("Update package generated successfully")
+            logging.info('Update package generated successfully')
         except Exception:
-            logging.exception("Generating update package failed")
+            logging.exception('Generating update package failed')
         finally:
             # Update generation does not change the project, so we always
             # keep the old status
             db.add_project_file(
                 self.project.builddir, upd_filename,
-                "application/octet-stream",
-                f"Update package from {self.base_version} to "
-                f"{self.current_version}")
+                'application/octet-stream',
+                f'Update package from {self.base_version} to '
+                f'{self.current_version}')
             db.reset_busy(self.project.builddir, self.old_status)
 
     def _gen_upd_filename(self):
@@ -442,13 +442,13 @@ class SaveVersionJob(AsyncWorkerJob):
     def __init__(self, project, description):
         AsyncWorkerJob.__init__(self, project)
         self.description = description
-        self.name = self.project.xml.text("project/name")
-        self.version = self.project.xml.text("project/version")
+        self.name = self.project.xml.text('project/name')
+        self.version = self.project.xml.text('project/version')
         self.old_status = None
 
     def enqueue(self, queue, db):
         self.old_status = db.set_busy(self.project.builddir,
-                                      ["build_done", "has_changes"])
+                                      ['build_done', 'has_changes'])
 
         # Create the database entry now. This has the advantage that the
         # user will see an error message immediately, if he tries to use
@@ -461,26 +461,26 @@ class SaveVersionJob(AsyncWorkerJob):
             raise
 
         if self.project.savesh_file:
-            logging.info("save version script:")
+            logging.info('save version script:')
             do((f'{self.project.savesh_file} "{self.project.builddir} '
                 f'{self.project.xml.text("project/version")} '
                 f'{self.project.xml.text("project/name")}"'
                 ), allow_fail=True)
 
-        logging.info("Enqueueing project to save package archive")
+        logging.info('Enqueueing project to save package archive')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
-        logging.info("Generating package archive")
+        logging.info('Generating package archive')
         repodir = get_versioned_filename(self.name, self.version,
-                                         ".pkgarchive")
+                                         '.pkgarchive')
         try:
             gen_binpkg_archive(self.project, repodir)
         except Exception:
-            logging.exception("Saving version failed")
+            logging.exception('Saving version failed')
             db.del_version(self.project.builddir, self.version, force=True)
         else:
-            logging.info("Version saved successfully")
+            logging.info('Version saved successfully')
         finally:
             db.reset_busy(self.project.builddir, self.old_status)
 
@@ -489,18 +489,18 @@ class CheckoutVersionJob(AsyncWorkerJob):
     def __init__(self, project, version):
         AsyncWorkerJob.__init__(self, project)
         self.version = version
-        self.name = self.project.xml.text("project/name")
+        self.name = self.project.xml.text('project/name')
 
     def enqueue(self, queue, db):
         old_status = db.set_busy(self.project.builddir,
-                                 ["build_done", "has_changes", "build_failed"])
+                                 ['build_done', 'has_changes', 'build_failed'])
 
         # If old status was build_failed, just restore the source.xml of the
         # given version and restore the status, indicating that we need a
         # complete rebuild
-        if old_status == "build_failed":
-            logging.warning("Previous project status indicated a failed build\n"
-                            "Just checking out the XML file.")
+        if old_status == 'build_failed':
+            logging.warning('Previous project status indicated a failed build\n'
+                            'Just checking out the XML file.')
 
             try:
                 db.checkout_version_xml(self.project.builddir, self.version)
@@ -519,19 +519,19 @@ class CheckoutVersionJob(AsyncWorkerJob):
             self.project.set_xml(None)
             raise
 
-        logging.info("Enqueueing project for package archive checkout")
+        logging.info('Enqueueing project for package archive checkout')
         AsyncWorkerJob.enqueue(self, queue, db)
 
     def execute(self, db):
-        logging.info("Checking out package archive")
+        logging.info('Checking out package archive')
         repodir = get_versioned_filename(self.name, self.version,
-                                         ".pkgarchive")
+                                         '.pkgarchive')
         success = self.build_failed
         try:
             checkout_binpkg_archive(self.project, repodir)
-            logging.info("Package archive checked out successfully")
+            logging.info('Package archive checked out successfully')
         except Exception:
-            logging.exception("Checking out package archive failed")
+            logging.exception('Checking out package archive failed')
         else:
             success = self.has_changes
         finally:
@@ -549,7 +549,7 @@ def savecwd():
 
 class AsyncWorker(Thread):
     def __init__(self, db):
-        Thread.__init__(self, name="AsyncWorker")
+        Thread.__init__(self, name='AsyncWorker')
         self.db = db
         self.queue = Queue()
         self.start()
@@ -568,7 +568,7 @@ class AsyncWorker(Thread):
             job = self.queue.get()
             if job is not None:
                 with savecwd():
-                    with elbe_logging({"projects": job.project.builddir}):
+                    with elbe_logging({'projects': job.project.builddir}):
                         job.execute(self.db)
             else:
                 loop = False
diff --git a/elbepack/cdroms.py b/elbepack/cdroms.py
index 43a97d32..15397dfb 100644
--- a/elbepack/cdroms.py
+++ b/elbepack/cdroms.py
@@ -25,7 +25,7 @@ CDROM_SIZE = 640 * 1000 * 1000
 def add_source_pkg(repo, component, cache, pkg, version, forbid):
     if pkg in forbid:
         return
-    pkg_id = f"{pkg}-{version}"
+    pkg_id = f'{pkg}-{version}'
     try:
         dsc = cache.download_source(pkg,
                                     version,
@@ -57,10 +57,10 @@ def mk_source_cdrom(components, codename,
 
     for component in components.keys():
         rfs, cache, pkg_lst = components[component]
-        logging.info("Adding %s component", component)
-        rfs.mkdir_p("/var/cache/elbe/sources")
+        logging.info('Adding %s component', component)
+        rfs.mkdir_p('/var/cache/elbe/sources')
         repo = CdromSrcRepo(codename, init_codename,
-                            os.path.join(target, f"srcrepo-{component}"),
+                            os.path.join(target, f'srcrepo-{component}'),
                             cdrom_size, mirror)
         repos[component] = repo
         for pkg, version in pkg_lst:
@@ -82,7 +82,7 @@ def mk_source_cdrom(components, codename,
         if not dsc_real.endswith('.dsc'):
             continue
 
-        repos["main"].include_init_dsc(dsc_real, "initvm")
+        repos['main'].include_init_dsc(dsc_real, 'initvm')
 
     for repo in repos.values():
         repo.finalize()
@@ -98,20 +98,20 @@ def mk_source_cdrom(components, codename,
                 if volume_attr == 'all':
                     volume_list = repo.volume_indexes
                 else:
-                    volume_list = [int(v) for v in volume_attr.split(",")]
+                    volume_list = [int(v) for v in volume_attr.split(',')]
                 for volume_number in volume_list:
-                    with archive_tmpfile(arch_vol.text(".")) as fp:
+                    with archive_tmpfile(arch_vol.text('.')) as fp:
                         if volume_number in repo.volume_indexes:
                             do(
                                 f'tar xvfj "{fp.name}" -h -C '
                                 f'"{repo.get_volume_fs(volume_number).path}"')
                         else:
                             logging.warning("The src-cdrom archive's volume value "
-                                            "is not contained in the actual volumes")
+                                            'is not contained in the actual volumes')
     else:
-        options = ""
+        options = ''
 
-    return [(repo.buildiso(os.path.join(target, f"src-cdrom-{component}.iso"),
+    return [(repo.buildiso(os.path.join(target, f'src-cdrom-{component}.iso'),
             options=options)) for component, repo in repos.items()]
 
 
@@ -121,11 +121,11 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
     rfs.mkdir_p('/var/cache/elbe/binaries/main')
 
     if xml is not None:
-        mirror = xml.get_primary_mirror(rfs.fname("cdrom"))
+        mirror = xml.get_primary_mirror(rfs.fname('cdrom'))
     else:
         mirror = 'http://ftp.de.debian.org/debian'
 
-    repo_path = os.path.join(target, "binrepo")
+    repo_path = os.path.join(target, 'binrepo')
     target_repo_path = os.path.join(repo_path, 'targetrepo')
 
     # initvm repo has been built upon initvm creation
@@ -137,10 +137,10 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
         # When /var/cache/elbe/initvm-bin-repo has not been created
         # (because the initvm install was an old version or somthing,
         #  log an error, and continue with an empty directory.
-        logging.exception("/var/cache/elbe/initvm-bin-repo does not exist\n"
-                          "The generated CDROM will not contain initvm pkgs\n"
-                          "This happened because the initvm was probably\n"
-                          "generated with --skip-build-bin")
+        logging.exception('/var/cache/elbe/initvm-bin-repo does not exist\n'
+                          'The generated CDROM will not contain initvm pkgs\n'
+                          'This happened because the initvm was probably\n'
+                          'generated with --skip-build-bin')
 
         do(f'mkdir -p "{repo_path}"')
 
@@ -151,9 +151,9 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
 
     if xml is not None:
         cache = get_rpcaptcache(rfs, arch)
-        for p in xml.node("debootstrappkgs"):
+        for p in xml.node('debootstrappkgs'):
             pkg = XMLPackage(p, arch)
-            pkg_id = f"{pkg.name}-{pkg.installed_version}"
+            pkg_id = f'{pkg.name}-{pkg.installed_version}'
             try:
                 deb = cache.download_binary(pkg.name,
                                             '/var/cache/elbe/binaries/main',
@@ -169,7 +169,7 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
     cache = get_rpcaptcache(rfs, arch)
     pkglist = cache.get_installed_pkgs()
     for pkg in pkglist:
-        pkg_id = f"{pkg.name}-{pkg.installed_version}"
+        pkg_id = f'{pkg.name}-{pkg.installed_version}'
         try:
             deb = cache.download_binary(pkg.name,
                                         '/var/cache/elbe/binaries/added',
@@ -189,13 +189,13 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
     # Mark the binary repo with the necessary Files
     # to make the installer accept this as a CDRom
     repo_fs = Filesystem(repo_path)
-    repo_fs.mkdir_p(".disk")
-    repo_fs.write_file(".disk/base_installable", 0o644, "main\n")
-    repo_fs.write_file(".disk/base_components", 0o644, "main\n")
-    repo_fs.write_file(".disk/cd_type", 0o644, "not_complete\n")
-    repo_fs.write_file(".disk/info", 0o644, "elbe inst cdrom - full cd\n")
-    repo_fs.symlink(".", "debian", allow_exists=True)
-    repo_fs.write_file("md5sum.txt", 0o644, "")
+    repo_fs.mkdir_p('.disk')
+    repo_fs.write_file('.disk/base_installable', 0o644, 'main\n')
+    repo_fs.write_file('.disk/base_components', 0o644, 'main\n')
+    repo_fs.write_file('.disk/cd_type', 0o644, 'not_complete\n')
+    repo_fs.write_file('.disk/info', 0o644, 'elbe inst cdrom - full cd\n')
+    repo_fs.symlink('.', 'debian', allow_exists=True)
+    repo_fs.write_file('md5sum.txt', 0o644, '')
 
     # write source xml onto cdrom
     xml.xml.write(repo_fs.fname('source.xml'))
@@ -207,6 +207,6 @@ def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target):
              repo_fs.fname('vmlinuz'))
 
     target_repo_fs = Filesystem(target_repo_path)
-    target_repo_fs.write_file(".aptignr", 0o644, "")
+    target_repo_fs.write_file('.aptignr', 0o644, '')
 
-    return repo.buildiso(os.path.join(target, "bin-cdrom.iso"))
+    return repo.buildiso(os.path.join(target, 'bin-cdrom.iso'))
diff --git a/elbepack/changelogxml.py b/elbepack/changelogxml.py
index fc615b97..59d9e5be 100644
--- a/elbepack/changelogxml.py
+++ b/elbepack/changelogxml.py
@@ -18,4 +18,4 @@ class changelogs_xml:
         xmlpkg.et.text = changelog_text
 
     def write(self, fname):
-        self.outxml.write(fname, encoding="utf-8")
+        self.outxml.write(fname, encoding='utf-8')
diff --git a/elbepack/commands/add.py b/elbepack/commands/add.py
index 7bbe9b66..844820df 100644
--- a/elbepack/commands/add.py
+++ b/elbepack/commands/add.py
@@ -12,11 +12,11 @@ from elbepack.elbexml import ElbeXML, ValidationError
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog add [options] <xmlfile> <pkg1> [pkgN]")
+        usage='usage: %prog add [options] <xmlfile> <pkg1> [pkgN]')
     (_, args) = oparser.parse_args(argv)
 
     if len(args) < 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(86)
 
@@ -26,20 +26,20 @@ def run_command(argv):
     try:
         xml = ElbeXML(xmlfile)
     except ValidationError as E:
-        print(f"Error while reading xml file {xmlfile}: {E}")
+        print(f'Error while reading xml file {xmlfile}: {E}')
         sys.exit(87)
 
     for pkg in pkg_lst:
         try:
             xml.add_target_package(pkg)
         except ValueError as E:
-            print(f"Error while adding package {pkg} to {xmlfile}: {E}")
+            print(f'Error while adding package {pkg} to {xmlfile}: {E}')
             sys.exit(88)
 
     try:
         xml.xml.write(xmlfile)
         sys.exit(0)
     except PermissionError as E:
-        print(f"Unable to truncate file {xmlfile}: {E}")
+        print(f'Unable to truncate file {xmlfile}: {E}')
 
     sys.exit(89)
diff --git a/elbepack/commands/adjustpkgs.py b/elbepack/commands/adjustpkgs.py
index 50fc8faa..76c1337a 100644
--- a/elbepack/commands/adjustpkgs.py
+++ b/elbepack/commands/adjustpkgs.py
@@ -29,10 +29,10 @@ def set_pkgs(pkglist):
             if p.essential or \
                p.is_auto_installed or \
                p.name in pkglist or \
-               p.installed.priority == "important" or \
-               p.installed.priority == "required":
+               p.installed.priority == 'important' or \
+               p.installed.priority == 'required':
                 continue
-            logging.info("MARK REMOVE %s", p.name)
+            logging.info('MARK REMOVE %s', p.name)
             p.mark_delete(auto_fix=False, purge=True)
 
         for name in pkglist:
@@ -45,7 +45,7 @@ def set_pkgs(pkglist):
             cp = cache[name]
 
             cp.mark_install()
-            logging.info("MARK INSTALL %s", cp.name)
+            logging.info('MARK INSTALL %s', cp.name)
 
         cache.commit(apt.progress.base.AcquireProgress(),
                      apt.progress.base.InstallProgress())
@@ -58,7 +58,7 @@ def set_pkgs(pkglist):
                 continue
             if p.is_auto_removable:
                 p.mark_delete(purge=True)
-                logging.info("MARKED AS AUTOREMOVE %s", p.name)
+                logging.info('MARKED AS AUTOREMOVE %s', p.name)
 
     cache.commit(apt.progress.base.AcquireProgress(),
                  apt.progress.base.InstallProgress())
@@ -67,16 +67,16 @@ def set_pkgs(pkglist):
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog adjustpkgs [options] <xmlfile>")
+    oparser = OptionParser(usage='usage: %prog adjustpkgs [options] <xmlfile>')
 
-    oparser.add_option("-o", "--output", dest="output",
-                       help="name of logfile")
-    oparser.add_option("-n", "--name", dest="name",
-                       help="name of the project (included in the report)")
+    oparser.add_option('-o', '--output', dest='output',
+                       help='name of logfile')
+    oparser.add_option('-n', '--name', dest='name',
+                       help='name of the project (included in the report)')
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(62)
 
@@ -84,7 +84,7 @@ def run_command(argv):
         return 0
 
     xml = etree(args[0])
-    xml_pkglist = xml.node("/target/pkg-list")
+    xml_pkglist = xml.node('/target/pkg-list')
     xml_pkgs = [p.et.text for p in xml_pkglist]
 
     # TODO: install buildimage packages after target image generation
@@ -92,15 +92,15 @@ def run_command(argv):
     #         we need to introduce additional arguments for this
     #       in default copy mode chroot to the target and remove elbe-daemon
     #         and its dependencies (if it is not in  target/pkg-list.
-    buildenv_pkgs = ["python3-elbe-buildenv"]
-    if xml.has("./project/buildimage/pkg-list"):
+    buildenv_pkgs = ['python3-elbe-buildenv']
+    if xml.has('./project/buildimage/pkg-list'):
         buildenv_pkgs.extend([p.et.text for p in xml.node(
-            "project/buildimage/pkg-list")])
+            'project/buildimage/pkg-list')])
 
-    with elbe_logging({"files": opt.output}):
-        logging.info("ELBE Report for Project %s", opt.name)
+    with elbe_logging({'files': opt.output}):
+        logging.info('ELBE Report for Project %s', opt.name)
         return set_pkgs(xml_pkgs + buildenv_pkgs)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     run_command(sys.argv[1:])
diff --git a/elbepack/commands/bootup-check.py b/elbepack/commands/bootup-check.py
index be9cd4a8..dcc6126c 100644
--- a/elbepack/commands/bootup-check.py
+++ b/elbepack/commands/bootup-check.py
@@ -19,7 +19,7 @@ def is_installed(ip, cache):
     try:
         p = cache[ip.et.text]
     except KeyError:
-        print(f"{ip.et.text} is not in local apt")
+        print(f'{ip.et.text} is not in local apt')
         return False
     if p.current_state == apt_pkg.CURSTATE_INSTALLED:
         return True
@@ -28,7 +28,7 @@ def is_installed(ip, cache):
 
 def bootup_check(xml):
 
-    fpl = xml.node("fullpkgs")
+    fpl = xml.node('fullpkgs')
 
     apt_pkg.init()
     cache = apt_pkg.Cache()
@@ -37,30 +37,30 @@ def bootup_check(xml):
     for p in hl_cache:
         if p.is_installed:
             if not is_in_fpl(p, fpl):
-                print(f"{p.name} installed by user")
+                print(f'{p.name} installed by user')
 
     for ip in fpl:
         if not is_installed(ip, cache):
-            print(f"{ip.et.text} removed by user")
+            print(f'{ip.et.text} removed by user')
 
 
 def bootup_info():
-    with open("/etc/elbe_version", 'r') as ev:
+    with open('/etc/elbe_version', 'r') as ev:
         print(ev.read())
 
 
 def run_command(_argv):
     try:
-        xml = etree("/etc/elbe_base.xml")
+        xml = etree('/etc/elbe_base.xml')
     except IOError:
-        print("/etc/elbe_base.xml removed by user")
+        print('/etc/elbe_base.xml removed by user')
         return -1
 
     bootup_check(xml)
     try:
         bootup_info()
     except IOError:
-        print("/etc/elbe_version removed by user")
+        print('/etc/elbe_version removed by user')
         return -1
 
     return 0
diff --git a/elbepack/commands/buildchroot.py b/elbepack/commands/buildchroot.py
index e7ad440e..28b8419f 100644
--- a/elbepack/commands/buildchroot.py
+++ b/elbepack/commands/buildchroot.py
@@ -19,75 +19,75 @@ from elbepack.log import elbe_logging
 
 def run_command(argv):
     oparser = OptionParser(
-        usage="usage: %prog buildchroot [options] <xmlfile>")
+        usage='usage: %prog buildchroot [options] <xmlfile>')
 
-    oparser.add_option("-t", "--target", dest="target",
-                       help="directoryname of target")
+    oparser.add_option('-t', '--target', dest='target',
+                       help='directoryname of target')
 
-    oparser.add_option("-o", "--output", dest="output",
-                       help="name of logfile")
+    oparser.add_option('-o', '--output', dest='output',
+                       help='name of logfile')
 
-    oparser.add_option("-n", "--name", dest="name",
-                       help="name of the project (included in the report)")
+    oparser.add_option('-n', '--name', dest='name',
+                       help='name of the project (included in the report)')
 
-    oparser.add_option("--skip-pbuild", action="store_true",
-                       dest="skip_pbuild", default=False,
-                       help="skip building packages from <pbuilder> list")
+    oparser.add_option('--skip-pbuild', action='store_true',
+                       dest='skip_pbuild', default=False,
+                       help='skip building packages from <pbuilder> list')
 
     oparser.add_option(
-        "--build-bin",
-        action="store_true",
-        dest="build_bin",
+        '--build-bin',
+        action='store_true',
+        dest='build_bin',
         default=False,
-        help="Build Binary Repository CDROM, for exact Reproduction")
+        help='Build Binary Repository CDROM, for exact Reproduction')
 
-    oparser.add_option("--build-sources", action="store_true",
-                       dest="build_sources", default=False,
-                       help="Build Source CD")
+    oparser.add_option('--build-sources', action='store_true',
+                       dest='build_sources', default=False,
+                       help='Build Source CD')
 
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
 
     oparser.add_option(
-        "--cdrom-size",
-        action="store",
-        dest="cdrom_size",
+        '--cdrom-size',
+        action='store',
+        dest='cdrom_size',
         default=CDROM_SIZE,
-        help="Source ISO CD size in bytes")
+        help='Source ISO CD size in bytes')
 
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
-    oparser.add_option("--skip-pkglist", action="store_true",
-                       dest="skip_pkglist", default=False,
-                       help="ignore changes of the package list")
+    oparser.add_option('--skip-pkglist', action='store_true',
+                       dest='skip_pkglist', default=False,
+                       help='ignore changes of the package list')
 
-    oparser.add_option("--skip-cdrom", action="store_true",
-                       dest="skip_cdrom", default=False,
-                       help="(now obsolete) Skip cdrom iso generation")
+    oparser.add_option('--skip-cdrom', action='store_true',
+                       dest='skip_cdrom', default=False,
+                       help='(now obsolete) Skip cdrom iso generation')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(26)
 
     if not opt.target:
-        print("No target specified")
+        print('No target specified')
         sys.exit(27)
 
     if opt.skip_cdrom:
-        print("WARNING: Skip CDROMS is now the default, "
-              "use --build-bin to build binary CDROM")
+        print('WARNING: Skip CDROMS is now the default, '
+              'use --build-bin to build binary CDROM')
 
-    with elbe_logging({"files": opt.output}):
+    with elbe_logging({'files': opt.output}):
         try:
             project = ElbeProject(opt.target, args[0], opt.name,
                                   opt.buildtype, opt.skip_validation)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(28)
 
         try:
@@ -98,12 +98,12 @@ def run_command(argv):
                 opt.skip_pkglist,
                 opt.skip_pbuild)
         except CommandError as ce:
-            logging.error("Command in project build failed: %s", ce.cmd)
+            logging.error('Command in project build failed: %s', ce.cmd)
             sys.exit(29)
 
         try:
             db = ElbeDB()
             db.save_project(project)
         except OperationalError:
-            logging.exception("Failed to save project in database")
+            logging.exception('Failed to save project in database')
             sys.exit(30)
diff --git a/elbepack/commands/buildsdk.py b/elbepack/commands/buildsdk.py
index d9174c1b..65bc26ac 100644
--- a/elbepack/commands/buildsdk.py
+++ b/elbepack/commands/buildsdk.py
@@ -13,26 +13,26 @@ from elbepack.log import elbe_logging
 
 def run_command(argv):
     oparser = OptionParser(
-        usage="usage: %prog buildsdk [options] <builddir>")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+        usage='usage: %prog buildsdk [options] <builddir>')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(39)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
         try:
             project = ElbeProject(args[0], override_buildtype=opt.buildtype,
                                   skip_validate=opt.skip_validation)
         except ValidationError:
-            logging.exception("xml validation failed.  Bailing out")
+            logging.exception('xml validation failed.  Bailing out')
             sys.exit(40)
 
         project.build_sdk()
diff --git a/elbepack/commands/buildsysroot.py b/elbepack/commands/buildsysroot.py
index 410b0f2f..cb3e4a88 100644
--- a/elbepack/commands/buildsysroot.py
+++ b/elbepack/commands/buildsysroot.py
@@ -13,26 +13,26 @@ from elbepack.log import elbe_logging
 
 def run_command(argv):
     oparser = OptionParser(
-        usage="usage: %prog buildsysroot [options] <builddir>")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+        usage='usage: %prog buildsysroot [options] <builddir>')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(99)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
         try:
             project = ElbeProject(args[0], override_buildtype=opt.buildtype,
                                   skip_validate=opt.skip_validation)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(100)
 
         project.build_sysroot()
diff --git a/elbepack/commands/check-build.py b/elbepack/commands/check-build.py
index 7ec82df5..239189a2 100644
--- a/elbepack/commands/check-build.py
+++ b/elbepack/commands/check-build.py
@@ -18,12 +18,12 @@ from elbepack.treeutils import etree
 from elbepack.shellhelper import get_command_out, command_out, do, CommandError
 from elbepack.filesystem import TmpdirFilesystem
 
-DEVNULL = open(os.devnull, "w")
+DEVNULL = open(os.devnull, 'w')
 
 
 def run_command(argv):
 
-    oparser = optparse.OptionParser(usage="usage: %prog check-build <cmd> <build-dir>")
+    oparser = optparse.OptionParser(usage='usage: %prog check-build <cmd> <build-dir>')
 
     (_, args) = oparser.parse_args(argv)
 
@@ -31,34 +31,34 @@ def run_command(argv):
         oparser.print_help()
         os.sys.exit(63)
 
-    if args[0] == "all":
+    if args[0] == 'all':
         tests = [CheckBase.tests[tag] for tag in CheckBase.tests]
     elif args[0] in CheckBase.tests:
         tests = [CheckBase.tests[args[0]]]
     else:
-        print(f"Invalid check test {args[0]}")
-        print("Valid tests are:\n\tall")
+        print(f'Invalid check test {args[0]}')
+        print('Valid tests are:\n\tall')
         for tag in CheckBase.tests:
-            print(f"\t{tag}")
+            print(f'\t{tag}')
         os.sys.exit(64)
 
     total_cnt = 0
     fail_cnt = 0
 
-    with elbe_logging({"streams": None}):
+    with elbe_logging({'streams': None}):
 
         for test in tests:
 
-            logging.info("Starting test %s (%s)", test.__name__, test.__doc__)
+            logging.info('Starting test %s (%s)', test.__name__, test.__doc__)
             os.chdir(args[1])
             ret = test()()
 
             total_cnt += 1
             if ret:
                 fail_cnt += 1
-                logging.error("FAILED test %s (%s)", test.__name__, test.__doc__)
+                logging.error('FAILED test %s (%s)', test.__name__, test.__doc__)
 
-        logging.info("Passed %d tests ouf of %d",
+        logging.info('Passed %d tests ouf of %d',
                      total_cnt - fail_cnt, total_cnt)
 
     os.sys.exit(fail_cnt)
@@ -94,14 +94,14 @@ class CheckBase:
         return _register
 
     def run(self):
-        raise Exception("Check run method not implemented")
+        raise Exception('Check run method not implemented')
         return 0
 
     def fail(self, reason):
         raise CheckException(reason)
 
 
- at CheckBase.register("cdrom")
+ at CheckBase.register('cdrom')
 class CheckCdroms(CheckBase):
 
     """Check for cdroms integrity"""
@@ -110,30 +110,30 @@ class CheckCdroms(CheckBase):
         try:
             do(f'7z x -o"{tgt}" "{cdrom}"')
         except CommandError as E:
-            self.fail(f"Failed to extract cdrom {cdrom}:\n{E}")
+            self.fail(f'Failed to extract cdrom {cdrom}:\n{E}')
 
     def dpkg_get_infos(self, path, fmt):
         """Get dpkg infos for .deb and .dsc file formats"""
         try:
-            if path.endswith(".deb"):
+            if path.endswith('.deb'):
                 cmd = f'dpkg -f "{path}" {" ".join(fmt)}'
-            elif path.endswith(".dsc"):
+            elif path.endswith('.dsc'):
                 cmd = f'grep -E "^({"|".join(fmt)}):" {path}'
-            return get_command_out(cmd).decode("utf-8")
+            return get_command_out(cmd).decode('utf-8')
         except CommandError as E:
             self.fail(
                 f"Failed to get debian infos ({'|'.join(fmt)}) "
-                f"for {path}:\n{E}")
+                f'for {path}:\n{E}')
 
     @staticmethod
     def cmp_version(v1, v2):
         return command_out(
-            f"dpkg --compare-versions {v1} eq {v2}", output=DEVNULL)[0]
+            f'dpkg --compare-versions {v1} eq {v2}', output=DEVNULL)[0]
 
     def do_src(self, sources, src_total):
         """Check for sources in src-cdrom*"""
 
-        iso_it = glob.iglob("src-cdrom*")
+        iso_it = glob.iglob('src-cdrom*')
         src_cnt = 0
 
         # For every src-cdrom*, extract it to a temporary directory
@@ -146,18 +146,18 @@ class CheckCdroms(CheckBase):
                         continue
 
                     infos = self.dpkg_get_infos(realpath,
-                                                ["Source", "Version"])
+                                                ['Source', 'Version'])
                     src_name = None
                     src_version = None
 
                     for info in infos.split('\n'):
 
-                        if info.startswith("Source:"):
+                        if info.startswith('Source:'):
                             src_name = info.split('Source:')[1].strip(' ')
 
                         # Same as for the binary version.  The
                         # PGP's signature contains a version field
-                        elif info.startswith("Version:"):
+                        elif info.startswith('Version:'):
                             if not src_version:
                                 src_version = info.split('Version:')[1].strip(' ')
 
@@ -170,7 +170,7 @@ class CheckCdroms(CheckBase):
                             # Found a matching version; prune it
                             if self.cmp_version(version, src_version) == 0:
 
-                                logging.info("Validating source %s_%s",
+                                logging.info('Validating source %s_%s',
                                              src_name, version)
 
                                 sources[src_name].remove(version)
@@ -189,20 +189,20 @@ class CheckCdroms(CheckBase):
                         # a warning
                         if not match:
                             logging.warning("Can't find matching version for source %s_%s.\n"
-                                            "It might have already been validated",
+                                            'It might have already been validated',
                                             src_name, src_version)
                     else:
-                        logging.error("Extra source %s_%s found",
+                        logging.error('Extra source %s_%s found',
                                       src_name, src_version)
                         self.ret = 1
 
         # List missing sources
         for src_name in sources:
             for src_version in sources[src_name]:
-                logging.error("Missing source %s_%s",
+                logging.error('Missing source %s_%s',
                               src_name, src_version)
 
-        logging.info("Succesfully validated %d source packages out of %d",
+        logging.info('Succesfully validated %d source packages out of %d',
                      src_cnt, src_total)
 
         if src_cnt != src_total:
@@ -231,7 +231,7 @@ class CheckCdroms(CheckBase):
 
         # Every build has a source.xml where the list of binaries
         # installed can be found
-        xml = etree("source.xml")
+        xml = etree('source.xml')
 
         # Initial statistics fo the build
         bin_cnt = 0
@@ -243,23 +243,23 @@ class CheckCdroms(CheckBase):
 
         # Create a dictionnary of the form {"bin-name": [versions ..]}
         # from the source.xml.  We do this by iterating over all <pkg>
-        for tag in xml.all("./*/pkg"):
+        for tag in xml.all('./*/pkg'):
 
             bin_pkg = tag.et.text
 
             # Package already in the dictionnary? Add its version.
             # Otherwise, add a new entry into the dictionnary
             if bin_pkg in binaries:
-                binaries[bin_pkg].append(tag.et.attrib["version"])
+                binaries[bin_pkg].append(tag.et.attrib['version'])
             else:
-                binaries[bin_pkg] = [tag.et.attrib["version"]]
+                binaries[bin_pkg] = [tag.et.attrib['version']]
 
             bin_total += 1
 
         # For every bin-cdrom, create a temporary directory where to
         # extract it and find all *.deb files
         #
-        for cdrom in glob.glob("bin-cdrom*"):
+        for cdrom in glob.glob('bin-cdrom*'):
             with TmpdirFilesystem() as tmp:
                 self.extract_cdrom(tmp.path, cdrom)
                 for _, realpath in tmp.walk_files():
@@ -267,10 +267,10 @@ class CheckCdroms(CheckBase):
                         continue
 
                     # Extract informations from .deb
-                    infos = self.dpkg_get_infos(realpath, ["Package",
-                                                           "Source",
-                                                           "Version",
-                                                           "Built-Using"])
+                    infos = self.dpkg_get_infos(realpath, ['Package',
+                                                           'Source',
+                                                           'Version',
+                                                           'Built-Using'])
                     src_name = None
                     src_version = None
                     bin_name = None
@@ -279,7 +279,7 @@ class CheckCdroms(CheckBase):
                     for line in infos.split('\n'):
 
                         # Package: <PACKAGE>
-                        if line.startswith("Package:"):
+                        if line.startswith('Package:'):
                             bin_name = line.split('Package:')[1].strip(' \t')
 
                         # Version: <VERSION>
@@ -289,7 +289,7 @@ class CheckCdroms(CheckBase):
                         # the PGP signature will put a 'Version' field.
                         # Thus, let's check if we already have found a
                         # binary version and don't overwrite it
-                        elif line.startswith("Version:"):
+                        elif line.startswith('Version:'):
                             if not bin_version:
                                 bin_version = line.split('Version:')[1].strip(' ')
 
@@ -297,24 +297,24 @@ class CheckCdroms(CheckBase):
                         #
                         # This field is optional.  If it is not present, the
                         # source package default to the bin package
-                        elif line.startswith("Source:"):
+                        elif line.startswith('Source:'):
                             src_infos = line.split('Source:')[1].strip(' ').split(' ')
                             src_name = src_infos[0]
                             if len(src_infos) > 1:
-                                src_version = src_infos[1].strip("()")
+                                src_version = src_infos[1].strip('()')
 
                         # Built-Using: <SRC (=VERSION)>...
                         #
                         # Sources list in the built-using field are
                         # seperated by a comma
-                        elif line.startswith("Built-Using:"):
+                        elif line.startswith('Built-Using:'):
 
-                            built_using = line.split("Built-Using:")[1].strip(' ').split(',')
+                            built_using = line.split('Built-Using:')[1].strip(' ').split(',')
 
                             for src in built_using:
 
                                 name, version = src.strip(' ').split(' ', 1)
-                                version = version.strip("(= )")
+                                version = version.strip('(= )')
 
                                 # TODO - This is not component aware!
                                 if name in sources:
@@ -351,20 +351,20 @@ class CheckCdroms(CheckBase):
                     bin_cnt += 1
                     try:
                         binaries[bin_name].remove(bin_version)
-                        logging.info("Validating binary %s_%s",
+                        logging.info('Validating binary %s_%s',
                                      bin_name, bin_version)
-                        logging.info("Adding source %s_%s", src_name, src_version)
+                        logging.info('Adding source %s_%s', src_name, src_version)
                     except KeyError:
-                        logging.error("Foreign binary found %s_%s",
+                        logging.error('Foreign binary found %s_%s',
                                       bin_name, bin_version)
                         self.ret = 1
 
         # List all missing binaries
         for bin_name in binaries:
             for bin_version in binaries[bin_name]:
-                logging.error("Missing binary %s_%s", bin_name, bin_version)
+                logging.error('Missing binary %s_%s', bin_name, bin_version)
 
-        logging.info("Succesfully validated %d binary packages out of %d",
+        logging.info('Succesfully validated %d binary packages out of %d',
                      bin_cnt, bin_total)
 
         if bin_cnt != bin_total:
@@ -378,7 +378,7 @@ class CheckCdroms(CheckBase):
         return self.ret
 
 
- at CheckBase.register("img")
+ at CheckBase.register('img')
 class CheckImage(CheckBase):
 
     """Check if image can boot"""
@@ -387,49 +387,49 @@ class CheckImage(CheckBase):
     def open_tgz(path):
         tmp = tempfile.NamedTemporaryFile(prefix='elbe')
         command_out(
-            f"tar --to-stdout --extract --gunzip --file {path}", output=tmp)
+            f'tar --to-stdout --extract --gunzip --file {path}', output=tmp)
         return tmp
 
     def open_img(self, path):
-        if path.endswith(".tar.gz"):
+        if path.endswith('.tar.gz'):
             return self.open_tgz(path)
         return open(path)
 
     def run(self):
 
-        self.xml = etree("source.xml")
+        self.xml = etree('source.xml')
 
         fail_cnt = 0
         total_cnt = 0
 
         # For all image
-        for tag in self.xml.all(".//check-image-list/check"):
+        for tag in self.xml.all('.//check-image-list/check'):
             fail_cnt += self.do_img(tag)
             total_cnt += 1
 
-        logging.info("Succesfully validate %d images out of %d",
+        logging.info('Succesfully validate %d images out of %d',
                      total_cnt - fail_cnt, total_cnt)
 
         return fail_cnt
 
     def do_img(self, tag):
 
-        img_name = tag.text("./img")
-        qemu = tag.text("./interpreter")
+        img_name = tag.text('./img')
+        qemu = tag.text('./interpreter')
 
         with self.open_img(img_name) as img:
 
             # ELBE_IMG always points to the opened image
-            os.environ["ELBE_IMG"] = img.name
+            os.environ['ELBE_IMG'] = img.name
 
             opts = os.path.expandvars(tag
-                                      .text("./interpreter-opts")
+                                      .text('./interpreter-opts')
                                       .strip(' \t\n'))
 
-            for candidate, action in [("login",  self.do_login),
-                                      ("serial", self.do_serial)]:
+            for candidate, action in [('login',  self.do_login),
+                                      ('serial', self.do_serial)]:
 
-                element = tag.et.find(os.path.join("./action", candidate))
+                element = tag.et.find(os.path.join('./action', candidate))
 
                 if element is not None:
                     return action(element, img_name, qemu, opts)
@@ -439,25 +439,25 @@ class CheckImage(CheckBase):
 
     def do_login(self, _element, img_name, qemu, opts):
 
-        passwd = "root"
-        if self.xml.find(".//action/login").text:
-            passwd = self.xml.find(".//action/login").text
+        passwd = 'root'
+        if self.xml.find('.//action/login').text:
+            passwd = self.xml.find('.//action/login').text
 
         comm = [
-            ("expect", ".*[Ll]ogin:.*"),
-            ("sendline", "root"),
-            ("expect", "[Pp]assword:.*"),
-            ("sendline", passwd),
-            ("expect", ".*#"),
+            ('expect', '.*[Ll]ogin:.*'),
+            ('sendline', 'root'),
+            ('expect', '[Pp]assword:.*'),
+            ('sendline', passwd),
+            ('expect', '.*#'),
 
             # This assume systemd is on the system.  We might want to change
             # this to a more generic way
-            ("sendline", "shutdown --poweroff now bye"),
+            ('sendline', 'shutdown --poweroff now bye'),
 
-            ("expect", "bye"),
+            ('expect', 'bye'),
 
             # 30 seconds timeout for EOF; This will fail if systemd goes haywire
-            ("EOF", ""),
+            ('EOF', ''),
         ]
 
         return self.do_comm(img_name, qemu, opts, comm)
@@ -470,14 +470,14 @@ class CheckImage(CheckBase):
 
     def do_comm(self, img_name, qemu, opts, comm):
 
-        child = pexpect.spawn(qemu + " " + opts)
+        child = pexpect.spawn(qemu + ' ' + opts)
         transcript = []
         ret = 0
 
         try:
             for action, text in comm:
 
-                if action == "expect":
+                if action == 'expect':
 
                     # Try to expect something from the guest If there's a
                     # timeout; the test fails Otherwise; Add to the transcript
@@ -493,14 +493,14 @@ class CheckImage(CheckBase):
                         transcript.append(child.before.decode('utf-8'))
                         transcript.append(child.after.decode('utf-8'))
 
-                elif action == "sendline":
+                elif action == 'sendline':
                     child.sendline(text)
 
                 # We're expecting the serial line to be closed by the guest.  If
                 # there's a timeout, it means that the guest has not closed the
                 # line and the test has failed.  In every case the test ends
                 # here.
-                elif action == "EOF":
+                elif action == 'EOF':
                     try:
                         child.expect(pexpect.EOF)
                     except pexpect.exceptions.TIMEOUT:
@@ -514,21 +514,21 @@ class CheckImage(CheckBase):
 
         # Woops. The guest has die and we didn't expect that!
         except pexpect.exceptions.EOF as E:
-            logging.error("Communication was interrupted unexpectedly %s", E)
+            logging.error('Communication was interrupted unexpectedly %s', E)
             ret = 1
 
         child.close()
 
-        logging.info("Transcript for image %s:\n"
-                     "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n"
-                     "%s\n"
-                     "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",
+        logging.info('Transcript for image %s:\n'
+                     '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
+                     '%s\n'
+                     '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
                      img_name, ''.join(transcript))
 
         return ret or child.exitstatus
 
 
- at CheckBase.register("sdk")
+ at CheckBase.register('sdk')
 class CheckSDK(CheckBase):
     """Check if SDK is working"""
 
@@ -596,21 +596,21 @@ exit 1
             os.chmod(sdk, 0o744)
 
             # Extract here with 'yes' to all answers
-            do(f"./{sdk} -y -d .")
+            do(f'./{sdk} -y -d .')
 
             # Get environment file
-            env = tmp.glob("environment-setup*")[0]
+            env = tmp.glob('environment-setup*')[0]
 
             # NOTE!  This script requires binfmt to be installed.
-            do("/bin/sh", stdin=self.script, env_add={"ELBE_SDK_ENV": env})
+            do('/bin/sh', stdin=self.script, env_add={'ELBE_SDK_ENV': env})
 
     def run(self):
-        for sdk in glob.glob("setup-elbe-sdk*"):
+        for sdk in glob.glob('setup-elbe-sdk*'):
             self.do_sdk(sdk)
 
 
- at CheckBase.register("rebuild")
+ at CheckBase.register('rebuild')
 class CheckRebuild(CheckBase):
 
     def run(self):
-        do(f"{sys.executable} {elbe_exe} initvm submit --skip-build-source bin-cdrom.iso")
+        do(f'{sys.executable} {elbe_exe} initvm submit --skip-build-source bin-cdrom.iso')
diff --git a/elbepack/commands/check_updates.py b/elbepack/commands/check_updates.py
index 14db8f76..2b0adb3f 100644
--- a/elbepack/commands/check_updates.py
+++ b/elbepack/commands/check_updates.py
@@ -38,42 +38,42 @@ def build_changelog_xml(v, opt, update_packages):
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog check_updates [options] <source-xmlfile>")
+        usage='usage: %prog check_updates [options] <source-xmlfile>')
     oparser.add_option(
-        "-s",
-        "--script",
-        dest="script",
-        help="filename of script to run when an update is required")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+        '-s',
+        '--script',
+        dest='script',
+        help='filename of script to run when an update is required')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
     oparser.add_option(
-        "-c",
-        "--changelogs",
-        dest="changelogs",
-        help="filename of changelog xml file")
+        '-c',
+        '--changelogs',
+        dest='changelogs',
+        help='filename of changelog xml file')
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(51)
 
     if not opt.skip_validation:
         validation = validate_xml(args[0])
         if validation:
-            print("xml validation failed. Bailing out")
+            print('xml validation failed. Bailing out')
             for i in validation:
                 print(i)
             sys.exit(52)
 
-    print(f"checking {args[0]}")
+    print(f'checking {args[0]}')
 
     xml = ElbeXML(args[0])
 
-    fullp = xml.node("fullpkgs")
+    fullp = xml.node('fullpkgs')
 
-    arch = xml.text("project/buildimage/arch", key="arch")
+    arch = xml.text('project/buildimage/arch', key='arch')
 
     v = virtapt.VirtApt(xml)
 
@@ -81,7 +81,7 @@ def run_command(argv):
         pname = p.et.text
         pauto = p.et.get('auto')
 
-        if pauto != "true":
+        if pauto != 'true':
             v.mark_install(pname)
 
     errors = 0
@@ -97,11 +97,11 @@ def run_command(argv):
         if not v.has_pkg(xp.name):
             if not xp.is_auto_installed:
                 print(
-                    f"{xp.name} does not exist in cache but is specified in "
-                    "pkg-list")
+                    f'{xp.name} does not exist in cache but is specified in '
+                    'pkg-list')
                 errors += 1
             else:
-                print(f"{xp.name} is no more required")
+                print(f'{xp.name} is no more required')
                 required_updates += 1
 
             continue
@@ -109,7 +109,7 @@ def run_command(argv):
         if v.marked_install(xp.name):
             cver = v.get_candidate_ver(xp.name)
             if xp.installed_version != cver:
-                print(f"{xp.name}: {xp.installed_version} != {cver}")
+                print(f'{xp.name}: {xp.installed_version} != {cver}')
                 required_updates += 1
 
                 if opt.changelogs:
@@ -120,16 +120,16 @@ def run_command(argv):
     sys.stdout.flush()
     sys.stderr.flush()
     if errors > 0:
-        print(f"{errors} Errors occured, xml files needs fixing")
+        print(f'{errors} Errors occured, xml files needs fixing')
         if opt.script:
-            system(f"{opt.script} ERRORS {args[0]}", allow_fail=True)
+            system(f'{opt.script} ERRORS {args[0]}', allow_fail=True)
     elif required_updates > 0:
-        print(f"{required_updates} updates required")
+        print(f'{required_updates} updates required')
 
         if opt.changelogs:
             build_changelog_xml(v, opt, update_packages)
 
         if opt.script:
-            system(f"{opt.script} UPDATE {args[0]}", allow_fail=True)
+            system(f'{opt.script} UPDATE {args[0]}', allow_fail=True)
     else:
-        print("No Updates available")
+        print('No Updates available')
diff --git a/elbepack/commands/chg_archive.py b/elbepack/commands/chg_archive.py
index a279bcce..eb1057c6 100644
--- a/elbepack/commands/chg_archive.py
+++ b/elbepack/commands/chg_archive.py
@@ -13,37 +13,37 @@ from elbepack.treeutils import etree
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog chg_archive [options] <xmlfile> "
-              "[<archive>|<directory>]")
+        usage='usage: %prog chg_archive [options] <xmlfile> '
+              '[<archive>|<directory>]')
     oparser.add_option(
-        "--keep-attributes",
-        action="store_true",
-        help="keep file owners and groups, if not specified all files will "
-             "belong to root:root",
-        dest="keep_attributes",
+        '--keep-attributes',
+        action='store_true',
+        help='keep file owners and groups, if not specified all files will '
+             'belong to root:root',
+        dest='keep_attributes',
         default=False)
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(42)
 
     try:
         xml = etree(args[0])
     except BaseException:
-        print("Error reading xml file!")
+        print('Error reading xml file!')
         sys.exit(43)
 
     try:
         xml = chg_archive(xml, args[1], opt.keep_attributes)
     except BaseException:
-        print("Error reading archive")
+        print('Error reading archive')
         sys.exit(44)
 
     try:
         xml.write(args[0])
     except BaseException:
-        print("Unable to write new xml file")
+        print('Unable to write new xml file')
         sys.exit(45)
diff --git a/elbepack/commands/chroot.py b/elbepack/commands/chroot.py
index 5c52d521..eacea6b5 100644
--- a/elbepack/commands/chroot.py
+++ b/elbepack/commands/chroot.py
@@ -15,56 +15,56 @@ from elbepack.log import elbe_logging
 
 def run_command(argv):
     oparser = OptionParser(
-        usage="usage: %prog chroot [options] <builddir> [cmd]")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--target", action="store_true", dest="target",
-                       help="chroot into target instead of buildenv",
+        usage='usage: %prog chroot [options] <builddir> [cmd]')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--target', action='store_true', dest='target',
+                       help='chroot into target instead of buildenv',
                        default=False)
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(72)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
         try:
             project = ElbeProject(args[0],
                                   override_buildtype=opt.buildtype,
                                   skip_validate=opt.skip_validation,
                                   url_validation=ValidationMode.NO_CHECK)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(73)
 
-        os.environ["LANG"] = "C"
-        os.environ["LANGUAGE"] = "C"
-        os.environ["LC_ALL"] = "C"
+        os.environ['LANG'] = 'C'
+        os.environ['LANGUAGE'] = 'C'
+        os.environ['LC_ALL'] = 'C'
         # TODO: howto set env in chroot?
-        os.environ["PS1"] = project.xml.text('project/name') + r': \w\$'
+        os.environ['PS1'] = project.xml.text('project/name') + r': \w\$'
 
-        cmd = "/bin/bash"
+        cmd = '/bin/bash'
 
         if len(args) > 1:
-            cmd = ""
+            cmd = ''
             cmd2 = args[1:]
             for c in cmd2:
-                cmd += (c + " ")
+                cmd += (c + ' ')
 
         if opt.target:
             try:
                 with project.targetfs:
-                    system(f"/usr/sbin/chroot {project.targetpath} {cmd}")
+                    system(f'/usr/sbin/chroot {project.targetpath} {cmd}')
             except CommandError as e:
                 print(repr(e))
         else:
             try:
                 with project.buildenv:
-                    system(f"/usr/sbin/chroot {project.chrootpath} {cmd}")
+                    system(f'/usr/sbin/chroot {project.chrootpath} {cmd}')
             except CommandError as e:
                 print(repr(e))
diff --git a/elbepack/commands/control.py b/elbepack/commands/control.py
index b869da98..fc84e649 100644
--- a/elbepack/commands/control.py
+++ b/elbepack/commands/control.py
@@ -19,100 +19,100 @@ from elbepack.elbexml import ValidationMode
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: elbe control [options] <command>")
+    oparser = OptionParser(usage='usage: elbe control [options] <command>')
 
-    oparser.add_option("--host", dest="host", default=cfg['soaphost'],
-                       help="Ip or hostname of elbe-daemon.")
+    oparser.add_option('--host', dest='host', default=cfg['soaphost'],
+                       help='Ip or hostname of elbe-daemon.')
 
-    oparser.add_option("--port", dest="port", default=cfg['soapport'],
-                       help="Port of soap itf on elbe-daemon.")
+    oparser.add_option('--port', dest='port', default=cfg['soapport'],
+                       help='Port of soap itf on elbe-daemon.')
 
-    oparser.add_option("--pass", dest="passwd", default=cfg['elbepass'],
-                       help="Password (default is foo).")
+    oparser.add_option('--pass', dest='passwd', default=cfg['elbepass'],
+                       help='Password (default is foo).')
 
-    oparser.add_option("--user", dest="user", default=cfg['elbeuser'],
-                       help="Username (default is root).")
+    oparser.add_option('--user', dest='user', default=cfg['elbeuser'],
+                       help='Username (default is root).')
 
     oparser.add_option(
-        "--retries",
-        dest="retries",
-        default="10",
-        help="How many times to retry the connection to the server before "
-             "giving up (default is 10 times, yielding 10 seconds).")
+        '--retries',
+        dest='retries',
+        default='10',
+        help='How many times to retry the connection to the server before '
+             'giving up (default is 10 times, yielding 10 seconds).')
 
     oparser.add_option(
-        "--build-bin",
-        action="store_true",
-        dest="build_bin",
+        '--build-bin',
+        action='store_true',
+        dest='build_bin',
         default=False,
-        help="Build binary repository CDROM, for exact reproduction.")
+        help='Build binary repository CDROM, for exact reproduction.')
 
-    oparser.add_option("--build-sources", action="store_true",
-                       dest="build_sources", default=False,
-                       help="Build source CDROM")
+    oparser.add_option('--build-sources', action='store_true',
+                       dest='build_sources', default=False,
+                       help='Build source CDROM')
 
     oparser.add_option(
-        "--skip-pbuilder",
-        action="store_true",
-        dest="skip_pbuilder",
+        '--skip-pbuilder',
+        action='store_true',
+        dest='skip_pbuilder',
         default=False,
         help="skip pbuilder section of XML (don't build packages)")
 
-    oparser.add_option("--output",
-                       dest="output", default=None,
-                       help="Output files to <directory>")
+    oparser.add_option('--output',
+                       dest='output', default=None,
+                       help='Output files to <directory>')
 
-    oparser.add_option("--matches", dest="matches", default=False,
-                       help="Select files based on wildcard expression.")
+    oparser.add_option('--matches', dest='matches', default=False,
+                       help='Select files based on wildcard expression.')
 
-    oparser.add_option("--pbuilder-only", action="store_true",
-                       dest="pbuilder_only", default=False,
-                       help="Only list/download pbuilder Files")
+    oparser.add_option('--pbuilder-only', action='store_true',
+                       dest='pbuilder_only', default=False,
+                       help='Only list/download pbuilder Files')
 
-    oparser.add_option("--cpuset", default=-1, type="int",
-                       help="Limit cpuset of pbuilder commands (bitmask)"
-                            "(defaults to -1 for all CPUs)")
+    oparser.add_option('--cpuset', default=-1, type='int',
+                       help='Limit cpuset of pbuilder commands (bitmask)'
+                            '(defaults to -1 for all CPUs)')
 
-    oparser.add_option("--profile", dest="profile", default="",
-                       help="Make pbuilder commands build the specified profile")
+    oparser.add_option('--profile', dest='profile', default='',
+                       help='Make pbuilder commands build the specified profile')
 
-    oparser.add_option("--cross", dest="cross", default=False,
-                       action="store_true",
-                       help="Creates an environment for crossbuilding if "
-                            "combined with create. Combined with build it"
-                            " will use this environment.")
+    oparser.add_option('--cross', dest='cross', default=False,
+                       action='store_true',
+                       help='Creates an environment for crossbuilding if '
+                            'combined with create. Combined with build it'
+                            ' will use this environment.')
 
-    oparser.add_option("--no-ccache", dest="noccache", default=False,
-                       action="store_true",
+    oparser.add_option('--no-ccache', dest='noccache', default=False,
+                       action='store_true',
                        help="Deactivates the compiler cache 'ccache'")
 
-    oparser.add_option("--ccache-size", dest="ccachesize", default="10G",
-                       action="store", type="string",
-                       help="set a limit for the compiler cache size "
-                            "(should be a number followed by an optional "
-                            "suffix: k, M, G, T. Use 0 for no limit.)")
+    oparser.add_option('--ccache-size', dest='ccachesize', default='10G',
+                       action='store', type='string',
+                       help='set a limit for the compiler cache size '
+                            '(should be a number followed by an optional '
+                            'suffix: k, M, G, T. Use 0 for no limit.)')
 
     devel = OptionGroup(
         oparser,
-        "options for elbe developers",
+        'options for elbe developers',
         "Caution: Don't use these options in a productive environment")
-    devel.add_option("--skip-urlcheck", action="store_true",
-                     dest="url_validation", default=ValidationMode.CHECK_ALL,
-                     help="Skip URL Check inside initvm")
+    devel.add_option('--skip-urlcheck', action='store_true',
+                     dest='url_validation', default=ValidationMode.CHECK_ALL,
+                     help='Skip URL Check inside initvm')
 
-    devel.add_option("--debug", action="store_true",
-                     dest="debug", default=False,
-                     help="Enable debug mode.")
+    devel.add_option('--debug', action='store_true',
+                     dest='debug', default=False,
+                     help='Enable debug mode.')
 
-    devel.add_option("--ignore-version-diff", action="store_true",
-                     dest="ignore_version", default=False,
-                     help="allow different elbe version on host and initvm")
+    devel.add_option('--ignore-version-diff', action='store_true',
+                     dest='ignore_version', default=False,
+                     help='allow different elbe version on host and initvm')
     oparser.add_option_group(devel)
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("elbe control - no subcommand given", file=sys.stderr)
+        print('elbe control - no subcommand given', file=sys.stderr)
         ClientAction.print_actions()
         return
 
@@ -127,28 +127,28 @@ def run_command(argv):
                 opt.retries))
     except URLError:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Check, whether the initvm is actually running.", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Check, whether the initvm is actually running.', file=sys.stderr)
         print("try 'elbe initvm start'", file=sys.stderr)
         sys.exit(13)
     except socket.error:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
+        print('', file=sys.stderr)
         print(
-            "Check, whether the Soap Server is running inside the initvm",
+            'Check, whether the Soap Server is running inside the initvm',
             file=sys.stderr)
         print("try 'elbe initvm attach'", file=sys.stderr)
         sys.exit(14)
     except BadStatusLine:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Check, whether the initvm is actually running.", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Check, whether the initvm is actually running.', file=sys.stderr)
         print("try 'elbe initvm start'", file=sys.stderr)
         sys.exit(15)
 
@@ -156,36 +156,36 @@ def run_command(argv):
         v_server = control.service.get_version()
         if v_server != elbe_version:
             print(
-                f"elbe v{v_server} is used in initvm, this is not compatible "
-                f"with elbe v{elbe_version} that is used on this machine. "
-                "Please install same versions of elbe in initvm and on your "
-                "machine.",
+                f'elbe v{v_server} is used in initvm, this is not compatible '
+                f'with elbe v{elbe_version} that is used on this machine. '
+                'Please install same versions of elbe in initvm and on your '
+                'machine.',
                 file=sys.stderr)
             print(
-                f"To install elbe v{elbe_version} into the initvm use "
+                f'To install elbe v{elbe_version} into the initvm use '
                 "'elbe control --ignore-version-diff install_elbe_version'")
 
             if not opt.ignore_version:
                 sys.exit(16)
     except AttributeError:
-        print("the elbe installation inside the initvm doesn't provide a \
-get_version interface. Please create a new initvm or upgrade \
-elbe inside the existing initvm.", file=sys.stderr)
+        print("the elbe installation inside the initvm doesn't provide a "
+              'get_version interface. Please create a new initvm or upgrade '
+              'elbe inside the existing initvm.', file=sys.stderr)
         if not opt.ignore_version:
             sys.exit(24)
 
     try:
         action = ClientAction(args[0])
     except KeyError:
-        print("elbe control - unknown subcommand", file=sys.stderr)
+        print('elbe control - unknown subcommand', file=sys.stderr)
         ClientAction.print_actions()
         sys.exit(25)
 
     try:
         action.execute(control, opt, args[1:])
     except WebFault as e:
-        print("Server returned error:", file=sys.stderr)
-        print("", file=sys.stderr)
+        print('Server returned error:', file=sys.stderr)
+        print('', file=sys.stderr)
         if hasattr(e.fault, 'faultstring'):
             print(e.fault.faultstring, file=sys.stderr)
         else:
diff --git a/elbepack/commands/daemon.py b/elbepack/commands/daemon.py
index a32b7ebe..9c767ca5 100644
--- a/elbepack/commands/daemon.py
+++ b/elbepack/commands/daemon.py
@@ -19,17 +19,17 @@ def run_command(argv):
     daemons = get_daemonlist()
 
     if not daemons:
-        print("no elbe daemons installed")
+        print('no elbe daemons installed')
 
-    oparser = OptionParser(usage="usage: %prog")
-    oparser.add_option("--host", dest="host", default='0.0.0.0',
-                       help="interface to host daemon")
-    oparser.add_option("--port", dest="port", default=7587,
-                       help="port to host daemon")
+    oparser = OptionParser(usage='usage: %prog')
+    oparser.add_option('--host', dest='host', default='0.0.0.0',
+                       help='interface to host daemon')
+    oparser.add_option('--port', dest='port', default=7587,
+                       help='port to host daemon')
 
     for d in daemons:
-        oparser.add_option("--" + str(d), dest=str(d), default=False,
-                           action="store_true", help="enable " + str(d))
+        oparser.add_option('--' + str(d), dest=str(d), default=False,
+                           action='store_true', help='enable ' + str(d))
 
     (opt, _) = oparser.parse_args(argv)
 
@@ -40,19 +40,19 @@ def run_command(argv):
             if str(o) == str(d):
                 if getattr(opt, o):
                     active = True
-                    print(f"enable {d}")
-                    module = "elbepack.daemons." + str(d)
+                    print(f'enable {d}')
+                    module = 'elbepack.daemons.' + str(d)
                     _ = __import__(module)
                     cmdmod = sys.modules[module]
                     cherrypy.tree.graft(
                         cmdmod.get_app(
                             cherrypy.engine),
-                        "/" + str(d))
+                        '/' + str(d))
     if not active:
-        print("no daemon activated, use")
+        print('no daemon activated, use')
         for d in daemons:
-            print(f"   --{d}")
-        print("to activate at least one daemon")
+            print(f'   --{d}')
+        print('to activate at least one daemon')
         return
 
     cherrypy.server.unsubscribe()
diff --git a/elbepack/commands/db.py b/elbepack/commands/db.py
index 93f4567e..0a9694a1 100644
--- a/elbepack/commands/db.py
+++ b/elbepack/commands/db.py
@@ -8,14 +8,14 @@ from elbepack.dbaction import DbAction
 def run_command(argv):
 
     if not argv:
-        print("elbe db - no action given")
+        print('elbe db - no action given')
         DbAction.print_actions()
         return
 
     try:
         DbAction(argv[0]).execute(argv[1:])
     except KeyError:
-        print("elbe db - unknown action given")
+        print('elbe db - unknown action given')
         DbAction.print_actions()
         return
 
diff --git a/elbepack/commands/diff.py b/elbepack/commands/diff.py
index 394beaf6..52c15f43 100644
--- a/elbepack/commands/diff.py
+++ b/elbepack/commands/diff.py
@@ -13,21 +13,21 @@ def walk_generated(gen_path, fix_path, exclude):
 
     file_to_rm = []
     file_differ = []
-    gen_path = gen_path.rstrip("/")
-    fix_path = fix_path.rstrip("/")
+    gen_path = gen_path.rstrip('/')
+    fix_path = fix_path.rstrip('/')
 
     for root, _, files in os.walk(gen_path):
         if root == gen_path:
-            infs_root = "/"
+            infs_root = '/'
         else:
-            infs_root = root.replace(gen_path, "")
+            infs_root = root.replace(gen_path, '')
 
         if True in [infs_root.startswith(x) for x in exclude]:
             continue
 
         if not files:
             if not os.path.exists(fix_path + infs_root):
-                print(f"empty directory {infs_root} only exists in gen image")
+                print(f'empty directory {infs_root} only exists in gen image')
                 file_to_rm.append(infs_root)
         else:
             for f in files:
@@ -41,22 +41,22 @@ def walk_generated(gen_path, fix_path, exclude):
                             if not filecmp.cmp(
                                     gen_fname, fix_fname, shallow=False):
                                 print(
-                                    f"files {gen_fname} and {fix_fname} differ")
+                                    f'files {gen_fname} and {fix_fname} differ')
                                 file_differ.append(os.path.join(infs_root, f))
                         else:
                             if not (os.readlink(gen_fname) ==
                                     os.readlink(fix_fname)):
                                 print(
-                                    f"symlinks {gen_fname} and "
-                                    f"{fix_fname} differ")
+                                    f'symlinks {gen_fname} and '
+                                    f'{fix_fname} differ')
                                 file_differ.append(os.path.join(infs_root, f))
 
                 elif not os.path.exists(gen_fname) and \
                         os.path.exists(fix_fname):
-                    print(f"file {fix_fname} only exists in fixed image")
+                    print(f'file {fix_fname} only exists in fixed image')
                 elif os.path.exists(gen_fname) and not \
                         os.path.exists(fix_fname):
-                    print(f"file {gen_fname} only exists in gen image")
+                    print(f'file {gen_fname} only exists in gen image')
                     file_to_rm.append(os.path.join(infs_root, f))
 
     return file_differ, file_to_rm
@@ -66,29 +66,29 @@ def walk_fixed(gen_path, fix_path, exclude):
 
     file_only = []
     dir_to_create = []
-    gen_path = gen_path.rstrip("/")
-    fix_path = fix_path.rstrip("/")
+    gen_path = gen_path.rstrip('/')
+    fix_path = fix_path.rstrip('/')
 
     for root, _, files in os.walk(fix_path):
         if root == fix_path:
-            infs_root = "/"
+            infs_root = '/'
         else:
-            infs_root = root.replace(fix_path, "")
+            infs_root = root.replace(fix_path, '')
 
         if True in [infs_root.startswith(x) for x in exclude]:
             continue
 
         if not files:
             if not os.path.exists(gen_path + infs_root):
-                print(f"empty directory {infs_root} only exists in fix image")
-                dir_to_create.append(infs_root.lstrip("/"))
+                print(f'empty directory {infs_root} only exists in fix image')
+                dir_to_create.append(infs_root.lstrip('/'))
         else:
             for f in files:
                 gen_fname = os.path.join(gen_path + infs_root, f)
                 fix_fname = os.path.join(fix_path + infs_root, f)
 
                 if not os.path.exists(gen_fname) and os.path.exists(fix_fname):
-                    print(f"file {fix_fname} only exists in fixed image")
+                    print(f'file {fix_fname} only exists in fixed image')
                     file_only.append(os.path.join(infs_root, f))
 
     return file_only, dir_to_create
@@ -96,13 +96,13 @@ def walk_fixed(gen_path, fix_path, exclude):
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: %prog diff [options] <dir1> <dir2>")
-    oparser.add_option("--exclude", action="append", dest="exclude",
-                       help="Paths to exclude")
+    oparser = OptionParser(usage='usage: %prog diff [options] <dir1> <dir2>')
+    oparser.add_option('--exclude', action='append', dest='exclude',
+                       help='Paths to exclude')
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(106)
 
@@ -115,16 +115,16 @@ def run_command(argv):
     differ, rm = walk_generated(gen_rfs, fix_rfs, opt.exclude)
     only, mkdir = walk_fixed(gen_rfs, fix_rfs, opt.exclude)
 
-    print("suggesting:")
+    print('suggesting:')
     print()
 
     for f in rm:
-        print(f"<rm>{f}</rm>")
+        print(f'<rm>{f}</rm>')
 
     for d in mkdir:
-        print(f"<mkdir>{d}</mkdir>")
+        print(f'<mkdir>{d}</mkdir>')
 
-    print("")
+    print('')
 
     for f in differ + only:
-        print(f"tar rf archive.tar -C {fix_rfs} {f}")
+        print(f'tar rf archive.tar -C {fix_rfs} {f}')
diff --git a/elbepack/commands/fetch_initvm_pkgs.py b/elbepack/commands/fetch_initvm_pkgs.py
index 9eb3faf3..38519d84 100644
--- a/elbepack/commands/fetch_initvm_pkgs.py
+++ b/elbepack/commands/fetch_initvm_pkgs.py
@@ -27,46 +27,46 @@ def run_command(argv):
     # files/directories or just globaly.
 
     oparser = OptionParser(
-        usage="usage: %prog fetch_initvm_pkgs [options] <xmlfile>")
+        usage='usage: %prog fetch_initvm_pkgs [options] <xmlfile>')
 
-    oparser.add_option("-b", "--binrepo", dest="binrepo",
-                       default="/var/cache/elbe/initvm-bin-repo",
-                       help="directory where the bin repo should reside")
+    oparser.add_option('-b', '--binrepo', dest='binrepo',
+                       default='/var/cache/elbe/initvm-bin-repo',
+                       help='directory where the bin repo should reside')
 
-    oparser.add_option("-s", "--srcrepo", dest="srcrepo",
-                       default="/var/cache/elbe/initvm-src-repo",
-                       help="directory where the src repo should reside")
+    oparser.add_option('-s', '--srcrepo', dest='srcrepo',
+                       default='/var/cache/elbe/initvm-src-repo',
+                       help='directory where the src repo should reside')
 
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
-    oparser.add_option("--cdrom-mount-path", dest="cdrom_path",
-                       help="path where cdrom is mounted")
+    oparser.add_option('--cdrom-mount-path', dest='cdrom_path',
+                       help='path where cdrom is mounted')
 
-    oparser.add_option("--cdrom-device", dest="cdrom_device",
-                       help="cdrom device, in case it has to be mounted")
+    oparser.add_option('--cdrom-device', dest='cdrom_device',
+                       help='cdrom device, in case it has to be mounted')
 
-    oparser.add_option("--apt-archive", dest="archive",
-                       default="/var/cache/elbe/binaries/main",
-                       help="path where binary packages are downloaded to.")
+    oparser.add_option('--apt-archive', dest='archive',
+                       default='/var/cache/elbe/binaries/main',
+                       help='path where binary packages are downloaded to.')
 
-    oparser.add_option("--src-archive", dest="srcarchive",
-                       default="/var/cache/elbe/sources",
-                       help="path where src packages are downloaded to.")
+    oparser.add_option('--src-archive', dest='srcarchive',
+                       default='/var/cache/elbe/sources',
+                       help='path where src packages are downloaded to.')
 
-    oparser.add_option("--skip-build-sources", action="store_false",
-                       dest="build_sources", default=True,
-                       help="Skip downloading Source Packages")
+    oparser.add_option('--skip-build-sources', action='store_false',
+                       dest='build_sources', default=True,
+                       help='Skip downloading Source Packages')
 
-    oparser.add_option("--skip-build-bin", action="store_false",
-                       dest="build_bin", default=True,
-                       help="Skip downloading binary packages")
+    oparser.add_option('--skip-build-bin', action='store_false',
+                       dest='build_bin', default=True,
+                       help='Skip downloading binary packages')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(46)
 
@@ -74,10 +74,10 @@ def run_command(argv):
         xml = ElbeXML(args[0], skip_validate=opt.skip_validation)
     except ValidationError as e:
         print(str(e))
-        print("xml validation failed. Bailing out")
+        print('xml validation failed. Bailing out')
         sys.exit(47)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
 
         if opt.cdrom_path:
             if opt.cdrom_device:
@@ -86,7 +86,7 @@ def run_command(argv):
             # a cdrom build is identified by the cdrom option
             # the xml file that is copied into the initvm
             # by the initrd does not have the cdrom tags setup.
-            mirror = f"file://{opt.cdrom_path}"
+            mirror = f'file://{opt.cdrom_path}'
         else:
             mirror = xml.get_initvm_primary_mirror(opt.cdrom_path)
 
@@ -103,7 +103,7 @@ def run_command(argv):
             cache = Cache()
             cache.open()
             for pkg in pkglist:
-                pkg_id = f"{pkg.name}-{pkg.installed_version}"
+                pkg_id = f'{pkg.name}-{pkg.installed_version}'
                 retry = 1
                 while retry < 3:
                     try:
@@ -149,7 +149,7 @@ def run_command(argv):
 
         if opt.build_sources:
             for pkg in pkglist:
-                pkg_id = f"{pkg.name}-{pkg.installed_version}"
+                pkg_id = f'{pkg.name}-{pkg.installed_version}'
                 retry = 1
                 while retry < 3:
                     try:
diff --git a/elbepack/commands/gen_update.py b/elbepack/commands/gen_update.py
index a22e07f9..9c5a69d2 100644
--- a/elbepack/commands/gen_update.py
+++ b/elbepack/commands/gen_update.py
@@ -15,35 +15,35 @@ from elbepack.log import elbe_logging
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: %prog gen_update [options] [xmlfile]")
-    oparser.add_option("-t", "--target", dest="target",
-                       help="directoryname of target")
-    oparser.add_option("-o", "--output", dest="output",
-                       help="filename of the update package")
-    oparser.add_option("-n", "--name", dest="name",
-                       help="name of the project (included in the report)")
+    oparser = OptionParser(usage='usage: %prog gen_update [options] [xmlfile]')
+    oparser.add_option('-t', '--target', dest='target',
+                       help='directoryname of target')
+    oparser.add_option('-o', '--output', dest='output',
+                       help='filename of the update package')
+    oparser.add_option('-n', '--name', dest='name',
+                       help='name of the project (included in the report)')
     oparser.add_option(
-        "-p",
-        "--pre-sh",
-        dest="presh_file",
-        help="script that is executed before the update will be applied")
+        '-p',
+        '--pre-sh',
+        dest='presh_file',
+        help='script that is executed before the update will be applied')
     oparser.add_option(
-        "-P",
-        "--post-sh",
-        dest="postsh_file",
-        help="script that is executed after the update was applied")
-    oparser.add_option("-c", "--cfg-dir", dest="cfg_dir",
-                       help="files that are copied to target")
-    oparser.add_option("-x", "--cmd-dir", dest="cmd_dir",
-                       help="scripts that are executed on the target")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
-    oparser.add_option("--debug", action="store_true", dest="debug",
+        '-P',
+        '--post-sh',
+        dest='postsh_file',
+        help='script that is executed after the update was applied')
+    oparser.add_option('-c', '--cfg-dir', dest='cfg_dir',
+                       help='files that are copied to target')
+    oparser.add_option('-x', '--cmd-dir', dest='cmd_dir',
+                       help='scripts that are executed on the target')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
+    oparser.add_option('--debug', action='store_true', dest='debug',
                        default=False,
-                       help="Enable various features to debug the build")
+                       help='Enable various features to debug the build')
 
     (opt, args) = oparser.parse_args(argv)
 
@@ -53,11 +53,11 @@ def run_command(argv):
             sys.exit(31)
 
     if len(args) == 1 and not opt.target:
-        print("No target specified")
+        print('No target specified')
         sys.exit(32)
 
     if not opt.output:
-        print("No output file specified")
+        print('No output file specified')
         sys.exit(33)
 
     if opt.buildtype:
@@ -65,13 +65,13 @@ def run_command(argv):
     else:
         buildtype = None
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
         try:
             project = ElbeProject(opt.target, name=opt.name,
                                   override_buildtype=buildtype,
                                   skip_validate=opt.skip_validation)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(34)
 
     if opt.presh_file:
@@ -90,15 +90,15 @@ def run_command(argv):
     if len(args) >= 1:
         update_xml = args[0]
 
-    with elbe_logging({"projects": project.builddir}):
+    with elbe_logging({'projects': project.builddir}):
         try:
             gen_update_pkg(project, update_xml, opt.output, buildtype,
                            opt.skip_validation, opt.debug,
                            cfg_dir=opt.cfg_dir, cmd_dir=opt.cmd_dir)
 
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(37)
         except MissingData:
-            logging.exception("Missing Data")
+            logging.exception('Missing Data')
             sys.exit(38)
diff --git a/elbepack/commands/genlicence.py b/elbepack/commands/genlicence.py
index 53c96d08..1d980f91 100644
--- a/elbepack/commands/genlicence.py
+++ b/elbepack/commands/genlicence.py
@@ -13,38 +13,38 @@ from elbepack.log import elbe_logging
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog genlicence [options] <project>")
-    oparser.add_option("--output", dest="output",
-                       help="outputfilename")
-    oparser.add_option("--xml", dest="xml", default=None,
-                       help="xml outputfilename")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+    oparser = OptionParser(usage='usage: %prog genlicence [options] <project>')
+    oparser.add_option('--output', dest='output',
+                       help='outputfilename')
+    oparser.add_option('--xml', dest='xml', default=None,
+                       help='xml outputfilename')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(70)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
         try:
             project = ElbeProject(args[0],
                                   override_buildtype=opt.buildtype,
                                   skip_validate=opt.skip_validation,
                                   url_validation=ValidationMode.NO_CHECK)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(71)
 
         if opt.output:
-            f = io.open(opt.output, "w+", encoding='utf-8')
+            f = io.open(opt.output, 'w+', encoding='utf-8')
         else:
-            f = io.open('licence.txt', "w+", encoding='utf-8')
+            f = io.open('licence.txt', 'w+', encoding='utf-8')
 
         pkglist = project.get_rpcaptcache().get_installed_pkgs()
         pkgnames = [p.name for p in pkglist]
diff --git a/elbepack/commands/get_archive.py b/elbepack/commands/get_archive.py
index ca6cf50c..696de4a1 100644
--- a/elbepack/commands/get_archive.py
+++ b/elbepack/commands/get_archive.py
@@ -12,7 +12,7 @@ from elbepack.treeutils import etree
 
 
 def unbase(s, fname):
-    outfile = open(fname, "w+b")
+    outfile = open(fname, 'w+b')
     outfile.write(standard_b64decode(s))
     outfile.close()
 
@@ -20,30 +20,30 @@ def unbase(s, fname):
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog get_archive <xmlfile> <archive>")
+        usage='usage: %prog get_archive <xmlfile> <archive>')
     (_, args) = oparser.parse_args(argv)
 
     if len(args) != 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(101)
 
     if os.path.exists(args[1]):
-        print("archive already exists, bailing out")
+        print('archive already exists, bailing out')
         sys.exit(102)
 
     try:
         xml = etree(args[0])
     except BaseException:
-        print("Error reading xml file!")
+        print('Error reading xml file!')
         sys.exit(103)
 
-    if xml.has("archive") and not xml.text("archive") is None:
+    if xml.has('archive') and not xml.text('archive') is None:
         try:
-            unbase(xml.text("archive"), args[1])
+            unbase(xml.text('archive'), args[1])
         except BaseException:
-            print("Error writing archive")
+            print('Error writing archive')
             sys.exit(104)
     else:
-        print("no archive in this xml file.")
+        print('no archive in this xml file.')
         sys.exit(105)
diff --git a/elbepack/commands/hdimg.py b/elbepack/commands/hdimg.py
index dffd7a96..22accd6e 100644
--- a/elbepack/commands/hdimg.py
+++ b/elbepack/commands/hdimg.py
@@ -15,41 +15,41 @@ from elbepack.log import elbe_logging
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog hdimg --target <dir> --output <out> <xmlfile>")
-    oparser.add_option("--target", dest="target",
-                       help="target directory",
-                       metavar="FILE")
-    oparser.add_option("-o", "--output", dest="output",
-                       help="name of logfile")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--skip-grub", action="store_true",
-                       dest="skip_grub", default=False,
-                       help="Skip grub install")
+        usage='usage: %prog hdimg --target <dir> --output <out> <xmlfile>')
+    oparser.add_option('--target', dest='target',
+                       help='target directory',
+                       metavar='FILE')
+    oparser.add_option('-o', '--output', dest='output',
+                       help='name of logfile')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--skip-grub', action='store_true',
+                       dest='skip_grub', default=False,
+                       help='Skip grub install')
     oparser.add_option(
-        "--grub-version",
-        type="int",
-        dest="grub_version",
+        '--grub-version',
+        type='int',
+        dest='grub_version',
         default=202,
-        help="use specific grub version (possible values are 0, 97, and 202)")
+        help='use specific grub version (possible values are 0, 97, and 202)')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(65)
 
     if not opt.target:
-        print("No directory specified!")
+        print('No directory specified!')
         oparser.print_help()
         sys.exit(66)
 
     if not opt.output:
-        print("No Log output")
+        print('No Log output')
         oparser.print_help()
         sys.exit(67)
 
@@ -57,18 +57,18 @@ def run_command(argv):
         opt.grub_version = 0
 
     if opt.grub_version not in [0, 97, 202]:
-        print("invalid grub version")
+        print('invalid grub version')
         oparser.print_help()
         sys.exit(68)
 
-    with elbe_logging({"files": opt.output}):
+    with elbe_logging({'files': opt.output}):
         try:
             project = ElbeProject(opt.target,
                                   override_buildtype=opt.buildtype,
                                   xmlpath=args[0],
                                   skip_validate=opt.skip_validation)
         except ValidationError:
-            logging.exception("XML validation failed.  Bailing out")
+            logging.exception('XML validation failed.  Bailing out')
             sys.exit(69)
 
         project.targetfs.part_target(opt.target, opt.grub_version)
diff --git a/elbepack/commands/init.py b/elbepack/commands/init.py
index ab71f453..5086c2b6 100644
--- a/elbepack/commands/init.py
+++ b/elbepack/commands/init.py
@@ -24,170 +24,170 @@ from elbepack.filesystem import Filesystem
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: %prog init [options] <filename>")
+    oparser = OptionParser(usage='usage: %prog init [options] <filename>')
 
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
-    oparser.add_option("--directory", dest="directory", default="./build",
-                       help="Working directory (default is build)",
-                       metavar="FILE")
+    oparser.add_option('--directory', dest='directory', default='./build',
+                       help='Working directory (default is build)',
+                       metavar='FILE')
 
     oparser.add_option(
-        "--cdrom",
-        dest="cdrom",
-        help="Use FILE as cdrom iso, and use that to build the initvm",
-        metavar="FILE")
+        '--cdrom',
+        dest='cdrom',
+        help='Use FILE as cdrom iso, and use that to build the initvm',
+        metavar='FILE')
 
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
 
     oparser.add_option(
-        "--debug",
-        dest="debug",
-        action="store_true",
+        '--debug',
+        dest='debug',
+        action='store_true',
         default=False,
-        help="start qemu in graphical mode to enable console switch")
+        help='start qemu in graphical mode to enable console switch')
 
     oparser.add_option(
-        "--devel",
-        dest="devel",
-        action="store_true",
+        '--devel',
+        dest='devel',
+        action='store_true',
         default=False,
-        help="use devel mode, and install current builddir inside initvm")
+        help='use devel mode, and install current builddir inside initvm')
 
     oparser.add_option(
-        "--nesting",
-        dest="nesting",
-        action="store_true",
+        '--nesting',
+        dest='nesting',
+        action='store_true',
         default=False,
-        help="allow initvm to support nested kvm. "
-             "This makes /proc/cpuinfo inside initvm differ per host.")
+        help='allow initvm to support nested kvm. '
+             'This makes /proc/cpuinfo inside initvm differ per host.')
 
     oparser.add_option(
-        "--skip-build-bin",
-        action="store_false",
-        dest="build_bin",
+        '--skip-build-bin',
+        action='store_false',
+        dest='build_bin',
         default=True,
-        help="Skip building Binary Repository CDROM, for exact Reproduction")
+        help='Skip building Binary Repository CDROM, for exact Reproduction')
 
     oparser.add_option(
-        "--skip-build-sources",
-        action="store_false",
-        dest="build_sources",
+        '--skip-build-sources',
+        action='store_false',
+        dest='build_sources',
         default=True,
-        help="Skip building Source CDROM")
+        help='Skip building Source CDROM')
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("no filename specified")
+        print('no filename specified')
         oparser.print_help()
         sys.exit(78)
     elif len(args) > 1:
-        print("too many filenames specified")
+        print('too many filenames specified')
         oparser.print_help()
         sys.exit(79)
 
-    with elbe_logging({"files": None}):
+    with elbe_logging({'files': None}):
         if opt.devel:
-            if not os.path.isdir(os.path.join(elbe_dir, "elbepack")):
-                logging.error("Devel Mode only valid, "
-                              "when running from elbe checkout")
+            if not os.path.isdir(os.path.join(elbe_dir, 'elbepack')):
+                logging.error('Devel Mode only valid, '
+                              'when running from elbe checkout')
                 sys.exit(80)
 
         if not opt.skip_validation:
             validation = validate_xml(args[0])
             if validation:
-                logging.error("xml validation failed. Bailing out")
+                logging.error('xml validation failed. Bailing out')
                 for i in validation:
                     logging.error(i)
                 sys.exit(81)
 
         xml = etree(args[0])
 
-        if not xml.has("initvm"):
-            logging.error("fatal error: "
+        if not xml.has('initvm'):
+            logging.error('fatal error: '
                           "xml missing mandatory section 'initvm'")
             sys.exit(82)
 
         if opt.buildtype:
             buildtype = opt.buildtype
-        elif xml.has("initvm/buildtype"):
-            buildtype = xml.text("/initvm/buildtype")
+        elif xml.has('initvm/buildtype'):
+            buildtype = xml.text('/initvm/buildtype')
         else:
-            buildtype = "nodefaults"
+            buildtype = 'nodefaults'
 
         defs = ElbeDefaults(buildtype)
 
-        http_proxy = xml.text("/initvm/mirror/primary_proxy", default="")
-        http_proxy = http_proxy.strip().replace("LOCALMACHINE", "localhost")
+        http_proxy = xml.text('/initvm/mirror/primary_proxy', default='')
+        http_proxy = http_proxy.strip().replace('LOCALMACHINE', 'localhost')
 
         if opt.cdrom:
-            mirror = xml.node("initvm/mirror")
+            mirror = xml.node('initvm/mirror')
             mirror.clear()
-            cdrom = mirror.ensure_child("cdrom")
+            cdrom = mirror.ensure_child('cdrom')
             cdrom.set_text(os.path.abspath(opt.cdrom))
 
         try:
             os.makedirs(opt.directory)
         except OSError as e:
-            logging.error("unable to create project directory: %s (%s)",
+            logging.error('unable to create project directory: %s (%s)',
                           opt.directory,
                           e.strerror)
             sys.exit(83)
 
-        out_path = os.path.join(opt.directory, ".elbe-in")
+        out_path = os.path.join(opt.directory, '.elbe-in')
         try:
             os.makedirs(out_path)
         except OSError as e:
-            logging.error("unable to create subdirectory: %s (%s)",
+            logging.error('unable to create subdirectory: %s (%s)',
                           out_path,
                           e.strerror)
             sys.exit(84)
 
         initvm_http_proxy = http_proxy.replace('http://localhost:',
                                                'http://10.0.2.2:')
-        d = {"elbe_version": elbe_version,
-             "defs": defs,
-             "opt": opt,
-             "xml": xml,
-             "prj": xml.node("/initvm"),
-             "http_proxy": initvm_http_proxy,
-             "pkgs": xml.node("/initvm/pkg-list") or [],
-             "preseed": get_initvm_preseed(xml),
-             "cfg": cfg}
-
-        if http_proxy != "":
-            os.putenv("http_proxy", http_proxy)
-            os.putenv("https_proxy", http_proxy)
-            os.putenv("no_proxy", "localhost,127.0.0.1")
+        d = {'elbe_version': elbe_version,
+             'defs': defs,
+             'opt': opt,
+             'xml': xml,
+             'prj': xml.node('/initvm'),
+             'http_proxy': initvm_http_proxy,
+             'pkgs': xml.node('/initvm/pkg-list') or [],
+             'preseed': get_initvm_preseed(xml),
+             'cfg': cfg}
+
+        if http_proxy != '':
+            os.putenv('http_proxy', http_proxy)
+            os.putenv('https_proxy', http_proxy)
+            os.putenv('no_proxy', 'localhost,127.0.0.1')
 
         try:
-            copy_kinitrd(xml.node("/initvm"), out_path)
+            copy_kinitrd(xml.node('/initvm'), out_path)
         except NoKinitrdException as e:
             msg = str(e)
-            logging.error("Failure to download kernel/initrd debian Package:")
-            logging.error("")
+            logging.error('Failure to download kernel/initrd debian Package:')
+            logging.error('')
             logging.error(msg)
-            logging.error("")
-            logging.error("Check Mirror configuration")
+            logging.error('')
+            logging.error('Check Mirror configuration')
             if 'SHA256SUMS' in msg:
-                logging.error("If you use debmirror please read "
-                              "https://github.com/Linutronix/elbe/issues/188 "
-                              "on how to work around the issue")
+                logging.error('If you use debmirror please read '
+                              'https://github.com/Linutronix/elbe/issues/188 '
+                              'on how to work around the issue')
             sys.exit(85)
 
         templates = os.listdir(init_template_dir)
 
-        make_executable = ["init-elbe.sh.mako",
-                           "preseed.cfg.mako"]
+        make_executable = ['init-elbe.sh.mako',
+                           'preseed.cfg.mako']
 
         for t in templates:
-            o = t.replace(".mako", "")
+            o = t.replace('.mako', '')
 
-            if t in ("Makefile.mako", "libvirt.xml.mako"):
+            if t in ('Makefile.mako', 'libvirt.xml.mako'):
                 write_template(
                     os.path.join(
                         opt.directory, o), os.path.join(
@@ -202,25 +202,25 @@ def run_command(argv):
                 os.chmod(os.path.join(out_path, o), 0o755)
 
         shutil.copyfile(args[0],
-                        os.path.join(out_path, "source.xml"))
+                        os.path.join(out_path, 'source.xml'))
 
         keys = []
-        for key in xml.all(".//initvm/mirror/url-list/url/raw-key"):
+        for key in xml.all('.//initvm/mirror/url-list/url/raw-key'):
             keys.append(key.et.text)
 
         if opt.cdrom:
             keys.append(system_out(f'7z x -so "{opt.cdrom}" repo.pub'))
 
-        import_keyring = os.path.join(out_path, "elbe-keyring")
+        import_keyring = os.path.join(out_path, 'elbe-keyring')
 
         do(f'gpg --no-options \
                  --no-default-keyring \
                  --keyring {import_keyring} --import',
-           stdin="".join(keys).encode('ascii'),
+           stdin=''.join(keys).encode('ascii'),
            allow_fail=True,
            env_add={'GNUPGHOME': out_path})
 
-        export_keyring = import_keyring + ".gpg"
+        export_keyring = import_keyring + '.gpg'
 
         do(f'gpg --no-options \
                 --no-default-keyring \
@@ -236,33 +236,33 @@ def run_command(argv):
                 opts.append(
                     f'--exclude "{os.path.relpath(out_path, start=elbe_dir)}"')
 
-            opts.append("--exclude-vcs")
-            opts.append("--exclude-vcs-ignores")
+            opts.append('--exclude-vcs')
+            opts.append('--exclude-vcs-ignores')
             opts.append("--exclude='elbe-build*'")
             opts.append("--exclude='docs/*'")
-            tar_fname = os.path.join(out_path, "elbe-devel.tar.bz2")
+            tar_fname = os.path.join(out_path, 'elbe-devel.tar.bz2')
             system(f'tar cfj "{tar_fname}" {" ".join(opts)} -C "{elbe_dir}" .')
 
-        to_cpy = [("apt.conf", "etc/apt"),
-                  ("init-elbe.sh", ""),
-                  ("source.xml", ""),
-                  ("initrd-cdrom.gz", ""),
-                  ("vmlinuz", ""),
-                  ("preseed.cfg", "")]
+        to_cpy = [('apt.conf', 'etc/apt'),
+                  ('init-elbe.sh', ''),
+                  ('source.xml', ''),
+                  ('initrd-cdrom.gz', ''),
+                  ('vmlinuz', ''),
+                  ('preseed.cfg', '')]
 
         elbe_in = Filesystem(out_path)
 
         if opt.devel:
-            to_cpy.append(("elbe-devel.tar.bz2", ""))
+            to_cpy.append(('elbe-devel.tar.bz2', ''))
 
         # Convert relative rfs path to absolute in the system
-        to_cpy = [(elbe_in.fname(src), elbe_in.fname(os.path.join("initrd-tree", dst)))
+        to_cpy = [(elbe_in.fname(src), elbe_in.fname(os.path.join('initrd-tree', dst)))
                   for src, dst
                   in to_cpy]
 
         # These are already absolute path!
-        keyrings = elbe_in.fname(os.path.join("initrd-tree", "usr/share/keyrings"))
-        for gpg in elbe_in.glob("*.gpg"):
+        keyrings = elbe_in.fname(os.path.join('initrd-tree', 'usr/share/keyrings'))
+        for gpg in elbe_in.glob('*.gpg'):
             to_cpy.append((gpg, keyrings))
 
         for src, dst in to_cpy:
diff --git a/elbepack/commands/initvm.py b/elbepack/commands/initvm.py
index 5321c1c2..14a0830f 100644
--- a/elbepack/commands/initvm.py
+++ b/elbepack/commands/initvm.py
@@ -11,61 +11,61 @@ from elbepack.xmlpreprocess import PreprocessWrapper
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: elbe initvm [options] <command>")
+    oparser = OptionParser(usage='usage: elbe initvm [options] <command>')
 
     oparser.add_option(
-        "--directory",
-        dest="directory",
+        '--directory',
+        dest='directory',
         default=None,
-        help="directory, where the initvm resides, default is ./initvm")
+        help='directory, where the initvm resides, default is ./initvm')
 
-    oparser.add_option("--cdrom", dest="cdrom", default=None,
-                       help="iso image of Binary cdrom")
+    oparser.add_option('--cdrom', dest='cdrom', default=None,
+                       help='iso image of Binary cdrom')
 
     oparser.add_option(
-        "--devel",
-        action="store_true",
-        dest="devel",
+        '--devel',
+        action='store_true',
+        dest='devel',
         default=False,
-        help="Install elbe Version from the current working into initvm")
+        help='Install elbe Version from the current working into initvm')
 
-    oparser.add_option("--skip-download", action="store_true",
-                       dest="skip_download", default=False,
-                       help="Skip downloading generated Files")
+    oparser.add_option('--skip-download', action='store_true',
+                       dest='skip_download', default=False,
+                       help='Skip downloading generated Files')
 
-    oparser.add_option("--output", dest="outdir", default=None,
-                       help="directory where to save downloaded Files")
+    oparser.add_option('--output', dest='outdir', default=None,
+                       help='directory where to save downloaded Files')
 
     oparser.add_option(
-        "--skip-build-bin",
-        action="store_false",
-        dest="build_bin",
+        '--skip-build-bin',
+        action='store_false',
+        dest='build_bin',
         default=True,
-        help="Skip building Binary Repository CDROM, for exact Reproduction")
+        help='Skip building Binary Repository CDROM, for exact Reproduction')
 
-    oparser.add_option("--skip-build-sources", action="store_false",
-                       dest="build_sources", default=True,
-                       help="Skip building Source CDROM")
+    oparser.add_option('--skip-build-sources', action='store_false',
+                       dest='build_sources', default=True,
+                       help='Skip building Source CDROM')
 
-    oparser.add_option("--keep-files", action="store_true",
-                       dest="keep_files", default=False,
+    oparser.add_option('--keep-files', action='store_true',
+                       dest='keep_files', default=False,
                        help="don't delete elbe project files in initvm")
 
-    oparser.add_option("--writeproject", dest="writeproject", default=None,
-                       help="write project name to file")
+    oparser.add_option('--writeproject', dest='writeproject', default=None,
+                       help='write project name to file')
 
     oparser.add_option(
-        "--nesting",
-        dest="nesting",
-        action="store_true",
+        '--nesting',
+        dest='nesting',
+        action='store_true',
         default=False,
-        help="allow initvm to support nested kvm. "
-             "This makes /proc/cpuinfo inside initvm differ per host.")
+        help='allow initvm to support nested kvm. '
+             'This makes /proc/cpuinfo inside initvm differ per host.')
 
     oparser.add_option(
-        "--build-sdk",
-        dest="build_sdk",
-        action="store_true",
+        '--build-sdk',
+        dest='build_sdk',
+        action='store_true',
         default=False,
         help="Also make 'initvm submit' build an SDK.")
 
@@ -74,7 +74,7 @@ def run_command(argv):
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("elbe initvm - no subcommand given", file=sys.stderr)
+        print('elbe initvm - no subcommand given', file=sys.stderr)
         InitVMAction.print_actions()
         sys.exit(48)
 
@@ -86,13 +86,13 @@ def run_command(argv):
     try:
         action = InitVMAction(args[0])
     except KeyError:
-        print("elbe initvm - unknown subcommand", file=sys.stderr)
+        print('elbe initvm - unknown subcommand', file=sys.stderr)
         InitVMAction.print_actions()
         sys.exit(49)
 
     try:
         action.execute(directory, opt, args[1:])
     except InitVMError as e:
-        print("InitVM Exception", file=sys.stderr)
+        print('InitVM Exception', file=sys.stderr)
         print(e, file=sys.stderr)
         sys.exit(50)
diff --git a/elbepack/commands/mkcdrom.py b/elbepack/commands/mkcdrom.py
index 464e8644..11fef1dd 100644
--- a/elbepack/commands/mkcdrom.py
+++ b/elbepack/commands/mkcdrom.py
@@ -21,44 +21,44 @@ def run_command(argv):
 
     # pylint disable=too-many-statements
 
-    oparser = OptionParser(usage="usage: %prog mkcdrom [options] <builddir>")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
-    oparser.add_option("--buildtype", dest="buildtype",
-                       help="Override the buildtype")
-    oparser.add_option("--arch", dest="arch",
-                       help="Override the architecture")
-    oparser.add_option("--codename", dest="codename",
-                       help="Override the codename")
-    oparser.add_option("--init_codename", dest="init_codename",
-                       help="Override the initvm codename")
-    oparser.add_option("--rfs-only", action="store_true",
-                       dest="rfs_only", default=False,
-                       help="builddir points to RFS")
-    oparser.add_option("--log", dest="log",
-                       help="Log to filename")
-    oparser.add_option("--binary", action="store_true",
-                       dest="binary", default=False,
-                       help="build binary cdrom")
-    oparser.add_option("--source", action="store_true",
-                       dest="source", default=False,
-                       help="build source cdrom")
+    oparser = OptionParser(usage='usage: %prog mkcdrom [options] <builddir>')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
+    oparser.add_option('--buildtype', dest='buildtype',
+                       help='Override the buildtype')
+    oparser.add_option('--arch', dest='arch',
+                       help='Override the architecture')
+    oparser.add_option('--codename', dest='codename',
+                       help='Override the codename')
+    oparser.add_option('--init_codename', dest='init_codename',
+                       help='Override the initvm codename')
+    oparser.add_option('--rfs-only', action='store_true',
+                       dest='rfs_only', default=False,
+                       help='builddir points to RFS')
+    oparser.add_option('--log', dest='log',
+                       help='Log to filename')
+    oparser.add_option('--binary', action='store_true',
+                       dest='binary', default=False,
+                       help='build binary cdrom')
+    oparser.add_option('--source', action='store_true',
+                       dest='source', default=False,
+                       help='build source cdrom')
     oparser.add_option(
-        "--cdrom-size",
-        action="store",
-        dest="cdrom_size",
+        '--cdrom-size',
+        action='store',
+        dest='cdrom_size',
         default=CDROM_SIZE,
-        help="Source ISO CD size in bytes")
+        help='Source ISO CD size in bytes')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments", file=sys.stderr)
+        print('wrong number of arguments', file=sys.stderr)
         oparser.print_help()
         sys.exit(74)
 
-    with elbe_logging({"files": opt.log}):
+    with elbe_logging({'files': opt.log}):
 
         if not opt.rfs_only:
             try:
@@ -66,14 +66,14 @@ def run_command(argv):
                                       override_buildtype=opt.buildtype,
                                       skip_validate=opt.skip_validation)
             except ValidationError:
-                logging.exception("XML validation failed.  Bailing out")
+                logging.exception('XML validation failed.  Bailing out')
                 sys.exit(75)
 
             builddir = project.builddir
             rfs = project.buildenv.rfs
             xml = project.xml
-            arch = xml.text("project/arch", key="arch")
-            codename = xml.text("project/suite")
+            arch = xml.text('project/arch', key='arch')
+            codename = xml.text('project/suite')
             init_codename = xml.get_initvm_codename()
         else:
             builddir = os.path.abspath(os.path.curdir)
@@ -87,7 +87,7 @@ def run_command(argv):
         if opt.source:
             with rfs:
                 cache = get_rpcaptcache(rfs, arch)
-                components = {"main": (rfs,
+                components = {'main': (rfs,
                                        cache,
                                        cache.get_corresponding_source_packages())}
                 generated_files += mk_source_cdrom(components, codename,
@@ -103,6 +103,6 @@ def run_command(argv):
                                                    xml,
                                                    builddir)
 
-        logging.info("Image Build finished.")
-        logging.info("Files generated:\n%s",
-                     "\n".join([str(f) for f in generated_files]))
+        logging.info('Image Build finished.')
+        logging.info('Files generated:\n%s',
+                     '\n'.join([str(f) for f in generated_files]))
diff --git a/elbepack/commands/parselicence.py b/elbepack/commands/parselicence.py
index a608610d..c2f273e5 100644
--- a/elbepack/commands/parselicence.py
+++ b/elbepack/commands/parselicence.py
@@ -66,9 +66,9 @@ class license_dep5_to_spdx (dict):
                 mapped_lic = self.map_one_license(
                     pkgname, with_split[0], errors)
                 if mapped_lic is None:
-                    mapped_lic = f"UNKNOWN_MAPPING({with_split[0]})"
+                    mapped_lic = f'UNKNOWN_MAPPING({with_split[0]})'
                 if len(with_split) == 2:
-                    ands.append(f"{mapped_lic} WITH {with_split[1]}")
+                    ands.append(f'{mapped_lic} WITH {with_split[1]}')
                 else:
                     ands.append(mapped_lic)
             ors.append(' AND '.join(ands))
@@ -99,7 +99,7 @@ def scan_nomos(license_text):
 
     expected_start = f'File {os.path.basename(f.name)} contains license(s) '
     if not nomos_out.startswith(expected_start):
-        raise Exception("nomos output error")
+        raise Exception('nomos output error')
 
     licenses = nomos_out[len(expected_start):].strip()
 
@@ -123,32 +123,32 @@ def license_string(pkg):
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog parselicence [options] <licencefile>")
-    oparser.add_option("--output", dest="output",
-                       help="outputfilename")
-    oparser.add_option("--mapping", dest="mapping",
-                       help="mapping filename")
+        usage='usage: %prog parselicence [options] <licencefile>')
+    oparser.add_option('--output', dest='output',
+                       help='outputfilename')
+    oparser.add_option('--mapping', dest='mapping',
+                       help='mapping filename')
     oparser.add_option(
-        "--use-nomos",
-        action="store_true",
-        dest="use_nomos",
+        '--use-nomos',
+        action='store_true',
+        dest='use_nomos',
         default=False,
-        help="Use the external nomos tool on the copyright text, "
-             "and record the ouput in out xml")
+        help='Use the external nomos tool on the copyright text, '
+             'and record the ouput in out xml')
     oparser.add_option(
-        "--errors-only",
-        action="store_true",
-        dest="only_errors",
+        '--errors-only',
+        action='store_true',
+        dest='only_errors',
         default=False,
-        help="Only Output Packages with errors, "
-             "needing a fix in the mapping file")
-    oparser.add_option("--tvout", dest="tagvalue",
-                       help="tag value output filename")
+        help='Only Output Packages with errors, '
+             'needing a fix in the mapping file')
+    oparser.add_option('--tvout', dest='tagvalue',
+                       help='tag value output filename')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("wrong number of arguments")
+        print('wrong number of arguments')
         oparser.print_help()
         sys.exit(53)
 
@@ -160,7 +160,7 @@ def run_command(argv):
     err_pkg = 0
 
     if not opt.mapping:
-        print("A mapping file is required")
+        print('A mapping file is required')
         oparser.print_help()
         sys.exit(54)
 
@@ -236,7 +236,7 @@ def run_command(argv):
             tree.root.remove_child(pkg)
 
     if opt.tagvalue is not None:
-        with io.open(opt.tagvalue, "wt", encoding='utf-8') as fp:
+        with io.open(opt.tagvalue, 'wt', encoding='utf-8') as fp:
             fp.write('SPDXVersion: SPDX-1.2\n')
             fp.write('DataLicense: CC0-1.0\n')
             fp.write('\n')
@@ -267,5 +267,5 @@ def run_command(argv):
     if opt.output is not None:
         tree.write(opt.output)
 
-    print("statistics:")
-    print(f"num:{num_pkg} mr:{mr} hr:{hr} err_pkg:{err_pkg}")
+    print('statistics:')
+    print(f'num:{num_pkg} mr:{mr} hr:{hr} err_pkg:{err_pkg}')
diff --git a/elbepack/commands/pbuilder.py b/elbepack/commands/pbuilder.py
index e308de4a..8a8198f2 100644
--- a/elbepack/commands/pbuilder.py
+++ b/elbepack/commands/pbuilder.py
@@ -10,73 +10,73 @@ from elbepack.xmlpreprocess import PreprocessWrapper
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: elbe pbuilder [options] <command>")
+    oparser = OptionParser(usage='usage: elbe pbuilder [options] <command>')
 
-    oparser.add_option("--project", dest="project", default=None,
-                       help="project directory on the initvm")
+    oparser.add_option('--project', dest='project', default=None,
+                       help='project directory on the initvm')
 
-    oparser.add_option("--xmlfile", dest="xmlfile", default=None,
-                       help="xmlfile to use")
+    oparser.add_option('--xmlfile', dest='xmlfile', default=None,
+                       help='xmlfile to use')
 
-    oparser.add_option("--writeproject", dest="writeproject", default=None,
-                       help="write project name to file")
+    oparser.add_option('--writeproject', dest='writeproject', default=None,
+                       help='write project name to file')
 
-    oparser.add_option("--skip-download", action="store_true",
-                       dest="skip_download", default=False,
-                       help="Skip downloading generated Files")
+    oparser.add_option('--skip-download', action='store_true',
+                       dest='skip_download', default=False,
+                       help='Skip downloading generated Files')
 
     oparser.add_option(
-        "--origfile",
-        dest="origfile",
+        '--origfile',
+        dest='origfile',
         default=[],
-        action="append",
-        help="upload orig file")
+        action='append',
+        help='upload orig file')
 
-    oparser.add_option("--output", dest="outdir", default=None,
-                       help="directory where to save downloaded Files")
+    oparser.add_option('--output', dest='outdir', default=None,
+                       help='directory where to save downloaded Files')
 
-    oparser.add_option("--cpuset", default=-1, type="int",
-                       help="Limit cpuset of pbuilder commands (bitmask) "
-                            "(defaults to -1 for all CPUs)")
+    oparser.add_option('--cpuset', default=-1, type='int',
+                       help='Limit cpuset of pbuilder commands (bitmask) '
+                            '(defaults to -1 for all CPUs)')
 
-    oparser.add_option("--profile", dest="profile", default="",
-                       help="profile that shall be built")
+    oparser.add_option('--profile', dest='profile', default='',
+                       help='profile that shall be built')
 
-    oparser.add_option("--cross", dest="cross", default=False,
-                       action="store_true",
-                       help="Creates an environment for crossbuilding if "
-                            "combined with create. Combined with build it"
-                            " will use this environment.")
+    oparser.add_option('--cross', dest='cross', default=False,
+                       action='store_true',
+                       help='Creates an environment for crossbuilding if '
+                            'combined with create. Combined with build it'
+                            ' will use this environment.')
 
-    oparser.add_option("--no-ccache", dest="noccache", default=False,
-                       action="store_true",
+    oparser.add_option('--no-ccache', dest='noccache', default=False,
+                       action='store_true',
                        help="Deactivates the compiler cache 'ccache'")
 
-    oparser.add_option("--ccache-size", dest="ccachesize", default="10G",
-                       action="store", type="string",
-                       help="set a limit for the compiler cache size "
-                            "(should be a number followed by an optional "
-                            "suffix: k, M, G, T. Use 0 for no limit.)")
+    oparser.add_option('--ccache-size', dest='ccachesize', default='10G',
+                       action='store', type='string',
+                       help='set a limit for the compiler cache size '
+                            '(should be a number followed by an optional '
+                            'suffix: k, M, G, T. Use 0 for no limit.)')
 
     PreprocessWrapper.add_options(oparser)
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("elbe pbuilder - no subcommand given", file=sys.stderr)
+        print('elbe pbuilder - no subcommand given', file=sys.stderr)
         PBuilderAction.print_actions()
         return
 
     try:
         action = PBuilderAction(args[0])
     except KeyError:
-        print("elbe pbuilder - unknown subcommand", file=sys.stderr)
+        print('elbe pbuilder - unknown subcommand', file=sys.stderr)
         PBuilderAction.print_actions()
         sys.exit(92)
 
     try:
         action.execute(opt, args[1:])
     except PBuilderError as e:
-        print("PBuilder Exception", file=sys.stderr)
+        print('PBuilder Exception', file=sys.stderr)
         print(e, file=sys.stderr)
         sys.exit(93)
diff --git a/elbepack/commands/pin_versions.py b/elbepack/commands/pin_versions.py
index 7b022ce5..94dcb87b 100644
--- a/elbepack/commands/pin_versions.py
+++ b/elbepack/commands/pin_versions.py
@@ -13,22 +13,22 @@ from elbepack.validate import validate_xml
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog pin_versions [options] <xmlfile>")
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+        usage='usage: %prog pin_versions [options] <xmlfile>')
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(94)
 
     if not opt.skip_validation:
         validation = validate_xml(args[0])
         if validation:
-            print("xml validation failed. Bailing out")
+            print('xml validation failed. Bailing out')
             for i in validation:
                 print(i)
             sys.exit(95)
@@ -36,17 +36,17 @@ def run_command(argv):
     try:
         xml = etree(args[0])
     except BaseException:
-        print("Error reading xml file!")
+        print('Error reading xml file!')
         sys.exit(96)
 
-    if not xml.has("fullpkgs"):
-        print("xml file does not have fullpkgs node")
+    if not xml.has('fullpkgs'):
+        print('xml file does not have fullpkgs node')
         sys.exit(97)
 
-    plist = xml.ensure_child("/target/pkg-list")
+    plist = xml.ensure_child('/target/pkg-list')
     plist.clear()
 
-    fullp = xml.node("fullpkgs")
+    fullp = xml.node('fullpkgs')
 
     for p in fullp:
         pname = p.et.text
@@ -60,5 +60,5 @@ def run_command(argv):
     try:
         xml.write(args[0])
     except BaseException:
-        print("Unable to write new xml file")
+        print('Unable to write new xml file')
         sys.exit(98)
diff --git a/elbepack/commands/pkgdiff.py b/elbepack/commands/pkgdiff.py
index 735c5cab..1ce469a5 100644
--- a/elbepack/commands/pkgdiff.py
+++ b/elbepack/commands/pkgdiff.py
@@ -16,17 +16,17 @@ from elbepack.elbexml import ElbeXML, ValidationMode
 def run_command(argv):
 
     oparser = OptionParser(
-        usage="usage: %prog pkgdiff [options] <rfs1> <rfs2>")
+        usage='usage: %prog pkgdiff [options] <rfs1> <rfs2>')
     oparser.add_option(
-        "--noauto",
-        action="store_true",
-        dest="noauto",
+        '--noauto',
+        action='store_true',
+        dest='noauto',
         default=False,
-        help="Dont compare automatically installed Packages")
+        help='Dont compare automatically installed Packages')
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(41)
 
@@ -78,12 +78,12 @@ def run_command(argv):
 
     for p in fix_pkgs:
         if p not in gen_pkgs:
-            print(f"+<pkg>{p}</pkg>")
+            print(f'+<pkg>{p}</pkg>')
 
     for p in gen_pkgs:
         if p not in fix_pkgs.keys():
-            print(f"-<pkg>{p}</pkg>")
+            print(f'-<pkg>{p}</pkg>')
 
     for p in fix_pkgs:
         if p in gen_pkgs.keys() and fix_pkgs[p] != gen_pkgs[p]:
-            print(f"{p}: Version mismatch {fix_pkgs[p]} != {gen_pkgs[p]}")
+            print(f'{p}: Version mismatch {fix_pkgs[p]} != {gen_pkgs[p]}')
diff --git a/elbepack/commands/preprocess.py b/elbepack/commands/preprocess.py
index 61b5b184..9cf1af4e 100644
--- a/elbepack/commands/preprocess.py
+++ b/elbepack/commands/preprocess.py
@@ -9,29 +9,29 @@ from elbepack.xmlpreprocess import XMLPreprocessError, xmlpreprocess
 
 
 def add_pass_through_options(oparser):
-    oparser.add_option("-v", "--variants", dest="variant",
+    oparser.add_option('-v', '--variants', dest='variant',
                        default=None,
-                       help="enable only tags with empty or given variant")
+                       help='enable only tags with empty or given variant')
 
-    oparser.add_option("-p", "--proxy", dest="proxy",
+    oparser.add_option('-p', '--proxy', dest='proxy',
                        default=None,
-                       help="add proxy to mirrors")
+                       help='add proxy to mirrors')
 
-    oparser.add_option("-z", "--gzip", dest="gzip", type="int",
+    oparser.add_option('-z', '--gzip', dest='gzip', type='int',
                        default=9,
-                       help="gzip compression level 1-9 (0: no compression)")
+                       help='gzip compression level 1-9 (0: no compression)')
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog preprocess [options] <xmlfile>")
-    oparser.add_option("-o", "--output", dest="output",
-                       default="preprocess.xml",
-                       help="preprocessed output file", metavar="<xmlfile>")
+    oparser = OptionParser(usage='usage: %prog preprocess [options] <xmlfile>')
+    oparser.add_option('-o', '--output', dest='output',
+                       default='preprocess.xml',
+                       help='preprocessed output file', metavar='<xmlfile>')
     add_pass_through_options(oparser)
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments", file=sys.stderr)
+        print('Wrong number of arguments', file=sys.stderr)
         oparser.print_help()
         sys.exit(112)
 
diff --git a/elbepack/commands/prjrepo.py b/elbepack/commands/prjrepo.py
index e1290f1a..32fffd09 100644
--- a/elbepack/commands/prjrepo.py
+++ b/elbepack/commands/prjrepo.py
@@ -18,44 +18,44 @@ from elbepack.config import cfg
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: elbe prjrepo [options] <command>")
+    oparser = OptionParser(usage='usage: elbe prjrepo [options] <command>')
 
-    oparser.add_option("--host", dest="host", default=cfg['soaphost'],
-                       help="Ip or hostname of elbe-daemon.")
+    oparser.add_option('--host', dest='host', default=cfg['soaphost'],
+                       help='Ip or hostname of elbe-daemon.')
 
-    oparser.add_option("--port", dest="port", default=cfg['soapport'],
-                       help="Port of soap itf on elbe-daemon.")
+    oparser.add_option('--port', dest='port', default=cfg['soapport'],
+                       help='Port of soap itf on elbe-daemon.')
 
-    oparser.add_option("--pass", dest="passwd", default=cfg['elbepass'],
-                       help="Password (default is foo).")
+    oparser.add_option('--pass', dest='passwd', default=cfg['elbepass'],
+                       help='Password (default is foo).')
 
-    oparser.add_option("--user", dest="user", default=cfg['elbeuser'],
-                       help="Username (default is root).")
+    oparser.add_option('--user', dest='user', default=cfg['elbeuser'],
+                       help='Username (default is root).')
 
     oparser.add_option(
-        "--retries",
-        dest="retries",
-        default="10",
-        help="How many times to retry the connection to the server before\
-                giving up (default is 10 times, yielding 10 seconds).")
+        '--retries',
+        dest='retries',
+        default='10',
+        help='How many times to retry the connection to the server before\
+                giving up (default is 10 times, yielding 10 seconds).')
 
     devel = OptionGroup(
         oparser,
-        "options for elbe developers",
+        'options for elbe developers',
         "Caution: Don't use these options in a productive environment")
-    devel.add_option("--debug", action="store_true",
-                     dest="debug", default=False,
-                     help="Enable debug mode.")
+    devel.add_option('--debug', action='store_true',
+                     dest='debug', default=False,
+                     help='Enable debug mode.')
 
-    devel.add_option("--ignore-version-diff", action="store_true",
-                     dest="ignore_version", default=False,
-                     help="allow different elbe version on host and initvm")
+    devel.add_option('--ignore-version-diff', action='store_true',
+                     dest='ignore_version', default=False,
+                     help='allow different elbe version on host and initvm')
     oparser.add_option_group(devel)
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("elbe prjrepo - no subcommand given", file=sys.stderr)
+        print('elbe prjrepo - no subcommand given', file=sys.stderr)
         RepoAction.print_actions()
         return
 
@@ -71,28 +71,28 @@ def run_command(argv):
                 opt.retries))
     except URLError:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Check, wether the initvm is actually running.", file=sys.stderr)
-        print("try `elbe initvm start`", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Check, wether the initvm is actually running.', file=sys.stderr)
+        print('try `elbe initvm start`', file=sys.stderr)
         sys.exit(10)
     except socket.error:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
+        print('', file=sys.stderr)
         print(
-            "Check, wether the Soap Server is running inside the initvm",
+            'Check, wether the Soap Server is running inside the initvm',
             file=sys.stderr)
         print("try 'elbe initvm attach'", file=sys.stderr)
         sys.exit(11)
     except BadStatusLine:
         print(
-            f"Failed to connect to Soap server {opt.host}:{opt.port}\n",
+            f'Failed to connect to Soap server {opt.host}:{opt.port}\n',
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Check, wether the initvm is actually running.", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Check, wether the initvm is actually running.', file=sys.stderr)
         print(
             "try 'elbe initvm --directory /path/to/initvm start'",
             file=sys.stderr)
@@ -103,10 +103,10 @@ def run_command(argv):
         v_server = control.service.get_version()
         if v_server != elbe_version:
             print(
-                f"elbe v{v_server} is used in initvm, this is not compatible "
-                f"with elbe v{elbe_version} that is used on this machine. "
-                "Please install same versions of elbe in initvm and on your "
-                "machine.",
+                f'elbe v{v_server} is used in initvm, this is not compatible '
+                f'with elbe v{elbe_version} that is used on this machine. '
+                'Please install same versions of elbe in initvm and on your '
+                'machine.',
                 file=sys.stderr)
 
             if not opt.ignore_version:
@@ -122,7 +122,7 @@ elbe inside the existing initvm.", file=sys.stderr)
     try:
         action = RepoAction(args[0])
     except KeyError:
-        print("elbe prjrepo - unknown subcommand", file=sys.stderr)
+        print('elbe prjrepo - unknown subcommand', file=sys.stderr)
         RepoAction.print_actions()
         sys.exit(22)
 
@@ -130,8 +130,8 @@ elbe inside the existing initvm.", file=sys.stderr)
     try:
         action.execute(control, opt, args[1:])
     except WebFault as e:
-        print("Server returned an error:", file=sys.stderr)
-        print("", file=sys.stderr)
+        print('Server returned an error:', file=sys.stderr)
+        print('', file=sys.stderr)
         if hasattr(e.fault, 'faultstring'):
             print(e.fault.faultstring, file=sys.stderr)
         else:
diff --git a/elbepack/commands/remove_sign.py b/elbepack/commands/remove_sign.py
index a9bb955e..382e2c22 100644
--- a/elbepack/commands/remove_sign.py
+++ b/elbepack/commands/remove_sign.py
@@ -7,12 +7,12 @@ from elbepack.egpg import unsign_file
 
 def run_command(argv):
     if len(argv) != 1:
-        print("Wrong number of arguments.")
-        print("Please pass the name of the file to unsign.")
+        print('Wrong number of arguments.')
+        print('Please pass the name of the file to unsign.')
         return
 
     fname = unsign_file(argv[0])
     if fname:
-        print(f"unsigned file: {fname}")
+        print(f'unsigned file: {fname}')
     else:
-        print("removing signature failed")
+        print('removing signature failed')
diff --git a/elbepack/commands/repodir.py b/elbepack/commands/repodir.py
index 8c896b47..41a0d39c 100644
--- a/elbepack/commands/repodir.py
+++ b/elbepack/commands/repodir.py
@@ -10,20 +10,20 @@ from elbepack.repodir import RepodirError, Repodir
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog repodir [options] <xmlfile>")
-    oparser.add_option("-o", "--output", dest="output",
-                       default="repodir.xml",
-                       help="preprocessed output file", metavar="<xmlfile>")
+    oparser = OptionParser(usage='usage: %prog repodir [options] <xmlfile>')
+    oparser.add_option('-o', '--output', dest='output',
+                       default='repodir.xml',
+                       help='preprocessed output file', metavar='<xmlfile>')
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments", file=sys.stderr)
+        print('Wrong number of arguments', file=sys.stderr)
         oparser.print_help()
         sys.exit(55)
 
     xml_input = args[0]
     if not os.path.isfile(xml_input):
-        print(f"{xml_input} does not exist", file=sys.stderr)
+        print(f'{xml_input} does not exist', file=sys.stderr)
         sys.exit(56)
 
     if os.path.exists(opt.output):
diff --git a/elbepack/commands/setsel.py b/elbepack/commands/setsel.py
index 0c8e94b0..1bfeec38 100644
--- a/elbepack/commands/setsel.py
+++ b/elbepack/commands/setsel.py
@@ -10,7 +10,7 @@ from elbepack.treeutils import etree
 
 
 def parse_selections(fname):
-    fp = open(fname, "r")
+    fp = open(fname, 'r')
 
     sels = []
 
@@ -22,7 +22,7 @@ def parse_selections(fname):
 
         sp = lic.split()
 
-        print(f"{sp[0]} {sp[1]}")
+        print(f'{sp[0]} {sp[1]}')
 
         if sp[1] == 'install':
             sels.append(sp[0])
@@ -33,17 +33,17 @@ def parse_selections(fname):
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: %prog setsel <xmlfile> <pkglist.txt>")
+    oparser = OptionParser(usage='usage: %prog setsel <xmlfile> <pkglist.txt>')
     (_, args) = oparser.parse_args(argv)
 
     if len(args) != 2:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(23)
 
     xml = etree(args[0])
 
-    pkg_list = xml.node("/pkg-list")
+    pkg_list = xml.node('/pkg-list')
 
     pkg_list.clear()
 
diff --git a/elbepack/commands/show.py b/elbepack/commands/show.py
index ebb66c7a..4ac9add9 100644
--- a/elbepack/commands/show.py
+++ b/elbepack/commands/show.py
@@ -12,25 +12,25 @@ from elbepack.validate import validate_xml
 
 def run_command(argv):
 
-    oparser = OptionParser(usage="usage: %prog show [options] <filename>")
+    oparser = OptionParser(usage='usage: %prog show [options] <filename>')
 
-    oparser.add_option("--verbose", action="store_true", dest="verbose",
+    oparser.add_option('--verbose', action='store_true', dest='verbose',
                        default=False,
-                       help="show detailed project informations")
+                       help='show detailed project informations')
 
-    oparser.add_option("--skip-validation", action="store_true",
-                       dest="skip_validation", default=False,
-                       help="Skip xml schema validation")
+    oparser.add_option('--skip-validation', action='store_true',
+                       dest='skip_validation', default=False,
+                       help='Skip xml schema validation')
 
     (opt, args) = oparser.parse_args(argv)
 
     if not args:
-        print("No Filename specified")
+        print('No Filename specified')
         oparser.print_help()
         sys.exit(107)
 
     if len(args) > 1:
-        print("too many filenames specified")
+        print('too many filenames specified')
         oparser.print_help()
         sys.exit(108)
 
@@ -38,42 +38,42 @@ def run_command(argv):
         if not opt.skip_validation:
             validation = validate_xml(args[0])
             if validation:
-                print("xml validation failed. Bailing out")
+                print('xml validation failed. Bailing out')
                 for i in validation:
                     print(i)
                 sys.exit(109)
 
         xml = etree(args[0])
     except BaseException:
-        print("Unable to open xml File. Bailing out")
+        print('Unable to open xml File. Bailing out')
         sys.exit(110)
 
-    if not xml.has("./project"):
-        print("no project description available")
+    if not xml.has('./project'):
+        print('no project description available')
         sys.exit(111)
 
-    print(f"== {args[0]} ==")
+    print(f'== {args[0]} ==')
     print(f"Debian suite: {xml.text('./project/suite')}")
-    for s in xml.text("./project/description").splitlines():
-        print(f"{s.strip()}")
+    for s in xml.text('./project/description').splitlines():
+        print(f'{s.strip()}')
     if opt.verbose:
-        if xml.has("./target/passwd"):
+        if xml.has('./target/passwd'):
             print(f"root password: {xml.text('./target/passwd')}")
         print(
-            "primary_mirror: "
+            'primary_mirror: '
             f"{xml.text('./project/mirror/primary_proto')}://"
             f"{xml.text('./project/mirror/primary_host')}"
             f"{xml.text('./project/mirror/primary_path')}")
-        if xml.has("./project/mirror/url-list"):
-            print("additional mirrors:")
-            for url in xml.node("./project/mirror/url-list"):
-                if url.has("binary"):
+        if xml.has('./project/mirror/url-list'):
+            print('additional mirrors:')
+            for url in xml.node('./project/mirror/url-list'):
+                if url.has('binary'):
                     print(f"    deb {url.text('binary').strip()}")
-                if url.has("source"):
+                if url.has('source'):
                     print(f"    deb-src {url.text('source').strip()}")
-        if xml.has("./target/pkg-list"):
-            print("packages:")
-            for pkg in xml.node("./target/pkg-list"):
-                print(f"    {pkg.et.text}")
+        if xml.has('./target/pkg-list'):
+            print('packages:')
+            for pkg in xml.node('./target/pkg-list'):
+                print(f'    {pkg.et.text}')
         print(f"skip package validation: {xml.has('./project/noauth')}")
         print(f"archive embedded?        {xml.has('./archive')}")
diff --git a/elbepack/commands/sign.py b/elbepack/commands/sign.py
index 29c0318b..d735b3cd 100644
--- a/elbepack/commands/sign.py
+++ b/elbepack/commands/sign.py
@@ -7,8 +7,8 @@ from elbepack.egpg import sign_file
 
 def run_command(argv):
     if len(argv) != 2:
-        print("Wrong number of arguments.")
-        print("Please pass the name of the file to sign "
-              "and a valid gnupg fingerprint.")
+        print('Wrong number of arguments.')
+        print('Please pass the name of the file to sign '
+              'and a valid gnupg fingerprint.')
         return
     sign_file(argv[0], argv[1])
diff --git a/elbepack/commands/test.py b/elbepack/commands/test.py
index 0f9a8b77..909ac92c 100644
--- a/elbepack/commands/test.py
+++ b/elbepack/commands/test.py
@@ -33,10 +33,10 @@ class ElbeTestException(Exception):
         self.out = out
 
     def __repr__(self):
-        return f"ElbeTestException: \"{self.cmd}\" returns {self.ret}"
+        return f'ElbeTestException: \"{self.cmd}\" returns {self.ret}'
 
     def __str__(self):
-        return f"ElbeTestException: \"{self.cmd}\" returns {self.ret}"
+        return f'ElbeTestException: \"{self.cmd}\" returns {self.ret}'
 
 
 def system(cmd, allow_fail=False):
@@ -58,7 +58,7 @@ class ElbeTestCase(unittest.TestCase):
     def __str__(self):
         name = super(ElbeTestCase, self).__str__()
         if self.param:
-            return f"{name} : param={self.param}"
+            return f'{name} : param={self.param}'
         return name
 
     def parameterize(self, param):
@@ -77,7 +77,7 @@ class ElbeTestSuite:
             if isinstance(test, ElbeTestSuite):
                 continue
 
-            if not hasattr(test, "params"):
+            if not hasattr(test, 'params'):
                 self.tests.append(test)
                 continue
 
@@ -149,7 +149,7 @@ class ElbeTestResult(unittest.TestResult):
 
     def get_xml(self):
         with warnings.catch_warnings():
-            warnings.simplefilter("ignore")
+            warnings.simplefilter('ignore')
             results = self.buffer.getvalue()
 
         return results
@@ -158,33 +158,33 @@ class ElbeTestResult(unittest.TestResult):
 def run_command(argv):
 
     this_dir = os.path.dirname(os.path.realpath(__file__))
-    top_dir = os.path.join(this_dir, "..", "..")
+    top_dir = os.path.join(this_dir, '..', '..')
 
-    oparser = optparse.OptionParser(usage="usage: %prog [options]")
+    oparser = optparse.OptionParser(usage='usage: %prog [options]')
 
-    oparser.add_option("-f", "--filter", dest="filter",
-                       metavar="REGEX", type="string", default=".*",
-                       help="Run specific test according to a filter rule")
+    oparser.add_option('-f', '--filter', dest='filter',
+                       metavar='REGEX', type='string', default='.*',
+                       help='Run specific test according to a filter rule')
 
-    oparser.add_option("-l", "--level", dest="level",
-                       type="string", default="BASE",
-                       help="Set test level threshold")
+    oparser.add_option('-l', '--level', dest='level',
+                       type='string', default='BASE',
+                       help='Set test level threshold')
 
-    oparser.add_option("-i", "--invert", dest="invert_re",
-                       action="store_true", default=False,
-                       help="Invert the matching of --filter")
+    oparser.add_option('-i', '--invert', dest='invert_re',
+                       action='store_true', default=False,
+                       help='Invert the matching of --filter')
 
-    oparser.add_option("-d", "--dry-run", dest="dry_run",
-                       action="store_true", default=False,
-                       help="List tests that would have been executed and exit")
+    oparser.add_option('-d', '--dry-run', dest='dry_run',
+                       action='store_true', default=False,
+                       help='List tests that would have been executed and exit')
 
-    oparser.add_option("-p", "--parallel", dest="parallel",
-                       type="string", default="0,1",
-                       help="Run every thest where test_ID % N == node_ID")
+    oparser.add_option('-p', '--parallel', dest='parallel',
+                       type='string', default='0,1',
+                       help='Run every thest where test_ID % N == node_ID')
 
-    oparser.add_option("-o", "--output", dest="output",
-                       type="string", default=None,
-                       help="Write XML output to file")
+    oparser.add_option('-o', '--output', dest='output',
+                       type='string', default=None,
+                       help='Write XML output to file')
 
     (opt, _) = oparser.parse_args(argv)
 
@@ -208,8 +208,8 @@ def run_command(argv):
     # Dry run? Just exit gently
     if opt.dry_run:
         suite.ls()
-        print("======================================================================\n"
-              "This was a dry run. No tests were executed")
+        print('======================================================================\n'
+              'This was a dry run. No tests were executed')
         os.sys.exit(0)
 
     result = ElbeTestResult()
@@ -219,9 +219,9 @@ def run_command(argv):
     if opt.output is None:
         print(result.get_xml())
     else:
-        with open(opt.output, "w") as f:
+        with open(opt.output, 'w') as f:
             f.write(result.get_xml())
 
     if not result.wasSuccessful():
-        print("Testsuite failed.")
+        print('Testsuite failed.')
         os.sys.exit(77)
diff --git a/elbepack/commands/toolchainextract.py b/elbepack/commands/toolchainextract.py
index 7966976f..c3140e7b 100644
--- a/elbepack/commands/toolchainextract.py
+++ b/elbepack/commands/toolchainextract.py
@@ -16,15 +16,15 @@ from elbepack.log import elbe_logging
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog toolchainextract [options]")
-    oparser.add_option("-p", "--path", dest="path",
-                       help="path to toolchain")
-    oparser.add_option("-o", "--output", dest="output",
-                       help="output repository path")
-    oparser.add_option("-c", "--codename", dest="codename",
-                       help="distro codename for repository")
-    oparser.add_option("-b", "--buildtype", dest="buildtype",
-                       help="Override the buildtype")
+    oparser = OptionParser(usage='usage: %prog toolchainextract [options]')
+    oparser.add_option('-p', '--path', dest='path',
+                       help='path to toolchain')
+    oparser.add_option('-o', '--output', dest='output',
+                       help='output repository path')
+    oparser.add_option('-c', '--codename', dest='codename',
+                       help='distro codename for repository')
+    oparser.add_option('-b', '--buildtype', dest='buildtype',
+                       help='Override the buildtype')
     (opt, _) = oparser.parse_args(argv)
 
     if not opt.path:
@@ -43,33 +43,33 @@ def run_command(argv):
     defaults = ElbeDefaults(opt.buildtype)
 
     toolchain = get_toolchain(
-        defaults["toolchaintype"],
+        defaults['toolchaintype'],
         opt.path,
-        defaults["arch"])
+        defaults['arch'])
 
     tmpdir = mkdtemp()
 
     for lib in toolchain.pkg_libs:
         files = toolchain.get_files_for_pkg(lib)
 
-        pkglibpath = os.path.join("usr/lib", defaults["triplet"])
+        pkglibpath = os.path.join('usr/lib', defaults['triplet'])
         fmap = [(f, pkglibpath) for f in files]
 
         build_binary_deb(
             lib,
-            defaults["arch"],
-            defaults["toolchainver"],
+            defaults['arch'],
+            defaults['toolchainver'],
             lib +
-            " extracted from toolchain",
+            ' extracted from toolchain',
             fmap,
             toolchain.pkg_deps[lib],
             tmpdir)
 
     pkgs = os.listdir(tmpdir)
 
-    with elbe_logging({"streams": sys.stdout}):
+    with elbe_logging({'streams': sys.stdout}):
 
-        repo = ToolchainRepo(defaults["arch"],
+        repo = ToolchainRepo(defaults['arch'],
                              opt.codename,
                              opt.output)
 
diff --git a/elbepack/commands/updated.py b/elbepack/commands/updated.py
index 7fcfae6b..cfaf7da9 100644
--- a/elbepack/commands/updated.py
+++ b/elbepack/commands/updated.py
@@ -32,33 +32,33 @@ def run_command(argv):
 
     status = UpdateStatus()
 
-    oparser = OptionParser(usage="usage: %prog updated [options] <filename>")
+    oparser = OptionParser(usage='usage: %prog updated [options] <filename>')
 
-    oparser.add_option("--directory", dest="update_dir",
-                       help="monitor dir (default is /var/cache/elbe/updates)",
-                       metavar="FILE")
+    oparser.add_option('--directory', dest='update_dir',
+                       help='monitor dir (default is /var/cache/elbe/updates)',
+                       metavar='FILE')
 
-    oparser.add_option("--repocache", dest="repo_dir",
-                       help="monitor dir (default is /var/cache/elbe/repos)",
-                       metavar="FILE")
+    oparser.add_option('--repocache', dest='repo_dir',
+                       help='monitor dir (default is /var/cache/elbe/repos)',
+                       metavar='FILE')
 
-    oparser.add_option("--host", dest="host", default="",
-                       help="listen host")
+    oparser.add_option('--host', dest='host', default='',
+                       help='listen host')
 
-    oparser.add_option("--port", dest="port", default=8088,
-                       help="listen port")
+    oparser.add_option('--port', dest='port', default=8088,
+                       help='listen port')
 
-    oparser.add_option("--nosign", action="store_true", dest="nosign",
+    oparser.add_option('--nosign', action='store_true', dest='nosign',
                        default=False,
-                       help="accept none signed files")
+                       help='accept none signed files')
 
-    oparser.add_option("--verbose", action="store_true", dest="verbose",
+    oparser.add_option('--verbose', action='store_true', dest='verbose',
                        default=False,
-                       help="force output to stdout instead of syslog")
+                       help='force output to stdout instead of syslog')
 
-    oparser.add_option("--usb", action="store_true", dest="use_usb",
+    oparser.add_option('--usb', action='store_true', dest='use_usb',
                        default=False,
-                       help="monitor USB devices")
+                       help='monitor USB devices')
 
     (opt, _) = oparser.parse_args(argv)
 
@@ -66,12 +66,12 @@ def run_command(argv):
     status.verbose = opt.verbose
 
     if not opt.update_dir:
-        update_dir = "/var/cache/elbe/updates"
+        update_dir = '/var/cache/elbe/updates'
     else:
         update_dir = opt.update_dir
 
     if not opt.repo_dir:
-        status.repo_dir = "/var/cache/elbe/repos"
+        status.repo_dir = '/var/cache/elbe/repos'
     else:
         status.repo_dir = opt.repo_dir
 
@@ -88,8 +88,8 @@ def run_command(argv):
             status.monitors.append(um)
         else:
             status.log(
-                "USB Monitor has been requested. "
-                "This requires pyudev module which could not be imported.")
+                'USB Monitor has been requested. '
+                'This requires pyudev module which could not be imported.')
             sys.exit(1)
 
     signal.signal(signal.SIGTERM, shutdown)
@@ -110,7 +110,7 @@ def run_command(argv):
     try:
         status.soapserver.serve_forever()
     except BaseException:
-        shutdown(1, "now", status)
+        shutdown(1, 'now', status)
 
     for mon in status.monitors:
         mon.join()
diff --git a/elbepack/commands/validate.py b/elbepack/commands/validate.py
index 351758f4..c0114419 100644
--- a/elbepack/commands/validate.py
+++ b/elbepack/commands/validate.py
@@ -10,10 +10,10 @@ from elbepack.elbexml import ElbeXML, ValidationMode, ValidationError
 
 
 def run_command(argv):
-    oparser = OptionParser(usage="usage: %prog validate <xmlfile>")
-    oparser.add_option("--validate-urls", dest="validate_urls",
-                       help="try to access specified repositories",
-                       default=False, action="store_true")
+    oparser = OptionParser(usage='usage: %prog validate <xmlfile>')
+    oparser.add_option('--validate-urls', dest='validate_urls',
+                       help='try to access specified repositories',
+                       default=False, action='store_true')
 
     (opt, args) = oparser.parse_args(argv)
 
@@ -22,13 +22,13 @@ def run_command(argv):
         sys.exit(58)
 
     if not os.path.exists(args[0]):
-        print(f"{args[0]} - file not found")
+        print(f'{args[0]} - file not found')
         oparser.print_help()
         sys.exit(59)
 
     validation = validate_xml(args[0])
     if validation:
-        print("validation failed")
+        print('validation failed')
         for i in validation:
             print(i)
         sys.exit(60)
diff --git a/elbepack/commands/xsdtoasciidoc.py b/elbepack/commands/xsdtoasciidoc.py
index e3ce15b6..07578dd1 100644
--- a/elbepack/commands/xsdtoasciidoc.py
+++ b/elbepack/commands/xsdtoasciidoc.py
@@ -13,26 +13,26 @@ from elbepack.templates import write_template
 
 def run_command(argv):
     oparser = OptionParser(
-        usage="usage: %prog xsdtoasciidoc [options] <xsdfile>")
+        usage='usage: %prog xsdtoasciidoc [options] <xsdfile>')
 
-    oparser.add_option("--output", dest="out",
-                       help="specify output filename",
-                       metavar="FILE")
+    oparser.add_option('--output', dest='out',
+                       help='specify output filename',
+                       metavar='FILE')
 
     (opt, args) = oparser.parse_args(argv)
 
     if len(args) != 1:
-        print("Wrong number of arguments")
+        print('Wrong number of arguments')
         oparser.print_help()
         sys.exit(90)
 
     xml = etree(args[0])
 
     if not opt.out:
-        print("--output is mandatory")
+        print('--output is mandatory')
         sys.exit(91)
 
-    d = {"opt": opt,
-         "xml": xml}
+    d = {'opt': opt,
+         'xml': xml}
 
     write_template(opt.out, xsdtoasciidoc_mako_fname, d)
diff --git a/elbepack/config.py b/elbepack/config.py
index b3a46a02..54a1603e 100644
--- a/elbepack/config.py
+++ b/elbepack/config.py
@@ -8,15 +8,15 @@ import os
 class Config(dict):
     def __init__(self):
         dict.__init__(self)
-        self['soaphost'] = "localhost"
-        self['soapport'] = "7587"
+        self['soaphost'] = 'localhost'
+        self['soapport'] = '7587'
         self['soaptimeout'] = 90
-        self['sshport'] = "5022"
-        self['elbeuser'] = "root"
-        self['elbepass'] = "foo"
-        self['pbuilder_jobs'] = "auto"
-        self['initvm_domain'] = "initvm"
-        self['mirrorsed'] = ""
+        self['sshport'] = '5022'
+        self['elbeuser'] = 'root'
+        self['elbepass'] = 'foo'
+        self['pbuilder_jobs'] = 'auto'
+        self['initvm_domain'] = 'initvm'
+        self['mirrorsed'] = ''
 
         if 'ELBE_SOAPPORT' in os.environ:
             self['soapport'] = os.environ['ELBE_SOAPPORT']
diff --git a/elbepack/daemons/soap/__init__.py b/elbepack/daemons/soap/__init__.py
index 3b92caa3..87324a20 100644
--- a/elbepack/daemons/soap/__init__.py
+++ b/elbepack/daemons/soap/__init__.py
@@ -27,7 +27,7 @@ warnings.simplefilter('ignore', category=ResourceWarning)
 class EsoapApp(Application):
     def __init__(self, *args, **kargs):
         Application.__init__(self, *args, **kargs)
-        self.pm = ProjectManager("/var/cache/elbe")
+        self.pm = ProjectManager('/var/cache/elbe')
 
 
 class MySession (SessionMiddleware, SimplePlugin):
diff --git a/elbepack/daemons/soap/authentication.py b/elbepack/daemons/soap/authentication.py
index f1aa287e..1a92e0b8 100644
--- a/elbepack/daemons/soap/authentication.py
+++ b/elbepack/daemons/soap/authentication.py
@@ -92,7 +92,7 @@ def authenticated_uid(func):
             return func(self, uid, arg1, arg2, arg3, arg4, arg5)
         return wrapped
 
-    raise Exception(f"arg count {func.__code__.co_argcount} not implemented")
+    raise Exception(f'arg count {func.__code__.co_argcount} not implemented')
 
 
 def authenticated_admin(func):
@@ -166,4 +166,4 @@ def authenticated_admin(func):
             return func(self, arg1, arg2, arg3, arg4, arg5)
         return wrapped
 
-    raise Exception(f"arg count {func.__code__.co_argcount} not implemented")
+    raise Exception(f'arg count {func.__code__.co_argcount} not implemented')
diff --git a/elbepack/daemons/soap/esoap.py b/elbepack/daemons/soap/esoap.py
index 45d1eaea..c381c344 100644
--- a/elbepack/daemons/soap/esoap.py
+++ b/elbepack/daemons/soap/esoap.py
@@ -67,8 +67,8 @@ class ESoap (ServiceBase):
         # prerm/postinst scripts.
         # elbe daemon does it itself, because cherrypy
         # notices that.
-        hostfs.write_file("usr/sbin/policy-rc.d",
-                          0o755, "#!/bin/sh\nexit 101\n")
+        hostfs.write_file('usr/sbin/policy-rc.d',
+                          0o755, '#!/bin/sh\nexit 101\n')
         try:
             env = {'LANG': 'C',
                    'LANGUAGE': 'C',
@@ -76,7 +76,7 @@ class ESoap (ServiceBase):
                    'DEBIAN_FRONTEND': 'noninteractive',
                    'DEBCONF_NONINTERACTIVE_SEEN': 'true'}
 
-            cmd = ("apt-get update; "
+            cmd = ('apt-get update; '
                    f"apt-get install -y --allow-downgrades {' '.join(pkgs)}")
 
             ret, out = command_out(cmd, env_add=env)
@@ -122,9 +122,9 @@ class ESoap (ServiceBase):
         if part == 0:
             if self.app.pm.db.is_busy(builddir):
                 return -1
-            self.app.pm.db.set_busy(builddir, ["empty_project", "needs_build",
-                                               "has_changes", "build_done",
-                                               "build_failed"])
+            self.app.pm.db.set_busy(builddir, ['empty_project', 'needs_build',
+                                               'has_changes', 'build_done',
+                                               'build_failed'])
             # truncate file
             with open(fn, 'w') as fp:
                 fp.write('')
@@ -132,8 +132,8 @@ class ESoap (ServiceBase):
         if part == -1:
             with open(fn, 'a') as fp:
                 fp.flush()
-            self.app.pm.db.reset_busy(builddir, "has_changes")
-            if fname == "source.xml":
+            self.app.pm.db.reset_busy(builddir, 'has_changes')
+            if fname == 'source.xml':
                 # ensure that the project cache is reloaded
                 self.app.pm.close_current_project(uid)
                 self.app.pm.open_project(
@@ -154,21 +154,21 @@ class ESoap (ServiceBase):
 
         size = 1024 * 1024 * 5
         pos = size * part
-        file_name = builddir + "/" + filename
+        file_name = builddir + '/' + filename
         file_stat = os.stat(file_name)
 
         if pos >= file_stat.st_size:
-            return "EndOfFile"
+            return 'EndOfFile'
 
         with open(file_name, 'rb') as fp:
             if not fp:
-                return "FileNotFound"
+                return 'FileNotFound'
             try:
                 fp.seek(pos)
                 data = fp.read(size)
                 return binascii.b2a_base64(data)
             except BaseException:
-                return "EndOfFile"
+                return 'EndOfFile'
 
     @rpc(String)
     @authenticated_uid
@@ -228,10 +228,10 @@ class ESoap (ServiceBase):
         self.app.pm.open_project(
             uid, builddir, url_validation=ValidationMode.NO_CHECK)
 
-        cdrom_fname = os.path.join(builddir, "uploaded_cdrom.iso")
+        cdrom_fname = os.path.join(builddir, 'uploaded_cdrom.iso')
 
         # Now write empty File
-        fp = open(cdrom_fname, "w")
+        fp = open(cdrom_fname, 'w')
         fp.close()
 
     @rpc(String, String)
@@ -241,10 +241,10 @@ class ESoap (ServiceBase):
         self.app.pm.open_project(
             uid, builddir, url_validation=ValidationMode.NO_CHECK)
 
-        cdrom_fname = os.path.join(builddir, "uploaded_cdrom.iso")
+        cdrom_fname = os.path.join(builddir, 'uploaded_cdrom.iso')
 
         # Now append data to cdrom_file
-        fp = open(cdrom_fname, "ab")
+        fp = open(cdrom_fname, 'ab')
         fp.write(binascii.a2b_base64(data))
         fp.close()
 
@@ -262,10 +262,10 @@ class ESoap (ServiceBase):
     def start_pdebuild(self, uid, builddir):
         self.app.pm.open_project(uid, builddir)
 
-        pdebuild_fname = os.path.join(builddir, "current_pdebuild.tar.gz")
+        pdebuild_fname = os.path.join(builddir, 'current_pdebuild.tar.gz')
 
         # Now write empty File
-        fp = open(pdebuild_fname, "w")
+        fp = open(pdebuild_fname, 'w')
         fp.close()
 
     @rpc(String, String)
@@ -274,10 +274,10 @@ class ESoap (ServiceBase):
     def append_pdebuild(self, uid, builddir, data):
         self.app.pm.open_project(uid, builddir)
 
-        pdebuild_fname = os.path.join(builddir, "current_pdebuild.tar.gz")
+        pdebuild_fname = os.path.join(builddir, 'current_pdebuild.tar.gz')
 
         # Now write empty File
-        fp = open(pdebuild_fname, "ab")
+        fp = open(pdebuild_fname, 'ab')
         fp.write(binascii.a2b_base64(data))
         fp.close()
 
@@ -297,7 +297,7 @@ class ESoap (ServiceBase):
         orig_fname = os.path.join(builddir, fname)
 
         # Now write empty File
-        fp = open(orig_fname, "w")
+        fp = open(orig_fname, 'w')
         fp.close()
 
         self.app.pm.set_orig_fname(uid, fname)
@@ -311,7 +311,7 @@ class ESoap (ServiceBase):
         orig_fname = os.path.join(builddir, self.app.pm.get_orig_fname(uid))
 
         # Now append to File
-        fp = open(orig_fname, "ab")
+        fp = open(orig_fname, 'ab')
         fp.write(binascii.a2b_base64(data))
         fp.close()
 
@@ -369,7 +369,7 @@ class ESoap (ServiceBase):
     @authenticated_admin
     @soap_faults
     def shutdown_initvm(self):
-        system("systemctl --no-block poweroff")
+        system('systemctl --no-block poweroff')
 
     @rpc(String)
     @authenticated_uid
@@ -385,7 +385,7 @@ class ESoap (ServiceBase):
         self.app.pm.open_project(uid, builddir)
         s = ''
         for _, _, filenames in os.walk(
-                os.path.join(builddir, "repo/pool/main")):
+                os.path.join(builddir, 'repo/pool/main')):
             for filename in fnmatch.filter(filenames, '*.deb'):
                 s += filename + '\n'
         return s
@@ -395,12 +395,12 @@ class ESoap (ServiceBase):
     @soap_faults
     def tar_prjrepo(self, uid, builddir, filename):
         self.app.pm.open_project(uid, builddir)
-        with tarfile.open(os.path.join(builddir, filename), "w:gz") as tar:
+        with tarfile.open(os.path.join(builddir, filename), 'w:gz') as tar:
             tar.add(
                 os.path.join(
-                    builddir, "repo"), arcname=os.path.basename(
+                    builddir, 'repo'), arcname=os.path.basename(
                     os.path.join(
-                        builddir, "repo")))
+                        builddir, 'repo')))
 
     @rpc(String, String)
     @authenticated_uid
diff --git a/elbepack/daemons/soap/faults.py b/elbepack/daemons/soap/faults.py
index b61ea372..4cd3ac98 100644
--- a/elbepack/daemons/soap/faults.py
+++ b/elbepack/daemons/soap/faults.py
@@ -16,14 +16,14 @@ from elbepack.db import ElbeDBError, InvalidLogin
 
 class SoapElbeDBError(Fault):
     def __init__(self, dberr):
-        Fault.__init__(self, faultcode="ElbeDBError", faultstring=str(dberr))
+        Fault.__init__(self, faultcode='ElbeDBError', faultstring=str(dberr))
 
 
 class SoapElbeProjectError(Fault):
     def __init__(self, err):
         Fault.__init__(
             self,
-            faultcode="ElbeProjectError",
+            faultcode='ElbeProjectError',
             faultstring=str(err))
 
 
@@ -31,39 +31,39 @@ class SoapElbeAuthenticationFailed(Fault):
     def __init__(self):
         Fault.__init__(
             self,
-            faultcode="ElbeAuthenticationFailed",
-            faultstring="Authentication Failed")
+            faultcode='ElbeAuthenticationFailed',
+            faultstring='Authentication Failed')
 
 
 class SoapElbeNotLoggedIn(Fault):
     def __init__(self):
         Fault.__init__(
             self,
-            faultcode="ElbeNotLoggedIn",
-            faultstring="Not authenticated ! "
-                        "Cant let you perform this command.")
+            faultcode='ElbeNotLoggedIn',
+            faultstring='Not authenticated ! '
+                        'Cant let you perform this command.')
 
 
 class SoapElbeNotAuthorized(Fault):
     def __init__(self):
         Fault.__init__(
             self,
-            faultcode="ElbeNotAuthorized",
-            faultstring="Not Authorized ! Cant let you perform this command.")
+            faultcode='ElbeNotAuthorized',
+            faultstring='Not Authorized ! Cant let you perform this command.')
 
 
 class SoapElbeValidationError(Fault):
     def __init__(self, exc):
         Fault.__init__(
             self,
-            faultcode="ElbeValidationError",
+            faultcode='ElbeValidationError',
             faultstring=exc.__repr__())
 
 
 class SoapElbeInvalidState(Fault):
     def __init__(self):
-        Fault.__init__(self, faultcode="ElbeInvalidState",
-                       faultstring="Project is Busy ! Operation Invalid")
+        Fault.__init__(self, faultcode='ElbeInvalidState',
+                       faultstring='Project is Busy ! Operation Invalid')
 
 
 def soap_faults(func):
@@ -89,7 +89,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -109,7 +109,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -129,7 +129,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -149,7 +149,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -169,7 +169,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -189,7 +189,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -209,7 +209,7 @@ def soap_faults(func):
             except ElbeDBError as e:
                 raise SoapElbeDBError(str(e))
             except OSError as e:
-                raise SoapElbeProjectError("OSError: " + str(e))
+                raise SoapElbeProjectError('OSError: ' + str(e))
             except ValidationError as e:
                 raise SoapElbeValidationError(e)
             except InvalidLogin:
@@ -218,4 +218,4 @@ def soap_faults(func):
                 raise SoapElbeProjectError(format_exc())
         return wrapped
 
-    raise Exception(f"arg count {func.__code__.co_argcount} not implemented")
+    raise Exception(f'arg count {func.__code__.co_argcount} not implemented')
diff --git a/elbepack/db.py b/elbepack/db.py
index b7c5aaa4..532109e4 100644
--- a/elbepack/db.py
+++ b/elbepack/db.py
@@ -30,7 +30,7 @@ from elbepack.elbeproject import ElbeProject
 from elbepack.elbexml import (ElbeXML, ValidationMode)
 from elbepack.dosunix import dos2unix
 
-os.environ['SQLALCHEMY_SILENCE_UBER_WARNING'] = "1"
+os.environ['SQLALCHEMY_SILENCE_UBER_WARNING'] = '1'
 Base = declarative_base()
 
 
@@ -41,7 +41,7 @@ class ElbeDBError(Exception):
 
 class InvalidLogin(Exception):
     def __init__(self):
-        Exception.__init__(self, "Invalid login")
+        Exception.__init__(self, 'Invalid login')
 
 
 @contextmanager
@@ -51,7 +51,7 @@ def session_scope(session):
         try:
             session.commit()
         except OperationalError as e:
-            raise ElbeDBError("database commit failed: " + str(e))
+            raise ElbeDBError('database commit failed: ' + str(e))
     except BaseException:
         session.rollback()
         raise
@@ -129,20 +129,20 @@ class ElbeDB:
     def get_project_data(self, builddir):
         # Can throw: ElbeDBError
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
         with session_scope(self.session) as s:
             try:
                 p = s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
             return ProjectData(p)
 
     def set_postbuild(self, builddir, postbuild_file):
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
         with session_scope(self.session) as s:
             p = None
@@ -151,27 +151,27 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    "cannot set postbuild file while project "
-                    f"{builddir} is busy")
+                    'cannot set postbuild file while project '
+                    f'{builddir} is busy')
 
             p.edit = datetime.utcnow()
 
-            with open(builddir + "/postbuild.sh", 'w') as dst:
+            with open(builddir + '/postbuild.sh', 'w') as dst:
                 copyfileobj(postbuild_file, dst)
 
-            os.chmod(builddir + "/postbuild.sh", 0o755)
-            dos2unix(builddir + "/postbuild.sh")
+            os.chmod(builddir + '/postbuild.sh', 0o755)
+            dos2unix(builddir + '/postbuild.sh')
 
             return _update_project_file(s, builddir,
-                                        "postbuild.sh", "application/sh", "postbuild script")
+                                        'postbuild.sh', 'application/sh', 'postbuild script')
 
     def set_savesh(self, builddir, savesh_file):
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
         with session_scope(self.session) as s:
             p = None
@@ -180,31 +180,31 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    f"cannot set savesh file while project {builddir} is busy")
+                    f'cannot set savesh file while project {builddir} is busy')
 
             p.edit = datetime.utcnow()
-            if p.status == "empty_project" or p.status == "build_failed":
-                p.status = "needs_build"
-            elif p.status == "build_done":
-                p.status = "has_changes"
+            if p.status == 'empty_project' or p.status == 'build_failed':
+                p.status = 'needs_build'
+            elif p.status == 'build_done':
+                p.status = 'has_changes'
 
-            with open(builddir + "/save.sh", 'w') as dst:
+            with open(builddir + '/save.sh', 'w') as dst:
                 copyfileobj(savesh_file, dst)
 
-            os.chmod(builddir + "/save.sh", 0o755)
-            dos2unix(builddir + "/save.sh")
+            os.chmod(builddir + '/save.sh', 0o755)
+            dos2unix(builddir + '/save.sh')
 
             return _update_project_file(
                 s, builddir,
-                "save.sh", "application/sh", "version save script")
+                'save.sh', 'application/sh', 'version save script')
 
     def set_presh(self, builddir, presh_file):
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
         with session_scope(self.session) as s:
             p = None
@@ -213,29 +213,29 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    f"cannot set presh file while project {builddir} is busy")
+                    f'cannot set presh file while project {builddir} is busy')
 
             p.edit = datetime.utcnow()
-            if p.status == "empty_project" or p.status == "build_failed":
-                p.status = "needs_build"
-            elif p.status == "build_done":
-                p.status = "has_changes"
+            if p.status == 'empty_project' or p.status == 'build_failed':
+                p.status = 'needs_build'
+            elif p.status == 'build_done':
+                p.status = 'has_changes'
 
-            with open(builddir + "/pre.sh", 'w') as dst:
+            with open(builddir + '/pre.sh', 'w') as dst:
                 copyfileobj(presh_file, dst)
 
-            dos2unix(builddir + "/pre.sh")
+            dos2unix(builddir + '/pre.sh')
 
             return _update_project_file(
-                s, builddir, "pre.sh", "application/sh", "pre install script")
+                s, builddir, 'pre.sh', 'application/sh', 'pre install script')
 
     def set_postsh(self, builddir, postsh_file):
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
         with session_scope(self.session) as s:
             p = None
@@ -244,34 +244,34 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    f"cannot set postsh file while project {builddir} is busy")
+                    f'cannot set postsh file while project {builddir} is busy')
 
             p.edit = datetime.utcnow()
-            if p.status == "empty_project" or p.status == "build_failed":
-                p.status = "needs_build"
-            elif p.status == "build_done":
-                p.status = "has_changes"
+            if p.status == 'empty_project' or p.status == 'build_failed':
+                p.status = 'needs_build'
+            elif p.status == 'build_done':
+                p.status = 'has_changes'
 
-            with open(builddir + "/post.sh", 'w') as dst:
+            with open(builddir + '/post.sh', 'w') as dst:
                 copyfileobj(postsh_file, dst)
 
-            dos2unix(builddir + "/post.sh")
+            dos2unix(builddir + '/post.sh')
 
             return _update_project_file(
                 s, builddir,
-                "post.sh", "application/sh", "post install script")
+                'post.sh', 'application/sh', 'post install script')
 
     def set_xml(self, builddir, xml_file):
         # This method can throw: ElbeDBError, ValidationError, OSError
 
         if not os.path.exists(builddir):
-            raise ElbeDBError("project directory does not exist")
+            raise ElbeDBError('project directory does not exist')
 
-        srcxml_fname = os.path.join(builddir, "source.xml")
+        srcxml_fname = os.path.join(builddir, 'source.xml')
 
         if xml_file is None:
             xml_file = srcxml_fname
@@ -283,23 +283,23 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    f"cannot set XML file while project {builddir} is busy")
+                    f'cannot set XML file while project {builddir} is busy')
 
             xml = ElbeXML(
                 xml_file,
                 url_validation=ValidationMode.NO_CHECK)  # ValidationError
 
-            p.name = xml.text("project/name")
-            p.version = xml.text("project/version")
+            p.name = xml.text('project/name')
+            p.version = xml.text('project/version')
             p.edit = datetime.utcnow()
-            if p.status == "empty_project" or p.status == "build_failed":
-                p.status = "needs_build"
-            elif p.status == "build_done":
-                p.status = "has_changes"
+            if p.status == 'empty_project' or p.status == 'build_failed':
+                p.status = 'needs_build'
+            elif p.status == 'build_done':
+                p.status = 'has_changes'
 
             if xml_file != srcxml_fname:
                 copyfile(xml_file, srcxml_fname)  # OSError
@@ -307,9 +307,9 @@ class ElbeDB:
             _update_project_file(
                 s,
                 builddir,
-                "source.xml",
-                "application/xml",
-                "ELBE recipe of the project")
+                'source.xml',
+                'application/xml',
+                'ELBE recipe of the project')
 
     # TODO what about source.xml ? stored always in db ? version management ?
     #       build/needs_build state ? locking ?
@@ -323,7 +323,7 @@ class ElbeDB:
                 if s.query(Project).\
                         filter(Project.builddir == builddir).count() > 0:
                     raise ElbeDBError(
-                        f"project {builddir} already exists in database")
+                        f'project {builddir} already exists in database')
 
                 try:
                     os.makedirs(builddir)  # OSError
@@ -331,10 +331,10 @@ class ElbeDB:
                 except OSError as e:
                     if e.errno == errno.EEXIST:
                         raise ElbeDBError(
-                            f"project directory {builddir} already exists")
+                            f'project directory {builddir} already exists')
                     raise
 
-                p = Project(builddir=builddir, status="empty_project",
+                p = Project(builddir=builddir, status='empty_project',
                             owner_id=owner_id)
                 s.add(p)
         except BaseException:
@@ -353,11 +353,11 @@ class ElbeDB:
                 p = s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    f"cannot delete project {builddir} while it is busy")
+                    f'cannot delete project {builddir} while it is busy')
 
             if os.path.exists(builddir):
                 # delete project in background to avoid blocking caller for a
@@ -380,20 +380,20 @@ class ElbeDB:
                 p = s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            sourcexmlpath = os.path.join(builddir, "source.xml")
+            sourcexmlpath = os.path.join(builddir, 'source.xml')
             if os.path.exists(sourcexmlpath):
-                p.status = "needs_build"
+                p.status = 'needs_build'
             else:
-                p.status = "empty_project"
+                p.status = 'empty_project'
 
         if clean:
-            targetpath = os.path.join(builddir, "target")
+            targetpath = os.path.join(builddir, 'target')
             if os.path.exists(targetpath):
                 rmtree(targetpath)      # OSError
 
-            chrootpath = os.path.join(builddir, "chroot")
+            chrootpath = os.path.join(builddir, 'chroot')
             if os.path.exists(chrootpath):
                 rmtree(chrootpath)      # OSError
 
@@ -411,20 +411,20 @@ class ElbeDB:
 
             if not os.path.exists(ep.builddir):
                 os.makedirs(ep.builddir)
-            if not os.path.isfile(ep.builddir + "/source.xml") and ep.xml:
-                ep.xml.xml.write(ep.builddir + "/source.xml")
+            if not os.path.isfile(ep.builddir + '/source.xml') and ep.xml:
+                ep.xml.xml.write(ep.builddir + '/source.xml')
 
-            with open(ep.builddir + "/source.xml") as xml_file:
+            with open(ep.builddir + '/source.xml') as xml_file:
                 xml_str = xml_file.read()
                 if not project:
-                    project = Project(name=ep.xml.text("project/name"),
-                                      version=ep.xml.text("project/version"),
+                    project = Project(name=ep.xml.text('project/name'),
+                                      version=ep.xml.text('project/version'),
                                       builddir=ep.builddir,
                                       xml=xml_str)
                     s.add(project)
                 else:
                     project.edit = datetime.utcnow()
-                    project.version = ep.xml.text("project/version")
+                    project.version = ep.xml.text('project/version')
                     project.xml = xml_str
 
     def load_project(
@@ -474,24 +474,24 @@ class ElbeDB:
                                    url_validation=url_validation)
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
     def set_busy(self, builddir, allowed_status):
-        assert "busy" not in allowed_status
+        assert 'busy' not in allowed_status
         with session_scope(self.session) as s:
             try:
                 p = s.query(Project).with_for_update(). \
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
             if p.status not in allowed_status:
-                raise ElbeDBError("project: " + builddir +
-                                  " set_busy: invalid status: " + p.status)
+                raise ElbeDBError('project: ' + builddir +
+                                  ' set_busy: invalid status: ' + p.status)
 
             old_status = p.status
-            p.status = "busy"
+            p.status = 'busy'
             return old_status
 
     def is_busy(self, builddir):
@@ -501,12 +501,12 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            return p.status == "busy"
+            return p.status == 'busy'
 
     def reset_busy(self, builddir, new_status):
-        assert new_status in ("has_changes", "build_done", "build_failed")
+        assert new_status in ('has_changes', 'build_done', 'build_failed')
 
         with session_scope(self.session) as s:
             try:
@@ -514,13 +514,13 @@ class ElbeDB:
                     filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status != "busy":
+            if p.status != 'busy':
                 raise ElbeDBError(
-                    "project: " +
+                    'project: ' +
                     builddir +
-                    " reset_busy: invalid status: " +
+                    ' reset_busy: invalid status: ' +
                     p.status)
 
             p.status = new_status
@@ -532,9 +532,9 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            return p.status == "has_changes"
+            return p.status == 'has_changes'
 
     def get_owner_id(self, builddir):
         with session_scope(self.session) as s:
@@ -543,7 +543,7 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
             if p.owner_id is None:
                 return None
@@ -551,12 +551,12 @@ class ElbeDB:
             return int(p.owner_id)
 
     def set_project_version(self, builddir, new_version=None):
-        if new_version == "":
-            raise ElbeDBError("version number must not be empty")
+        if new_version == '':
+            raise ElbeDBError('version number must not be empty')
 
-        if not re.match("^[A-Za-z0-9_.-]{1,25}$", new_version):
+        if not re.match('^[A-Za-z0-9_.-]{1,25}$', new_version):
             raise ElbeDBError(
-                "version number must contain valid characters [A-Za-z0-9_-.]")
+                'version number must contain valid characters [A-Za-z0-9_-.]')
 
         with session_scope(self.session) as s:
             try:
@@ -564,23 +564,23 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "empty_project" or p.status == "busy":
+            if p.status == 'empty_project' or p.status == 'busy':
                 raise ElbeDBError(
-                    "project: " +
+                    'project: ' +
                     builddir +
-                    " set_project_version: invalid status: " +
+                    ' set_project_version: invalid status: ' +
                     p.status)
 
-            xmlpath = os.path.join(builddir, "source.xml")
+            xmlpath = os.path.join(builddir, 'source.xml')
             xml = ElbeXML(xmlpath, url_validation=ValidationMode.NO_CHECK)
 
             if new_version is not None:
-                xml.node("/project/version").set_text(new_version)
+                xml.node('/project/version').set_text(new_version)
                 xml.xml.write(xmlpath)
 
-            p.version = xml.text("/project/version")
+            p.version = xml.text('/project/version')
 
     def list_project_versions(self, builddir):
         with session_scope(self.session) as s:
@@ -589,7 +589,7 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
             return [ProjectVersionData(v) for v in p.versions]
 
@@ -600,24 +600,24 @@ class ElbeDB:
                     one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            assert p.status == "busy"
+            assert p.status == 'busy'
 
-            sourcexmlpath = os.path.join(builddir, "source.xml")
+            sourcexmlpath = os.path.join(builddir, 'source.xml')
             sourcexml = ElbeXML(sourcexmlpath,
                                 url_validation=ValidationMode.NO_CHECK)
 
-            version = sourcexml.text("project/version")
+            version = sourcexml.text('project/version')
             if s.query(ProjectVersion).\
                     filter(ProjectVersion.builddir == builddir).\
                     filter(ProjectVersion.version == version).count() > 0:
                 raise ElbeDBError(
-                    f"Version {version} already exists for project in "
-                    f"{builddir}, please change version number first")
+                    f'Version {version} already exists for project in '
+                    f'{builddir}, please change version number first')
 
             versionxmlname = get_versioned_filename(p.name, version,
-                                                    ".version.xml")
+                                                    '.version.xml')
             versionxmlpath = os.path.join(builddir, versionxmlname)
             copyfile(sourcexmlpath, versionxmlpath)
 
@@ -627,8 +627,8 @@ class ElbeDB:
             s.add(v)
 
             _update_project_file(s, builddir, versionxmlname,
-                                 "application/xml",
-                                 f"source.xml for version {version}")
+                                 'application/xml',
+                                 f'source.xml for version {version}')
 
     def set_version_description(self, builddir, version, description):
         with session_scope(self.session) as s:
@@ -638,7 +638,7 @@ class ElbeDB:
                     filter(ProjectVersion.version == version).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"no such project version: {builddir} (version {version})")
+                    f'no such project version: {builddir} (version {version})')
 
             v.description = description
 
@@ -650,13 +650,13 @@ class ElbeDB:
                     filter(ProjectVersion.version == version).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"no such project version: {builddir} (version {version})")
+                    f'no such project version: {builddir} (version {version})')
 
-            assert v.project.status == "busy"
+            assert v.project.status == 'busy'
 
-            sourcexmlpath = os.path.join(builddir, "source.xml")
+            sourcexmlpath = os.path.join(builddir, 'source.xml')
             versionxmlname = get_versioned_filename(v.project.name, version,
-                                                    ".version.xml")
+                                                    '.version.xml')
             versionxmlpath = os.path.join(builddir, versionxmlname)
 
             copyfile(versionxmlpath, sourcexmlpath)
@@ -670,16 +670,16 @@ class ElbeDB:
                     filter(ProjectVersion.version == version).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"no such project version: {builddir} (version {version})")
+                    f'no such project version: {builddir} (version {version})')
 
             if not force:
-                if v.project.status == "busy":
+                if v.project.status == 'busy':
                     raise ElbeDBError(
-                        f"cannot delete version of project in {builddir} while "
-                        "it is busy")
+                        f'cannot delete version of project in {builddir} while '
+                        'it is busy')
 
             xmlname = get_versioned_filename(v.project.name, version,
-                                             ".version.xml")
+                                             '.version.xml')
             xmlpath = os.path.join(builddir, xmlname)
             os.remove(xmlpath)
             s.delete(v)
@@ -695,10 +695,10 @@ class ElbeDB:
                     filter(ProjectVersion.version == version).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"no such project version: {builddir} (version {version})")
+                    f'no such project version: {builddir} (version {version})')
 
             xmlname = get_versioned_filename(v.project.name, version,
-                                             ".version.xml")
+                                             '.version.xml')
             return os.path.join(builddir, xmlname)
 
     def get_project_files(self, builddir):
@@ -708,13 +708,13 @@ class ElbeDB:
                 p = s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    "project: " +
+                    'project: ' +
                     builddir +
-                    " get_project_files: invalid status: " +
+                    ' get_project_files: invalid status: ' +
                     p.status)
 
             return [ProjectFileData(f) for f in p.files]
@@ -725,13 +725,13 @@ class ElbeDB:
                 p = s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
-            if p.status == "busy":
+            if p.status == 'busy':
                 raise ElbeDBError(
-                    "project: " +
+                    'project: ' +
                     builddir +
-                    " get_project_file: invalid status: " +
+                    ' get_project_file: invalid status: ' +
                     p.status)
 
             try:
@@ -740,7 +740,7 @@ class ElbeDB:
                     filter(ProjectFile.name == name).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"no file {name} in project {builddir} registered")
+                    f'no file {name} in project {builddir} registered')
 
             return ProjectFileData(f)
 
@@ -750,7 +750,7 @@ class ElbeDB:
                 s.query(Project).filter(Project.builddir == builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {builddir} is not registered in the database")
+                    f'project {builddir} is not registered in the database')
 
             _update_project_file(s, builddir, name, mime_type,
                                  description)
@@ -762,7 +762,7 @@ class ElbeDB:
                     filter(Project.builddir == ep.builddir).one()
             except NoResultFound:
                 raise ElbeDBError(
-                    f"project {ep.builddir} is not registered in the database")
+                    f'project {ep.builddir} is not registered in the database')
 
             # Delete no longer existing files from the database
             files = s.query(ProjectFile).\
@@ -778,77 +778,77 @@ class ElbeDB:
                 for img in images:
                     _update_project_file(
                         s, p.builddir, img,
-                        "application/octet-stream", "Image")
+                        'application/octet-stream', 'Image')
 
             # Add other generated files
-            _update_project_file(s, p.builddir, "source.xml",
-                                 "application/xml",
-                                 "Current source.xml of the project")
+            _update_project_file(s, p.builddir, 'source.xml',
+                                 'application/xml',
+                                 'Current source.xml of the project')
 
-            for name in ["chroot", "target", "sysroot-target", "sysroot-host"]:
+            for name in ['chroot', 'target', 'sysroot-target', 'sysroot-host']:
 
-                _update_project_file(s, p.builddir, f"licence-{name}.txt",
-                                     "text/plain; charset=utf-8",
-                                     "License file")
+                _update_project_file(s, p.builddir, f'licence-{name}.txt',
+                                     'text/plain; charset=utf-8',
+                                     'License file')
 
-                _update_project_file(s, p.builddir, f"licence-{name}.xml",
-                                     "application/xml",
-                                     "xml License file")
+                _update_project_file(s, p.builddir, f'licence-{name}.xml',
+                                     'application/xml',
+                                     'xml License file')
 
-            _update_project_file(s, p.builddir, "validation.txt",
-                                 "text/plain; charset=utf-8",
-                                 "Package list validation result")
+            _update_project_file(s, p.builddir, 'validation.txt',
+                                 'text/plain; charset=utf-8',
+                                 'Package list validation result')
 
-            _update_project_file(s, p.builddir, "elbe-report.txt",
-                                 "text/plain; charset=utf-8",
-                                 "Report")
+            _update_project_file(s, p.builddir, 'elbe-report.txt',
+                                 'text/plain; charset=utf-8',
+                                 'Report')
 
-            _update_project_file(s, p.builddir, "log.txt",
-                                 "text/plain; charset=utf-8",
-                                 "Log file")
+            _update_project_file(s, p.builddir, 'log.txt',
+                                 'text/plain; charset=utf-8',
+                                 'Log file')
 
-            _update_project_file(s, p.builddir, "sysroot.tar.xz",
-                                 "application/x-xz-compressed-tar",
-                                 "sysroot for cross-toolchains")
+            _update_project_file(s, p.builddir, 'sysroot.tar.xz',
+                                 'application/x-xz-compressed-tar',
+                                 'sysroot for cross-toolchains')
 
-            sdk = glob.glob(os.path.join(p.builddir, "setup-elbe-sdk-*.sh"))
+            sdk = glob.glob(os.path.join(p.builddir, 'setup-elbe-sdk-*.sh'))
             try:
                 # throws index error if no  setup-elbe-sdk-* file exists
                 # that's ok because it might not yet been built
                 sdkname = sdk[0].split('/')[-1]
 
                 _update_project_file(s, p.builddir, sdkname,
-                                     "application/x-shellscript",
-                                     "SDK Installer")
+                                     'application/x-shellscript',
+                                     'SDK Installer')
             except IndexError:
                 pass
 
-            _update_project_file(s, p.builddir, "chroot.tar.xz",
-                                 "application/x-xz-compressed-tar",
+            _update_project_file(s, p.builddir, 'chroot.tar.xz',
+                                 'application/x-xz-compressed-tar',
                                  "chroot for 'native' development")
 
             # Add Repository iso images
             for img in ep.repo_images:
                 name = os.path.basename(img)
                 _update_project_file(s, p.builddir, name,
-                                     "application/octet-stream",
-                                     "Repository IsoImage")
+                                     'application/octet-stream',
+                                     'Repository IsoImage')
 
             # Scan pbuilder/build directory if that exists
-            if os.path.exists(os.path.join(p.builddir, "pbuilder", "result")):
-                pbresult_path = os.path.join(p.builddir, "pbuilder", "result")
-                pfile_path = os.path.join("pbuilder", "result")
+            if os.path.exists(os.path.join(p.builddir, 'pbuilder', 'result')):
+                pbresult_path = os.path.join(p.builddir, 'pbuilder', 'result')
+                pfile_path = os.path.join('pbuilder', 'result')
             else:
-                pbresult_path = os.path.join(p.builddir, "pbuilder_cross",
-                                             "result")
-                pfile_path = os.path.join("pbuilder_cross", "result")
+                pbresult_path = os.path.join(p.builddir, 'pbuilder_cross',
+                                             'result')
+                pfile_path = os.path.join('pbuilder_cross', 'result')
 
             if os.path.isdir(pbresult_path):
                 for f in os.listdir(pbresult_path):
                     pfile = os.path.join(pfile_path, f)
                     _update_project_file(s, p.builddir, pfile,
-                                         "application/octet-stream",
-                                         "Pbuilder artifact")
+                                         'application/octet-stream',
+                                         'Pbuilder artifact')
 
     def add_user(self, name, fullname, password, email, admin):
 
@@ -862,7 +862,7 @@ class ElbeDB:
 
         with session_scope(self.session) as s:
             if s.query(User).filter(User.name == name).count() > 0:
-                raise ElbeDBError(f"user {name} already exists in the database")
+                raise ElbeDBError(f'user {name} already exists in the database')
             s.add(u)
 
     def modify_user(self, userid, name, fullname, email, admin,
@@ -872,13 +872,13 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.id == userid).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with id {userid}")
+                raise ElbeDBError(f'no user with id {userid}')
 
             # If a user name change is requested, check for uniqueness
             if name != u.name:
                 if s.query(User).filter(User.name == name).count() > 0:
                     raise ElbeDBError(
-                        f"user {name} already exists in the database")
+                        f'user {name} already exists in the database')
 
             u.name = name
             u.fullname = fullname
@@ -894,7 +894,7 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.id == userid).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with id {userid}")
+                raise ElbeDBError(f'no user with id {userid}')
 
             # Get a list of all projects owned by the user to delete. Set their
             # owner to nobody and return them to the caller later, so it can
@@ -930,7 +930,7 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.id == userid).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with id {userid}")
+                raise ElbeDBError(f'no user with id {userid}')
 
             return bool(u.admin)
 
@@ -939,7 +939,7 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.id == userid).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with id {userid}")
+                raise ElbeDBError(f'no user with id {userid}')
 
             return str(u.name)
 
@@ -948,7 +948,7 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.id == userid).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with id {userid}")
+                raise ElbeDBError(f'no user with id {userid}')
 
             return UserData(u)
 
@@ -957,7 +957,7 @@ class ElbeDB:
             try:
                 u = s.query(User).filter(User.name == name).one()
             except NoResultFound:
-                raise ElbeDBError(f"no user with name {name}")
+                raise ElbeDBError(f'no user with name {name}')
 
             return int(u.id)
 
@@ -990,7 +990,7 @@ class User(Base):
     pwhash = Column(String)
     email = Column(String)
     admin = Column(Boolean)
-    projects = relationship("Project", backref="owner")
+    projects = relationship('Project', backref='owner')
 
 
 class UserData:
@@ -1012,8 +1012,8 @@ class Project (Base):
     status = Column(String)
     edit = Column(DateTime, default=datetime.utcnow)
     owner_id = Column(Integer, ForeignKey('users.id'))
-    versions = relationship("ProjectVersion", backref="project")
-    files = relationship("ProjectFile", backref="project")
+    versions = relationship('ProjectVersion', backref='project')
+    files = relationship('ProjectFile', backref='project')
 
 
 class ProjectData:
diff --git a/elbepack/dbaction.py b/elbepack/dbaction.py
index 00b31bd3..7edd4faa 100644
--- a/elbepack/dbaction.py
+++ b/elbepack/dbaction.py
@@ -20,9 +20,9 @@ class DbAction:
 
     @classmethod
     def print_actions(cls):
-        print("available actions are:")
+        print('available actions are:')
         for a in cls.actiondict:
-            print(f"   {a}")
+            print(f'   {a}')
 
     def __new__(cls, node):
         action = cls.actiondict[node]
@@ -42,13 +42,13 @@ class InitAction(DbAction):
         DbAction.__init__(self, node)
 
     def execute(self, args):
-        oparser = OptionParser(usage="usage: %prog db init [options]")
-        oparser.add_option("--name", dest="name", default="root")
-        oparser.add_option("--fullname", dest="fullname", default="Admin")
-        oparser.add_option("--password", dest="password", default="foo")
-        oparser.add_option("--email", dest="email", default="root at localhost")
-        oparser.add_option("--noadmin", dest="admin", default=True,
-                           action="store_false")
+        oparser = OptionParser(usage='usage: %prog db init [options]')
+        oparser.add_option('--name', dest='name', default='root')
+        oparser.add_option('--fullname', dest='fullname', default='Admin')
+        oparser.add_option('--password', dest='password', default='foo')
+        oparser.add_option('--email', dest='email', default='root at localhost')
+        oparser.add_option('--noadmin', dest='admin', default=True,
+                           action='store_false')
 
         (opt, _) = oparser.parse_args(args)
 
@@ -67,17 +67,17 @@ class AddUserAction(DbAction):
 
     def execute(self, args):
         oparser = OptionParser(
-            usage="usage: %prog db add_user [options] <username>")
-        oparser.add_option("--fullname", dest="fullname")
-        oparser.add_option("--password", dest="password")
-        oparser.add_option("--email", dest="email")
-        oparser.add_option("--admin", dest="admin", default=False,
-                           action="store_true")
+            usage='usage: %prog db add_user [options] <username>')
+        oparser.add_option('--fullname', dest='fullname')
+        oparser.add_option('--password', dest='password')
+        oparser.add_option('--email', dest='email')
+        oparser.add_option('--admin', dest='admin', default=False,
+                           action='store_true')
 
         (opt, arg) = oparser.parse_args(args)
 
         if len(arg) != 1:
-            print("wrong number of arguments")
+            print('wrong number of arguments')
             oparser.print_help()
             return
 
@@ -101,22 +101,22 @@ class DelUserAction(DbAction):
 
     def execute(self, args):
         oparser = OptionParser(
-            usage="usage: %prog db del_user [options] <userid>")
-        oparser.add_option("--delete-projects", dest="delete_projects",
-                           default=False, action="store_true")
-        oparser.add_option("--quiet", dest="quiet",
-                           default=False, action="store_true")
+            usage='usage: %prog db del_user [options] <userid>')
+        oparser.add_option('--delete-projects', dest='delete_projects',
+                           default=False, action='store_true')
+        oparser.add_option('--quiet', dest='quiet',
+                           default=False, action='store_true')
 
         (opt, arg) = oparser.parse_args(args)
 
         if len(arg) != 1:
-            print("usage: elbe db del_user <userid>")
+            print('usage: elbe db del_user <userid>')
             return
 
         try:
             userid = int(arg[0])
         except ValueError as E:
-            print(f"userid must be an integer - {E}")
+            print(f'userid must be an integer - {E}')
             return
 
         db = ElbeDB()
@@ -126,18 +126,18 @@ class DelUserAction(DbAction):
         if projects:
             if not opt.quiet:
                 if opt.delete_projects:
-                    print("removing projects owned by the deleted user:")
+                    print('removing projects owned by the deleted user:')
                 else:
-                    print("keeping projects owned by the deleted user:")
+                    print('keeping projects owned by the deleted user:')
 
         for p in projects:
             if not opt.quiet:
-                print(f"{p.builddir}: {p.name} [{p.version}] {p.edit}")
+                print(f'{p.builddir}: {p.name} [{p.version}] {p.edit}')
             if opt.delete_projects:
                 try:
                     db.del_project(p.builddir)
                 except ElbeDBError as e:
-                    print(f"  ==> {e} ")
+                    print(f'  ==> {e} ')
 
 
 DbAction.register(DelUserAction)
@@ -155,7 +155,7 @@ class ListProjectsAction(DbAction):
         projects = db.list_projects()
 
         for p in projects:
-            print(f"{p.builddir}: {p.name} [{p.version}] {p.edit}")
+            print(f'{p.builddir}: {p.name} [{p.version}] {p.edit}')
 
 
 DbAction.register(ListProjectsAction)
@@ -173,7 +173,7 @@ class ListUsersAction(DbAction):
         users = db.list_users()
 
         for u in users:
-            print(f"{u.name}: {u.fullname} <{u.email}>")
+            print(f'{u.name}: {u.fullname} <{u.email}>')
 
 
 DbAction.register(ListUsersAction)
@@ -188,9 +188,9 @@ class CreateProjectAction(DbAction):
 
     def execute(self, args):
         oparser = OptionParser(
-            usage="usage: %prog db create_project [options] <project_dir>")
-        oparser.add_option("--user", dest="user",
-                           help="user name of the designated project owner")
+            usage='usage: %prog db create_project [options] <project_dir>')
+        oparser.add_option('--user', dest='user',
+                           help='user name of the designated project owner')
         (opt, arg) = oparser.parse_args(args)
 
         if len(arg) != 1:
@@ -214,7 +214,7 @@ class DeleteProjectAction(DbAction):
 
     def execute(self, args):
         if len(args) != 1:
-            print("usage: elbe db del_project <project_dir>")
+            print('usage: elbe db del_project <project_dir>')
             return
 
         db = ElbeDB()
@@ -233,7 +233,7 @@ class SetXmlAction(DbAction):
 
     def execute(self, args):
         if len(args) != 2:
-            print("usage: elbe db set_xml <project_dir> <xml>")
+            print('usage: elbe db set_xml <project_dir> <xml>')
             return
 
         db = ElbeDB()
@@ -252,22 +252,22 @@ class BuildAction(DbAction):
 
     def execute(self, args):
         if len(args) != 1:
-            print("usage: elbe db build <project_dir>")
+            print('usage: elbe db build <project_dir>')
             return
 
         db = ElbeDB()
-        db.set_busy(args[0], ["empty_project", "needs_build", "has_changes",
-                              "build_done", "build_failed"])
+        db.set_busy(args[0], ['empty_project', 'needs_build', 'has_changes',
+                              'build_done', 'build_failed'])
         try:
             ep = db.load_project(args[0])
             ep.build()
             db.update_project_files(ep)
         except Exception as e:
             db.update_project_files(ep)
-            db.reset_busy(args[0], "build_failed")
+            db.reset_busy(args[0], 'build_failed')
             print(str(e))
             return
-        db.reset_busy(args[0], "build_done")
+        db.reset_busy(args[0], 'build_done')
 
 
 DbAction.register(BuildAction)
@@ -282,14 +282,14 @@ class GetFilesAction(DbAction):
 
     def execute(self, args):
         if len(args) != 1:
-            print("usage: elbe db get_files <project_dir>")
+            print('usage: elbe db get_files <project_dir>')
             return
 
         db = ElbeDB()
         files = db.get_project_files(args[0])
         for f in files:
             if f.description:
-                print(f"{f.name:40}  {f.description}")
+                print(f'{f.name:40}  {f.description}')
             else:
                 print(f.name)
 
@@ -306,14 +306,14 @@ class ResetProjectAction(DbAction):
 
     def execute(self, args):
         oparser = OptionParser(
-            usage="usage: %prog db reset_project [options] <project_dir>")
-        oparser.add_option("--clean", dest="clean", default=False,
-                           action="store_true")
+            usage='usage: %prog db reset_project [options] <project_dir>')
+        oparser.add_option('--clean', dest='clean', default=False,
+                           action='store_true')
 
         (opt, arg) = oparser.parse_args(args)
 
         if len(arg) != 1:
-            print("wrong number of arguments")
+            print('wrong number of arguments')
             oparser.print_help()
             return
 
@@ -333,7 +333,7 @@ class SetProjectVersionAction(DbAction):
 
     def execute(self, args):
         if len(args) != 2:
-            print("usage: elbe db set_project_version <project_dir> <version>")
+            print('usage: elbe db set_project_version <project_dir> <version>')
             return
 
         db = ElbeDB()
@@ -352,7 +352,7 @@ class ListVersionsAction(DbAction):
 
     def execute(self, args):
         if len(args) != 1:
-            print("usage: elbe db list_versions <project_dir>")
+            print('usage: elbe db list_versions <project_dir>')
             return
 
         db = ElbeDB()
@@ -360,7 +360,7 @@ class ListVersionsAction(DbAction):
 
         for v in versions:
             if v.description:
-                print(f"{v.version}: {v.description}")
+                print(f'{v.version}: {v.description}')
             else:
                 print(v.version)
 
@@ -377,13 +377,13 @@ class SaveVersionAction(DbAction):
 
     def execute(self, args):
         oparser = OptionParser(
-            usage="usage: %prog db save_version <project_dir>")
-        oparser.add_option("--description", dest="description")
+            usage='usage: %prog db save_version <project_dir>')
+        oparser.add_option('--description', dest='description')
 
         (opt, arg) = oparser.parse_args(args)
 
         if len(arg) != 1:
-            print("wrong number of arguments")
+            print('wrong number of arguments')
             oparser.print_help()
             return
 
@@ -403,7 +403,7 @@ class DelVersionAction(DbAction):
 
     def execute(self, args):
         if len(args) != 2:
-            print("usage: elbe db del_version <project_dir> <version>")
+            print('usage: elbe db del_version <project_dir> <version>')
             return
 
         db = ElbeDB()
@@ -422,7 +422,7 @@ class PrintVersionXMLAction(DbAction):
 
     def execute(self, args):
         if len(args) != 2:
-            print("usage: elbe db print_version_xml <project_dir> <version>")
+            print('usage: elbe db print_version_xml <project_dir> <version>')
             return
 
         db = ElbeDB()
diff --git a/elbepack/debinstaller.py b/elbepack/debinstaller.py
index cd189d0e..e9bc0827 100644
--- a/elbepack/debinstaller.py
+++ b/elbepack/debinstaller.py
@@ -68,20 +68,20 @@ class SHA256SUMSFile(HashValidator):
                     # line contains a hash entry.
                     # check filename, whether we are interested in it
                     if m.group(2) in fname_list:
-                        self.insert_fname_hash("SHA256",
+                        self.insert_fname_hash('SHA256',
                                                m.group(2),
                                                m.group(1))
 
 
 def setup_apt_keyring(gpg_home, keyring_fname):
     ring_path = os.path.join(gpg_home, keyring_fname)
-    if not os.path.isdir("/etc/apt/trusted.gpg.d"):
+    if not os.path.isdir('/etc/apt/trusted.gpg.d'):
         print("/etc/apt/trusted.gpg.d doesn't exist")
-        print("apt-get install debian-archive-keyring may "
-              "fix this problem")
+        print('apt-get install debian-archive-keyring may '
+              'fix this problem')
         sys.exit(115)
 
-    if os.path.exists("/etc/apt/trusted.gpg"):
+    if os.path.exists('/etc/apt/trusted.gpg'):
         system(f'cp /etc/apt/trusted.gpg "{ring_path}"')
 
     gpg_options = f'--keyring "{ring_path}" --no-auto-check-trustdb ' \
@@ -89,9 +89,9 @@ def setup_apt_keyring(gpg_home, keyring_fname):
                   '--batch ' \
                   f'--homedir "{gpg_home}"'
 
-    trustkeys = os.listdir("/etc/apt/trusted.gpg.d")
+    trustkeys = os.listdir('/etc/apt/trusted.gpg.d')
     for key in trustkeys:
-        print(f"Import {key}: ")
+        print(f'Import {key}: ')
         try:
             system(
                 f'gpg {gpg_options} '
@@ -122,7 +122,7 @@ def verify_release(tmp, base_url):
     # directly.
     sig = urlopen(base_url + 'Release.gpg', None, 10)
     try:
-        with tmp.open("Release", "r") as signed:
+        with tmp.open('Release', 'r') as signed:
 
             overall_status = OverallStatus()
 
@@ -145,12 +145,12 @@ def verify_release(tmp, base_url):
 
 def download_kinitrd(tmp, suite, mirror, skip_signature=False):
     base_url = f"{mirror.replace('LOCALMACHINE', 'localhost')}/dists/{suite}/"
-    installer_path = "main/installer-amd64/current/images/"
+    installer_path = 'main/installer-amd64/current/images/'
 
     setup_apt_keyring(tmp.fname('/'), 'pubring.gpg')
 
     # download release file
-    download(base_url + "Release", tmp.fname('Release'))
+    download(base_url + 'Release', tmp.fname('Release'))
     if not skip_signature:
         verify_release(tmp, base_url)
 
@@ -186,50 +186,50 @@ def download_kinitrd(tmp, suite, mirror, skip_signature=False):
 
 
 def get_primary_mirror(prj):
-    if prj.has("mirror/primary_host"):
-        m = prj.node("mirror")
+    if prj.has('mirror/primary_host'):
+        m = prj.node('mirror')
 
-        mirror = m.text("primary_proto") + "://"
+        mirror = m.text('primary_proto') + '://'
         mirror += (
             f"{m.text('primary_host')}/{m.text('primary_path')}"
-            .replace("//", "/"))
+            .replace('//', '/'))
     else:
-        raise NoKinitrdException("Broken xml file: "
-                                 "no cdrom and no primary host")
+        raise NoKinitrdException('Broken xml file: '
+                                 'no cdrom and no primary host')
 
     return mirror
 
 
 def copy_kinitrd(prj, target_dir):
 
-    suite = prj.text("suite")
+    suite = prj.text('suite')
 
     try:
         tmp = TmpdirFilesystem()
-        if prj.has("mirror/cdrom"):
+        if prj.has('mirror/cdrom'):
             system(
                 f'7z x -o{tmp.fname("/")} "{prj.text("mirror/cdrom")}" '
                 'initrd-cdrom.gz vmlinuz')
 
             # initrd.gz needs to be cdrom version !
-            copyfile(tmp.fname("initrd-cdrom.gz"),
-                     os.path.join(target_dir, "initrd.gz"))
+            copyfile(tmp.fname('initrd-cdrom.gz'),
+                     os.path.join(target_dir, 'initrd.gz'))
         else:
             mirror = get_primary_mirror(prj)
-            download_kinitrd(tmp, suite, mirror, prj.has("noauth"))
+            download_kinitrd(tmp, suite, mirror, prj.has('noauth'))
 
-            copyfile(tmp.fname("initrd.gz"),
-                     os.path.join(target_dir, "initrd.gz"))
+            copyfile(tmp.fname('initrd.gz'),
+                     os.path.join(target_dir, 'initrd.gz'))
 
-        copyfile(tmp.fname("initrd-cdrom.gz"),
-                 os.path.join(target_dir, "initrd-cdrom.gz"))
+        copyfile(tmp.fname('initrd-cdrom.gz'),
+                 os.path.join(target_dir, 'initrd-cdrom.gz'))
 
-        copyfile(tmp.fname("vmlinuz"),
-                 os.path.join(target_dir, "vmlinuz"))
+        copyfile(tmp.fname('vmlinuz'),
+                 os.path.join(target_dir, 'vmlinuz'))
 
     except IOError as e:
-        raise NoKinitrdException(f"IoError {e}")
+        raise NoKinitrdException(f'IoError {e}')
     except InvalidSignature as e:
-        raise NoKinitrdException(f"InvalidSignature {e}")
+        raise NoKinitrdException(f'InvalidSignature {e}')
     except HashValidationFailed as e:
-        raise NoKinitrdException(f"HashValidationFailed {e}")
+        raise NoKinitrdException(f'HashValidationFailed {e}')
diff --git a/elbepack/debpkg.py b/elbepack/debpkg.py
index 1cd284fb..1cced6f8 100644
--- a/elbepack/debpkg.py
+++ b/elbepack/debpkg.py
@@ -32,7 +32,7 @@ def gen_controlfile(name, version, arch, description, deps):
 
 
 def write_file(fname, mode, cont):
-    f = open(fname, "w")
+    f = open(fname, 'w')
     f.write(cont)
     f.close()
     os.chmod(fname, mode)
@@ -48,15 +48,15 @@ def build_binary_deb(
         target_dir):
 
     tmpdir = mkdtemp()
-    pkgfname = f"{name}_{version}_{arch}"
+    pkgfname = f'{name}_{version}_{arch}'
     pkgdir = os.path.join(tmpdir, pkgfname)
 
     os.system(f'mkdir -p "{os.path.join(pkgdir, "DEBIAN")}"')
     write_file(
         os.path.join(
             pkgdir,
-            "DEBIAN",
-            "control"),
+            'DEBIAN',
+            'control'),
         0o644,
         gen_controlfile(
             name,
@@ -75,4 +75,4 @@ def build_binary_deb(
         f'cp -v "{os.path.join(tmpdir, pkgfname + ".deb")}" "{target_dir}"')
     os.system(f'rm -r "{tmpdir}"')
 
-    return pkgfname + ".deb"
+    return pkgfname + '.deb'
diff --git a/elbepack/directories.py b/elbepack/directories.py
index ebb1d373..136e2767 100644
--- a/elbepack/directories.py
+++ b/elbepack/directories.py
@@ -21,14 +21,14 @@ def init_directories(elbe_relpath):
     elbe_exe = os.path.abspath(os.path.realpath(elbe_relpath))
     elbe_dir = os.path.dirname(elbe_exe)
 
-    if not elbe_exe.startswith("/usr/bin/"):
+    if not elbe_exe.startswith('/usr/bin/'):
 
         # Set XML catalog if elbe is run from source
-        xmlcat = os.path.join(elbe_dir, "schema/catalog.xml")
+        xmlcat = os.path.join(elbe_dir, 'schema/catalog.xml')
         if os.environ.get('XML_CATALOG_FILES') is None:
             os.environ['XML_CATALOG_FILES'] = xmlcat
         else:
-            os.environ['XML_CATALOG_FILES'] += " "
+            os.environ['XML_CATALOG_FILES'] += ' '
             os.environ['XML_CATALOG_FILES'] += xmlcat
 
 
@@ -38,8 +38,8 @@ def get_cmdlist():
 
 pack_dir = elbepack.__path__[0]
 
-init_template_dir = os.path.join(pack_dir, "init")
-mako_template_dir = os.path.join(pack_dir, "makofiles")
+init_template_dir = os.path.join(pack_dir, 'init')
+mako_template_dir = os.path.join(pack_dir, 'makofiles')
 
-default_preseed_fname = os.path.join(pack_dir, "default-preseed.xml")
-xsdtoasciidoc_mako_fname = os.path.join(pack_dir, "xsdtoasciidoc.mako")
+default_preseed_fname = os.path.join(pack_dir, 'default-preseed.xml')
+xsdtoasciidoc_mako_fname = os.path.join(pack_dir, 'xsdtoasciidoc.mako')
diff --git a/elbepack/dump.py b/elbepack/dump.py
index e14debc8..f513916c 100644
--- a/elbepack/dump.py
+++ b/elbepack/dump.py
@@ -16,8 +16,8 @@ from elbepack.version import elbe_version
 from elbepack.aptpkgutils import APTPackage
 from elbepack.shellhelper import do
 
-report = logging.getLogger("report")
-validation = logging.getLogger("validation")
+report = logging.getLogger('report')
+validation = logging.getLogger('validation')
 
 
 def get_initvm_pkglist():
@@ -36,12 +36,12 @@ def dump_fullpkgs(xml, rfs, cache):
         xml.append_full_pkg(p)
 
     sources_list = xml.xml.ensure_child('sources_list')
-    slist = rfs.read_file("etc/apt/sources.list")
+    slist = rfs.read_file('etc/apt/sources.list')
     sources_list.set_text(slist)
 
     try:
         preferences = xml.xml.ensure_child('apt_prefs')
-        prefs = rfs.read_file("etc/apt/preferences")
+        prefs = rfs.read_file('etc/apt/preferences')
         preferences.set_text(prefs)
     except IOError:
         pass
@@ -63,12 +63,12 @@ def dump_initvmpkgs(xml):
         xml.append_initvm_pkg(p)
 
     sources_list = xml.xml.ensure_child('initvm_sources_list')
-    slist = hostfs.read_file("etc/apt/sources.list")
+    slist = hostfs.read_file('etc/apt/sources.list')
     sources_list.set_text(slist)
 
     try:
         preferences = xml.xml.ensure_child('initvm_apt_prefs')
-        prefs = hostfs.read_file("etc/apt/preferences")
+        prefs = hostfs.read_file('etc/apt/preferences')
         preferences.set_text(prefs)
     except IOError:
         pass
@@ -76,19 +76,19 @@ def dump_initvmpkgs(xml):
 
 def check_full_pkgs(pkgs, fullpkgs, cache):
 
-    validation.info("ELBE Package validation")
-    validation.info("=======================")
-    validation.info("")
-    validation.info("Package List validation")
-    validation.info("-----------------------")
-    validation.info("")
+    validation.info('ELBE Package validation')
+    validation.info('=======================')
+    validation.info('')
+    validation.info('Package List validation')
+    validation.info('-----------------------')
+    validation.info('')
 
     errors = 0
 
     if pkgs:
         for p in pkgs:
             name = p.et.text
-            nomulti_name = name.split(":")[0]
+            nomulti_name = name.split(':')[0]
             if not cache.has_pkg(nomulti_name):
                 validation.error("Package '%s' does not exist", nomulti_name)
                 errors += 1
@@ -108,14 +108,14 @@ def check_full_pkgs(pkgs, fullpkgs, cache):
                 continue
 
     if errors == 0:
-        validation.info("No Errors found")
+        validation.info('No Errors found')
 
     if not fullpkgs:
         return
 
-    validation.info("Full Packagelist validation")
-    validation.info("---------------------------")
-    validation.info("")
+    validation.info('Full Packagelist validation')
+    validation.info('---------------------------')
+    validation.info('')
     errors = 0
 
     pindex = {}
@@ -164,72 +164,72 @@ def check_full_pkgs(pkgs, fullpkgs, cache):
 
     for cp in cache.get_installed_pkgs():
         if cp.name not in pindex:
-            validation.error("Additional package %s installed, that was not requested",
+            validation.error('Additional package %s installed, that was not requested',
                              cp.name)
             errors += 1
 
     if errors == 0:
-        validation.info("No Errors found")
+        validation.info('No Errors found')
 
 
 def elbe_report(xml, buildenv, cache, targetfs):
 
     rfs = buildenv.rfs
 
-    report.info("ELBE Report for Project %s\n\n"
-                "Report timestamp: %s\n"
-                "elbe: %s",
-                xml.text("project/name"),
-                datetime.now().strftime("%Y%m%d-%H%M%S"),
+    report.info('ELBE Report for Project %s\n\n'
+                'Report timestamp: %s\n'
+                'elbe: %s',
+                xml.text('project/name'),
+                datetime.now().strftime('%Y%m%d-%H%M%S'),
                 str(elbe_version))
 
     slist = rfs.read_file('etc/apt/sources.list')
-    report.info("")
-    report.info("Apt Sources dump")
-    report.info("----------------")
-    report.info("")
-    report.info("%s", slist)
-    report.info("")
+    report.info('')
+    report.info('Apt Sources dump')
+    report.info('----------------')
+    report.info('')
+    report.info('%s', slist)
+    report.info('')
 
     try:
-        prefs = rfs.read_file("etc/apt/preferences")
+        prefs = rfs.read_file('etc/apt/preferences')
     except IOError:
-        prefs = ""
-
-    report.info("")
-    report.info("Apt Preferences dump")
-    report.info("--------------------")
-    report.info("")
-    report.info("%s", prefs)
-    report.info("")
-    report.info("Installed Packages List")
-    report.info("-----------------------")
-    report.info("")
+        prefs = ''
+
+    report.info('')
+    report.info('Apt Preferences dump')
+    report.info('--------------------')
+    report.info('')
+    report.info('%s', prefs)
+    report.info('')
+    report.info('Installed Packages List')
+    report.info('-----------------------')
+    report.info('')
 
     instpkgs = cache.get_installed_pkgs()
     for p in instpkgs:
-        report.info("|%s|%s|%s", p.name, p.installed_version, p.origin)
+        report.info('|%s|%s|%s', p.name, p.installed_version, p.origin)
 
     index = cache.get_fileindex(removeprefix='/usr')
     mt_index = targetfs.mtime_snap()
 
-    if xml.has("archive") and not xml.text("archive") is None:
-        with archive_tmpfile(xml.text("archive")) as fp:
+    if xml.has('archive') and not xml.text('archive') is None:
+        with archive_tmpfile(xml.text('archive')) as fp:
             do(f'tar xvfj "{fp.name}" -h -C "{targetfs.path}"')
         mt_index_postarch = targetfs.mtime_snap()
     else:
         mt_index_postarch = mt_index
 
-    if xml.has("target/finetuning"):
+    if xml.has('target/finetuning'):
         do_finetuning(xml, buildenv, targetfs)
         mt_index_post_fine = targetfs.mtime_snap()
     else:
         mt_index_post_fine = mt_index_postarch
 
-    report.info("")
-    report.info("File List")
-    report.info("---------")
-    report.info("")
+    report.info('')
+    report.info('File List')
+    report.info('---------')
+    report.info('')
 
     tgt_pkg_list = set()
 
@@ -239,28 +239,28 @@ def elbe_report(xml, buildenv, cache, targetfs):
             pkg = index[unprefixed]
             tgt_pkg_list.add(pkg)
         else:
-            pkg = "postinst generated"
+            pkg = 'postinst generated'
 
         if fpath in mt_index_post_fine:
             if fpath in mt_index_postarch:
                 if mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
-                    pkg = "modified finetuning"
+                    pkg = 'modified finetuning'
                 elif fpath in mt_index:
                     if mt_index_postarch[fpath] != mt_index[fpath]:
-                        pkg = "from archive"
+                        pkg = 'from archive'
                     # else leave pkg as is
                 else:
-                    pkg = "added in archive"
+                    pkg = 'added in archive'
             else:
-                pkg = "added in finetuning"
+                pkg = 'added in finetuning'
         # else leave pkg as is
 
-        report.info("|+%s+|%s", fpath, pkg)
+        report.info('|+%s+|%s', fpath, pkg)
 
-    report.info("")
-    report.info("Deleted Files")
-    report.info("-------------")
-    report.info("")
+    report.info('')
+    report.info('Deleted Files')
+    report.info('-------------')
+    report.info('')
 
     for fpath in list(mt_index.keys()):
         if fpath not in mt_index_post_fine:
@@ -268,50 +268,50 @@ def elbe_report(xml, buildenv, cache, targetfs):
             if unprefixed in index:
                 pkg = index[unprefixed]
             else:
-                pkg = "postinst generated"
-            report.info("|+%s+|%s", fpath, pkg)
+                pkg = 'postinst generated'
+            report.info('|+%s+|%s', fpath, pkg)
 
-    report.info("")
-    report.info("Target Package List")
-    report.info("-------------------")
-    report.info("")
+    report.info('')
+    report.info('Target Package List')
+    report.info('-------------------')
+    report.info('')
 
     instpkgs = cache.get_installed_pkgs()
     pkgindex = {}
     for p in instpkgs:
         pkgindex[p.name] = p
 
-    if xml.has("target/pkgversionlist"):
+    if xml.has('target/pkgversionlist'):
         targetfs.remove('etc/elbe_pkglist')
         f = targetfs.open('etc/elbe_pkglist', 'w')
     for pkg in tgt_pkg_list:
         p = pkgindex[pkg]
-        report.info("|%s|%s|%s|%s",
+        report.info('|%s|%s|%s|%s',
                     p.name,
                     p.installed_version,
                     p.is_auto_installed,
                     p.installed_md5)
-        if xml.has("target/pkgversionlist"):
-            f.write(f"{p.name} {p.installed_version} {p.installed_md5}\n")
+        if xml.has('target/pkgversionlist'):
+            f.write(f'{p.name} {p.installed_version} {p.installed_md5}\n')
 
-    if xml.has("target/pkgversionlist"):
+    if xml.has('target/pkgversionlist'):
         f.close()
 
-    if not xml.has("archive") or xml.text("archive") is None:
+    if not xml.has('archive') or xml.text('archive') is None:
         return list(tgt_pkg_list)
 
-    validation.info("")
-    validation.info("Archive validation")
-    validation.info("------------------")
-    validation.info("")
+    validation.info('')
+    validation.info('Archive validation')
+    validation.info('------------------')
+    validation.info('')
 
     for fpath in list(mt_index_postarch.keys()):
         if (fpath not in mt_index or
            mt_index_postarch[fpath] != mt_index[fpath]):
             if fpath not in mt_index_post_fine:
-                validation.warning("Archive file %s deleted in finetuning",
+                validation.warning('Archive file %s deleted in finetuning',
                                    fpath)
             elif mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
-                validation.warning("Archive file %s modified in finetuning",
+                validation.warning('Archive file %s modified in finetuning',
                                    fpath)
     return list(tgt_pkg_list)
diff --git a/elbepack/efilesystem.py b/elbepack/efilesystem.py
index f022e489..e3271c4c 100644
--- a/elbepack/efilesystem.py
+++ b/elbepack/efilesystem.py
@@ -83,7 +83,7 @@ def copy_filelist(src, file_lst, dst):
                     'cp -a --reflink=auto '
                     f'"{src.realpath(f)}" "{dst.realpath(f)}"')
             except CommandError as E:
-                logging.warning("Error while copying from %s to %s of file %s - %s",
+                logging.warning('Error while copying from %s to %s of file %s - %s',
                                 src.path, dst.path, f, E)
 
     # update utime which will change after a file has been copied into
@@ -96,12 +96,12 @@ def copy_filelist(src, file_lst, dst):
 def extract_target(src, xml, dst, cache):
 
     # create filelists describing the content of the target rfs
-    if xml.tgt.has("tighten") or xml.tgt.has("diet"):
+    if xml.tgt.has('tighten') or xml.tgt.has('diet'):
         pkglist = [n.et.text for n in xml.node(
             'target/pkg-list') if n.tag == 'pkg']
-        arch = xml.text("project/buildimage/arch", key="arch")
+        arch = xml.text('project/buildimage/arch', key='arch')
 
-        if xml.tgt.has("diet"):
+        if xml.tgt.has('diet'):
             withdeps = []
             for p in pkglist:
                 deps = cache.get_dependencies(p)
@@ -112,12 +112,12 @@ def extract_target(src, xml, dst, cache):
 
         file_list = []
         for line in pkglist:
-            file_list += src.cat_file(f"var/lib/dpkg/info/{line}.list")
-            file_list += src.cat_file(f"var/lib/dpkg/info/{line}.conffiles")
+            file_list += src.cat_file(f'var/lib/dpkg/info/{line}.list')
+            file_list += src.cat_file(f'var/lib/dpkg/info/{line}.conffiles')
 
-            file_list += src.cat_file(f"var/lib/dpkg/info/{line}:{arch}.list")
+            file_list += src.cat_file(f'var/lib/dpkg/info/{line}:{arch}.list')
             file_list += src.cat_file(
-                f"var/lib/dpkg/info/{line}:{arch}.conffiles")
+                f'var/lib/dpkg/info/{line}:{arch}.conffiles')
 
         file_list = sorted(set(file_list),
                            key=lambda k: k[4:] if k.startswith('/usr') else k)
@@ -125,42 +125,42 @@ def extract_target(src, xml, dst, cache):
     else:
         # first copy most diretories
         for f in src.listdir():
-            subprocess.call(["cp", "-a", "--reflink=auto", f, dst.fname('')])
+            subprocess.call(['cp', '-a', '--reflink=auto', f, dst.fname('')])
 
     try:
-        dst.mkdir_p("dev")
+        dst.mkdir_p('dev')
     except BaseException:
         pass
     try:
-        dst.mkdir_p("proc")
+        dst.mkdir_p('proc')
     except BaseException:
         pass
     try:
-        dst.mkdir_p("sys")
+        dst.mkdir_p('sys')
     except BaseException:
         pass
 
-    if xml.tgt.has("setsel"):
+    if xml.tgt.has('setsel'):
         pkglist = [n.et.text for n in xml.node(
             'target/pkg-list') if n.tag == 'pkg']
         psel = 'var/cache/elbe/pkg-selections'
 
         with open(dst.fname(psel), 'w+') as f:
             for item in pkglist:
-                f.write(f"{item}  install\n")
+                f.write(f'{item}  install\n')
 
-        host_arch = get_command_out("dpkg --print-architecture").strip()
+        host_arch = get_command_out('dpkg --print-architecture').strip()
         if xml.is_cross(host_arch):
-            ui = "/usr/share/elbe/qemu-elbe/" + str(xml.defs["userinterpr"])
+            ui = '/usr/share/elbe/qemu-elbe/' + str(xml.defs['userinterpr'])
             if not os.path.exists(ui):
-                ui = "/usr/bin/" + str(xml.defs["userinterpr"])
+                ui = '/usr/bin/' + str(xml.defs['userinterpr'])
             do(f"cp {ui} {dst.fname('usr/bin')}")
 
-        cmds = ["--clear-selections",
-                f"--set-selections < {dst.fname(psel)}",
-                "--purge -a"]
+        cmds = ['--clear-selections',
+                f'--set-selections < {dst.fname(psel)}',
+                '--purge -a']
         for cmd in cmds:
-            chroot(dst.path, f"/usr/bin/dpkg {cmd}")
+            chroot(dst.path, f'/usr/bin/dpkg {cmd}')
 
 
 class ElbeFilesystem(Filesystem):
@@ -168,20 +168,20 @@ class ElbeFilesystem(Filesystem):
         Filesystem.__init__(self, path, clean)
 
     def dump_elbeversion(self, xml):
-        f = self.open("etc/elbe_version", "w+")
+        f = self.open('etc/elbe_version', 'w+')
         f.write(f"{xml.prj.text('name')} {xml.prj.text('version')}\n")
-        f.write(f"this RFS was generated by elbe {elbe_version}\n")
-        f.write(time.strftime("%c\n"))
+        f.write(f'this RFS was generated by elbe {elbe_version}\n')
+        f.write(time.strftime('%c\n'))
         f.close()
 
-        version_file = self.open("etc/updated_version", "w")
-        version_file.write(xml.text("/project/version"))
+        version_file = self.open('etc/updated_version', 'w')
+        version_file.write(xml.text('/project/version'))
         version_file.close()
 
-        elbe_base = self.open("etc/elbe_base.xml", "wb")
+        elbe_base = self.open('etc/elbe_base.xml', 'wb')
         xml.xml.write(elbe_base)
         elbe_base.close()
-        self.chmod("etc/elbe_base.xml", stat.S_IREAD)
+        self.chmod('etc/elbe_base.xml', stat.S_IREAD)
 
     def write_licenses(self, f, pkglist, xml_fname=None):
         licence_xml = copyright_xml()
@@ -190,16 +190,16 @@ class ElbeFilesystem(Filesystem):
             copyright_fname = self.fname(copyright_file)
             if os.path.isfile(copyright_fname):
                 try:
-                    with io.open(copyright_fname, "r",
+                    with io.open(copyright_fname, 'r',
                                  encoding='utf-8', errors='replace') as lic:
                         lic_text = lic.read()
                 except IOError as e:
-                    logging.exception("Error while processing license file %s",
+                    logging.exception('Error while processing license file %s',
                                       copyright_fname)
                     lic_text = u"Error while processing license file %s: '%s'" % (
                         copyright_file, e.strerror)
             else:
-                logging.warning("License file does not exist, skipping %s",
+                logging.warning('License file does not exist, skipping %s',
                                 copyright_fname)
                 continue
             # in Python2 'pkg' is a binary string whereas in Python3 it is a
@@ -209,11 +209,11 @@ class ElbeFilesystem(Filesystem):
 
             if f is not None:
                 f.write(pkg)
-                f.write(":\n======================================"
-                        "==========================================")
-                f.write("\n")
+                f.write(':\n======================================'
+                        '==========================================')
+                f.write('\n')
                 f.write(lic_text)
-                f.write("\n\n")
+                f.write('\n\n')
 
             if xml_fname is not None:
                 licence_xml.add_copyright_file(pkg, lic_text)
@@ -256,33 +256,33 @@ class Excursion:
         self.dst = dst
 
     def _saved_to(self):
-        return f"{self.origin}.orig"
+        return f'{self.origin}.orig'
 
     def _do_excursion(self, rfs):
         if rfs.lexists(self.origin) and self.restore is True:
             save_to = self._saved_to()
-            system(f"mv {rfs.fname(self.origin)} {rfs.fname(save_to)}")
+            system(f'mv {rfs.fname(self.origin)} {rfs.fname(save_to)}')
         if os.path.exists(self.origin):
             if self.dst is not None:
                 dst = self.dst
             else:
                 dst = self.origin
-            system(f"cp {self.origin} {rfs.fname(dst)}")
+            system(f'cp {self.origin} {rfs.fname(dst)}')
 
     # This should be a method of rfs
     @staticmethod
     def _del_rfs_file(filename, rfs):
         if rfs.lexists(filename):
-            flags = "-f"
+            flags = '-f'
             if rfs.isdir(filename):
-                flags += "r"
-            system(f"rm {flags} {rfs.fname(filename)}")
+                flags += 'r'
+            system(f'rm {flags} {rfs.fname(filename)}')
 
     def _undo_excursion(self, rfs):
         saved_to = self._saved_to()
         self._del_rfs_file(self.origin, rfs)
         if self.restore is True and rfs.lexists(saved_to):
-            system(f"mv {rfs.fname(saved_to)} {rfs.fname(self.origin)}")
+            system(f'mv {rfs.fname(saved_to)} {rfs.fname(self.origin)}')
 
 
 class ChRootFilesystem(ElbeFilesystem):
@@ -290,7 +290,7 @@ class ChRootFilesystem(ElbeFilesystem):
     def __init__(self, path, interpreter=None, clean=False):
         ElbeFilesystem.__init__(self, path, clean)
         self.interpreter = interpreter
-        self.cwd = os.open("/", os.O_RDONLY)
+        self.cwd = os.open('/', os.O_RDONLY)
         self.inchroot = False
         self.protect_from_excursion = set()
 
@@ -299,26 +299,26 @@ class ChRootFilesystem(ElbeFilesystem):
 
     def __enter__(self):
         Excursion.begin(self)
-        Excursion.add(self, "/etc/resolv.conf")
-        Excursion.add(self, "/etc/apt/apt.conf")
-        Excursion.add(self, "/usr/sbin/policy-rc.d")
+        Excursion.add(self, '/etc/resolv.conf')
+        Excursion.add(self, '/etc/apt/apt.conf')
+        Excursion.add(self, '/usr/sbin/policy-rc.d')
 
         if self.interpreter:
-            if not self.exists("usr/bin"):
-                if self.islink("usr/bin"):
-                    Excursion.add(self, "/usr/bin")
+            if not self.exists('usr/bin'):
+                if self.islink('usr/bin'):
+                    Excursion.add(self, '/usr/bin')
 
-            ui = "/usr/share/elbe/qemu-elbe/" + self.interpreter
+            ui = '/usr/share/elbe/qemu-elbe/' + self.interpreter
             if not os.path.exists(ui):
-                ui = "/usr/bin/" + self.interpreter
+                ui = '/usr/bin/' + self.interpreter
 
-            Excursion.add(self, ui, False, "/usr/bin")
+            Excursion.add(self, ui, False, '/usr/bin')
 
         Excursion.do(self)
 
-        self.mkdir_p("usr/bin")
-        self.mkdir_p("usr/sbin")
-        self.write_file("usr/sbin/policy-rc.d", 0o755, "#!/bin/sh\nexit 101\n")
+        self.mkdir_p('usr/bin')
+        self.mkdir_p('usr/sbin')
+        self.write_file('usr/sbin/policy-rc.d', 0o755, '#!/bin/sh\nexit 101\n')
         self.mount()
         return self
 
@@ -338,10 +338,10 @@ class ChRootFilesystem(ElbeFilesystem):
         if self.path == '/':
             return
         try:
-            system(f"mount -t proc none {self.path}/proc")
-            system(f"mount -t sysfs none {self.path}/sys")
-            system(f"mount -o bind /dev {self.path}/dev")
-            system(f"mount -o bind /dev/pts {self.path}/dev/pts")
+            system(f'mount -t proc none {self.path}/proc')
+            system(f'mount -t sysfs none {self.path}/sys')
+            system(f'mount -o bind /dev {self.path}/dev')
+            system(f'mount -o bind /dev/pts {self.path}/dev/pts')
         except BaseException:
             self.umount()
             raise
@@ -349,9 +349,9 @@ class ChRootFilesystem(ElbeFilesystem):
     def enter_chroot(self):
         assert not self.inchroot
 
-        os.environ["LANG"] = "C"
-        os.environ["LANGUAGE"] = "C"
-        os.environ["LC_ALL"] = "C"
+        os.environ['LANG'] = 'C'
+        os.environ['LANGUAGE'] = 'C'
+        os.environ['LC_ALL'] = 'C'
 
         os.chdir(self.path)
         self.inchroot = True
@@ -364,17 +364,17 @@ class ChRootFilesystem(ElbeFilesystem):
     def _umount(self, path):
         path = os.path.join(self.path, path)
         if os.path.ismount(path):
-            system(f"umount {path}")
+            system(f'umount {path}')
 
     def umount(self):
         if self.path == '/':
             return
-        self._umount("proc/sys/fs/binfmt_misc")
-        self._umount("proc")
-        self._umount("sys")
-        self._umount("dev/pts")
+        self._umount('proc/sys/fs/binfmt_misc')
+        self._umount('proc')
+        self._umount('sys')
+        self._umount('dev/pts')
         time.sleep(0.5)
-        self._umount("dev")
+        self._umount('dev')
 
     def leave_chroot(self):
         assert self.inchroot
@@ -385,27 +385,27 @@ class ChRootFilesystem(ElbeFilesystem):
         if self.path == '/':
             return
 
-        os.chroot(".")
+        os.chroot('.')
 
 
 class TargetFs(ChRootFilesystem):
     def __init__(self, path, xml, clean=True):
-        ChRootFilesystem.__init__(self, path, xml.defs["userinterpr"], clean)
+        ChRootFilesystem.__init__(self, path, xml.defs['userinterpr'], clean)
         self.xml = xml
         self.images = []
         self.image_packers = {}
 
     def write_fstab(self, xml):
-        if not self.exists("etc"):
-            if self.islink("etc"):
-                self.mkdir(self.realpath("etc"))
+        if not self.exists('etc'):
+            if self.islink('etc'):
+                self.mkdir(self.realpath('etc'))
             else:
-                self.mkdir("etc")
+                self.mkdir('etc')
 
-        if xml.tgt.has("fstab"):
-            f = self.open("etc/fstab", "w")
-            for fs in xml.tgt.node("fstab"):
-                if not fs.has("nofstab"):
+        if xml.tgt.has('fstab'):
+            f = self.open('etc/fstab', 'w')
+            for fs in xml.tgt.node('fstab'):
+                if not fs.has('nofstab'):
                     fstab = fstabentry(xml, fs)
                     f.write(fstab.get_str())
             f.close()
@@ -423,13 +423,13 @@ class TargetFs(ChRootFilesystem):
             self.images.append(i)
             self.image_packers[i] = default_packer
 
-        if self.xml.has("target/package/tar"):
-            targz_name = self.xml.text("target/package/tar/name")
+        if self.xml.has('target/package/tar'):
+            targz_name = self.xml.text('target/package/tar/name')
             try:
                 options = ''
-                if self.xml.has("target/package/tar/options"):
-                    options = self.xml.text("target/package/tar/options")
-                cmd = "tar cfz %(dest)s/%(fname)s -C %(sdir)s %(options)s ."
+                if self.xml.has('target/package/tar/options'):
+                    options = self.xml.text('target/package/tar/options')
+                cmd = 'tar cfz %(dest)s/%(fname)s -C %(sdir)s %(options)s .'
                 args = dict(
                     options=options,
                     dest=targetdir,
@@ -443,14 +443,14 @@ class TargetFs(ChRootFilesystem):
                 # error was logged; continue creating cpio image
                 pass
 
-        if self.xml.has("target/package/cpio"):
+        if self.xml.has('target/package/cpio'):
             oldwd = os.getcwd()
-            cpio_name = self.xml.text("target/package/cpio/name")
+            cpio_name = self.xml.text('target/package/cpio/name')
             os.chdir(self.fname(''))
             try:
                 do(
-                    f"find . -print | cpio -ov -H newc >"
-                    f"{os.path.join(targetdir, cpio_name)}")
+                    f'find . -print | cpio -ov -H newc >'
+                    f'{os.path.join(targetdir, cpio_name)}')
                 # only append filename if creating cpio was successful
                 self.images.append(cpio_name)
             except CommandError:
@@ -458,18 +458,18 @@ class TargetFs(ChRootFilesystem):
                 pass
             os.chdir(oldwd)
 
-        if self.xml.has("target/package/squashfs"):
+        if self.xml.has('target/package/squashfs'):
             oldwd = os.getcwd()
-            sfs_name = self.xml.text("target/package/squashfs/name")
+            sfs_name = self.xml.text('target/package/squashfs/name')
             os.chdir(self.fname(''))
             try:
                 options = ''
-                if self.xml.has("target/package/squashfs/options"):
-                    options = self.xml.text("target/package/squashfs/options")
+                if self.xml.has('target/package/squashfs/options'):
+                    options = self.xml.text('target/package/squashfs/options')
 
                 do(
                     f"mksquashfs {self.fname('')} {targetdir}/{sfs_name} "
-                    f"-noappend -no-progress {options}")
+                    f'-noappend -no-progress {options}')
                 # only append filename if creating mksquashfs was successful
                 self.images.append(sfs_name)
             except CommandError:
diff --git a/elbepack/egpg.py b/elbepack/egpg.py
index 303e9b0b..d5711812 100644
--- a/elbepack/egpg.py
+++ b/elbepack/egpg.py
@@ -80,17 +80,17 @@ def check_signature(ctx, signature):
     status = OverallStatus()
 
     if signature.summary & sigsum.KEY_MISSING:
-        print(f"Signature with unknown key: {signature.fpr}")
+        print(f'Signature with unknown key: {signature.fpr}')
         status.key_missing = 1
         return status
 
     # there should be a key
     key = ctx.get_key(signature.fpr, 0)
-    print(f"{key.uids[0].name} <{key.uids[0].email}> ({signature.fpr}):")
+    print(f'{key.uids[0].name} <{key.uids[0].email}> ({signature.fpr}):')
 
     if signature.summary & sigsum.VALID == sigsum.VALID:
         # signature fully valid and trusted
-        print("VALID (Trusted)")
+        print('VALID (Trusted)')
         status.valid = 1
         return status
 
@@ -98,38 +98,38 @@ def check_signature(ctx, signature):
     if signature.summary == 0:
         # Signature is valid, but the key is not ultimately trusted,
         # see: http://www.gossamer-threads.com/lists/gnupg/users/52350
-        print("VALID (Untrusted).")
+        print('VALID (Untrusted).')
         status.valid = 1
 
     if signature.summary & sigsum.SIG_EXPIRED == sigsum.SIG_EXPIRED:
-        print("SIGNATURE EXPIRED!")
+        print('SIGNATURE EXPIRED!')
         status.sig_expired = 1
         status.valid_threshold = 1
 
     if signature.summary & sigsum.KEY_EXPIRED == sigsum.KEY_EXPIRED:
-        print("KEY EXPIRED!")
+        print('KEY EXPIRED!')
         status.key_expired = 1
         status.valid_threshold = 1
 
     if signature.summary & sigsum.KEY_REVOKED == sigsum.KEY_REVOKED:
-        print("KEY REVOKED!")
+        print('KEY REVOKED!')
         status.key_revoked = 1
         status.valid_threshold = 1
 
     if signature.summary & sigsum.RED == sigsum.RED:
-        print("INVALID SIGNATURE!")
+        print('INVALID SIGNATURE!')
         status.invalid = 1
 
     if signature.summary & sigsum.CRL_MISSING == sigsum.CRL_MISSING:
-        print("CRL MISSING!")
+        print('CRL MISSING!')
         status.gpg_error = 1
 
     if signature.summary & sigsum.CRL_TOO_OLD == sigsum.CRL_TOO_OLD:
-        print("CRL TOO OLD!")
+        print('CRL TOO OLD!')
         status.gpg_error = 1
 
     if signature.summary & sigsum.BAD_POLICY == sigsum.BAD_POLICY:
-        print("UNMET POLICY REQUIREMENT!")
+        print('UNMET POLICY REQUIREMENT!')
         status.gpg_error = 1
 
     if signature.summary & sigsum.SYS_ERROR == sigsum.SYS_ERROR:
@@ -142,7 +142,7 @@ def check_signature(ctx, signature):
 def unsign_file(fname):
     # check for .gpg extension and create an output filename without it
     if len(fname) <= 4 or fname[len(fname) - 4:] != '.gpg':
-        print("The input file needs a .gpg extension")
+        print('The input file needs a .gpg extension')
         return None
 
     outfilename = fname[:len(fname) - 4]
@@ -159,7 +159,7 @@ def unsign_file(fname):
         infile = core.Data(file=fname)
         outfile = core.Data(file=outfilename)
     except (GPGMEError, ValueError) as E:
-        print(f"Error: Opening file {fname} or {outfilename} - {E}")
+        print(f'Error: Opening file {fname} or {outfilename} - {E}')
     else:
         # obtain signature and write unsigned file
         ctx.op_verify(infile, None, outfile)
@@ -184,9 +184,9 @@ def unlock_key(fingerprint):
                         '/var/cache/elbe/gnupg')
     key = ctx.get_key(fingerprint, secret=True)
     keygrip = key.subkeys[0].keygrip
-    system("/usr/lib/gnupg/gpg-preset-passphrase "
-           f"--preset -P requiredToAvoidUserInput {keygrip}",
-           env_add={"GNUPGHOME": "/var/cache/elbe/gnupg"})
+    system('/usr/lib/gnupg/gpg-preset-passphrase '
+           f'--preset -P requiredToAvoidUserInput {keygrip}',
+           env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
 
 
 def sign(infile, outfile, fingerprint):
@@ -206,7 +206,7 @@ def sign(infile, outfile, fingerprint):
     try:
         key = ctx.get_key(fingerprint, 0)
     except (KeyNotFound, GPGMEError, AssertionError) as E:
-        print(f"Error: No key with fingerprint {fingerprint} - {E}")
+        print(f'Error: No key with fingerprint {fingerprint} - {E}')
         return
     else:
         unlock_key(key.fpr)
@@ -216,15 +216,15 @@ def sign(infile, outfile, fingerprint):
     try:
         indata = core.Data(file=infile)
     except (GPGMEError, ValueError) as E:
-        print(f"Error: Opening file {infile} - {E}")
+        print(f'Error: Opening file {infile} - {E}')
     else:
         outdata = core.Data()
         try:
             ctx.op_sign(indata, outdata, sig.mode.NORMAL)
         except InvalidSigners as E:
-            print("Error: Invalid signer - %s", E)
+            print('Error: Invalid signer - %s', E)
         except GPGMEError as E:
-            print("Error: While signing - %s", E)
+            print('Error: While signing - %s', E)
         else:
             outdata.seek(0, os.SEEK_SET)
             signature = outdata.read()
@@ -260,11 +260,11 @@ EOT = 4294967295
 
 
 def generate_elbe_internal_key():
-    hostfs.mkdir_p("/var/cache/elbe/gnupg")
-    hostfs.write_file("/var/cache/elbe/gnupg/gpg-agent.conf", 0o600,
-                      "allow-preset-passphrase\n"
-                      f"default-cache-ttl {EOT}\n"
-                      f"max-cache-ttl {EOT}\n")
+    hostfs.mkdir_p('/var/cache/elbe/gnupg')
+    hostfs.write_file('/var/cache/elbe/gnupg/gpg-agent.conf', 0o600,
+                      'allow-preset-passphrase\n'
+                      f'default-cache-ttl {EOT}\n'
+                      f'max-cache-ttl {EOT}\n')
     ctx = core.Context()
     ctx.set_engine_info(PROTOCOL_OpenPGP,
                         None,
@@ -276,8 +276,8 @@ def generate_elbe_internal_key():
 
 
 def export_key(fingerprint, outfile):
-    system(f"/usr/bin/gpg -a -o {outfile} --export {fingerprint}",
-           env_add={"GNUPGHOME": "/var/cache/elbe/gnupg"})
+    system(f'/usr/bin/gpg -a -o {outfile} --export {fingerprint}',
+           env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
 
 
 def unarmor_openpgp_keyring(armored):
@@ -289,6 +289,6 @@ def unarmor_openpgp_keyring(armored):
         conv_cmd = get_command_out('/usr/bin/gpg --no-options --dearmor', stdin=armored)
     except CommandError as e:
         logging.error(e)
-        return b""
+        return b''
 
     return conv_cmd
diff --git a/elbepack/elbeproject.py b/elbepack/elbeproject.py
index 5ae068da..746bafe6 100644
--- a/elbepack/elbeproject.py
+++ b/elbepack/elbeproject.py
@@ -39,34 +39,34 @@ from elbepack.templates import write_pack_template
 from elbepack.finetuning import do_prj_finetuning
 
 
-validation = logging.getLogger("validation")
+validation = logging.getLogger('validation')
 
 
 class IncompatibleArchitectureException(Exception):
     def __init__(self, oldarch, newarch):
         Exception.__init__(
             self,
-            f"Cannot change architecture from {oldarch} to {newarch} in "
-            "existing project")
+            f'Cannot change architecture from {oldarch} to {newarch} in '
+            'existing project')
 
 
 class AptCacheUpdateError(Exception):
     def __init__(self, e):
-        Exception.__init__(self, f"Error Updating rpcaptcache: {e}")
+        Exception.__init__(self, f'Error Updating rpcaptcache: {e}')
 
 
 class AptCacheCommitError(Exception):
     def __init__(self, msg=''):
-        Exception.__init__(self, f"Error Committing rpcaptcache {msg}")
+        Exception.__init__(self, f'Error Committing rpcaptcache {msg}')
 
 
 class UnsupportedSDKException(Exception):
     def __init__(self, triplet):
-        Exception.__init__(self, f"SDK for {triplet} currently unsupported")
+        Exception.__init__(self, f'SDK for {triplet} currently unsupported')
 
 
 def test_gen_sdk_scripts():
-    system("mkdir -p /tmp/test/sdk")
+    system('mkdir -p /tmp/test/sdk')
     gen_sdk_scripts('armhf-linux-gnueabihf',
                     'ARM',
                     'testproject',
@@ -82,8 +82,8 @@ def gen_sdk_scripts(triplet,
                     builddir,
                     sdkpath):
 
-    prj_name = prj_name.replace(" ", "_")
-    prj_version = prj_version.replace(" ", "_")
+    prj_name = prj_name.replace(' ', '_')
+    prj_version = prj_version.replace(' ', '_')
 
     # generate the setup script
     sdkvalues = {'sdk_arch': 'x86_64',
@@ -125,11 +125,11 @@ class ElbeProject:
             savesh_file=None):
 
         self.builddir = os.path.abspath(str(builddir))
-        self.chrootpath = os.path.join(self.builddir, "chroot")
-        self.targetpath = os.path.join(self.builddir, "target")
-        self.sysrootpath = os.path.join(self.builddir, "sysroot")
-        self.sdkpath = os.path.join(self.builddir, "sdk")
-        self.validationpath = os.path.join(self.builddir, "validation.txt")
+        self.chrootpath = os.path.join(self.builddir, 'chroot')
+        self.targetpath = os.path.join(self.builddir, 'target')
+        self.sysrootpath = os.path.join(self.builddir, 'sysroot')
+        self.sdkpath = os.path.join(self.builddir, 'sdk')
+        self.validationpath = os.path.join(self.builddir, 'validation.txt')
 
         self.name = name
         self.override_buildtype = override_buildtype
@@ -162,21 +162,21 @@ class ElbeProject:
                 skip_validate=skip_validate,
                 url_validation=url_validation)
         else:
-            sourcexmlpath = os.path.join(self.builddir, "source.xml")
+            sourcexmlpath = os.path.join(self.builddir, 'source.xml')
             self.xml = ElbeXML(
                 sourcexmlpath,
                 buildtype=override_buildtype,
                 skip_validate=skip_validate,
                 url_validation=url_validation)
 
-        self.arch = self.xml.text("project/arch", key="arch")
-        self.codename = self.xml.text("project/suite")
+        self.arch = self.xml.text('project/arch', key='arch')
+        self.codename = self.xml.text('project/suite')
 
         if not self.name:
-            self.name = self.xml.text("project/name")
+            self.name = self.xml.text('project/name')
 
         self.repo = ProjectRepo(self.arch, self.codename,
-                                os.path.join(self.builddir, "repo"))
+                                os.path.join(self.builddir, 'repo'))
 
         # Create BuildEnv instance, if the chroot directory exists and
         # has an etc/elbe_version
@@ -204,14 +204,14 @@ class ElbeProject:
         self.host_sysrootenv = None
 
     def build_chroottarball(self):
-        do(f"tar cJf {self.builddir}/chroot.tar.xz "
-           "--exclude=./tmp/*  --exclude=./dev/* "
-           "--exclude=./run/*  --exclude=./sys/* "
-           "--exclude=./proc/* --exclude=./var/cache/* "
-           f"-C {self.chrootpath} .")
+        do(f'tar cJf {self.builddir}/chroot.tar.xz '
+           '--exclude=./tmp/*  --exclude=./dev/* '
+           '--exclude=./run/*  --exclude=./sys/* '
+           '--exclude=./proc/* --exclude=./var/cache/* '
+           f'-C {self.chrootpath} .')
 
     def get_sysroot_paths(self):
-        triplet = self.xml.defs["triplet"]
+        triplet = self.xml.defs['triplet']
 
         paths = [
             './usr/include',
@@ -243,7 +243,7 @@ class ElbeProject:
                                    clean=True)
         # Import keyring
         self.sysrootenv.import_keys()
-        logging.info("Keys imported")
+        logging.info('Keys imported')
 
         self.xml.add_target_package("libc-bin")
         self.xml.add_target_package("libc6-dbg")
@@ -253,12 +253,12 @@ class ElbeProject:
 
         # ignore packages from debootstrap
         tpkgs = self.xml.get_target_packages()
-        bspkgs = self.xml.node("debootstrappkgs")
+        bspkgs = self.xml.node('debootstrappkgs')
         ignore_pkgs = [p.et.text for p in bspkgs if p.et.text not in tpkgs]
         ignore_dev_pkgs = []
         if self.xml.has('target/pkg-blacklist/sysroot'):
             ignore_dev_pkgs = [p.et.text for p in self.xml.node(
-                "target/pkg-blacklist/sysroot")]
+                'target/pkg-blacklist/sysroot')]
 
         with self.sysrootenv:
             try:
@@ -271,29 +271,29 @@ class ElbeProject:
                 cache.mark_install_devpkgs(set(ignore_pkgs),
                                            set(ignore_dev_pkgs))
             except SystemError:
-                logging.exception("Mark install devpkgs failed")
+                logging.exception('Mark install devpkgs failed')
             try:
                 cache.commit()
             except SystemError as e:
-                logging.exception("Commiting changes failed")
+                logging.exception('Commiting changes failed')
                 raise AptCacheCommitError(str(e))
 
-            self.gen_licenses("sysroot-target", self.sysrootenv,
+            self.gen_licenses('sysroot-target', self.sysrootenv,
                               [p.name for p in cache.get_installed_pkgs()])
 
         try:
             self.sysrootenv.rfs.dump_elbeversion(self.xml)
         except IOError:
-            logging.exception("Dump elbeversion into sysroot failed")
+            logging.exception('Dump elbeversion into sysroot failed')
 
-        sysrootfilelist = os.path.join(self.builddir, "sysroot-filelist")
+        sysrootfilelist = os.path.join(self.builddir, 'sysroot-filelist')
 
         with self.sysrootenv.rfs:
-            chroot(self.sysrootpath, "/usr/bin/symlinks -cr /usr/lib")
+            chroot(self.sysrootpath, '/usr/bin/symlinks -cr /usr/lib')
 
         paths = self.get_sysroot_paths()
 
-        do(f"rm {sysrootfilelist}", allow_fail=True)
+        do(f'rm {sysrootfilelist}', allow_fail=True)
         os.chdir(self.sysrootpath)
         for p in paths:
             do(f'find -path "{p}" >> {sysrootfilelist}')
@@ -306,8 +306,8 @@ class ElbeProject:
                 filelist_fd.write('./sbin\n')
 
         do(
-            f"tar cfJ {self.builddir}/sysroot.tar.xz "
-            f"-C {self.sysrootpath} -T {sysrootfilelist}")
+            f'tar cfJ {self.builddir}/sysroot.tar.xz '
+            f'-C {self.sysrootpath} -T {sysrootfilelist}')
 
     def build_host_sysroot(self, pkgs, hostsysrootpath):
         do(f'rm -rf {hostsysrootpath}; mkdir "{hostsysrootpath}"')
@@ -315,11 +315,11 @@ class ElbeProject:
         self.host_sysrootenv = BuildEnv(self.xml,
                                         hostsysrootpath,
                                         clean=True,
-                                        arch="amd64",
+                                        arch='amd64',
                                         hostsysroot=True)
         # Import keyring
         self.host_sysrootenv.import_keys()
-        logging.info("Keys imported")
+        logging.info('Keys imported')
 
         with self.host_sysrootenv:
 
@@ -334,19 +334,19 @@ class ElbeProject:
                 try:
                     cache.mark_install(p, None)
                 except KeyError:
-                    logging.exception("No Package %s", p)
+                    logging.exception('No Package %s', p)
                 except SystemError:
-                    logging.exception("Unable to correct problems in "
-                                      "package %s",
+                    logging.exception('Unable to correct problems in '
+                                      'package %s',
                                       p)
 
             try:
                 cache.commit()
             except SystemError as e:
-                logging.exception("Commiting changes failed")
+                logging.exception('Commiting changes failed')
                 raise AptCacheCommitError(str(e))
 
-            self.gen_licenses("sysroot-host", self.host_sysrootenv,
+            self.gen_licenses('sysroot-host', self.host_sysrootenv,
                               [p.name for p in cache.get_installed_pkgs()])
 
         # This is just a sysroot, some directories
@@ -368,8 +368,8 @@ class ElbeProject:
         self.host_sysrootenv.rfs.rmtree('/var')
 
     def build_sdk(self):
-        triplet = self.xml.defs["triplet"]
-        elfcode = self.xml.defs["elfcode"]
+        triplet = self.xml.defs['triplet']
+        elfcode = self.xml.defs['elfcode']
 
         host_pkglist = []
         if self.xml.tgt.has('hostsdk-pkg-list'):
@@ -378,7 +378,7 @@ class ElbeProject:
                     host_pkglist.append(p.et.text.strip())
         else:
             try:
-                host_pkglist.append(self.xml.defs["sdkgccpkg"])
+                host_pkglist.append(self.xml.defs['sdkgccpkg'])
             except KeyError:
                 raise UnsupportedSDKException(triplet)
 
@@ -386,9 +386,9 @@ class ElbeProject:
 
         # build target sysroot including libs and headers for the target
         self.build_sysroot()
-        sdktargetpath = os.path.join(self.sdkpath, "sysroots", "target")
-        do(f"mkdir -p {sdktargetpath}")
-        do(f"tar xJf {self.builddir}/sysroot.tar.xz -C {sdktargetpath}")
+        sdktargetpath = os.path.join(self.sdkpath, 'sysroots', 'target')
+        do(f'mkdir -p {sdktargetpath}')
+        do(f'tar xJf {self.builddir}/sysroot.tar.xz -C {sdktargetpath}')
         # build host sysroot including cross compiler
         hostsysrootpath = os.path.join(self.sdkpath, 'sysroots', 'host')
 
@@ -397,32 +397,32 @@ class ElbeProject:
         n = gen_sdk_scripts(triplet,
                             elfcode,
                             self.name,
-                            self.xml.text("project/version"),
+                            self.xml.text('project/version'),
                             self.builddir,
                             self.sdkpath)
 
         # create sdk tar and append it to setup script
-        do(f"cd {self.sdkpath}; tar cJf ../sdk.txz .")
-        do(f"cd {self.builddir}; rm -rf sdk")
-        do(f"cd {self.builddir}; cat sdk.txz >> {n}")
-        do(f"cd {self.builddir}; chmod +x {n}")
-        do(f"cd {self.builddir}; rm sdk.txz")
+        do(f'cd {self.sdkpath}; tar cJf ../sdk.txz .')
+        do(f'cd {self.builddir}; rm -rf sdk')
+        do(f'cd {self.builddir}; cat sdk.txz >> {n}')
+        do(f'cd {self.builddir}; chmod +x {n}')
+        do(f'cd {self.builddir}; rm sdk.txz')
 
     def pbuild(self, p):
         self.pdebuild_init()
-        os.mkdir(os.path.join(self.builddir, "pdebuilder"))
-        src_path = os.path.join(self.builddir, "pdebuilder", "current")
+        os.mkdir(os.path.join(self.builddir, 'pdebuilder'))
+        src_path = os.path.join(self.builddir, 'pdebuilder', 'current')
 
-        src_uri = p.text('.').replace("LOCALMACHINE", "10.0.2.2").strip()
-        logging.info("Retrieve pbuild sources: %s",  src_uri)
+        src_uri = p.text('.').replace('LOCALMACHINE', '10.0.2.2').strip()
+        logging.info('Retrieve pbuild sources: %s',  src_uri)
         if p.tag == 'git':
-            do(f"git clone {src_uri} {src_path}")
+            do(f'git clone {src_uri} {src_path}')
             try:
                 do(f"cd {src_path}; git reset --hard {p.et.attrib['revision']}")
             except IndexError:
                 pass
         elif p.tag == 'svn':
-            do(f"svn co --non-interactive {src_uri} {src_path}")
+            do(f'svn co --non-interactive {src_uri} {src_path}')
         elif p.tag == 'src-pkg':
             apt_args = '--yes -q --download-only'
             if self.xml.prj.has('noauth'):
@@ -432,10 +432,10 @@ class ElbeProject:
 
             do(f'dpkg-source -x {self.chrootpath}/*.dsc "{src_path}"; rm {self.chrootpath}/*.dsc')
         else:
-            logging.info("Unknown pbuild source: %s", p.tag)
+            logging.info('Unknown pbuild source: %s', p.tag)
 
         # pdebuild_build(-1) means use all cpus
-        self.pdebuild_build(cpuset=-1, profile="", cross=False)
+        self.pdebuild_build(cpuset=-1, profile='', cross=False)
 
     def build_cdroms(self, build_bin=True,
                      build_sources=False, cdrom_size=None,
@@ -444,9 +444,9 @@ class ElbeProject:
         self.repo_images = []
 
         env = None
-        sysrootstr = ""
+        sysrootstr = ''
         if os.path.exists(self.sysrootpath):
-            sysrootstr = "(including sysroot packages)"
+            sysrootstr = '(including sysroot packages)'
             env = BuildEnv(self.xml, self.sysrootpath,
                            build_sources=build_sources, clean=False)
         else:
@@ -465,7 +465,7 @@ class ElbeProject:
             init_codename = self.xml.get_initvm_codename()
 
             if build_bin:
-                validation.info("Binary CD %s", sysrootstr)
+                validation.info('Binary CD %s', sysrootstr)
 
                 self.repo_images += mk_binary_cdrom(env.rfs,
                                                     self.arch,
@@ -474,34 +474,34 @@ class ElbeProject:
                                                     self.xml,
                                                     self.builddir)
             if build_sources:
-                if not cdrom_size and self.xml.has("src-cdrom/size"):
-                    cdrom_size = size_to_int(self.xml.text("src-cdrom/size"))
+                if not cdrom_size and self.xml.has('src-cdrom/size'):
+                    cdrom_size = size_to_int(self.xml.text('src-cdrom/size'))
 
-                validation.info("Source CD %s", sysrootstr)
+                validation.info('Source CD %s', sysrootstr)
 
                 # Target component
                 cache = self.get_rpcaptcache(env=self.buildenv)
                 tgt_lst = cache.get_corresponding_source_packages(pkg_lst=tgt_pkg_lst)
-                components = {"target": (self.buildenv.rfs, cache, tgt_lst)}
+                components = {'target': (self.buildenv.rfs, cache, tgt_lst)}
 
                 # Main component
                 main_lst = []
                 if self.xml is not None:
                     tmp_lst = []
-                    for pkg_node in self.xml.node("debootstrappkgs"):
+                    for pkg_node in self.xml.node('debootstrappkgs'):
                         pkg = XMLPackage(pkg_node, self.arch)
                         tmp_lst.append(pkg.name)
                     main_lst = cache.get_corresponding_source_packages(pkg_lst=tmp_lst)
-                components["main"] = (env.rfs, cache, main_lst)
+                components['main'] = (env.rfs, cache, main_lst)
 
                 # Added component
-                other_components = [(env, "added")]
+                other_components = [(env, 'added')]
 
                 # Let's build a list of (build_env, name) for the
                 # other RFS if they exist
-                host_sysroot_path = os.path.join(self.sdkpath, "sysroots", "host")
-                for path, name in [(self.chrootpath, "chroot"),
-                                   (host_sysroot_path, "sysroot-host")]:
+                host_sysroot_path = os.path.join(self.sdkpath, 'sysroots', 'host')
+                for path, name in [(self.chrootpath, 'chroot'),
+                                   (host_sysroot_path, 'sysroot-host')]:
                     if os.path.exists(path) and env.path != path:
                         tmp_env = BuildEnv(self.xml, path)
                         with tmp_env:
@@ -519,12 +519,12 @@ class ElbeProject:
                     # Using kwargs here allows us to avoid making
                     # special case for when self.xml is None
                     kwargs = {
-                        "cdrom_size": cdrom_size,
-                        "xml": self.xml
+                        'cdrom_size': cdrom_size,
+                        'xml': self.xml
                         }
 
                     if self.xml is not None:
-                        kwargs["mirror"] = self.xml.get_primary_mirror(env.rfs.fname("cdrom"))
+                        kwargs['mirror'] = self.xml.get_primary_mirror(env.rfs.fname('cdrom'))
 
                     for iso in mk_source_cdrom(components,
                                                self.codename,
@@ -562,12 +562,12 @@ class ElbeProject:
 
         # Import keyring
         self.buildenv.import_keys()
-        logging.info("Keys imported")
+        logging.info('Keys imported')
 
         if self.xml.has('target/pbuilder') and not skip_pbuild:
-            if not os.path.exists(os.path.join(self.builddir, "pbuilder")):
+            if not os.path.exists(os.path.join(self.builddir, 'pbuilder')):
                 self.create_pbuilder(cross=False, noccache=False,
-                                     ccachesize="10G")
+                                     ccachesize='10G')
             for p in self.xml.node('target/pbuilder'):
                 self.pbuild(p)
                 # the package might be needed by a following pbuild, so update
@@ -587,7 +587,7 @@ class ElbeProject:
         try:
             self.buildenv.rfs.dump_elbeversion(self.xml)
         except IOError:
-            logging.exception("Dump elbeversion failed")
+            logging.exception('Dump elbeversion failed')
 
         # Extract target FS. We always create a new instance here with
         # clean=true, because we want a pristine directory.
@@ -599,9 +599,9 @@ class ElbeProject:
 
         # Package validation and package list
         if not skip_pkglist:
-            pkgs = self.xml.xml.node("/target/pkg-list")
-            if self.xml.has("fullpkgs"):
-                check_full_pkgs(pkgs, self.xml.xml.node("/fullpkgs"),
+            pkgs = self.xml.xml.node('/target/pkg-list')
+            if self.xml.has('fullpkgs'):
+                check_full_pkgs(pkgs, self.xml.xml.node('/fullpkgs'),
                                 self.get_rpcaptcache())
             else:
                 check_full_pkgs(pkgs, None, self.get_rpcaptcache())
@@ -615,7 +615,7 @@ class ElbeProject:
         try:
             self.targetfs.dump_elbeversion(self.xml)
         except MemoryError:
-            logging.exception("Dump elbeversion failed")
+            logging.exception('Dump elbeversion failed')
 
         # install packages for buildenv
         if not skip_pkglist:
@@ -623,45 +623,45 @@ class ElbeProject:
 
         # Write source.xml
         try:
-            sourcexmlpath = os.path.join(self.builddir, "source.xml")
+            sourcexmlpath = os.path.join(self.builddir, 'source.xml')
             self.xml.xml.write(sourcexmlpath)
         except MemoryError:
-            logging.exception("Write source.xml failed (archive to huge?)")
+            logging.exception('Write source.xml failed (archive to huge?)')
 
         # Elbe report
         cache = self.get_rpcaptcache()
         tgt_pkgs = elbe_report(self.xml, self.buildenv, cache, self.targetfs)
 
         # chroot' licenses
-        self.gen_licenses("chroot", self.buildenv,
+        self.gen_licenses('chroot', self.buildenv,
                           [p.name for p in cache.get_installed_pkgs()])
 
-        self.gen_licenses("target", self.buildenv, tgt_pkgs)
+        self.gen_licenses('target', self.buildenv, tgt_pkgs)
 
         # Use some handwaving to determine grub version
-        grub_arch = "ia32" if self.arch == "i386" else self.arch
+        grub_arch = 'ia32' if self.arch == 'i386' else self.arch
         grub_fw_type = []
         grub_version = 0
         if self.get_rpcaptcache().is_installed('grub-pc'):
             grub_version = 202
-            grub_fw_type.append("bios")
+            grub_fw_type.append('bios')
         if self.get_rpcaptcache().is_installed(f'grub-efi-{grub_arch}-bin'):
             grub_version = 202
-            grub_tgt = "x86_64" if self.arch == "amd64" else self.arch
-            grub_fw_type.extend(["efi", grub_tgt + "-efi"])
+            grub_tgt = 'x86_64' if self.arch == 'amd64' else self.arch
+            grub_fw_type.extend(['efi', grub_tgt + '-efi'])
         if (self.get_rpcaptcache().is_installed('shim-signed') and
                 self.get_rpcaptcache().is_installed(
                     f'grub-efi-{grub_arch}-signed')):
             grub_version = 202
-            grub_fw_type.append("shimfix")
+            grub_fw_type.append('shimfix')
         if self.get_rpcaptcache().is_installed('grub-legacy'):
-            logging.warning("package grub-legacy is installed, "
-                            "this is obsolete.")
+            logging.warning('package grub-legacy is installed, '
+                            'this is obsolete.')
             grub_version = 97
-            grub_fw_type.append("bios")
+            grub_fw_type.append('bios')
         elif not grub_fw_type:
-            logging.warning("neither package grub-pc nor grub-efi-%s-bin "
-                            "are installed, skipping grub",
+            logging.warning('neither package grub-pc nor grub-efi-%s-bin '
+                            'are installed, skipping grub',
                             grub_arch)
 
         self.targetfs.part_target(self.builddir, grub_version, grub_fw_type)
@@ -669,7 +669,7 @@ class ElbeProject:
         self.build_cdroms(build_bin, build_sources, cdrom_size, tgt_pkg_lst=tgt_pkgs)
 
         if self.postbuild_file:
-            logging.info("Postbuild script")
+            logging.info('Postbuild script')
             cmd = (f' "{self.builddir} {self.xml.text("project/version")} '
                    f'{self.xml.text("project/name")}"')
             do(self.postbuild_file + cmd, allow_fail=True)
@@ -694,7 +694,7 @@ class ElbeProject:
             f'"{os.path.join(self.builddir, "pbuilder_cross","result")}"')
 
         # Recreate the directories removed
-        if os.path.exists(os.path.join(self.builddir, "pbuilder_cross")):
+        if os.path.exists(os.path.join(self.builddir, 'pbuilder_cross')):
             do(
                 'mkdir -p '
                 f'"{os.path.join(self.builddir, "pbuilder_cross","result")}"')
@@ -704,30 +704,30 @@ class ElbeProject:
                 f'"{os.path.join(self.builddir, "pbuilder", "result")}"')
 
     def pdebuild(self, cpuset, profile, cross):
-        cross_pbuilderrc = os.path.join(self.builddir, "cross_pbuilderrc")
+        cross_pbuilderrc = os.path.join(self.builddir, 'cross_pbuilderrc')
         if cross and not os.path.exists(cross_pbuilderrc):
-            logging.error("Please make sure that you create the pbuilder "
-                          "environment with the --cross option if you want to "
-                          "use the build command with --cross.")
+            logging.error('Please make sure that you create the pbuilder '
+                          'environment with the --cross option if you want to '
+                          'use the build command with --cross.')
             sys.exit(116)
 
         if os.path.exists(cross_pbuilderrc) and not cross:
-            logging.error("Please make sure that if you created the pbuilder "
-                          "environment without the --cross option, you use the "
-                          "build command without --cross too.")
+            logging.error('Please make sure that if you created the pbuilder '
+                          'environment without the --cross option, you use the '
+                          'build command without --cross too.')
             sys.exit(117)
 
         self.pdebuild_init()
 
-        pbdir = os.path.join(self.builddir, "pdebuilder", "current")
+        pbdir = os.path.join(self.builddir, 'pdebuilder', 'current')
         do(f'mkdir -p "{os.path.join(pbdir)}"')
 
         # create .gitconfig and declare pdebuilder/current directory as safe
-        git_file_name = os.path.join(self.builddir, "pdebuilder", ".gitconfig")
-        git_safe_dir = os.path.join(self.builddir, "pdebuilder", "current")
-        with open(git_file_name, "w", encoding="ascii") as git_file:
-            git_file.write("[safe]\n")
-            git_file.write(f"\tdirectory = {git_safe_dir}\n")
+        git_file_name = os.path.join(self.builddir, 'pdebuilder', '.gitconfig')
+        git_safe_dir = os.path.join(self.builddir, 'pdebuilder', 'current')
+        with open(git_file_name, 'w', encoding='ascii') as git_file:
+            git_file.write('[safe]\n')
+            git_file.write(f'\tdirectory = {git_safe_dir}\n')
 
         # Untar current_pdebuild.tar.gz into pdebuilder/current
         do(
@@ -749,24 +749,24 @@ class ElbeProject:
             # cpuset == -1 means empty cpuset_cmd
             cpuset_cmd = ''
 
-        profile_list = profile.split(",")
-        deb_build_opts = [i for i in profile_list if i in ("nodoc", "nocheck")]
+        profile_list = profile.split(',')
+        deb_build_opts = [i for i in profile_list if i in ('nodoc', 'nocheck')]
 
-        pdebuilder_current = os.path.join(self.builddir, "pdebuilder", "current")
+        pdebuilder_current = os.path.join(self.builddir, 'pdebuilder', 'current')
 
-        formatfile = ""
+        formatfile = ''
 
-        if os.path.exists(os.path.join(pdebuilder_current, "debian", "source", "format")):
+        if os.path.exists(os.path.join(pdebuilder_current, 'debian', 'source', 'format')):
             formatfile = open(os.path.join(pdebuilder_current,
-                              "debian", "source", "format"), "r").read()
+                              'debian', 'source', 'format'), 'r').read()
 
         src_pkg_name = open(os.path.join(pdebuilder_current,
-                            "debian", "changelog"), "r").readline().split()[0]
+                            'debian', 'changelog'), 'r').readline().split()[0]
 
-        if "3.0 (quilt)" in formatfile and not self.orig_files:
-            do(f"cd {pdebuilder_current}; origtargz --download-only --tar-only")
+        if '3.0 (quilt)' in formatfile and not self.orig_files:
+            do(f'cd {pdebuilder_current}; origtargz --download-only --tar-only')
             self.orig_files = glob.glob(
-                f"{pdebuilder_current}/../{src_pkg_name}*.orig.*")
+                f'{pdebuilder_current}/../{src_pkg_name}*.orig.*')
         else:
             try:
                 for orig_fname in self.orig_files:
@@ -788,33 +788,33 @@ class ElbeProject:
                    f'--basetgz "{os.path.join(self.builddir, "pbuilder_cross", "base.tgz")}" '
                    f'--buildresult "{os.path.join(self.builddir, "pbuilder_cross", "result")}" '
                    '../*.dsc',
-                   env_add={'DEB_BUILD_PROFILES': profile.replace(",", " "),
-                            'DEB_BUILD_OPTIONS': " ".join(deb_build_opts)})
-                pbuilderdir = "pbuilder_cross"
+                   env_add={'DEB_BUILD_PROFILES': profile.replace(',', ' '),
+                            'DEB_BUILD_OPTIONS': ' '.join(deb_build_opts)})
+                pbuilderdir = 'pbuilder_cross'
             else:
                 do(f'cd "{os.path.join(self.builddir, "pdebuilder", "current")}"; '
                    f'{cpuset_cmd} pdebuild --debbuildopts "-j{cfg["pbuilder_jobs"]} -sa" '
                    f'--configfile "{os.path.join(self.builddir, "pbuilderrc")}" '
                    '--use-pdebuild-internal '
                    f'--buildresult "{os.path.join(self.builddir, "pbuilder", "result")}"',
-                   env_add={'DEB_BUILD_PROFILES': profile.replace(",", " "),
-                            'DEB_BUILD_OPTIONS': " ".join(deb_build_opts)})
-                pbuilderdir = "pbuilder"
+                   env_add={'DEB_BUILD_PROFILES': profile.replace(',', ' '),
+                            'DEB_BUILD_OPTIONS': ' '.join(deb_build_opts)})
+                pbuilderdir = 'pbuilder'
 
             self.repo.remove(os.path.join(self.builddir,
-                                          "pdebuilder",
-                                          "current",
-                                          "debian",
-                                          "control"))
+                                          'pdebuilder',
+                                          'current',
+                                          'debian',
+                                          'control'))
 
             self.repo.include(os.path.join(self.builddir,
                                            pbuilderdir,
-                                           "result",
-                                           "*.changes"))
+                                           'result',
+                                           '*.changes'))
         except CommandError:
-            logging.exception("Package fails to build.\n"
-                              "Please make sure, that the submitted package "
-                              "builds in pbuilder")
+            logging.exception('Package fails to build.\n'
+                              'Please make sure, that the submitted package '
+                              'builds in pbuilder')
         finally:
             self.orig_fname = None
             self.orig_files = []
@@ -840,11 +840,11 @@ class ElbeProject:
         do(f'mkdir -p "{os.path.join(self.builddir, "aptconfdir", "apt.conf.d")}"')
 
         if not noccache:
-            ccache_path = os.path.join(self.builddir, "ccache")
+            ccache_path = os.path.join(self.builddir, 'ccache')
             do(f'mkdir -p "{ccache_path}"')
             do(f'chmod a+w "{ccache_path}"')
-            ccache_fp = open(os.path.join(ccache_path, "ccache.conf"), "w")
-            ccache_fp.write(f"max_size = {ccachesize}")
+            ccache_fp = open(os.path.join(ccache_path, 'ccache.conf'), 'w')
+            ccache_fp.write(f'max_size = {ccachesize}')
             ccache_fp.close()
 
         # write config files
@@ -861,9 +861,9 @@ class ElbeProject:
         pbuilder_write_apt_conf(self.builddir, self.xml)
 
         # Run pbuilder --create
-        no_check_gpg = ""
+        no_check_gpg = ''
         if self.xml.prj.has('noauth'):
-            no_check_gpg = "--debootstrapopts --no-check-gpg"
+            no_check_gpg = '--debootstrapopts --no-check-gpg'
         if cross:
             do('pbuilder --create '
                f'--buildplace "{os.path.join(self.builddir, "pbuilder_cross")}" '
@@ -878,10 +878,10 @@ class ElbeProject:
 
     def sync_xml_to_disk(self):
         try:
-            sourcexmlpath = os.path.join(self.builddir, "source.xml")
+            sourcexmlpath = os.path.join(self.builddir, 'source.xml')
             self.xml.xml.write(sourcexmlpath)
         except MemoryError:
-            logging.exception("write source.xml failed (archive to huge?)")
+            logging.exception('write source.xml failed (archive to huge?)')
 
     def get_rpcaptcache(self, env=None, norecommend=None):
         if not env:
@@ -890,7 +890,7 @@ class ElbeProject:
         if norecommend is None:
             norecommend = not self.xml.prj.has('install-recommends')
 
-        if env.arch == "default":
+        if env.arch == 'default':
             arch = self.arch
         else:
             arch = env.arch
@@ -910,12 +910,12 @@ class ElbeProject:
     def has_full_buildenv(self):
         if os.path.exists(self.chrootpath):
             elbeversionpath = os.path.join(self.chrootpath,
-                                           "etc", "elbe_version")
+                                           'etc', 'elbe_version')
             if os.path.isfile(elbeversionpath):
                 return True
 
-            logging.warning("%s exists, but it does not have "
-                            "an etc/elbe_version file.", self.chrootpath)
+            logging.warning('%s exists, but it does not have '
+                            'an etc/elbe_version file.', self.chrootpath)
             # Apparently we do not have a functional build environment
             return False
 
@@ -924,15 +924,15 @@ class ElbeProject:
     def set_xml(self, xmlpath):
         # Use supplied XML file, if given, otherwise change to source.xml
         if not xmlpath:
-            xmlpath = os.path.join(self.builddir, "source.xml")
+            xmlpath = os.path.join(self.builddir, 'source.xml')
 
         newxml = ElbeXML(xmlpath, buildtype=self.override_buildtype,
                          skip_validate=self.skip_validate,
                          url_validation=self.url_validation)
 
         # New XML file has to have the same architecture
-        oldarch = self.xml.text("project/arch", key="arch")
-        newarch = newxml.text("project/arch", key="arch")
+        oldarch = self.xml.text('project/arch', key='arch')
+        newarch = newxml.text('project/arch', key='arch')
         if newarch != oldarch:
             raise IncompatibleArchitectureException(oldarch, newarch)
 
@@ -965,39 +965,39 @@ class ElbeProject:
 
     def write_log_header(self):
 
-        logging.info("ELBE Report for Project %s\n"
-                     "Report timestamp: %s", self.name,
-                     datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
+        logging.info('ELBE Report for Project %s\n'
+                     'Report timestamp: %s', self.name,
+                     datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))
 
     def copy_initvmnode(self):
-        source_path = "/var/cache/elbe/source.xml"
+        source_path = '/var/cache/elbe/source.xml'
         try:
             initxml = ElbeXML(source_path,
                               skip_validate=self.skip_validate,
                               url_validation=ValidationMode.NO_CHECK)
             self.xml.get_initvmnode_from(initxml)
         except ValidationError:
-            logging.exception("%s validation failed.  "
-                              "Will not copy initvm node", source_path)
+            logging.exception('%s validation failed.  '
+                              'Will not copy initvm node', source_path)
         except IOError:
-            logging.exception("%s not available.  "
-                              "Can not copy initvm node", source_path)
+            logging.exception('%s not available.  '
+                              'Can not copy initvm node', source_path)
         except NoInitvmNode:
-            logging.exception("%s is available.  But it does not "
-                              "contain an initvm node", source_path)
+            logging.exception('%s is available.  But it does not '
+                              'contain an initvm node', source_path)
 
     def install_packages(self, target, buildenv=False):
 
         # to workaround debian bug no. 872543
         if self.xml.prj.has('noauth'):
-            inrelease = glob.glob(f"{self.chrootpath}/var/lib/apt/lists/*InRelease")
-            release_gpg = glob.glob(f"{self.chrootpath}/var/lib/apt/lists/*.gpg")
+            inrelease = glob.glob(f'{self.chrootpath}/var/lib/apt/lists/*InRelease')
+            release_gpg = glob.glob(f'{self.chrootpath}/var/lib/apt/lists/*.gpg')
             if inrelease:
-                system(f"rm {inrelease[0]};")
-                logging.info("Removed InRelease file!")
+                system(f'rm {inrelease[0]};')
+                logging.info('Removed InRelease file!')
             if release_gpg:
-                system(f"rm {release_gpg[0]};")
-                logging.info("Removed Release.gpg file!")
+                system(f'rm {release_gpg[0]};')
+                logging.info('Removed Release.gpg file!')
 
         with target:
             # First update the apt cache
@@ -1016,7 +1016,7 @@ class ElbeProject:
 
                 self.copy_initvmnode()
             else:
-                sourcepath = os.path.join(self.builddir, "source.xml")
+                sourcepath = os.path.join(self.builddir, 'source.xml')
                 source = ElbeXML(sourcepath,
                                  buildtype=self.override_buildtype,
                                  skip_validate=self.skip_validate,
@@ -1026,8 +1026,8 @@ class ElbeProject:
                 try:
                     self.xml.get_initvmnode_from(source)
                 except NoInitvmNode:
-                    logging.warning("source.xml is available. "
-                                    "But it does not contain an initvm node")
+                    logging.warning('source.xml is available. '
+                                    'But it does not contain an initvm node')
                     self.copy_initvmnode()
 
             # Seed /etc, we need /etc/hosts for hostname -f to work correctly
@@ -1037,7 +1037,7 @@ class ElbeProject:
             # remove all non-essential packages to ensure that on a incremental
             # build packages can be removed
             debootstrap_pkgs = []
-            for p in self.xml.node("debootstrappkgs"):
+            for p in self.xml.node('debootstrappkgs'):
                 debootstrap_pkgs.append(p.et.text)
 
             pkgs = target.xml.get_target_packages() + debootstrap_pkgs
@@ -1050,10 +1050,10 @@ class ElbeProject:
                 try:
                     self.get_rpcaptcache(env=target).mark_install(p, None)
                 except KeyError:
-                    logging.exception("No Package %s", p)
+                    logging.exception('No Package %s', p)
                 except SystemError:
-                    logging.exception("Unable to correct problems "
-                                      "in package %s",
+                    logging.exception('Unable to correct problems '
+                                      'in package %s',
                                       p)
 
             # temporary disabled because of
@@ -1064,13 +1064,13 @@ class ElbeProject:
             try:
                 self.get_rpcaptcache(env=target).commit()
             except SystemError as e:
-                logging.exception("Commiting changes failed")
+                logging.exception('Commiting changes failed')
                 raise AptCacheCommitError(str(e))
 
     def gen_licenses(self, rfs, env, pkg_list):
 
-        lic_txt_fname = os.path.join(self.builddir, f"licence-{rfs}.txt")
-        lic_xml_fname = os.path.join(self.builddir, f"licence-{rfs}.xml")
+        lic_txt_fname = os.path.join(self.builddir, f'licence-{rfs}.txt')
+        lic_xml_fname = os.path.join(self.builddir, f'licence-{rfs}.xml')
         pkg_list.sort()
 
         with io.open(lic_txt_fname, 'w+',
diff --git a/elbepack/elbexml.py b/elbepack/elbexml.py
index 393ed2f5..12f92b55 100644
--- a/elbepack/elbexml.py
+++ b/elbepack/elbexml.py
@@ -25,13 +25,13 @@ class ValidationError(Exception):
         self.validation = validation
 
     def __repr__(self):
-        rep = "Elbe XML Validation Error\n"
+        rep = 'Elbe XML Validation Error\n'
         for v in self.validation:
             rep += (v + '\n')
         return rep
 
     def __str__(self):
-        retval = ""
+        retval = ''
         for v in self.validation:
             retval += (v + '\n')
         return retval
@@ -49,11 +49,11 @@ class ValidationMode:
 
 def replace_localmachine(mirror, initvm=True):
     if initvm:
-        localmachine = "10.0.2.2"
+        localmachine = '10.0.2.2'
     else:
-        localmachine = "localhost"
+        localmachine = 'localhost'
 
-    return mirror.replace("LOCALMACHINE", localmachine)
+    return mirror.replace('LOCALMACHINE', localmachine)
 
 
 class ElbeXML:
@@ -70,19 +70,19 @@ class ElbeXML:
                 raise ValidationError(validation)
 
         self.xml = etree(fname)
-        self.prj = self.xml.node("/project")
-        self.tgt = self.xml.node("/target")
+        self.prj = self.xml.node('/project')
+        self.tgt = self.xml.node('/target')
 
         if buildtype:
             pass
-        elif self.xml.has("project/buildtype"):
-            buildtype = self.xml.text("/project/buildtype")
+        elif self.xml.has('project/buildtype'):
+            buildtype = self.xml.text('/project/buildtype')
         else:
-            buildtype = "nodefaults"
+            buildtype = 'nodefaults'
         self.defs = ElbeDefaults(buildtype)
 
         if not skip_validate and url_validation != ValidationMode.NO_CHECK:
-            self.validate_apt_sources(url_validation, self.defs["arch"])
+            self.validate_apt_sources(url_validation, self.defs['arch'])
 
     def text(self, txt, key=None):
         if key:
@@ -100,43 +100,43 @@ class ElbeXML:
 
     def is_cross(self, host_arch):
 
-        target = self.text("project/buildimage/arch", key="arch")
+        target = self.text('project/buildimage/arch', key='arch')
 
         if host_arch == target:
             return False
 
-        if (host_arch == "amd64") and (target == "i386"):
+        if (host_arch == 'amd64') and (target == 'i386'):
             return False
 
-        if (host_arch == "armhf") and (target == "armel"):
+        if (host_arch == 'armhf') and (target == 'armel'):
             return False
 
         return True
 
     def get_initvm_primary_mirror(self, cdrompath):
-        if self.xml.has("initvm/mirror/primary_host"):
-            m = self.node("initvm/mirror")
+        if self.xml.has('initvm/mirror/primary_host'):
+            m = self.node('initvm/mirror')
 
-            mirror = m.text("primary_proto") + "://"
-            mirror += f"{m.text('primary_host')}/{m.text('primary_path')}".replace("//", "/")
+            mirror = m.text('primary_proto') + '://'
+            mirror += f"{m.text('primary_host')}/{m.text('primary_path')}".replace('//', '/')
 
-        elif self.xml.has("initvm/mirror/cdrom") and cdrompath:
-            mirror = f"file://{cdrompath}"
+        elif self.xml.has('initvm/mirror/cdrom') and cdrompath:
+            mirror = f'file://{cdrompath}'
 
-        return mirror.replace("LOCALMACHINE", "10.0.2.2")
+        return mirror.replace('LOCALMACHINE', '10.0.2.2')
 
     def get_primary_mirror(self, cdrompath, initvm=True, hostsysroot=False):
-        if self.prj.has("mirror/primary_host"):
-            m = self.prj.node("mirror")
+        if self.prj.has('mirror/primary_host'):
+            m = self.prj.node('mirror')
 
-            if hostsysroot and self.prj.has("mirror/host"):
-                mirror = m.text("host")
+            if hostsysroot and self.prj.has('mirror/host'):
+                mirror = m.text('host')
             else:
-                mirror = m.text("primary_proto") + "://"
-                mirror += f"{m.text('primary_host')}/{m.text('primary_path')}".replace("//", "/")
+                mirror = m.text('primary_proto') + '://'
+                mirror += f"{m.text('primary_host')}/{m.text('primary_path')}".replace('//', '/')
 
-        elif self.prj.has("mirror/cdrom") and cdrompath:
-            mirror = f"file://{cdrompath}"
+        elif self.prj.has('mirror/cdrom') and cdrompath:
+            mirror = f'file://{cdrompath}'
 
         return replace_localmachine(mirror, initvm)
 
@@ -144,30 +144,30 @@ class ElbeXML:
     def create_apt_sources_list(self, build_sources=False, initvm=True, hostsysroot=False):
 
         if self.prj is None:
-            return "# No Project"
+            return '# No Project'
 
-        if not self.prj.has("mirror") and not self.prj.has("mirror/cdrom"):
-            return "# no mirrors configured"
+        if not self.prj.has('mirror') and not self.prj.has('mirror/cdrom'):
+            return '# no mirrors configured'
 
         goptions = []
         mirrors = []
-        suite = self.prj.text("suite")
+        suite = self.prj.text('suite')
 
-        if self.prj.has("mirror/primary_host"):
+        if self.prj.has('mirror/primary_host'):
 
             pmirror = self.get_primary_mirror(None, hostsysroot=hostsysroot)
 
-            if self.prj.has("mirror/options"):
+            if self.prj.has('mirror/options'):
                 poptions = [opt.et.text.strip(' \t\n')
                             for opt
-                            in self.prj.all("mirror/options/option")]
+                            in self.prj.all('mirror/options/option')]
             else:
                 poptions = []
 
             if hostsysroot:
-                arch = self.text("project/buildimage/sdkarch", key="sdkarch")
+                arch = self.text('project/buildimage/sdkarch', key='sdkarch')
             else:
-                arch = self.text("project/buildimage/arch", key="arch")
+                arch = self.text('project/buildimage/arch', key='arch')
 
             poptions = goptions + poptions
 
@@ -175,44 +175,44 @@ class ElbeXML:
                 mirrors.append(
                     f"deb-src [{' '.join(poptions)}] {pmirror} {suite} main")
 
-            poptions.append(f"arch={arch}")
+            poptions.append(f'arch={arch}')
 
             mirrors.append(f"deb [{' '.join(poptions)}] {pmirror} {suite} main")
 
-            if self.prj.has("mirror/url-list"):
+            if self.prj.has('mirror/url-list'):
 
-                for url in self.prj.node("mirror/url-list"):
+                for url in self.prj.node('mirror/url-list'):
 
-                    if url.has("options"):
+                    if url.has('options'):
                         options = [opt.et.text.strip(' \t\n')
                                    for opt
-                                   in url.all("options/option")]
+                                   in url.all('options/option')]
                     else:
                         options = []
 
                     options = goptions + options
 
-                    if url.has("binary"):
-                        bin_url = url.text("binary").strip()
+                    if url.has('binary'):
+                        bin_url = url.text('binary').strip()
                         mirrors.append(f"deb [{' '.join(options)}] {bin_url}")
 
-                    if url.has("source"):
-                        src_url = url.text("source").strip()
+                    if url.has('source'):
+                        src_url = url.text('source').strip()
                         mirrors.append(
                             f"deb-src [{' '.join(options)}] {src_url}")
 
-        if self.prj.has("mirror/cdrom"):
-            mirrors.append(f"deb copy:///cdrom/targetrepo {suite} main added")
+        if self.prj.has('mirror/cdrom'):
+            mirrors.append(f'deb copy:///cdrom/targetrepo {suite} main added')
 
         return replace_localmachine('\n'.join(mirrors), initvm)
 
     @staticmethod
     def validate_repo(r):
         try:
-            fp = urlopen(r["url"] + "InRelease", None, 30)
+            fp = urlopen(r['url'] + 'InRelease', None, 30)
         except URLError:
             try:
-                fp = urlopen(r["url"] + "Release", None, 30)
+                fp = urlopen(r['url'] + 'Release', None, 30)
             except URLError:
                 return False
             except socket.timeout:
@@ -221,15 +221,15 @@ class ElbeXML:
             return False
 
         ret = False
-        if "srcstr" in r:
+        if 'srcstr' in r:
             for line in fp:
-                needle = r["srcstr"].encode(encoding='utf-8')
+                needle = r['srcstr'].encode(encoding='utf-8')
                 if line.find(needle) != -1:
                     ret = True
                     break
-        elif "binstr" in r:
+        elif 'binstr' in r:
             for line in fp:
-                needle = r["binstr"].encode(encoding='utf-8')
+                needle = r['binstr'].encode(encoding='utf-8')
                 if line.find(needle) != -1:
                     ret = True
                     break
@@ -248,24 +248,24 @@ class ElbeXML:
         repos = []
         for line in sources_lines:
             line = re.sub(r'\[.*\] ', '', line)
-            if line.startswith("deb copy:"):
+            if line.startswith('deb copy:'):
                 # This is a cdrom, we dont verify it
                 pass
-            elif line.startswith("deb-src copy:"):
+            elif line.startswith('deb-src copy:'):
                 # This is a cdrom, we dont verify it
                 pass
-            elif line.startswith("deb ") or line.startswith("deb-src "):
+            elif line.startswith('deb ') or line.startswith('deb-src '):
                 # first check the validation mode, and
                 # only add the repo, when it matches
                 # the valudation mode
                 if url_validation == ValidationMode.NO_CHECK:
                     continue
 
-                if line.startswith("deb-src ") and \
+                if line.startswith('deb-src ') and \
                    url_validation != ValidationMode.CHECK_ALL:
                     continue
 
-                lsplit = line.split(" ")
+                lsplit = line.split(' ')
                 url = lsplit[1]
                 suite = lsplit[2]
                 r = {}
@@ -277,9 +277,9 @@ class ElbeXML:
                 # deb http://mirror foo/ --> URI-Prefix: http://mirror/foo
                 #
                 if suite.endswith('/'):
-                    r["url"] = f"{url}/{suite}"
+                    r['url'] = f'{url}/{suite}'
                 else:
-                    r["url"] = f"{url}/dists/{suite}/"
+                    r['url'] = f'{url}/dists/{suite}/'
 
                 #
                 # Try to get sections.
@@ -288,32 +288,32 @@ class ElbeXML:
                 #
                 try:
                     section = lsplit[3]
-                    if line.startswith("deb "):
-                        r["binstr"] = (f"{section}/binary-{arch}/Packages")
+                    if line.startswith('deb '):
+                        r['binstr'] = (f'{section}/binary-{arch}/Packages')
                     else:
-                        r["srcstr"] = (f"{section}/source/Sources")
+                        r['srcstr'] = (f'{section}/source/Sources')
                 except IndexError:
-                    if line.startswith("deb "):
-                        r["binstr"] = "Packages"
+                    if line.startswith('deb '):
+                        r['binstr'] = 'Packages'
                     else:
-                        r["srcstr"] = "Sources"
+                        r['srcstr'] = 'Sources'
 
                 repos.append(r)
 
         if not self.prj:
             return
 
-        if self.prj.has("mirror/primary_proxy"):
-            os.environ["no_proxy"] = "10.0.2.2,localhost,127.0.0.1"
+        if self.prj.has('mirror/primary_proxy'):
+            os.environ['no_proxy'] = '10.0.2.2,localhost,127.0.0.1'
             proxy = self.prj.text(
-                "mirror/primary_proxy").strip().replace("LOCALMACHINE",
-                                                        "10.0.2.2")
-            os.environ["http_proxy"] = proxy
-            os.environ["https_proxy"] = proxy
+                'mirror/primary_proxy').strip().replace('LOCALMACHINE',
+                                                        '10.0.2.2')
+            os.environ['http_proxy'] = proxy
+            os.environ['https_proxy'] = proxy
         else:
-            os.environ["http_proxy"] = ""
-            os.environ["https_proxy"] = ""
-            os.environ["no_proxy"] = ""
+            os.environ['http_proxy'] = ''
+            os.environ['https_proxy'] = ''
+            os.environ['no_proxy'] = ''
 
         passman = HTTPPasswordMgrWithDefaultRealm()
         authhandler = HTTPBasicAuthHandler(passman)
@@ -321,34 +321,34 @@ class ElbeXML:
         install_opener(opener)
 
         for r in repos:
-            if '@' in r["url"]:
-                t = r["url"].split('@')
+            if '@' in r['url']:
+                t = r['url'].split('@')
                 if '://' in t[0]:
                     scheme, auth = t[0].split('://')
                     scheme = scheme + '://'
                 else:
                     scheme = ''
                     auth = t[0]
-                r["url"] = scheme + t[1]
+                r['url'] = scheme + t[1]
                 usr, passwd = auth.split(':')
-                passman.add_password(None, r["url"], usr, passwd)
+                passman.add_password(None, r['url'], usr, passwd)
             if not self.validate_repo(r):
-                if "srcstr" in r:
+                if 'srcstr' in r:
                     raise ValidationError(
                         [f"Repository {r['url']}, {r['srcstr']} can not be validated"])
-                if "binstr" in r:
+                if 'binstr' in r:
                     raise ValidationError(
                         [f"Repository {r['url']}, {r['binstr']} can not be validated"])
                 raise ValidationError(
                     [f"Repository {r['url']} can not be validated"])
 
     def get_target_packages(self):
-        if not self.xml.has("/target/pkg-list"):
+        if not self.xml.has('/target/pkg-list'):
             return []
-        return [p.et.text for p in self.xml.node("/target/pkg-list")]
+        return [p.et.text for p in self.xml.node('/target/pkg-list')]
 
     def add_target_package(self, pkg):
-        plist = self.xml.ensure_child("/target/pkg-list")
+        plist = self.xml.ensure_child('/target/pkg-list')
 
         # only add package once
         for p in plist:
@@ -360,7 +360,7 @@ class ElbeXML:
         pak.et.tail = '\n'
 
     def set_target_packages(self, pkglist):
-        plist = self.xml.ensure_child("/target/pkg-list")
+        plist = self.xml.ensure_child('/target/pkg-list')
         plist.clear()
         for p in pkglist:
             pak = plist.append('pkg')
@@ -369,8 +369,8 @@ class ElbeXML:
 
     def get_buildenv_packages(self):
         retval = []
-        if self.prj.has("buildimage/pkg-list"):
-            retval = [p.et.text for p in self.prj.node("buildimage/pkg-list")]
+        if self.prj.has('buildimage/pkg-list'):
+            retval = [p.et.text for p in self.prj.node('buildimage/pkg-list')]
 
         return retval
 
@@ -446,14 +446,14 @@ class ElbeXML:
         self.xml.set_child_position(tree, 0)
 
     def get_initvm_codename(self):
-        if self.has("initvm/suite"):
-            return self.text("initvm/suite")
+        if self.has('initvm/suite'):
+            return self.text('initvm/suite')
         return None
 
     def set_cdrom_mirror(self, abspath):
-        mirror = self.node("project/mirror")
+        mirror = self.node('project/mirror')
         mirror.clear()
-        cdrom = mirror.ensure_child("cdrom")
+        cdrom = mirror.ensure_child('cdrom')
         cdrom.set_text(abspath)
 
     def dump_elbe_version(self):
@@ -468,4 +468,4 @@ class ElbeXML:
     def get_elbe_version(self):
         if self.has('elbe_version'):
             return self.text('elbe_version')
-        return "no version"
+        return 'no version'
diff --git a/elbepack/filesystem.py b/elbepack/filesystem.py
index 6def25a7..5746a394 100644
--- a/elbepack/filesystem.py
+++ b/elbepack/filesystem.py
@@ -18,31 +18,31 @@ def size_to_int(size):
     if size[-1] in digits:
         return int(size)
 
-    if size.endswith("M"):
+    if size.endswith('M'):
         unit = 1000 * 1000
         s = size[:-1]
-    elif size.endswith("MiB"):
+    elif size.endswith('MiB'):
         unit = 1024 * 1024
         s = size[:-3]
-    elif size.endswith("MB"):
+    elif size.endswith('MB'):
         unit = 1000 * 1000
         s = size[:-2]
-    if size.endswith("G"):
+    if size.endswith('G'):
         unit = 1000 * 1000 * 1000
         s = size[:-1]
-    elif size.endswith("GiB"):
+    elif size.endswith('GiB'):
         unit = 1024 * 1024 * 1024
         s = size[:-3]
-    elif size.endswith("GB"):
+    elif size.endswith('GB'):
         unit = 1000 * 1000 * 1000
         s = size[:-2]
-    if size.endswith("k"):
+    if size.endswith('k'):
         unit = 1000
         s = size[:-1]
-    elif size.endswith("kiB"):
+    elif size.endswith('kiB'):
         unit = 1024
         s = size[:-3]
-    elif size.endswith("kB"):
+    elif size.endswith('kB'):
         unit = 1000
         s = size[:-2]
 
@@ -73,7 +73,7 @@ class Filesystem:
             path = path[1:]
         return os.path.join(self.path, path)
 
-    def open(self, path, mode="r"):
+    def open(self, path, mode='r'):
         """
         >>> this.open("open") # doctest: +ELLIPSIS
         Traceback (most recent call last):
@@ -87,7 +87,7 @@ class Filesystem:
         """
         return open(self.fname(path), mode)
 
-    def open_gz(self, path, mode="r"):
+    def open_gz(self, path, mode='r'):
         """
         >>> this.open_gz("open_gz") # doctest: +ELLIPSIS
         Traceback (most recent call last):
@@ -369,18 +369,18 @@ class Filesystem:
 
     def write_file(self, path, mode, cont):
         path = self.realpath(path)
-        self._write_file(path, open(path, "w"), cont, mode)
+        self._write_file(path, open(path, 'w'), cont, mode)
 
     def append_file(self, path, cont, mode=None):
         path = self.realpath(path)
-        self._write_file(path, open(path, "a"), cont, mode)
+        self._write_file(path, open(path, 'a'), cont, mode)
 
     def read_file(self, path, gz=False):
         path = self.realpath(path)
         if gz:
-            fp = gzip.open(path, "r")
+            fp = gzip.open(path, 'r')
         else:
-            fp = open(path, "r")
+            fp = open(path, 'r')
 
         with fp:
             retval = fp.read()
@@ -415,7 +415,7 @@ class Filesystem:
             pass
         elif self.isfile(newdir):
             raise OSError(
-                "a file with the same name as the desired "
+                'a file with the same name as the desired '
                 f"dir, '{newdir}', already exists. in RFS {self.path}")
         else:
             self.mkdir(newdir)
@@ -425,7 +425,7 @@ class Filesystem:
         if self.exists(fname):
             self.utime(fname)
         else:
-            fp = self.open(fname, "w")
+            fp = self.open(fname, 'w')
             fp.close()
 
     def walk_files(self, directory='', exclude_dirs=None):
@@ -433,14 +433,14 @@ class Filesystem:
             exclude_dirs = []
 
         dirname = self.fname(directory)
-        if dirname == "/":
+        if dirname == '/':
             striplen = 0
         else:
             striplen = len(dirname)
         for dirpath, dirnames, filenames in os.walk(dirname):
             subpath = dirpath[striplen:]
             if not subpath:
-                subpath = "/"
+                subpath = '/'
 
             deldirs = []
             for d in dirnames:
@@ -454,7 +454,7 @@ class Filesystem:
             for f in filenames:
                 fpath = os.path.join(subpath, f)
                 realpath = os.path.join(dirpath, f)
-                yield "/" + fpath, realpath
+                yield '/' + fpath, realpath
 
     def mtime_snap(self, dirname='', exclude_dirs=None):
         if not exclude_dirs:
diff --git a/elbepack/finetuning.py b/elbepack/finetuning.py
index db598db1..a7ad4d6c 100644
--- a/elbepack/finetuning.py
+++ b/elbepack/finetuning.py
@@ -43,7 +43,7 @@ class FinetuningAction:
 
     def __new__(cls, node):
         if node.tag not in cls.actiondict:
-            raise FinetuningException(f"Invalid finetuning action {node.tag}")
+            raise FinetuningException(f'Invalid finetuning action {node.tag}')
         action = cls.actiondict[node.tag]
         return object.__new__(action)
 
@@ -65,7 +65,7 @@ class ImageFinetuningAction(FinetuningAction):
 
     def execute(self, _buildenv, _target):
         raise NotImplementedError(
-            f"<{self.tag}> may only be used in <image-finetuning>")
+            f'<{self.tag}> may only be used in <image-finetuning>')
 
     def execute_img(self, _buildenv, _target, _builddir, _loop_dev):
         raise NotImplementedError('execute_img() not implemented')
@@ -99,7 +99,7 @@ class MkdirAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, target):
-        do(f"mkdir -p {target.fname(self.node.et.text)}")
+        do(f'mkdir -p {target.fname(self.node.et.text)}')
 
 
 @FinetuningAction.register('mknod')
@@ -110,7 +110,7 @@ class MknodAction(FinetuningAction):
 
     def execute(self, _buildenv, target):
         do(
-            f"mknod {target.fname(self.node.et.text)} "
+            f'mknod {target.fname(self.node.et.text)} '
             f"{self.node.et.attrib['opts']}")
 
 
@@ -121,7 +121,7 @@ class BuildenvMkdirAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, buildenv, _target):
-        do(f"mkdir -p {buildenv.rfs.fname(self.node.et.text)}")
+        do(f'mkdir -p {buildenv.rfs.fname(self.node.et.text)}')
 
 
 @FinetuningAction.register('cp')
@@ -132,7 +132,7 @@ class CpAction(FinetuningAction):
 
     def execute(self, _buildenv, target):
         src = target.glob(self.node.et.attrib['path'])
-        cmd = f"cp -av %s {target.fname(self.node.et.text)}"
+        cmd = f'cp -av %s {target.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -145,7 +145,7 @@ class BuildenvCpAction(FinetuningAction):
 
     def execute(self, buildenv, _target):
         src = buildenv.glob(self.node.et.attrib['path'])
-        cmd = f"cp -av %s {buildenv.rfs.fname(self.node.et.text)}"
+        cmd = f'cp -av %s {buildenv.rfs.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -158,7 +158,7 @@ class B2TCpAction(FinetuningAction):
 
     def execute(self, buildenv, target):
         src = buildenv.rfs.glob(self.node.et.attrib['path'])
-        cmd = f"cp -av %s {target.fname(self.node.et.text)}"
+        cmd = f'cp -av %s {target.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -171,7 +171,7 @@ class T2BCpAction(FinetuningAction):
 
     def execute(self, buildenv, target):
         src = target.glob(self.node.et.attrib['path'])
-        cmd = f"cp -av %s {buildenv.rfs.fname(self.node.et.text)}"
+        cmd = f'cp -av %s {buildenv.rfs.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -190,7 +190,7 @@ class T2PMvAction(FinetuningAction):
         dest = os.path.join('..', dest)
 
         src = target.glob(self.node.et.attrib['path'])
-        cmd = f"mv -v %s {dest}"
+        cmd = f'mv -v %s {dest}'
         for f in src:
             do(cmd % f)
 
@@ -203,7 +203,7 @@ class MvAction(FinetuningAction):
 
     def execute(self, _buildenv, target):
         src = target.glob(self.node.et.attrib['path'])
-        cmd = f"mv -v %s {target.fname(self.node.et.text)}"
+        cmd = f'mv -v %s {target.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -231,7 +231,7 @@ class BuildenvMvAction(FinetuningAction):
 
     def execute(self, buildenv, _target):
         src = buildenv.rfs.glob(self.node.et.attrib['path'])
-        cmd = f"mv -v %s {buildenv.rfs.fname(self.node.et.text)}"
+        cmd = f'mv -v %s {buildenv.rfs.fname(self.node.et.text)}'
         for f in src:
             do(cmd % f)
 
@@ -245,7 +245,7 @@ class AddUserAction(FinetuningAction):
     def execute(self, _buildenv, target):
         with target:
             att = self.node.et.attrib
-            options = ""
+            options = ''
             if 'groups' in att:
                 options += f'-G "{att["groups"]}" '
             if 'shell' in att:
@@ -274,7 +274,7 @@ class AddUserAction(FinetuningAction):
             if 'passwd_hashed' in att:
                 chroot(
                     target.path,
-                    "chpasswd --encrypted",
+                    'chpasswd --encrypted',
                     stdin=f"{self.node.et.text}:{att['passwd_hashed']}")
 
 
@@ -288,7 +288,7 @@ class AddGroupAction(FinetuningAction):
         with target:
             att = self.node.et.attrib
             # we use -f always
-            options = "-f "
+            options = '-f '
             if 'gid' in att:
                 options += f'-g "{att["gid"]}" '
             if 'system' in att and att['system'] == 'True':
@@ -306,35 +306,35 @@ class AddFileAction(FinetuningAction):
 
     @staticmethod
     def decode(text, encoding):
-        if encoding == "plain":
-            msg = "\n".join([line.lstrip(" \t")
+        if encoding == 'plain':
+            msg = '\n'.join([line.lstrip(' \t')
                              for line in text.splitlines()[1:-1]])
-        elif encoding == "raw":
-            msg = "\n".join(text.splitlines()[1:-1])
-        elif encoding == "base64":
+        elif encoding == 'raw':
+            msg = '\n'.join(text.splitlines()[1:-1])
+        elif encoding == 'base64':
             msg = base64.standard_b64decode(text)
         else:
-            raise FinetuningException(f"Invalid encoding {encoding}")
+            raise FinetuningException(f'Invalid encoding {encoding}')
         return msg
 
     def execute(self, _buildenv, target):
 
         att = self.node.et.attrib
-        dst = att["dst"]
+        dst = att['dst']
         content = self.node.et.text
-        encoding = "plain"
+        encoding = 'plain'
         owner = None
         group = None
         mode = None
 
-        if "encoding" in att:
-            encoding = att["encoding"]
-        if "owner" in att:
-            owner = att["owner"]
-        if "group" in att:
-            group = att["group"]
-        if "mode" in att:
-            mode = att["mode"]
+        if 'encoding' in att:
+            encoding = att['encoding']
+        if 'owner' in att:
+            owner = att['owner']
+        if 'group' in att:
+            group = att['group']
+        if 'mode' in att:
+            mode = att['mode']
 
         try:
             target.mkdir_p(os.path.dirname(dst))
@@ -344,7 +344,7 @@ class AddFileAction(FinetuningAction):
 
         content = AddFileAction.decode(content, encoding)
 
-        if "append" in att and att["append"] == "true":
+        if 'append' in att and att['append'] == 'true':
             target.append_file(dst, content)
         else:
             target.write_file(dst, None, content)
@@ -378,24 +378,24 @@ class CmdAction(ImageFinetuningAction):
 
     def execute_img(self, _buildenv, _target, builddir, loop_dev):
 
-        script = '\n'.join(line.lstrip(" \t")
+        script = '\n'.join(line.lstrip(' \t')
                            for line
-                           in self.node.et.text.strip("\n").splitlines())
+                           in self.node.et.text.strip('\n').splitlines())
 
         mnt = os.path.join(builddir, 'imagemnt')
         dev = f"{loop_dev}p{self.node.et.attrib['part']}"
 
         if self.node.bool_attr('nomount'):
-            do("/bin/sh", stdin=script,
-               env_add={"ELBE_DEV": dev})
+            do('/bin/sh', stdin=script,
+               env_add={'ELBE_DEV': dev})
         else:
             with ImgMountFilesystem(mnt, dev) as fs:
-                do("/bin/sh", stdin=script,
-                   env_add={"ELBE_MNT": fs.path})
+                do('/bin/sh', stdin=script,
+                   env_add={'ELBE_MNT': fs.path})
 
     def execute(self, _buildenv, target):
         with target:
-            chroot(target.path, "/bin/sh", stdin=self.node.et.text)
+            chroot(target.path, '/bin/sh', stdin=self.node.et.text)
 
 
 @FinetuningAction.register('buildenv_command')
@@ -406,7 +406,7 @@ class BuildenvCmdAction(FinetuningAction):
 
     def execute(self, buildenv, _target):
         with buildenv:
-            chroot(buildenv.path, "/bin/sh", stdin=self.node.et.text)
+            chroot(buildenv.path, '/bin/sh', stdin=self.node.et.text)
 
 
 @FinetuningAction.register('purge')
@@ -417,7 +417,7 @@ class PurgeAction(FinetuningAction):
 
     def execute(self, _buildenv, target):
         with target:
-            chroot(target.path, f"dpkg --purge {self.node.et.text}")
+            chroot(target.path, f'dpkg --purge {self.node.et.text}')
 
 
 @FinetuningAction.register('updated')
@@ -431,7 +431,7 @@ class UpdatedAction(FinetuningAction):
         if self.node.et.text:
             fp = self.node.et.text
 
-            logging.info("transfert gpg key to target: %s", fp)
+            logging.info('transfert gpg key to target: %s', fp)
 
             gpgdata = core.Data()
             ctx = core.Context()
@@ -448,15 +448,15 @@ class UpdatedAction(FinetuningAction):
             with open((target.path + '/pub.key'), 'wb') as tkey:
                 tkey.write(key)
 
-            target.mkdir_p("/var/cache/elbe/gnupg", mode=0o700)
+            target.mkdir_p('/var/cache/elbe/gnupg', mode=0o700)
             with target:
                 do(
-                    f"gpg --import {target.path}/pub.key",
-                    env_add={'GNUPGHOME': f"{target.path}/var/cache/elbe/gnupg"})
+                    f'gpg --import {target.path}/pub.key',
+                    env_add={'GNUPGHOME': f'{target.path}/var/cache/elbe/gnupg'})
 
-        logging.info("generate base repo")
+        logging.info('generate base repo')
 
-        arch = target.xml.text("project/arch", key="arch")
+        arch = target.xml.text('project/arch', key='arch')
 
         buildenv.rfs.mkdir_p('/tmp/pkgs')
         with buildenv:
@@ -468,13 +468,13 @@ class UpdatedAction(FinetuningAction):
                     cache.download_binary(
                         pkg.name, '/tmp/pkgs', pkg.installed_version)
                 except ValueError:
-                    logging.exception("No package %s-%s",
+                    logging.exception('No package %s-%s',
                                       pkg.name, pkg.installed_version)
                 except FetchError:
-                    logging.exception("Package %s-%s could not be downloaded",
+                    logging.exception('Package %s-%s could not be downloaded',
                                       pkg.name, pkg.installed_version)
                 except TypeError:
-                    logging.exception("Package %s-%s missing name or version",
+                    logging.exception('Package %s-%s missing name or version',
                                       pkg.name, pkg.installed_version)
         r = UpdateRepo(target.xml,
                        target.path + '/var/cache/elbe/repos/base')
@@ -485,8 +485,8 @@ class UpdatedAction(FinetuningAction):
 
         slist = target.path + '/etc/apt/sources.list.d/base.list'
         slist_txt = 'deb [trusted=yes] file:///var/cache/elbe/repos/base '
-        slist_txt += target.xml.text("/project/suite")
-        slist_txt += " main"
+        slist_txt += target.xml.text('/project/suite')
+        slist_txt += ' main'
 
         with open(slist, 'w') as apt_source:
             apt_source.write(slist_txt)
@@ -504,14 +504,14 @@ class ArtifactAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, target):
-        if os.path.isfile("../target/" + self.node.et.text):
+        if os.path.isfile('../target/' + self.node.et.text):
             target.images.append('target' + self.node.et.text)
         else:
             logging.error("The specified artifact: '%s' doesn't exist",
                           self.node.et.text)
 
     def execute_prj(self, _buildenv, target, _builddir):
-        if os.path.isfile("../" + self.node.et.text):
+        if os.path.isfile('../' + self.node.et.text):
             target.images.append(self.node.et.text)
         else:
             logging.error("The specified artifact: '%s' doesn't exist",
@@ -525,8 +525,8 @@ class RmArtifactAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<rm_artifact> may only be "
-                                  "used in <project-finetuning>")
+        raise NotImplementedError('<rm_artifact> may only be '
+                                  'used in <project-finetuning>')
 
     def execute_prj(self, _buildenv, target, _builddir):
         try:
@@ -543,8 +543,8 @@ class LosetupAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<losetup> may only be "
-                                  "used in <project-finetuning>")
+        raise NotImplementedError('<losetup> may only be '
+                                  'used in <project-finetuning>')
 
     def execute_prj(self, buildenv, target, builddir):
         imgname = self.node.et.attrib['img']
@@ -567,8 +567,8 @@ class ImgConvertAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<img_convert> may only be "
-                                  "used in <project-finetuning>")
+        raise NotImplementedError('<img_convert> may only be '
+                                  'used in <project-finetuning>')
 
     def execute_prj(self, _buildenv, target, builddir):
         src = self.node.et.text
@@ -577,8 +577,8 @@ class ImgConvertAction(FinetuningAction):
 
         if src not in target.images:
             logging.error("Artifact '%s' does not exist.\n"
-                          "Valid Artifcact are: %s",
-                          src, ", ".join([str(i) for i in target.images]))
+                          'Valid Artifcact are: %s',
+                          src, ', '.join([str(i) for i in target.images]))
             raise FinetuningException(f"Artifact '{src}' does not exist")
 
         src_fname = os.path.join(builddir, src)
@@ -601,8 +601,8 @@ class SetPackerAction(FinetuningAction):
         FinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<set_packer> may only be "
-                                  "used in <project-finetuning>")
+        raise NotImplementedError('<set_packer> may only be '
+                                  'used in <project-finetuning>')
 
     def execute_prj(self, _buildenv, target, _builddir):
         img = self.node.et.text
@@ -618,8 +618,8 @@ class ExtractPartitionAction(ImageFinetuningAction):
         ImageFinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<extract_partition> may only be "
-                                  "used in <losetup>")
+        raise NotImplementedError('<extract_partition> may only be '
+                                  'used in <losetup>')
 
     def execute_img(self, _buildenv, target, builddir, loop_dev):
         part_nr = self.node.et.attrib['part']
@@ -638,15 +638,15 @@ class CopyFromPartition(ImageFinetuningAction):
         ImageFinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<copy_from_partition> may only be "
-                                  "used in <losetup>")
+        raise NotImplementedError('<copy_from_partition> may only be '
+                                  'used in <losetup>')
 
     def execute_img(self, _buildenv, target, builddir, loop_dev):
         part_nr = self.node.et.attrib['part']
         aname = self.node.et.attrib['artifact']
 
         img_mnt = os.path.join(builddir, 'imagemnt')
-        device = f"{loop_dev}p{part_nr}"
+        device = f'{loop_dev}p{part_nr}'
 
         with ImgMountFilesystem(img_mnt, device) as mnt_fs:
             fname = mnt_fs.glob(self.node.et.text)
@@ -673,15 +673,15 @@ class CopyToPartition(ImageFinetuningAction):
         ImageFinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<copy_to_partition> may only be "
-                                  "used in <losetup>")
+        raise NotImplementedError('<copy_to_partition> may only be '
+                                  'used in <losetup>')
 
     def execute_img(self, _buildenv, _target, builddir, loop_dev):
         part_nr = self.node.et.attrib['part']
         aname = self.node.et.attrib['artifact']
 
         img_mnt = os.path.join(builddir, 'imagemnt')
-        device = f"{loop_dev}p{part_nr}"
+        device = f'{loop_dev}p{part_nr}'
 
         with ImgMountFilesystem(img_mnt, device) as mnt_fs:
             fname = mnt_fs.fname(self.node.et.text)
@@ -695,8 +695,8 @@ class SetPartitionTypeAction(ImageFinetuningAction):
         ImageFinetuningAction.__init__(self, node)
 
     def execute(self, _buildenv, _target):
-        raise NotImplementedError("<set_partition_type> may only be "
-                                  "used in <losetup>")
+        raise NotImplementedError('<set_partition_type> may only be '
+                                  'used in <losetup>')
 
     def execute_img(self, _buildenv, _target, _builddir, loop_dev):
         part_nr = self.node.et.attrib['part']
@@ -708,14 +708,14 @@ class SetPartitionTypeAction(ImageFinetuningAction):
         do(cmd, stdin=inp)
 
 
- at FinetuningAction.register("unit-tests")
+ at FinetuningAction.register('unit-tests')
 class TestSuites(FinetuningAction):
 
-    elbe_junit = "elbe-junit.xml"
+    elbe_junit = 'elbe-junit.xml'
 
     def execute(self, _buildenv, _target):
         raise NotImplementedError(
-            f"<{self.tag}> can only be used in the context of a project")
+            f'<{self.tag}> can only be used in the context of a project')
 
     def execute_prj(self, buildenv, target, builddir):
 
@@ -733,21 +733,21 @@ class TestSuites(FinetuningAction):
         TestSuite.to_file(output, tss)
 
 
- at FinetuningAction.register("rm_apt_source")
+ at FinetuningAction.register('rm_apt_source')
 class RmAptSource(FinetuningAction):
 
     def execute(self, buildenv, _target):
 
-        src_path = f"{buildenv.path}/../target/etc/apt/sources.list"
+        src_path = f'{buildenv.path}/../target/etc/apt/sources.list'
 
-        with open(src_path, "r") as f:
-            src_lst = f.read().split("\n")
+        with open(src_path, 'r') as f:
+            src_lst = f.read().split('\n')
 
-        rm_src = self.node.et.text.replace("LOCALMACHINE", "10.0.2.2")
+        rm_src = self.node.et.text.replace('LOCALMACHINE', '10.0.2.2')
         src_lst = [src for src in src_lst if rm_src not in src]
 
-        with open(src_path, "w") as f:
-            f.write("\n".join(src_lst))
+        with open(src_path, 'w') as f:
+            f.write('\n'.join(src_lst))
 
 
 def do_finetuning(xml, buildenv, target):
diff --git a/elbepack/fstab.py b/elbepack/fstab.py
index 67441525..c9452845 100644
--- a/elbepack/fstab.py
+++ b/elbepack/fstab.py
@@ -10,32 +10,32 @@ from elbepack.shellhelper import do, get_command_out, CommandError
 
 
 def get_mtdnum(xml, label):
-    tgt = xml.node("target")
-    if not tgt.has("images"):
-        raise Exception("No images tag in target")
+    tgt = xml.node('target')
+    if not tgt.has('images'):
+        raise Exception('No images tag in target')
 
-    for i in tgt.node("images"):
-        if i.tag != "mtd":
+    for i in tgt.node('images'):
+        if i.tag != 'mtd':
             continue
 
-        if not i.has("ubivg"):
+        if not i.has('ubivg'):
             continue
 
-        for v in i.node("ubivg"):
-            if v.tag != "ubi":
+        for v in i.node('ubivg'):
+            if v.tag != 'ubi':
                 continue
 
-            if v.text("label") == label:
-                return i.text("nr")
+            if v.text('label') == label:
+                return i.text('nr')
 
-    raise Exception("No ubi volume with label " + label + " found")
+    raise Exception('No ubi volume with label ' + label + ' found')
 
 
 def get_devicelabel(xml, node):
-    if node.text("fs/type") == "ubifs":
+    if node.text('fs/type') == 'ubifs':
         return f"ubi{get_mtdnum(xml, node.text('label'))}:{node.text('label')}"
 
-    return "LABEL=" + node.text("label")
+    return 'LABEL=' + node.text('label')
 
 
 class mountpoint_dict (dict):
@@ -85,7 +85,7 @@ class hdpart:
         self.size = ppart.getLength() * sector_size
         self.filename = disk.device.path
         self.partnum = ppart.number
-        self.number = f"{disk.type}{ppart.number}"
+        self.number = f'{disk.type}{ppart.number}'
 
     def losetup(self):
 
@@ -112,34 +112,34 @@ class fstabentry(hdpart):
     def __init__(self, xml, entry, fsid=0):
         super().__init__()
 
-        if entry.has("source"):
-            self.source = entry.text("source")
+        if entry.has('source'):
+            self.source = entry.text('source')
         else:
             self.source = get_devicelabel(xml, entry)
 
-        if entry.has("label"):
-            self.label = entry.text("label")
+        if entry.has('label'):
+            self.label = entry.text('label')
 
-        self.mountpoint = entry.text("mountpoint")
-        self.options = entry.text("options", default="defaults")
-        if entry.has("fs"):
-            self.fstype = entry.text("fs/type")
-            self.mkfsopt = entry.text("fs/mkfs", default="")
-            self.passno = entry.text("fs/passno", default="0")
+        self.mountpoint = entry.text('mountpoint')
+        self.options = entry.text('options', default='defaults')
+        if entry.has('fs'):
+            self.fstype = entry.text('fs/type')
+            self.mkfsopt = entry.text('fs/mkfs', default='')
+            self.passno = entry.text('fs/passno', default='0')
 
             self.fs_device_commands = []
             self.fs_path_commands = []
-            for command in entry.node("fs/fs-finetuning") or []:
-                if command.tag == "device-command":
-                    self.fs_device_commands.append(command.text("."))
-                elif command.tag == "path-command":
-                    self.fs_path_commands.append(command.text("."))
+            for command in entry.node('fs/fs-finetuning') or []:
+                if command.tag == 'device-command':
+                    self.fs_device_commands.append(command.text('.'))
+                elif command.tag == 'path-command':
+                    self.fs_path_commands.append(command.text('.'))
 
         self.id = str(fsid)
 
     def get_str(self):
-        return (f"{self.source} {self.mountpoint} {self.fstype} {self.options} "
-                f"0 {self.passno}\n")
+        return (f'{self.source} {self.mountpoint} {self.fstype} {self.options} '
+                f'0 {self.passno}\n')
 
     def mountdepth(self):
         h = self.mountpoint
@@ -152,8 +152,8 @@ class fstabentry(hdpart):
             depth += 1
 
     def get_label_opt(self):
-        if self.fstype in ("ext4", "ext3", "ext2", "btrfs"):
-            return "-L " + self.label
-        if self.fstype == "vfat":
-            return "-n " + self.label
-        return ""
+        if self.fstype in ('ext4', 'ext3', 'ext2', 'btrfs'):
+            return '-L ' + self.label
+        if self.fstype == 'vfat':
+            return '-n ' + self.label
+        return ''
diff --git a/elbepack/hashes.py b/elbepack/hashes.py
index 690f2320..5e61b579 100644
--- a/elbepack/hashes.py
+++ b/elbepack/hashes.py
@@ -12,7 +12,7 @@ class HashValidationFailed(Exception):
 
 def validate_sha256(fname, expected_hash):
     m = hashlib.sha256()
-    with open(fname, "rb") as f:
+    with open(fname, 'rb') as f:
         buf = f.read(65536)
         while buf:
             m.update(buf)
@@ -45,6 +45,6 @@ class HashValidator:
         try:
             system(f'wget -O "{local_fname}" "{url}"')
         except CommandError:
-            raise HashValidationFailed(f"Failed to download {url}")
+            raise HashValidationFailed(f'Failed to download {url}')
 
         self.validate_file(upstream_fname, local_fname)
diff --git a/elbepack/hdimg.py b/elbepack/hdimg.py
index 3b457c6c..19027b71 100644
--- a/elbepack/hdimg.py
+++ b/elbepack/hdimg.py
@@ -20,34 +20,34 @@ def mkfs_mtd(mtd, fslabel, target):
     # generated files
     img_files = []
 
-    if not mtd.has("ubivg"):
+    if not mtd.has('ubivg'):
         return img_files
 
-    ubivg = mtd.node("ubivg")
+    ubivg = mtd.node('ubivg')
     for v in ubivg:
-        if not v.tag == "ubi":
+        if not v.tag == 'ubi':
             continue
 
-        if v.has("empty"):
+        if v.has('empty'):
             continue
 
-        if v.has("binary"):
+        if v.has('binary'):
             continue
 
-        label = v.text("label")
+        label = v.text('label')
         if label not in fslabel:
             continue
 
         try:
-            do(f"mkfs.ubifs "
+            do(f'mkfs.ubifs '
                f"-r {os.path.join(target, 'filesystems', fslabel[label].id)} "
-               f"-o {os.path.join(target, label)}.ubifs "
+               f'-o {os.path.join(target, label)}.ubifs '
                f"-m {ubivg.text('miniosize')} "
                f"-e {ubivg.text('logicaleraseblocksize')} "
                f"-c {ubivg.text('maxlogicaleraseblockcount')} "
-               f"{fslabel[label].mkfsopt}")
+               f'{fslabel[label].mkfsopt}')
             # only append the ubifs file if creation didn't fail
-            img_files.append(f"{label}.ubifs")
+            img_files.append(f'{label}.ubifs')
         except CommandError:
             # continue creating further ubifs filesystems
             pass
@@ -59,65 +59,65 @@ def build_image_mtd(mtd, target):
 
     img_files = []
 
-    if not mtd.has("ubivg"):
+    if not mtd.has('ubivg'):
         return img_files
 
-    ubivg = mtd.node("ubivg")
+    ubivg = mtd.node('ubivg')
 
     cfgfilename = f"{mtd.text('name')}_{mtd.node('ubivg').text('label')}.cfg"
-    fp = open(os.path.join(target, cfgfilename), "w")
+    fp = open(os.path.join(target, cfgfilename), 'w')
 
-    for vol in mtd.node("ubivg"):
-        if vol.has("label"):
+    for vol in mtd.node('ubivg'):
+        if vol.has('label'):
             fp.write(f"[{vol.text('label')}]\n")
-            fp.write("mode=ubi\n")
-            if not vol.has("empty"):
-                if vol.has("binary"):
-                    tmp = ""
+            fp.write('mode=ubi\n')
+            if not vol.has('empty'):
+                if vol.has('binary'):
+                    tmp = ''
                     # copy from buildenv if path starts with /
-                    if vol.text("binary")[0] == '/':
-                        tmp = target + "/" + "chroot" + vol.text("binary")
+                    if vol.text('binary')[0] == '/':
+                        tmp = target + '/' + 'chroot' + vol.text('binary')
                     # copy from project directory
                     else:
-                        tmp = target + "/" + vol.text("binary")
+                        tmp = target + '/' + vol.text('binary')
                     do(f"cp {tmp} {target}/{vol.text('label')}.ubibin")
-                    img_files.append(vol.text("label") + ".ubibin")
+                    img_files.append(vol.text('label') + '.ubibin')
                     fp.write(
                         f"image={os.path.join(target, vol.text('label'))}.ubibin\n")
                 else:
                     fp.write(
                         f"image={os.path.join(target, vol.text('label'))}.ubifs\n")
             else:
-                empt = open("/tmp/empty", "w")
-                empt.write("EMPTY")
+                empt = open('/tmp/empty', 'w')
+                empt.write('EMPTY')
                 empt.close()
-                fp.write("image=/tmp/empty\n")
+                fp.write('image=/tmp/empty\n')
 
             fp.write(f"vol_type={vol.text('type')}\n")
             fp.write(f"vol_id={vol.text('id')}\n")
             fp.write(f"vol_name={vol.text('label')}\n")
 
-            if vol.text("size") != "remain":
+            if vol.text('size') != 'remain':
                 fp.write(f"vol_size={size_to_int(vol.text('size'))}\n")
             else:
-                fp.write("vol_flags=autoresize\n")
+                fp.write('vol_flags=autoresize\n')
 
     fp.close()
 
-    if ubivg.has("subpagesize"):
-        subp = "-s " + ubivg.text("subpagesize")
+    if ubivg.has('subpagesize'):
+        subp = '-s ' + ubivg.text('subpagesize')
     else:
-        subp = ""
+        subp = ''
 
     try:
         do(
-            f"ubinize {subp} "
+            f'ubinize {subp} '
             f"-o {os.path.join(target, mtd.text('name'))} "
             f"-p {ubivg.text('physicaleraseblocksize')} "
             f"-m {ubivg.text('miniosize')} "
             f"{target}/{mtd.text('name')}_{ubivg.text('label')}.cfg")
         # only add file to list if ubinize command was successful
-        img_files.append(mtd.text("name"))
+        img_files.append(mtd.text('name'))
 
     except CommandError:
         # continue with generating further images
@@ -151,12 +151,12 @@ class grubinstaller202(grubinstaller_base):
         if '/' not in self.fs:
             return
 
-        imagemnt = os.path.join(target, "imagemnt")
+        imagemnt = os.path.join(target, 'imagemnt')
         imagemntfs = Filesystem(imagemnt)
         try:
             loopdev = self.losetup(self.fs['/'].filename)
-            loopnum = loopdev.replace("/dev/loop", "")
-            poopdev = "/dev/poop" + loopnum
+            loopnum = loopdev.replace('/dev/loop', '')
+            poopdev = '/dev/poop' + loopnum
 
             do(f'cp -a {loopdev} {poopdev}')
             do(f'kpartx -as {poopdev}')
@@ -173,37 +173,37 @@ class grubinstaller202(grubinstaller_base):
 
             do(f'mkdir -p "{imagemntfs.fname("boot/grub")}"')
 
-            devmap = open(imagemntfs.fname("boot/grub/device.map"), "w")
-            devmap.write(f"(hd0) {poopdev}\n")
+            devmap = open(imagemntfs.fname('boot/grub/device.map'), 'w')
+            devmap.write(f'(hd0) {poopdev}\n')
             devmap.close()
 
-            chroot(imagemnt, "update-grub2")
+            chroot(imagemnt, 'update-grub2')
 
-            if "efi" in self.fw_type:
-                grub_tgt = next(t for t in self.fw_type if t.endswith("-efi"))
+            if 'efi' in self.fw_type:
+                grub_tgt = next(t for t in self.fw_type if t.endswith('-efi'))
                 do(
-                    f"chroot {imagemnt} "
-                    f"grub-install {user_args} --target={grub_tgt} --removable "
-                    f"--no-floppy {poopdev}")
-            if "shimfix" in self.fw_type:
+                    f'chroot {imagemnt} '
+                    f'grub-install {user_args} --target={grub_tgt} --removable '
+                    f'--no-floppy {poopdev}')
+            if 'shimfix' in self.fw_type:
                 # grub-install is heavily dependent on the running system having
                 # a BIOS or EFI.  The initvm is BIOS-based, so fix the resulting
                 # shim installation.
                 do(f"chroot {imagemnt}  /bin/bash -c '"
-                   "cp -r /boot/efi/EFI/BOOT /boot/efi/EFI/debian && "
-                   "cd /usr/lib/shim && f=( shim*.efi.signed ) && cp "
+                   'cp -r /boot/efi/EFI/BOOT /boot/efi/EFI/debian && '
+                   'cd /usr/lib/shim && f=( shim*.efi.signed ) && cp '
                    "${f[0]} /boot/efi/EFI/debian/${f[0]%%.signed}'")
-            if not self.fw_type or "bios" in self.fw_type:
+            if not self.fw_type or 'bios' in self.fw_type:
                 do(
-                    f"chroot {imagemnt} "
-                    f"grub-install {user_args} --target=i386-pc "
-                    f"--no-floppy {poopdev}")
+                    f'chroot {imagemnt} '
+                    f'grub-install {user_args} --target=i386-pc '
+                    f'--no-floppy {poopdev}')
 
         except CommandError as E:
-            logging.error("Fail installing grub device: %s", E)
+            logging.error('Fail installing grub device: %s', E)
 
         finally:
-            os.unlink(imagemntfs.fname("boot/grub/device.map"))
+            os.unlink(imagemntfs.fname('boot/grub/device.map'))
             do(f"umount {imagemntfs.fname('dev')}", allow_fail=True)
             do(f"umount {imagemntfs.fname('proc')}", allow_fail=True)
             do(f"umount {imagemntfs.fname('sys')}", allow_fail=True)
@@ -213,8 +213,8 @@ class grubinstaller202(grubinstaller_base):
                     f'umount /dev/mapper/poop{loopnum}p{entry.partnum}',
                     allow_fail=True)
 
-            do(f"kpartx -d {poopdev}", allow_fail=True)
-            do(f"losetup -d {poopdev}", allow_fail=True)
+            do(f'kpartx -d {poopdev}', allow_fail=True)
+            do(f'losetup -d {poopdev}', allow_fail=True)
 
 
 class grubinstaller97(grubinstaller_base):
@@ -223,12 +223,12 @@ class grubinstaller97(grubinstaller_base):
         if '/' not in self.fs:
             return
 
-        imagemnt = os.path.join(target, "imagemnt")
+        imagemnt = os.path.join(target, 'imagemnt')
         imagemntfs = Filesystem(imagemnt)
         try:
             loopdev = self.losetup(self.fs['/'].filename)
-            loopnum = loopdev.replace("/dev/loop", "")
-            poopdev = "/dev/poop" + loopnum
+            loopnum = loopdev.replace('/dev/loop', '')
+            poopdev = '/dev/poop' + loopnum
 
             do(f'cp -a {loopdev} {poopdev}')
             do(f'kpartx -as {poopdev}')
@@ -236,7 +236,7 @@ class grubinstaller97(grubinstaller_base):
             bootentry = 0
 
             for entry in self.fs.depthlist():
-                if entry.mountpoint.startswith("/boot"):
+                if entry.mountpoint.startswith('/boot'):
                     bootentry_label = entry.label
                     bootentry = int(entry.partnum)
                 do(
@@ -254,8 +254,8 @@ class grubinstaller97(grubinstaller_base):
 
             do(f'mkdir -p "{imagemntfs.fname("boot/grub")}"')
 
-            devmap = open(imagemntfs.fname("boot/grub/device.map"), "w")
-            devmap.write(f"(hd0) {poopdev}\n")
+            devmap = open(imagemntfs.fname('boot/grub/device.map'), 'w')
+            devmap.write(f'(hd0) {poopdev}\n')
             devmap.close()
 
             # Replace groot and kopt because else they will be given
@@ -267,17 +267,17 @@ class grubinstaller97(grubinstaller_base):
             do(rf'chroot {imagemnt} sed -in "s/^# groot=.*$/# groot=\(hd0,{bootentry - 1}\)/" /boot/grub/menu.lst')  # noqa: E501
             do(rf'chroot {imagemnt} sed -in "s/^# kopt=.*$/# kopt=root=LABEL={bootentry_label}/" /boot/grub/menu.lst')  # noqa: E501
 
-            chroot(imagemnt, "update-grub")
+            chroot(imagemnt, 'update-grub')
 
             do(
-                f"chroot {imagemnt} "
-                f"grub-install {user_args} --no-floppy {poopdev}")
+                f'chroot {imagemnt} '
+                f'grub-install {user_args} --no-floppy {poopdev}')
 
         except CommandError as E:
-            logging.error("Fail installing grub device: %s", E)
+            logging.error('Fail installing grub device: %s', E)
 
         finally:
-            os.unlink(imagemntfs.fname("boot/grub/device.map"))
+            os.unlink(imagemntfs.fname('boot/grub/device.map'))
             do(f"umount {imagemntfs.fname('dev')}", allow_fail=True)
             do(f"umount {imagemntfs.fname('proc')}", allow_fail=True)
             do(f"umount {imagemntfs.fname('sys')}", allow_fail=True)
@@ -287,8 +287,8 @@ class grubinstaller97(grubinstaller_base):
                     f'umount /dev/mapper/poop{loopnum}p{entry.partnum}',
                     allow_fail=True)
 
-            do(f"kpartx -d {poopdev}", allow_fail=True)
-            do(f"losetup -d {poopdev}", allow_fail=True)
+            do(f'kpartx -d {poopdev}', allow_fail=True)
+            do(f'losetup -d {poopdev}', allow_fail=True)
 
 
 class simple_fstype:
@@ -305,35 +305,35 @@ def create_partition(
         current_sector):
 
     sector_size = 512
-    if part.text("size") == "remain" and disk.type == "gpt":
+    if part.text('size') == 'remain' and disk.type == 'gpt':
         sz = size_in_sectors - 35 - current_sector
-    elif part.text("size") == "remain":
+    elif part.text('size') == 'remain':
         sz = size_in_sectors - current_sector
     else:
-        sz = size_to_int(part.text("size")) // sector_size
+        sz = size_to_int(part.text('size')) // sector_size
 
     g = parted.Geometry(device=disk.device, start=current_sector, length=sz)
     if ptype != parted.PARTITION_EXTENDED and \
-       part.text("label") in fslabel and \
-       fslabel[part.text("label")].fstype == "vfat":
+       part.text('label') in fslabel and \
+       fslabel[part.text('label')].fstype == 'vfat':
 
-        fs = simple_fstype("fat32")
+        fs = simple_fstype('fat32')
         ppart = parted.Partition(disk, ptype, fs, geometry=g)
-        if disk.type != "gpt":
+        if disk.type != 'gpt':
             ppart.setFlag(_ped.PARTITION_LBA)
     else:
         ppart = parted.Partition(disk, ptype, geometry=g)
 
-    if disk.type == "gpt" and part.has("name"):
-        ppart.set_name(part.text("name"))
+    if disk.type == 'gpt' and part.has('name'):
+        ppart.set_name(part.text('name'))
 
     cons = parted.Constraint(exactGeom=g)
     disk.addPartition(ppart, cons)
 
-    if part.has("bootable"):
+    if part.has('bootable'):
         ppart.setFlag(_ped.PARTITION_BOOT)
 
-    if part.has("biosgrub"):
+    if part.has('biosgrub'):
         ppart.setFlag(_ped.PARTITION_BIOS_GRUB)
 
     return ppart
@@ -341,7 +341,7 @@ def create_partition(
 
 def create_label(disk, part, ppart, fslabel, target, grub):
 
-    entry = fslabel[part.text("label")]
+    entry = fslabel[part.text('label')]
     entry.set_geometry(ppart, disk)
 
     grub.add_fs_entry(entry)
@@ -350,13 +350,13 @@ def create_label(disk, part, ppart, fslabel, target, grub):
 
     try:
         do(
-            f"mkfs.{entry.fstype} {entry.mkfsopt} {entry.get_label_opt()} "
-            f"{loopdev}")
+            f'mkfs.{entry.fstype} {entry.mkfsopt} {entry.get_label_opt()} '
+            f'{loopdev}')
 
         _execute_fs_commands(entry.fs_device_commands, dict(device=loopdev))
 
-        mount_path = Path(target, "imagemnt")
-        do(f"mount {loopdev} {mount_path}")
+        mount_path = Path(target, 'imagemnt')
+        do(f'mount {loopdev} {mount_path}')
 
         _execute_fs_commands(entry.fs_path_commands, dict(path=mount_path))
 
@@ -366,9 +366,9 @@ def create_label(disk, part, ppart, fslabel, target, grub):
                 f'"{mount_path}/"',
                 allow_fail=True)
         finally:
-            do(f"umount {loopdev}")
+            do(f'umount {loopdev}')
     finally:
-        do(f"losetup -d {loopdev}")
+        do(f'losetup -d {loopdev}')
 
     return ppart
 
@@ -392,11 +392,11 @@ def create_binary(disk, part, ppart, target):
 
     try:
         # copy from buildenv if path starts with /
-        if part.text("binary")[0] == '/':
-            tmp = target + "/" + "chroot" + part.text("binary")
+        if part.text('binary')[0] == '/':
+            tmp = target + '/' + 'chroot' + part.text('binary')
         # copy from project directory
         else:
-            tmp = target + "/" + part.text("binary")
+            tmp = target + '/' + part.text('binary')
 
         do(f'dd if="{tmp}" of="{loopdev}"')
     finally:
@@ -414,7 +414,7 @@ def create_logical_partitions(disk,
     size_in_sectors = current_sector + epart.geometry.length
 
     for logical in extended:
-        if logical.tag != "logical":
+        if logical.tag != 'logical':
             continue
 
         current_sector += 2048
@@ -427,7 +427,7 @@ def create_logical_partitions(disk,
             current_sector)
         if logical.has('binary'):
             create_binary(disk, logical, lpart, target)
-        elif logical.has("label") and logical.text("label") in fslabel:
+        elif logical.has('label') and logical.text('label') in fslabel:
             create_label(disk, logical, lpart, fslabel, target, grub)
 
         current_sector += lpart.getLength()
@@ -436,20 +436,20 @@ def create_logical_partitions(disk,
 def do_image_hd(hd, fslabel, target, grub_version, grub_fw_type=None):
 
     sector_size = 512
-    s = size_to_int(hd.text("size"))
+    s = size_to_int(hd.text('size'))
     size_in_sectors = s // sector_size
 
-    imagename = os.path.join(target, hd.text("name"))
+    imagename = os.path.join(target, hd.text('name'))
     do(f'rm -f "{imagename}"', allow_fail=True)
-    f = open(imagename, "wb")
+    f = open(imagename, 'wb')
     f.truncate(size_in_sectors * sector_size)
     f.close()
 
     imag = parted.Device(imagename)
-    if hd.tag == "gpthd":
-        disk = parted.freshDisk(imag, "gpt")
+    if hd.tag == 'gpthd':
+        disk = parted.freshDisk(imag, 'gpt')
     else:
-        disk = parted.freshDisk(imag, "msdos")
+        disk = parted.freshDisk(imag, 'msdos')
 
     if grub_version == 202:
         grub = grubinstaller202(grub_fw_type)
@@ -458,12 +458,12 @@ def do_image_hd(hd, fslabel, target, grub_version, grub_fw_type=None):
     else:
         grub = grubinstaller_base()
 
-    current_sector = size_to_int(hd.text("first_partition_sector",
-                                         default="2048"))
+    current_sector = size_to_int(hd.text('first_partition_sector',
+                                         default='2048'))
 
     for part in hd:
 
-        if part.tag == "partition":
+        if part.tag == 'partition':
             ppart = create_partition(
                 disk,
                 part,
@@ -471,11 +471,11 @@ def do_image_hd(hd, fslabel, target, grub_version, grub_fw_type=None):
                 fslabel,
                 size_in_sectors,
                 current_sector)
-            if part.has("binary"):
+            if part.has('binary'):
                 create_binary(disk, part, ppart, target)
-            elif part.text("label") in fslabel:
+            elif part.text('label') in fslabel:
                 create_label(disk, part, ppart, fslabel, target, grub)
-        elif part.tag == "extended":
+        elif part.tag == 'extended':
             ppart = create_partition(
                 disk,
                 part,
@@ -492,27 +492,27 @@ def do_image_hd(hd, fslabel, target, grub_version, grub_fw_type=None):
 
     disk.commit()
 
-    if hd.has("grub-install") and grub_version:
-        grub.install(target, hd.text("grub-install"))
+    if hd.has('grub-install') and grub_version:
+        grub.install(target, hd.text('grub-install'))
 
-    return hd.text("name")
+    return hd.text('name')
 
 
 def add_binary_blob(hd, target):
 
-    imagename = os.path.join(target, hd.text("name"))
+    imagename = os.path.join(target, hd.text('name'))
 
     for binary in hd:
-        if binary.tag != "binary":
+        if binary.tag != 'binary':
             continue
 
         try:
-            offset = binary.et.attrib["offset"]
+            offset = binary.et.attrib['offset']
         except KeyError:
             offset = 0
 
         try:
-            bs = binary.et.attrib["blocksize"]
+            bs = binary.et.attrib['blocksize']
         except KeyError:
             bs = 1
 
@@ -535,15 +535,15 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
     img_files = []
 
     # Check whether we have any images first
-    if not xml.tgt.has("images"):
+    if not xml.tgt.has('images'):
         return img_files
 
     # Build a dictonary of mount points
     fslabel = {}
     mountpoints = mountpoint_dict()
 
-    for fs in xml.tgt.node("fstab"):
-        if fs.tag != "bylabel":
+    for fs in xml.tgt.node('fstab'):
+        if fs.tag != 'bylabel':
             continue
 
         # Create fstabentry Object
@@ -553,17 +553,17 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
         # this also sets the id field
         mountpoints.register(e)
 
-        fslabel[fs.text("label")] = e
+        fslabel[fs.text('label')] = e
 
     # Get the sorted list of mountpoints
     fslist = mountpoints.depthlist()
 
     # create directories, where we want our
     # filesystems later
-    fspath = os.path.join(target, "filesystems")
+    fspath = os.path.join(target, 'filesystems')
     do(f'mkdir -p {fspath}')
 
-    imagemnt = os.path.join(target, "imagemnt")
+    imagemnt = os.path.join(target, 'imagemnt')
     do(f'mkdir -p {imagemnt}')
 
     # now move all mountpoints into own directories
@@ -579,8 +579,8 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
 
     try:
         # Now iterate over all images and create filesystems and partitions
-        for i in xml.tgt.node("images"):
-            if i.tag == "msdoshd":
+        for i in xml.tgt.node('images'):
+            if i.tag == 'msdoshd':
                 img = do_image_hd(i,
                                   fslabel,
                                   target,
@@ -588,7 +588,7 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
                                   grub_fw_type)
                 img_files.append(img)
 
-            if i.tag == "gpthd":
+            if i.tag == 'gpthd':
                 img = do_image_hd(i,
                                   fslabel,
                                   target,
@@ -596,7 +596,7 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
                                   grub_fw_type)
                 img_files.append(img)
 
-            if i.tag == "mtd":
+            if i.tag == 'mtd':
                 imgs = mkfs_mtd(i, fslabel, target)
                 img_files.extend(imgs)
     finally:
@@ -610,14 +610,14 @@ def do_hdimg(xml, target, rfs, grub_version, grub_fw_type=None):
                    allow_fail=True)
 
     # Files are now moved back. ubinize needs files in place, so we run it now.
-    for i in xml.tgt.node("images"):
-        if i.tag == "mtd":
+    for i in xml.tgt.node('images'):
+        if i.tag == 'mtd':
             imgs = build_image_mtd(i, target)
             img_files.extend(imgs)
 
     # dd binary blobs onto images
-    for i in xml.tgt.node("images"):
-        if (i.tag == "msdoshd") or (i.tag == "gpthd"):
+    for i in xml.tgt.node('images'):
+        if (i.tag == 'msdoshd') or (i.tag == 'gpthd'):
             add_binary_blob(i, target)
 
     # use set() to remove duplicates, but
diff --git a/elbepack/initvmaction.py b/elbepack/initvmaction.py
index eb62a0bd..a250f209 100644
--- a/elbepack/initvmaction.py
+++ b/elbepack/initvmaction.py
@@ -21,12 +21,12 @@ from elbepack.xmlpreprocess import PreprocessWrapper
 
 
 def is_soap_local():
-    return cfg["soaphost"] in ("localhost", "127.0.0.1")
+    return cfg['soaphost'] in ('localhost', '127.0.0.1')
 
 
 def cmd_exists(x):
     return any(os.access(os.path.join(path, x), os.X_OK)
-               for path in os.environ["PATH"].split(os.pathsep))
+               for path in os.environ['PATH'].split(os.pathsep))
 
 # Create download directory with timestamp,
 # if necessary
@@ -37,7 +37,7 @@ def ensure_outdir(opt):
         opt.outdir = (
             f"elbe-build-{datetime.datetime.now().strftime('%Y%m%d-%H%M%S')}")
 
-    print(f"Saving generated Files to {opt.outdir}")
+    print(f'Saving generated Files to {opt.outdir}')
 
 
 class InitVMError(Exception):
@@ -58,9 +58,9 @@ class InitVMAction:
 
     @classmethod
     def print_actions(cls):
-        print("available subcommands are:", file=sys.stderr)
+        print('available subcommands are:', file=sys.stderr)
         for a in cls.actiondict:
-            print(f"   {a}", file=sys.stderr)
+            print(f'   {a}', file=sys.stderr)
 
     def __new__(cls, node):
         action = cls.actiondict[node]
@@ -82,7 +82,7 @@ class InitVMAction:
         # The tag initvmNeeded is required in order to be able to run `elbe
         # initvm create`
         try:
-            self.conn = libvirt.open("qemu:///system")
+            self.conn = libvirt.open('qemu:///system')
         except libvirt.libvirtError as verr:
             if not isinstance(verr.args[0], str):
                 raise
@@ -92,7 +92,7 @@ class InitVMAction:
                     retries -= 1
                     time.sleep(10)
                     try:
-                        self.conn = libvirt.open("qemu:///system")
+                        self.conn = libvirt.open('qemu:///system')
                     except libvirt.libvirtError as verr:
                         if not isinstance(verr.args[0], str):
                             raise
@@ -103,29 +103,29 @@ class InitVMAction:
                         break
 
                 if not self.conn:
-                    print("", file=sys.stderr)
-                    print("Accessing libvirt provider system not possible.", file=sys.stderr)
-                    print("Even after waiting 180 seconds.", file=sys.stderr)
+                    print('', file=sys.stderr)
+                    print('Accessing libvirt provider system not possible.', file=sys.stderr)
+                    print('Even after waiting 180 seconds.', file=sys.stderr)
                     print("Make sure that package 'libvirt-daemon-system' is", file=sys.stderr)
-                    print("installed, and the service is running properly", file=sys.stderr)
+                    print('installed, and the service is running properly', file=sys.stderr)
                     sys.exit(118)
 
             elif verr.args[0].startswith('authentication unavailable'):
-                print("", file=sys.stderr)
-                print("Accessing libvirt provider system not allowed.", file=sys.stderr)
-                print("Users which want to use elbe"
+                print('', file=sys.stderr)
+                print('Accessing libvirt provider system not allowed.', file=sys.stderr)
+                print('Users which want to use elbe'
                       "need to be members of the 'libvirt' group.", file=sys.stderr)
                 print("'gpasswd -a <user> libvirt' and logging in again,", file=sys.stderr)
-                print("should fix the problem.", file=sys.stderr)
+                print('should fix the problem.', file=sys.stderr)
                 sys.exit(119)
 
             elif verr.args[0].startswith('error from service: CheckAuthorization'):
-                print("", file=sys.stderr)
-                print("Accessing libvirt failed.", file=sys.stderr)
-                print("Probably entering the password for accssing libvirt", file=sys.stderr)
+                print('', file=sys.stderr)
+                print('Accessing libvirt failed.', file=sys.stderr)
+                print('Probably entering the password for accssing libvirt', file=sys.stderr)
                 print("timed out. If this occured after 'elbe initvm create'", file=sys.stderr)
                 print("it should be safe to use 'elbe initvm start' to", file=sys.stderr)
-                print("continue.", file=sys.stderr)
+                print('continue.', file=sys.stderr)
                 sys.exit(120)
 
             else:
@@ -186,10 +186,10 @@ class StartAction(InitVMAction):
             # TODO: Instead of waiting for five seconds
             # check whether SOAP server is reachable.
             for _ in range(1, 5):
-                sys.stdout.write("*")
+                sys.stdout.write('*')
                 sys.stdout.flush()
                 time.sleep(1)
-            print("*")
+            print('*')
 
 
 @InitVMAction.register('ensure')
@@ -216,12 +216,12 @@ class EnsureAction(InitVMAction):
                 if cmd[0] == 0:
                     break
                 if time.time() > stop:
-                    print(f"Waited for 5 minutes and the daemon is still not active: {cmd[2]}",
+                    print(f'Waited for 5 minutes and the daemon is still not active: {cmd[2]}',
                           file=sys.stderr)
                     sys.exit(123)
                 time.sleep(10)
         else:
-            print("Elbe initvm in bad state.")
+            print('Elbe initvm in bad state.')
             sys.exit(124)
 
 
@@ -240,7 +240,7 @@ class StopAction(InitVMAction):
 
         while True:
 
-            sys.stdout.write("*")
+            sys.stdout.write('*')
             sys.stdout.flush()
             time.sleep(1)
 
@@ -257,7 +257,7 @@ class StopAction(InitVMAction):
             except libvirt.libvirtError as e:
                 raise e
 
-        print("\nInitvm shutoff")
+        print('\nInitvm shutoff')
 
 
 @InitVMAction.register('attach')
@@ -284,7 +284,7 @@ def submit_with_repodir_and_dl_result(xmlfile, cdrom, opt):
         with Repodir(xmlfile, preprocess_xmlfile):
             submit_and_dl_result(preprocess_xmlfile, cdrom, opt)
     except RepodirError as err:
-        print("elbe repodir failed", file=sys.stderr)
+        print('elbe repodir failed', file=sys.stderr)
         print(err, file=sys.stderr)
         sys.exit(127)
     finally:
@@ -300,9 +300,9 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
             ret, prjdir, err = command_out_stderr(
                 f'{sys.executable} {elbe_exe} control create_project')
             if ret != 0:
-                print("elbe control create_project failed.", file=sys.stderr)
+                print('elbe control create_project failed.', file=sys.stderr)
                 print(err, file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(128)
 
             prjdir = prjdir.strip()
@@ -310,31 +310,31 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
             cmd = f'{sys.executable} {elbe_exe} control set_xml {prjdir} {xmlfile}'
             ret, _, err = command_out_stderr(cmd)
             if ret != 0:
-                print("elbe control set_xml failed2", file=sys.stderr)
+                print('elbe control set_xml failed2', file=sys.stderr)
                 print(err, file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(129)
     except CommandError:
         # this is the failure from PreprocessWrapper
         # it already printed the error message from
         # elbe preprocess
-        print("Giving up", file=sys.stderr)
+        print('Giving up', file=sys.stderr)
         sys.exit(130)
 
     if opt.writeproject:
-        with open(opt.writeproject, "w") as wpf:
+        with open(opt.writeproject, 'w') as wpf:
             wpf.write(prjdir)
 
     if cdrom is not None:
-        print("Uploading CDROM. This might take a while")
+        print('Uploading CDROM. This might take a while')
         try:
             system(f'{sys.executable} {elbe_exe} control set_cdrom "{prjdir}" "{cdrom}"')
         except CommandError:
-            print("elbe control set_cdrom Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control set_cdrom Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(131)
 
-        print("Upload finished")
+        print('Upload finished')
 
     build_opts = ''
     if opt.build_bin:
@@ -347,11 +347,11 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
     try:
         system(f'{sys.executable} {elbe_exe} control build "{prjdir}" {build_opts}')
     except CommandError:
-        print("elbe control build Failed", file=sys.stderr)
-        print("Giving up", file=sys.stderr)
+        print('elbe control build Failed', file=sys.stderr)
+        print('Giving up', file=sys.stderr)
         sys.exit(132)
 
-    print("Build started, waiting till it finishes")
+    print('Build started, waiting till it finishes')
 
     try:
         system(f'{sys.executable} {elbe_exe} control wait_busy "{prjdir}"')
@@ -365,27 +365,27 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
         print(
             f'{elbe_exe} control get_files --output "{opt.outdir}" "{prjdir}"',
             file=sys.stderr)
-        print("", file=sys.stderr)
+        print('', file=sys.stderr)
         print('The project can then be removed using:',
               file=sys.stderr)
         print(f'{elbe_exe} control del_project "{prjdir}"',
               file=sys.stderr)
-        print("", file=sys.stderr)
+        print('', file=sys.stderr)
         sys.exit(133)
 
-    print("")
-    print("Build finished !")
-    print("")
+    print('')
+    print('Build finished !')
+    print('')
 
     if opt.build_sdk:
         try:
             system(f'{sys.executable} {elbe_exe} control build_sdk "{prjdir}" {build_opts}')
         except CommandError:
-            print("elbe control build_sdk Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control build_sdk Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(134)
 
-        print("SDK Build started, waiting till it finishes")
+        print('SDK Build started, waiting till it finishes')
 
         try:
             system(f'{sys.executable} {elbe_exe} control wait_busy "{prjdir}"')
@@ -401,50 +401,50 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
                 f'{elbe_exe} control get_files --output "{opt.outdir}" '
                 f'"{prjdir}"',
                 file=sys.stderr)
-            print("", file=sys.stderr)
+            print('', file=sys.stderr)
             print('The project can then be removed using:',
                   file=sys.stderr)
             print(f'{elbe_exe} control del_project "{prjdir}"',
                   file=sys.stderr)
-            print("", file=sys.stderr)
+            print('', file=sys.stderr)
             sys.exit(135)
 
-        print("")
-        print("SDK Build finished !")
-        print("")
+        print('')
+        print('SDK Build finished !')
+        print('')
 
     try:
         system(f'{sys.executable} {elbe_exe} control dump_file "{prjdir}" validation.txt')
     except CommandError:
         print(
-            "Project failed to generate validation.txt",
+            'Project failed to generate validation.txt',
             file=sys.stderr)
-        print("Getting log.txt", file=sys.stderr)
+        print('Getting log.txt', file=sys.stderr)
         try:
             system(f'{sys.executable} {elbe_exe} control dump_file "{prjdir}" log.txt')
         except CommandError:
 
-            print("Failed to dump log.txt", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('Failed to dump log.txt', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
         sys.exit(136)
 
     if opt.skip_download:
-        print("")
-        print("Listing available files:")
-        print("")
+        print('')
+        print('Listing available files:')
+        print('')
         try:
             system(f'{sys.executable} {elbe_exe} control get_files "{prjdir}"')
         except CommandError:
-            print("elbe control get_files Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control get_files Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(137)
 
-        print("")
+        print('')
         print(f'Get Files with: elbe control get_file "{prjdir}" <filename>')
     else:
-        print("")
-        print("Getting generated Files")
-        print("")
+        print('')
+        print('Getting generated Files')
+        print('')
 
         ensure_outdir(opt)
 
@@ -453,15 +453,15 @@ def submit_and_dl_result(xmlfile, cdrom, opt):
                 f'{sys.executable} {elbe_exe} control get_files --output "{opt.outdir}" '
                 f'"{prjdir}"')
         except CommandError:
-            print("elbe control get_files Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control get_files Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(138)
 
         if not opt.keep_files:
             try:
                 system(f'{sys.executable} {elbe_exe} control del_project "{prjdir}"')
             except CommandError:
-                print("remove project from initvm failed",
+                print('remove project from initvm failed',
                       file=sys.stderr)
                 sys.exit(139)
 
@@ -473,7 +473,7 @@ def extract_cdrom(cdrom):
     """
 
     tmp = TmpdirFilesystem()
-    in_iso_name = "source.xml"
+    in_iso_name = 'source.xml'
     try:
         import pycdlib
         iso = pycdlib.PyCdlib()
@@ -484,17 +484,17 @@ def extract_cdrom(cdrom):
     except ImportError:
         system(f'7z x -o{tmp.path} "{cdrom}" {in_iso_name}')
 
-    print("", file=sys.stderr)
+    print('', file=sys.stderr)
 
     if not tmp.isfile('source.xml'):
         print(
-            "Iso image does not contain a source.xml file",
+            'Iso image does not contain a source.xml file',
             file=sys.stderr)
         print(
             "This is not supported by 'elbe initvm'",
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Exiting !!!", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Exiting !!!', file=sys.stderr)
         sys.exit(140)
 
     try:
@@ -503,18 +503,18 @@ def extract_cdrom(cdrom):
             url_validation=ValidationMode.NO_CHECK)
     except ValidationError as e:
         print(
-            "Iso image does contain a source.xml file.",
+            'Iso image does contain a source.xml file.',
             file=sys.stderr)
         print(
-            "But that xml does not validate correctly",
+            'But that xml does not validate correctly',
             file=sys.stderr)
-        print("", file=sys.stderr)
-        print("Exiting !!!", file=sys.stderr)
+        print('', file=sys.stderr)
+        print('Exiting !!!', file=sys.stderr)
         print(e)
         sys.exit(141)
 
-    print("Iso Image with valid source.xml detected !")
-    print(f"Image was generated using Elbe Version {exml.get_elbe_version()}")
+    print('Iso Image with valid source.xml detected !')
+    print(f'Image was generated using Elbe Version {exml.get_elbe_version()}')
 
     return tmp
 
@@ -529,25 +529,25 @@ class CreateAction(InitVMAction):
 
         if self.initvm is not None:
             print(f"Initvm is already defined for the libvirt domain '{cfg['initvm_domain']}'.\n")
-            print("If you want to build in your old initvm, use `elbe initvm submit <xml>`.")
-            print("If you want to remove your old initvm from libvirt "
+            print('If you want to build in your old initvm, use `elbe initvm submit <xml>`.')
+            print('If you want to remove your old initvm from libvirt '
                   f"run `virsh --connect qemu:///system undefine {cfg['initvm_domain']}`.\n")
-            print("You can specify another libvirt domain by setting the "
-                  "ELBE_INITVM_DOMAIN environment variable to an unused domain name.\n")
-            print("Note:")
-            print("\t1) You can reimport your old initvm via "
-                  "`virsh --connect qemu:///system define <file>`")
-            print("\t   where <file> is the corresponding libvirt.xml")
-            print("\t2) virsh --connect qemu:///system undefine does not delete the image "
-                  "of your old initvm.")
+            print('You can specify another libvirt domain by setting the '
+                  'ELBE_INITVM_DOMAIN environment variable to an unused domain name.\n')
+            print('Note:')
+            print('\t1) You can reimport your old initvm via '
+                  '`virsh --connect qemu:///system define <file>`')
+            print('\t   where <file> is the corresponding libvirt.xml')
+            print('\t2) virsh --connect qemu:///system undefine does not delete the image '
+                  'of your old initvm.')
             sys.exit(142)
 
         # Upgrade from older versions which used tmux
         try:
-            system("tmux has-session -t ElbeInitVMSession 2>/dev/null")
-            print("ElbeInitVMSession exists in tmux. "
-                  "It may belong to an old elbe version. "
-                  "Please stop it to prevent interfering with this version.", file=sys.stderr)
+            system('tmux has-session -t ElbeInitVMSession 2>/dev/null')
+            print('ElbeInitVMSession exists in tmux. '
+                  'It may belong to an old elbe version. '
+                  'Please stop it to prevent interfering with this version.', file=sys.stderr)
             sys.exit(143)
         except CommandError:
             pass
@@ -562,11 +562,11 @@ class CreateAction(InitVMAction):
                 try:
                     xml = etree(xmlfile)
                 except ValidationError as e:
-                    print(f"XML file is invalid: {e}")
+                    print(f'XML file is invalid: {e}')
                 # Use default XML if no initvm was specified
-                if not xml.has("initvm"):
+                if not xml.has('initvm'):
                     xmlfile = os.path.join(
-                        elbepack.__path__[0], "init/default-init.xml")
+                        elbepack.__path__[0], 'init/default-init.xml')
 
             elif args[0].endswith('.iso'):
                 # We have an iso image, extract xml from there.
@@ -576,14 +576,14 @@ class CreateAction(InitVMAction):
                 cdrom = args[0]
             else:
                 print(
-                    "Unknown file ending (use either xml or iso)",
+                    'Unknown file ending (use either xml or iso)',
                     file=sys.stderr)
                 sys.exit(144)
         else:
             # No xml File was specified, build the default elbe-init-with-ssh
             xmlfile = os.path.join(
                 elbepack.__path__[0],
-                "init/default-init.xml")
+                'init/default-init.xml')
 
         try:
             init_opts = ''
@@ -612,7 +612,7 @@ class CreateAction(InitVMAction):
 
         except CommandError:
             print("'elbe init' Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(145)
 
         # Read xml file for libvirt
@@ -625,7 +625,7 @@ class CreateAction(InitVMAction):
         except CommandError:
             print('Registering initvm in libvirt failed', file=sys.stderr)
             print(f"Try `virsh --connect qemu:///system undefine {cfg['initvm_domain']}`"
-                  "to delete existing initvm",
+                  'to delete existing initvm',
                   file=sys.stderr)
             sys.exit(146)
 
@@ -633,15 +633,15 @@ class CreateAction(InitVMAction):
         try:
             system(f'cd "{initvmdir}"; make')
         except CommandError:
-            print("Building the initvm Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('Building the initvm Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(147)
 
         try:
             system(f'{sys.executable} {elbe_exe} initvm start')
         except CommandError:
-            print("Starting the initvm Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('Starting the initvm Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(148)
 
         if len(args) == 1:
@@ -652,7 +652,7 @@ class CreateAction(InitVMAction):
                 try:
                     x = etree(args[0])
                 except ValidationError as e:
-                    print(f"XML file is invalid: {e}")
+                    print(f'XML file is invalid: {e}')
                     sys.exit(149)
                 if not x.has('project'):
                     print("elbe initvm ready: use 'elbe initvm submit "
@@ -676,8 +676,8 @@ class SubmitAction(InitVMAction):
         try:
             system(f'{sys.executable} {elbe_exe} initvm ensure')
         except CommandError:
-            print("Starting the initvm Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('Starting the initvm Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(150)
 
         # Init cdrom to None, if we detect it, we set it
@@ -695,7 +695,7 @@ class SubmitAction(InitVMAction):
                 cdrom = args[0]
             else:
                 print(
-                    "Unknown file ending (use either xml or iso)",
+                    'Unknown file ending (use either xml or iso)',
                     file=sys.stderr)
                 sys.exit(151)
 
@@ -711,11 +711,11 @@ class SyncAction(InitVMAction):
     def execute(self, _initvmdir, opt, args):
         top_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
         try:
-            system("rsync --info=name1,stats1  --archive --times "
+            system('rsync --info=name1,stats1  --archive --times '
                    "--exclude='.git*' --exclude='*.pyc' --exclude='elbe-build*' "
                    "--exclude='initvm' --exclude='__pycache__' --exclude='docs' "
                    "--exclude='examples' "
                    f"--rsh='ssh -p {cfg['sshport']}' --chown=root:root "
-                   f"{top_dir}/ root at localhost:/var/cache/elbe/devel")
+                   f'{top_dir}/ root at localhost:/var/cache/elbe/devel')
         except CommandError as E:
             print(E)
diff --git a/elbepack/isooptions.py b/elbepack/isooptions.py
index ffaad4c9..732d0b4f 100644
--- a/elbepack/isooptions.py
+++ b/elbepack/isooptions.py
@@ -6,20 +6,20 @@ import logging
 
 # https://wiki.osdev.org/ISO_9660
 iso_options = {
-    "sysid":     ("-sysid",      32, "Specifies the system ID",              "strA"),
-    "volid":     ("-V",          32, "Specifies the volume ID",              "strD"),
-    "volset":    ("-volset",    128, "Specifies the volume set ID",          "strD"),
-    "publisher": ("-publisher", 128, "Specifies the publisher ID",           "strA"),
-    "preparer":  ("-p",         128, "Specifies the preparer ID",            "strA"),
-    "app":       ("-A",         128, "Specifies the application ID",         "strA"),
-    "copyright": ("-copyright",  38, "Specifies copyright filename on disc", "strD"),
-    "abstract":  ("-abstract",   36, "Specifies the abstract filename",      "strD"),
-    "biblio":    ("-biblio",     37, "Specifies the bibliographic filename", "strD"),
+    'sysid':     ('-sysid',      32, 'Specifies the system ID',              'strA'),
+    'volid':     ('-V',          32, 'Specifies the volume ID',              'strD'),
+    'volset':    ('-volset',    128, 'Specifies the volume set ID',          'strD'),
+    'publisher': ('-publisher', 128, 'Specifies the publisher ID',           'strA'),
+    'preparer':  ('-p',         128, 'Specifies the preparer ID',            'strA'),
+    'app':       ('-A',         128, 'Specifies the application ID',         'strA'),
+    'copyright': ('-copyright',  38, 'Specifies copyright filename on disc', 'strD'),
+    'abstract':  ('-abstract',   36, 'Specifies the abstract filename',      'strD'),
+    'biblio':    ('-biblio',     37, 'Specifies the bibliographic filename', 'strD'),
 }
 
 encoding = {
-    "strA": """ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_!"%&'()*+,-./:;<=>? """,
-    "strD": """ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"""
+    'strA': """ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_!"%&'()*+,-./:;<=>? """,
+    'strD': """ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"""
 }
 
 
@@ -37,14 +37,14 @@ def iso_option_valid(opt_name, text):
 
 def get_iso_options(xml):
     options = []
-    src_opts = xml.node("src-cdrom/src-opts")
+    src_opts = xml.node('src-cdrom/src-opts')
     if src_opts is None:
-        return ""
+        return ''
     for node in src_opts:
         if node.tag not in iso_options:
             continue
         option = iso_options[node.tag]
-        logging.info("Adding option %s\n%s", node.tag, option[2])
+        logging.info('Adding option %s\n%s', node.tag, option[2])
         text = node.et.text[:option[1]]
         options.append('%s "%s"' % (option[0], text.replace('"', '\\"')))
-    return " ".join(options)
+    return ' '.join(options)
diff --git a/elbepack/junit.py b/elbepack/junit.py
index 65e37632..2d50c6bd 100644
--- a/elbepack/junit.py
+++ b/elbepack/junit.py
@@ -36,7 +36,7 @@ class TestSuite:
 
     def do_test(self, node, target):
         if node.tag not in self.test_dict:
-            raise TestException(f"Invalid Test {node.tag}")
+            raise TestException(f'Invalid Test {node.tag}')
         test = self.test_dict[node.tag]
         return test(node, target)()
 
@@ -47,12 +47,12 @@ class TestSuite:
                 test_cases.append(self.do_test(test, self.target))
             except TestException:
                 pass  # TODO - Handle me!
-        ts = junit.TestSuite(name=self.node.et.attrib["name"],
+        ts = junit.TestSuite(name=self.node.et.attrib['name'],
                              test_cases=test_cases)
         return ts
 
 
- at TestSuite.register("BaseTest", register=False)
+ at TestSuite.register('BaseTest', register=False)
 class BaseTest:
 
     tag = None
@@ -63,15 +63,15 @@ class BaseTest:
         self.target = target
 
     def __call__(self):
-        raise TestException(f"Unimplemented Test {self.tag}")
+        raise TestException(f'Unimplemented Test {self.tag}')
 
 
- at TestSuite.register("file-exists")
+ at TestSuite.register('file-exists')
 class TestFileExists(BaseTest):
 
     def __call__(self):
         path = self.node.et.text
         test = junit.TestCase(name=path, classname=self.tag)
         if not self.target.exists(path):
-            test.add_failure_info(message="FAILED")
+            test.add_failure_info(message='FAILED')
         return test
diff --git a/elbepack/licencexml.py b/elbepack/licencexml.py
index 8e52448d..b750189e 100644
--- a/elbepack/licencexml.py
+++ b/elbepack/licencexml.py
@@ -22,10 +22,10 @@ def do_heuristics(fp):
     c = Copyright()
     num_licenses = 0
     for lic in fp.readlines():
-        if lic.startswith("License:"):
+        if lic.startswith('License:'):
             num_licenses += 1
-            _, v = lic.split(":", 1)
-            data = {"License": v.strip()}
+            _, v = lic.split(':', 1)
+            data = {'License': v.strip()}
             lic_para = LicenseParagraph(data)
             c.add_license_paragraph(lic_para)
 
@@ -123,4 +123,4 @@ class copyright_xml:
         return
 
     def write(self, fname):
-        self.outxml.write(fname, encoding="iso-8859-1")
+        self.outxml.write(fname, encoding='iso-8859-1')
diff --git a/elbepack/log.py b/elbepack/log.py
index 59d9ba0d..4e3409ed 100644
--- a/elbepack/log.py
+++ b/elbepack/log.py
@@ -14,8 +14,8 @@ from contextlib import contextmanager
 root = logging.getLogger()
 root.setLevel(logging.DEBUG)
 local = threading.local()
-context_fmt = logging.Formatter("%(context)s%(message)s")
-msgonly_fmt = logging.Formatter("%(message)s")
+context_fmt = logging.Formatter('%(context)s%(message)s')
+msgonly_fmt = logging.Formatter('%(message)s')
 
 logging_methods = []
 
@@ -93,7 +93,7 @@ class ThreadFilter(logging.Filter):
             thread = record.thread
         retval = record.name in self.allowed and thread == self.thread
         if retval and not hasattr(record, 'context'):
-            record.context = f"[{record.levelname}]"
+            record.context = f'[{record.levelname}]'
         return retval
 
 
@@ -117,7 +117,7 @@ def logging_method(name):
     return decorator
 
 
- at logging_method("streams")
+ at logging_method('streams')
 @with_list
 def add_stream_handlers(streams):
 
@@ -133,14 +133,14 @@ def add_stream_handlers(streams):
         yield [out]
 
 
- at logging_method("projects")
+ at logging_method('projects')
 @with_list
 def add_project_handlers(projects):
 
     for proj in projects:
-        validation = logging.FileHandler(os.path.join(proj, "validation.txt"))
-        report = logging.FileHandler(os.path.join(proj, "elbe-report.txt"))
-        log = logging.FileHandler(os.path.join(proj, "log.txt"))
+        validation = logging.FileHandler(os.path.join(proj, 'validation.txt'))
+        report = logging.FileHandler(os.path.join(proj, 'elbe-report.txt'))
+        log = logging.FileHandler(os.path.join(proj, 'log.txt'))
         echo = QHandler(proj)
         soap = QHandler(proj)
 
@@ -159,7 +159,7 @@ def add_project_handlers(projects):
         yield [validation, report, log, echo, soap]
 
 
- at logging_method("files")
+ at logging_method('files')
 @with_list
 def add_file_handlers(files):
 
@@ -179,7 +179,7 @@ def add_file_handlers(files):
         yield [out]
 
 
- at logging_method("projectsQ")
+ at logging_method('projectsQ')
 @with_list
 def add_projectQ_handlers(projects):
 
@@ -214,7 +214,7 @@ def open_logging(targets):
 
 
 def close_logging():
-    if hasattr(local, "handlers"):
+    if hasattr(local, 'handlers'):
         for h in local.handlers:
             root.removeHandler(h)
             h.close()
@@ -228,8 +228,8 @@ class AsyncLogging:
         self.atmost = atmost
         self.fd = None
         calling_thread = threading.current_thread().ident
-        extra = {"_thread": calling_thread}
-        extra["context"] = ""
+        extra = {'_thread': calling_thread}
+        extra['context'] = ''
         self.stream = logging.LoggerAdapter(stream, extra)
         self.block = logging.LoggerAdapter(block, extra)
 
@@ -242,11 +242,11 @@ class AsyncLogging:
             os.close(r)
 
     def run(self):
-        rest = ""
+        rest = ''
 
         while True:
 
-            buf = os.read(self.fd, self.atmost).decode("utf-8", errors="replace")
+            buf = os.read(self.fd, self.atmost).decode('utf-8', errors='replace')
 
             # Pipe broke
             if not buf:
@@ -265,7 +265,7 @@ class AsyncLogging:
 
             # Log the line now for echo back
             if cnt:
-                logbuf = "\n".join(self.lines[-cnt:])
+                logbuf = '\n'.join(self.lines[-cnt:])
 
                 # filter out ansi sequences.
                 logbuf = re.sub('\u001b[.*?[@-~]', '', logbuf)
@@ -278,7 +278,7 @@ class AsyncLogging:
 
         if self.lines:
             self.lines[-1] += rest
-            self.block.info("\n".join(self.lines))
+            self.block.info('\n'.join(self.lines))
 
 
 def async_logging(r, w, stream, block, atmost=4096):
diff --git a/elbepack/pbuilder.py b/elbepack/pbuilder.py
index e99df97b..81645d0b 100644
--- a/elbepack/pbuilder.py
+++ b/elbepack/pbuilder.py
@@ -11,8 +11,8 @@ from elbepack.filesystem import Filesystem
 
 def pbuilder_write_config(builddir, xml, noccache):
     distname = xml.prj.text('suite')
-    pbuilderrc_fname = os.path.join(builddir, "pbuilderrc")
-    fp = open(pbuilderrc_fname, "w")
+    pbuilderrc_fname = os.path.join(builddir, 'pbuilderrc')
+    fp = open(pbuilderrc_fname, 'w')
 
     fp.write('#!/bin/sh\n')
     fp.write('set -e\n')
@@ -25,7 +25,7 @@ def pbuilder_write_config(builddir, xml, noccache):
     fp.write(f'HOOKDIR="{os.path.join(builddir, "pbuilder", "hooks.d")}"\n')
     fp.write('PATH="/usr/share/elbe/qemu-elbe:$PATH"\n')
 
-    if xml.text("project/arch", key="arch") != 'amd64':
+    if xml.text('project/arch', key='arch') != 'amd64':
         fp.write(f'ARCHITECTURE="{xml.text("project/buildimage/arch", key="arch")}"\n')
         fp.write('DEBOOTSTRAP="qemu-debootstrap"\n')
         fp.write('DEBOOTSTRAPOPTS=("${DEBOOTSTRAPOPTS[@]}" '
@@ -54,8 +54,8 @@ def pbuilder_write_config(builddir, xml, noccache):
 
 def pbuilder_write_cross_config(builddir, xml, noccache):
     distname = xml.prj.text('suite')
-    pbuilderrc_fname = os.path.join(builddir, "cross_pbuilderrc")
-    fp = open(pbuilderrc_fname, "w")
+    pbuilderrc_fname = os.path.join(builddir, 'cross_pbuilderrc')
+    fp = open(pbuilderrc_fname, 'w')
 
     fp.write('#!/bin/sh\n')
     fp.write('set -e\n')
@@ -96,8 +96,8 @@ def pbuilder_write_apt_conf(builddir, xml):
 
     # noauth is set
     # create pbuilder/aptconfdir/apt.conf.d/16allowuntrusted
-    aptconf_dir = os.path.join(builddir, "aptconfdir", "apt.conf.d")
-    fp = open(os.path.join(aptconf_dir, "16allowuntrusted"), "w")
+    aptconf_dir = os.path.join(builddir, 'aptconfdir', 'apt.conf.d')
+    fp = open(os.path.join(aptconf_dir, '16allowuntrusted'), 'w')
 
     # Make apt-get use --force-yes which is not specified by
     # pbuilder-satisfy-depends
@@ -115,7 +115,7 @@ def pbuilder_write_apt_conf(builddir, xml):
 
 
 def mirror_script_add_key_url(key_url):
-    key_url = key_url.replace("LOCALMACHINE", "10.0.2.2")
+    key_url = key_url.replace('LOCALMACHINE', '10.0.2.2')
     key_conn = urlopen(key_url, None, 10)
     key_text = key_conn.read()
     key_conn.close()
@@ -126,14 +126,14 @@ def mirror_script_add_key_url(key_url):
 def pbuilder_write_repo_hook(builddir, xml, cross):
 
     if cross:
-        pbuilder_hook_dir = os.path.join(builddir, "pbuilder_cross", "hooks.d")
+        pbuilder_hook_dir = os.path.join(builddir, 'pbuilder_cross', 'hooks.d')
     else:
-        pbuilder_hook_dir = os.path.join(builddir, "pbuilder", "hooks.d")
+        pbuilder_hook_dir = os.path.join(builddir, 'pbuilder', 'hooks.d')
 
-    with open(os.path.join(pbuilder_hook_dir, "H10elbe_apt_update"), "w") as f:
-        f.write("#!/bin/sh\napt update\n")
+    with open(os.path.join(pbuilder_hook_dir, 'H10elbe_apt_update'), 'w') as f:
+        f.write('#!/bin/sh\napt update\n')
 
-    with open(os.path.join(pbuilder_hook_dir, "G10elbe_apt_sources"), "w") as f:
+    with open(os.path.join(pbuilder_hook_dir, 'G10elbe_apt_sources'), 'w') as f:
 
         local_http = f"deb http://127.0.0.1:8080{builddir}/repo {xml.prj.text('suite')} main\n"
         mirrors = xml.create_apt_sources_list(hostsysroot=cross)
@@ -141,45 +141,45 @@ def pbuilder_write_repo_hook(builddir, xml, cross):
 
         keys = get_apt_keys(builddir, xml)
 
-        f.write("#!/bin/sh\n")
+        f.write('#!/bin/sh\n')
 
         # cat reads from stdin (-) and redirect (>) to
         # /etc/apt/sources.list
-        f.write(f"cat -> /etc/apt/sources.list <<EOF\n{mirrors}\nEOF\n")
+        f.write(f'cat -> /etc/apt/sources.list <<EOF\n{mirrors}\nEOF\n')
 
         for key in keys:
-            f.write(f"cat << EOF | apt-key add -\n{key}\nEOF\n")
+            f.write(f'cat << EOF | apt-key add -\n{key}\nEOF\n')
 
-        f.write("apt-get update\n")
+        f.write('apt-get update\n')
 
 
 def get_apt_keys(builddir, xml):
 
     if xml.prj is None:
-        return (["# No Project"], [])
+        return (['# No Project'], [])
 
-    if not xml.prj.has("mirror") and not xml.prj.has("mirror/cdrom"):
-        return (["# No mirrors configured"], [])
+    if not xml.prj.has('mirror') and not xml.prj.has('mirror/cdrom'):
+        return (['# No mirrors configured'], [])
 
-    keys = [Filesystem(builddir).read_file("repo/repo.pub")]
+    keys = [Filesystem(builddir).read_file('repo/repo.pub')]
 
-    if xml.prj.has("mirror/primary_host") and xml.prj.has("mirror/url-list"):
+    if xml.prj.has('mirror/primary_host') and xml.prj.has('mirror/url-list'):
 
-        for url in xml.prj.node("mirror/url-list"):
+        for url in xml.prj.node('mirror/url-list'):
 
-            if url.has("options"):
-                options = "[%s]" % ' '.join([opt.et.text.strip(' \t\n')
+            if url.has('options'):
+                options = '[%s]' % ' '.join([opt.et.text.strip(' \t\n')
                                              for opt
-                                             in url.all("options/option")])
+                                             in url.all('options/option')])
             else:
-                options = ""
+                options = ''
 
-            if "trusted=yes" in options:
+            if 'trusted=yes' in options:
                 continue
 
-            if url.has("raw-key"):
+            if url.has('raw-key'):
 
-                key = "\n".join(line.strip(" \t")
+                key = '\n'.join(line.strip(' \t')
                                 for line
                                 in url.text('raw-key').splitlines()[1:-1])
 
diff --git a/elbepack/pbuilderaction.py b/elbepack/pbuilderaction.py
index 9404a769..a35a5e27 100644
--- a/elbepack/pbuilderaction.py
+++ b/elbepack/pbuilderaction.py
@@ -13,7 +13,7 @@ from elbepack.xmlpreprocess import PreprocessWrapper
 
 def cmd_exists(x):
     return any(os.access(os.path.join(path, x), os.X_OK)
-               for path in os.environ["PATH"].split(os.pathsep))
+               for path in os.environ['PATH'].split(os.pathsep))
 
 # Create download directory with timestamp,
 # if necessary
@@ -21,9 +21,9 @@ def cmd_exists(x):
 
 def ensure_outdir(opt):
     if opt.outdir is None:
-        opt.outdir = ".."
+        opt.outdir = '..'
 
-    print(f"Saving generated Files to {opt.outdir}")
+    print(f'Saving generated Files to {opt.outdir}')
 
 
 class PBuilderError(Exception):
@@ -40,9 +40,9 @@ class PBuilderAction:
 
     @classmethod
     def print_actions(cls):
-        print("available subcommands are:", file=sys.stderr)
+        print('available subcommands are:', file=sys.stderr)
         for a in cls.actiondict:
-            print(f"   {a}", file=sys.stderr)
+            print(f'   {a}', file=sys.stderr)
 
     def __new__(cls, node):
         action = cls.actiondict[node]
@@ -63,14 +63,14 @@ class CreateAction(PBuilderAction):
         PBuilderAction.__init__(self, node)
 
     def execute(self, opt, _args):
-        crossopt = ""
+        crossopt = ''
         if opt.cross:
-            crossopt = "--cross"
+            crossopt = '--cross'
         if opt.noccache:
-            ccacheopt = "--no-ccache"
-            ccachesize = ""
+            ccacheopt = '--no-ccache'
+            ccachesize = ''
         else:
-            ccacheopt = "--ccache-size"
+            ccacheopt = '--ccache-size'
             ccachesize = opt.ccachesize
 
         if opt.xmlfile:
@@ -79,10 +79,10 @@ class CreateAction(PBuilderAction):
                     ret, prjdir, err = command_out_stderr(
                         f'{sys.executable} {elbe_exe} control create_project')
                     if ret != 0:
-                        print("elbe control create_project failed.",
+                        print('elbe control create_project failed.',
                               file=sys.stderr)
                         print(err, file=sys.stderr)
-                        print("Giving up", file=sys.stderr)
+                        print('Giving up', file=sys.stderr)
                         sys.exit(152)
 
                     prjdir = prjdir.strip()
@@ -90,48 +90,48 @@ class CreateAction(PBuilderAction):
                         f'{sys.executable} {elbe_exe} control set_xml "{prjdir}" "{ppw.preproc}"')
 
                     if ret != 0:
-                        print("elbe control set_xml failed.", file=sys.stderr)
+                        print('elbe control set_xml failed.', file=sys.stderr)
                         print(err, file=sys.stderr)
-                        print("Giving up", file=sys.stderr)
+                        print('Giving up', file=sys.stderr)
                         sys.exit(153)
             except CommandError:
                 # this is the failure from PreprocessWrapper
                 # it already printed the error message from
                 # elbe preprocess
-                print("Giving up", file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(154)
 
             if opt.writeproject:
-                wpf = open(opt.writeproject, "w")
+                wpf = open(opt.writeproject, 'w')
                 wpf.write(prjdir)
                 wpf.close()
 
         elif opt.project:
             prjdir = opt.project
         else:
-            print("you need to specify --project option", file=sys.stderr)
+            print('you need to specify --project option', file=sys.stderr)
             sys.exit(155)
 
-        print("Creating pbuilder")
+        print('Creating pbuilder')
 
         try:
             system(f"""'{sys.executable} {elbe_exe} control
                     build_pbuilder "{prjdir}" {crossopt} {ccacheopt} {ccachesize}'""")
         except CommandError:
-            print("elbe control build_pbuilder Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control build_pbuilder Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(156)
 
         try:
             system(f'{sys.executable} {elbe_exe} control wait_busy "{prjdir}"')
         except CommandError:
-            print("elbe control wait_busy Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control wait_busy Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(157)
 
-        print("")
-        print("Building Pbuilder finished !")
-        print("")
+        print('')
+        print('Building Pbuilder finished !')
+        print('')
 
 
 PBuilderAction.register(CreateAction)
@@ -147,23 +147,23 @@ class UpdateAction(PBuilderAction):
     def execute(self, opt, _args):
 
         if not opt.project:
-            print("you need to specify --project option", file=sys.stderr)
+            print('you need to specify --project option', file=sys.stderr)
             sys.exit(158)
 
         prjdir = opt.project
 
-        print("Updating pbuilder")
+        print('Updating pbuilder')
 
         try:
             system(f'{sys.executable} {elbe_exe} control update_pbuilder "{prjdir}"')
         except CommandError:
-            print("elbe control update_pbuilder Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control update_pbuilder Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(159)
 
-        print("")
-        print("Updating Pbuilder finished !")
-        print("")
+        print('')
+        print('Updating Pbuilder finished !')
+        print('')
 
 
 PBuilderAction.register(CreateAction)
@@ -178,18 +178,18 @@ class BuildAction(PBuilderAction):
 
     def execute(self, opt, _args):
 
-        crossopt = ""
+        crossopt = ''
         if opt.cross:
-            crossopt = "--cross"
+            crossopt = '--cross'
         tmp = TmpdirFilesystem()
 
         if opt.xmlfile:
             ret, prjdir, err = command_out_stderr(
                 f'{sys.executable} {elbe_exe} control create_project --retries 60 "{opt.xmlfile}"')
             if ret != 0:
-                print("elbe control create_project failed.", file=sys.stderr)
+                print('elbe control create_project failed.', file=sys.stderr)
                 print(err, file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(160)
 
             prjdir = prjdir.strip()
@@ -197,54 +197,54 @@ class BuildAction(PBuilderAction):
             try:
                 system(f'{sys.executable} {elbe_exe} control build_pbuilder "{prjdir}"')
             except CommandError:
-                print("elbe control build_pbuilder Failed", file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('elbe control build_pbuilder Failed', file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(161)
 
             try:
                 system(f'{sys.executable} {elbe_exe} control wait_busy "{prjdir}"')
             except CommandError:
-                print("elbe control wait_busy Failed", file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('elbe control wait_busy Failed', file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(162)
 
-            print("")
-            print("Building Pbuilder finished !")
-            print("")
+            print('')
+            print('Building Pbuilder finished !')
+            print('')
         elif opt.project:
             prjdir = opt.project
             system(f'{sys.executable} {elbe_exe} control rm_log {prjdir}')
         else:
             print(
-                "you need to specify --project or --xmlfile option",
+                'you need to specify --project or --xmlfile option',
                 file=sys.stderr)
             sys.exit(163)
 
-        print("")
-        print("Packing Source into tmp archive")
-        print("")
+        print('')
+        print('Packing Source into tmp archive')
+        print('')
         try:
             system(f'tar cfz "{tmp.fname("pdebuild.tar.gz")}" .')
         except CommandError:
-            print("tar Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('tar Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(164)
 
         for of in opt.origfile:
-            print("")
+            print('')
             print(f"Pushing orig file '{of}' into pbuilder")
-            print("")
+            print('')
             try:
                 system(
                     f'{sys.executable} {elbe_exe} control set_orig "{prjdir}" "{of}"')
             except CommandError:
-                print("elbe control set_orig Failed", file=sys.stderr)
-                print("Giving up", file=sys.stderr)
+                print('elbe control set_orig Failed', file=sys.stderr)
+                print('Giving up', file=sys.stderr)
                 sys.exit(165)
 
-        print("")
-        print("Pushing source into pbuilder")
-        print("")
+        print('')
+        print('Pushing source into pbuilder')
+        print('')
 
         try:
             system(
@@ -252,46 +252,46 @@ class BuildAction(PBuilderAction):
                 f'--profile "{opt.profile}" {crossopt} '
                 f'"{prjdir}" "{tmp.fname("pdebuild.tar.gz")}"')
         except CommandError:
-            print("elbe control set_pdebuild Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control set_pdebuild Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(166)
         try:
             system(f'{sys.executable} {elbe_exe} control wait_busy "{prjdir}"')
         except CommandError:
-            print("elbe control wait_busy Failed", file=sys.stderr)
-            print("Giving up", file=sys.stderr)
+            print('elbe control wait_busy Failed', file=sys.stderr)
+            print('Giving up', file=sys.stderr)
             sys.exit(167)
-        print("")
-        print("Pdebuild finished !")
-        print("")
+        print('')
+        print('Pdebuild finished !')
+        print('')
 
         if opt.skip_download:
-            print("")
-            print("Listing available files:")
-            print("")
+            print('')
+            print('Listing available files:')
+            print('')
             try:
                 system(
                     f'{sys.executable} {elbe_exe} control --pbuilder-only get_files "{prjdir}"')
             except CommandError:
-                print("elbe control get_files Failed", file=sys.stderr)
-                print("", file=sys.stderr)
-                print("dumping logfile", file=sys.stderr)
+                print('elbe control get_files Failed', file=sys.stderr)
+                print('', file=sys.stderr)
+                print('dumping logfile', file=sys.stderr)
 
                 try:
                     system(f'{sys.executable} {elbe_exe} control dump_file "{prjdir}" log.txt')
                 except CommandError:
-                    print("elbe control dump_file Failed", file=sys.stderr)
-                    print("", file=sys.stderr)
-                    print("Giving up", file=sys.stderr)
+                    print('elbe control dump_file Failed', file=sys.stderr)
+                    print('', file=sys.stderr)
+                    print('Giving up', file=sys.stderr)
 
                 sys.exit(168)
 
-            print("")
+            print('')
             print(f"Get Files with: 'elbe control get_file {prjdir} <filename>'")
         else:
-            print("")
-            print("Getting generated Files")
-            print("")
+            print('')
+            print('Getting generated Files')
+            print('')
 
             ensure_outdir(opt)
 
@@ -300,16 +300,16 @@ class BuildAction(PBuilderAction):
                     f'{sys.executable} {elbe_exe} control --pbuilder-only get_files '
                     f'--output "{opt.outdir}" "{prjdir}"')
             except CommandError:
-                print("elbe control get_files Failed", file=sys.stderr)
-                print("", file=sys.stderr)
-                print("dumping logfile", file=sys.stderr)
+                print('elbe control get_files Failed', file=sys.stderr)
+                print('', file=sys.stderr)
+                print('dumping logfile', file=sys.stderr)
 
                 try:
                     system(f'{sys.executable} {elbe_exe} control dump_file "{prjdir}" log.txt')
                 except CommandError:
-                    print("elbe control dump_file Failed", file=sys.stderr)
-                    print("", file=sys.stderr)
-                    print("Giving up", file=sys.stderr)
+                    print('elbe control dump_file Failed', file=sys.stderr)
+                    print('', file=sys.stderr)
+                    print('Giving up', file=sys.stderr)
 
                 sys.exit(169)
 
diff --git a/elbepack/pkgarchive.py b/elbepack/pkgarchive.py
index e98b2b0c..0ad969a4 100644
--- a/elbepack/pkgarchive.py
+++ b/elbepack/pkgarchive.py
@@ -13,8 +13,8 @@ from elbepack.repomanager import RepoBase, RepoAttributes
 class ArchiveRepo(RepoBase):
     def __init__(self, xml, pathname, origin, description, components):
 
-        arch = xml.text("project/arch", key="arch")
-        codename = xml.text("project/suite")
+        arch = xml.text('project/arch', key='arch')
+        codename = xml.text('project/suite')
 
         repo_attrs = RepoAttributes(codename, arch, components)
 
@@ -40,8 +40,8 @@ def gen_binpkg_archive(ep, repodir):
 
     try:
         # Repository containing all packages currently installed
-        repo = ArchiveRepo(ep.xml, repopath, "Elbe",
-                           "Elbe package archive", ["main"])
+        repo = ArchiveRepo(ep.xml, repopath, 'Elbe',
+                           'Elbe package archive', ['main'])
 
         c = ep.get_rpcaptcache()
         pkglist = c.get_installed_pkgs()
@@ -58,7 +58,7 @@ def gen_binpkg_archive(ep, repodir):
                                 'in var/cache/apt/archives, downloading it',
                                 filename)
                 abs_path = ep.buildenv.rfs.fname(rel_path)
-                pkg_id = f"{pkg.name}-{pkg.installed_version}"
+                pkg_id = f'{pkg.name}-{pkg.installed_version}'
                 try:
                     abs_path = c.download_binary(pkg.name,
                                                  '/var/cache/elbe/pkgarchive',
@@ -75,7 +75,7 @@ def gen_binpkg_archive(ep, repodir):
 
             # Add package to repository
             # XXX Use correct component
-            repo.includedeb(abs_path, "main")
+            repo.includedeb(abs_path, 'main')
 
         repo.finalize()
 
@@ -97,12 +97,12 @@ def checkout_binpkg_archive(ep, repodir):
         try:
             # Copy the package archive into the buildenv,
             # so the RPCAptCache can access it
-            logging.info("Copying package archive into build environment")
+            logging.info('Copying package archive into build environment')
             copytree(repopath, pkgarchive)
 
             # Move original etc/apt/sources.list and etc/apt/sources.list.d out
             # of the way
-            logging.info("Moving original APT configuration out of the way")
+            logging.info('Moving original APT configuration out of the way')
             if path.isfile(sources_list):
                 move(sources_list, sources_list_backup)
             if path.isdir(sources_list_d):
@@ -110,24 +110,24 @@ def checkout_binpkg_archive(ep, repodir):
 
             # Now create our own, with the package archive being the only
             # source
-            logging.info("Creating new /etc/apt/sources.list")
-            deb = "deb file:///var/cache/elbe/pkgarchive "
-            deb += ep.xml.text("/project/suite")
-            deb += " main"
+            logging.info('Creating new /etc/apt/sources.list')
+            deb = 'deb file:///var/cache/elbe/pkgarchive '
+            deb += ep.xml.text('/project/suite')
+            deb += ' main'
             with open(sources_list, 'w') as f:
                 f.write(deb)
 
             # We need to update the APT cache to apply the changed package
             # source
-            logging.info("Updating APT cache to use package archive")
+            logging.info('Updating APT cache to use package archive')
             ep.drop_rpcaptcache()
             c = ep.get_rpcaptcache()
             c.update()
 
             # Iterate over all packages, and mark them for installation or
             # deletion, using the same logic as in commands/updated.py
-            logging.info("Calculating packages to install/remove")
-            fpl = ep.xml.node("fullpkgs")
+            logging.info('Calculating packages to install/remove')
+            fpl = ep.xml.node('fullpkgs')
             pkgs = c.get_pkglist('all')
 
             for p in pkgs:
@@ -146,13 +146,13 @@ def checkout_binpkg_archive(ep, repodir):
                     c.mark_delete(p.name)
 
             # Now commit the changes
-            logging.info("Commiting package changes")
+            logging.info('Commiting package changes')
             c.commit()
         finally:
             # If we changed the package sources, move back the backup
             if path.isdir(sources_list_d_backup) or \
                     path.isfile(sources_list_backup):
-                logging.info("Moving back original APT configuration")
+                logging.info('Moving back original APT configuration')
                 update_needed = True
             else:
                 update_needed = False
@@ -167,11 +167,11 @@ def checkout_binpkg_archive(ep, repodir):
 
             # Remove the package archive from the buildenv
             if path.isdir(pkgarchive):
-                logging.info("Removing package archive from build environment")
+                logging.info('Removing package archive from build environment')
                 rmtree(pkgarchive)
 
             # Update APT cache, if we modified the package sources
             if update_needed:
-                logging.info("Updating APT cache to use original package sources")
+                logging.info('Updating APT cache to use original package sources')
                 ep.drop_rpcaptcache()
                 ep.get_rpcaptcache().update()
diff --git a/elbepack/pkgutils.py b/elbepack/pkgutils.py
index 522b54f5..cee7f72c 100644
--- a/elbepack/pkgutils.py
+++ b/elbepack/pkgutils.py
@@ -16,35 +16,35 @@ class NoPackageException(Exception):
 
 def get_sources_list(prj):
 
-    suite = prj.text("suite")
+    suite = prj.text('suite')
 
-    slist = ""
-    if prj.has("mirror/primary_host"):
+    slist = ''
+    if prj.has('mirror/primary_host'):
         protocl = f"{prj.text('mirror/primary_proto')}"
         host = f"{prj.text('mirror/primary_host').replace('LOCALMACHINE', '10.0.2.2')}"
         path = f"{prj.text('mirror/primary_path')}"
-        mirror = f"{protocl}://{host}/{path}"
-        slist += f"deb {mirror} {suite} main\n"
-        slist += f"deb-src {mirror} {suite} main\n"
-
-    if prj.node("mirror/url-list"):
-        for n in prj.node("mirror/url-list"):
-            if n.has("binary"):
-                tmp = n.text("binary").replace("LOCALMACHINE", "10.0.2.2")
-                slist += f"deb {tmp.strip()}\n"
-            if n.has("source"):
-                tmp = n.text("source").replace("LOCALMACHINE", "10.0.2.2")
-                slist += f"deb-src {tmp.strip()}\n"
+        mirror = f'{protocl}://{host}/{path}'
+        slist += f'deb {mirror} {suite} main\n'
+        slist += f'deb-src {mirror} {suite} main\n'
+
+    if prj.node('mirror/url-list'):
+        for n in prj.node('mirror/url-list'):
+            if n.has('binary'):
+                tmp = n.text('binary').replace('LOCALMACHINE', '10.0.2.2')
+                slist += f'deb {tmp.strip()}\n'
+            if n.has('source'):
+                tmp = n.text('source').replace('LOCALMACHINE', '10.0.2.2')
+                slist += f'deb-src {tmp.strip()}\n'
 
     return slist
 
 
 def get_key_list(prj):
     retval = []
-    if prj.node("mirror/url-list"):
-        for n in prj.node("mirror/url-list"):
-            if n.has("key"):
-                tmp = n.text("key").replace("LOCALMACHINE", "10.0.2.2")
+    if prj.node('mirror/url-list'):
+        for n in prj.node('mirror/url-list'):
+            if n.has('key'):
+                tmp = n.text('key').replace('LOCALMACHINE', '10.0.2.2')
                 retval.append(tmp.strip())
 
     return retval
@@ -82,7 +82,7 @@ def extract_pkg_changelog(fname, extra_pkg=None):
     pkgname = m.group('name')
     pkgarch = m.group('arch')
 
-    print(f"pkg: {pkgname}, arch: {pkgarch}")
+    print(f'pkg: {pkgname}, arch: {pkgarch}')
 
     fs = TmpdirFilesystem()
 
@@ -92,17 +92,17 @@ def extract_pkg_changelog(fname, extra_pkg=None):
 
     system(f'dpkg -x "{fname}" "{fs.fname("/")}"')
 
-    dch_dir = f"/usr/share/doc/{pkgname}"
+    dch_dir = f'/usr/share/doc/{pkgname}'
 
     if fs.islink(dch_dir) and not extra_pkg:
         lic = fs.readlink(dch_dir)
         print(dch_dir, lic)
         raise ChangelogNeedsDependency(lic)
 
-    dch_bin = f"/usr/share/doc/{pkgname}/changelog.Debian.{pkgarch}.gz"
-    dch_src = f"/usr/share/doc/{pkgname}/changelog.Debian.gz"
+    dch_bin = f'/usr/share/doc/{pkgname}/changelog.Debian.{pkgarch}.gz'
+    dch_src = f'/usr/share/doc/{pkgname}/changelog.Debian.gz'
 
-    ret = ""
+    ret = ''
 
     if fs.exists(dch_bin):
         ret += fs.read_file(dch_bin, gz=True).decode(encoding='utf-8',
diff --git a/elbepack/projectmanager.py b/elbepack/projectmanager.py
index 43576c58..564368fd 100644
--- a/elbepack/projectmanager.py
+++ b/elbepack/projectmanager.py
@@ -32,18 +32,18 @@ class ProjectManagerError(Exception):
 class AlreadyOpen(ProjectManagerError):
     def __init__(self, builddir, username):
         ProjectManagerError.__init__(
-            self, f"project in {builddir} is already opened by {username}")
+            self, f'project in {builddir} is already opened by {username}')
 
 
 class PermissionDenied(ProjectManagerError):
     def __init__(self, builddir):
         ProjectManagerError.__init__(
-            self, f"permission denied for project in {builddir}")
+            self, f'permission denied for project in {builddir}')
 
 
 class NoOpenProject(ProjectManagerError):
     def __init__(self):
-        ProjectManagerError.__init__(self, "must open a project first")
+        ProjectManagerError.__init__(self, 'must open a project first')
 
 
 class InvalidState(ProjectManagerError):
@@ -242,7 +242,7 @@ class ProjectManager:
             ep = self._get_current_project(userid, allow_busy=False)
 
             self.db.set_project_version(ep.builddir, new_version)
-            ep.xml.node("/project/version").set_text(new_version)
+            ep.xml.node('/project/version').set_text(new_version)
 
     def list_current_project_versions(self, userid):
         with self.lock:
@@ -271,11 +271,11 @@ class ProjectManager:
         with self.lock:
             ep = self._get_current_project(userid, allow_busy=False)
 
-            name = ep.xml.text("project/name")
+            name = ep.xml.text('project/name')
             self.db.del_version(ep.builddir, version)
 
             # Delete corresponding package archive, if existing
-            pkgarchive = get_versioned_filename(name, version, ".pkgarchive")
+            pkgarchive = get_versioned_filename(name, version, '.pkgarchive')
             pkgarchive_path = path.join(ep.builddir, pkgarchive)
             try:
                 rmtree(pkgarchive_path)
@@ -308,8 +308,8 @@ class ProjectManager:
     def build_current_pdebuild(self, userid, cpuset, profile, cross):
         with self.lock:
             ep = self._get_current_project(userid, allow_busy=False)
-            if (not path.isdir(path.join(ep.builddir, "pbuilder")) and
-                    not path.isdir(path.join(ep.builddir, "pbuilder_cross"))):
+            if (not path.isdir(path.join(ep.builddir, 'pbuilder')) and
+                    not path.isdir(path.join(ep.builddir, 'pbuilder_cross'))):
                 raise InvalidState('No pbuilder exists: run "elbe pbuilder '
                                    f'create --project {ep.builddir}" first')
 
@@ -318,8 +318,8 @@ class ProjectManager:
     def set_orig_fname(self, userid, fname):
         with self.lock:
             ep = self._get_current_project(userid, allow_busy=False)
-            if (not path.isdir(path.join(ep.builddir, "pbuilder")) and
-                    not path.isdir(path.join(ep.builddir, "pbuilder_cross"))):
+            if (not path.isdir(path.join(ep.builddir, 'pbuilder')) and
+                    not path.isdir(path.join(ep.builddir, 'pbuilder_cross'))):
                 raise InvalidState('No pbuilder exists: run "elbe pbuilder '
                                    f'create --project {ep.builddir}" first')
 
@@ -329,8 +329,8 @@ class ProjectManager:
     def get_orig_fname(self, userid):
         with self.lock:
             ep = self._get_current_project(userid, allow_busy=False)
-            if (not path.isdir(path.join(ep.builddir, "pbuilder")) and
-                    not path.isdir(path.join(ep.builddir, "pbuilder_cross"))):
+            if (not path.isdir(path.join(ep.builddir, 'pbuilder')) and
+                    not path.isdir(path.join(ep.builddir, 'pbuilder_cross'))):
                 raise InvalidState('No pbuilder exists: run "elbe pbuilder '
                                    f'create --project {ep.builddir}" first')
 
@@ -361,8 +361,8 @@ class ProjectManager:
             c = self._get_current_project_apt_cache(userid)
             if c.get_changes():
                 raise InvalidState(
-                    "project %s has uncommited package changes, "
-                    "please commit them first")
+                    'project %s has uncommited package changes, '
+                    'please commit them first')
 
             ep = self._get_current_project(userid)
             self.worker.enqueue(GenUpdateJob(ep, base_version))
@@ -407,7 +407,7 @@ class ProjectManager:
             ep = self._get_current_project(userid)
 
             debootstrap_pkgs = []
-            for p in ep.xml.xml.node("debootstrappkgs"):
+            for p in ep.xml.xml.node('debootstrappkgs'):
                 debootstrap_pkgs.append(p.et.text)
 
             return debootstrap_pkgs
@@ -476,8 +476,8 @@ class ProjectManager:
     def read_current_project_log(self, userid):
         with self.lock:
             ep = self._get_current_project(userid)
-            logpath = path.join(ep.builddir, "log.txt")
-            f = open(logpath, "r")
+            logpath = path.join(ep.builddir, 'log.txt')
+            f = open(logpath, 'r')
         try:
             data = f.read()
         finally:
@@ -527,7 +527,7 @@ class ProjectManager:
 
         if not allow_busy:
             if self.db.is_busy(ep.builddir):
-                raise InvalidState(f"project {ep.builddir} is busy")
+                raise InvalidState(f'project {ep.builddir} is busy')
 
         return ep
 
@@ -538,9 +538,9 @@ class ProjectManager:
             builddir = self.userid2project[userid].builddir
             if self.db.is_busy(builddir):
                 raise InvalidState(
-                    f"project in directory {builddir} of user "
-                    f"{self.db.get_username(userid)} is "
-                    "currently busy and cannot be closed")
+                    f'project in directory {builddir} of user '
+                    f'{self.db.get_username(userid)} is '
+                    'currently busy and cannot be closed')
 
             del self.builddir2userid[builddir]
             del self.userid2project[userid]
@@ -562,7 +562,7 @@ class ProjectManager:
 
         if not ep.has_full_buildenv():
             raise InvalidState(
-                f"project in directory {ep.builddir} does not have a "
-                "functional build environment")
+                f'project in directory {ep.builddir} does not have a '
+                'functional build environment')
 
         return ep.get_rpcaptcache()
diff --git a/elbepack/repodir.py b/elbepack/repodir.py
index 2cdda4d0..c9359b5d 100644
--- a/elbepack/repodir.py
+++ b/elbepack/repodir.py
@@ -27,37 +27,37 @@ def preprocess_repodir(xml, xmldir):
     for repodir in xml.iterfind('.//mirror/url-list/repodir'):
         repo = repodir.text.split(maxsplit=1)
         if len(repo) != 2:
-            raise RepodirError("A <repodir> must consist of a file path,"
-                               "a suite name, and components")
+            raise RepodirError('A <repodir> must consist of a file path,'
+                               'a suite name, and components')
 
         hostdir = os.path.join(xmldir, repo[0])
         httpd = HTTPServer(('localhost', 0),
                            functools.partial(SimpleHTTPRequestHandler, directory=hostdir))
 
-        url_element = Element("url")
+        url_element = Element('url')
         # Keep the variant attribute for later processing
         if 'variant' in repodir.attrib:
             url_element.attrib['variant'] = repodir.attrib['variant']
 
-        bin_el = Element("binary")
-        bin_el.text = f"http://LOCALMACHINE:{httpd.server_address[1]} {repo[1]}"
+        bin_el = Element('binary')
+        bin_el.text = f'http://LOCALMACHINE:{httpd.server_address[1]} {repo[1]}'
         url_element.append(bin_el)
-        src_el = Element("source")
+        src_el = Element('source')
         src_el.text = bin_el.text
         url_element.append(src_el)
 
         if 'signed-by' in repodir.attrib:
             try:
                 keyfile = os.path.join(hostdir, repodir.attrib['signed-by'])
-                auth_el = Element("raw-key")
-                auth_el.text = "\n" + open(keyfile, encoding='ascii').read()
+                auth_el = Element('raw-key')
+                auth_el.text = '\n' + open(keyfile, encoding='ascii').read()
             except Exception:
                 raise RepodirError(
-                    f"{keyfile} is not a valid ascii-armored OpenPGP keyring")
+                    f'{keyfile} is not a valid ascii-armored OpenPGP keyring')
         else:
-            auth_el = Element("options")
-            option_el = Element("option")
-            option_el.text = "trusted=yes"
+            auth_el = Element('options')
+            option_el = Element('option')
+            option_el.text = 'trusted=yes'
             auth_el.append(option_el)
         url_element.append(auth_el)
 
@@ -87,7 +87,7 @@ class Repodir:
 
             xml.write(
                 self.output,
-                encoding="UTF-8",
+                encoding='UTF-8',
                 pretty_print=True)
 
             for httpd in self.httpds:
@@ -96,10 +96,10 @@ class Repodir:
             return self
 
         except XMLSyntaxError:
-            raise RepodirError(f"XML Parse error\n{sys.exc_info()[1]}")
+            raise RepodirError(f'XML Parse error\n{sys.exc_info()[1]}')
         except BaseException:
             raise RepodirError(
-                f"Unknown Exception during validation\n{str(sys.exc_info()[1])}")
+                f'Unknown Exception during validation\n{str(sys.exc_info()[1])}')
 
     def __exit__(self, _typ, _value, _traceback):
         for httpd in self.httpds:
diff --git a/elbepack/repomanager.py b/elbepack/repomanager.py
index 5ed5221c..c54de0a0 100644
--- a/elbepack/repomanager.py
+++ b/elbepack/repomanager.py
@@ -74,10 +74,10 @@ class RepoBase:
 
         # if repo exists retrive the keyid otherwise
         # generate a new key and generate repository config
-        if self.fs.isdir("/"):
-            repo_conf = self.fs.read_file("conf/distributions")
+        if self.fs.isdir('/'):
+            repo_conf = self.fs.read_file('conf/distributions')
             for lic in repo_conf.splitlines():
-                if lic.startswith("SignWith"):
+                if lic.startswith('SignWith'):
                     self.keyid = lic.split()[1]
                     unlock_key(self.keyid)
         else:
@@ -93,7 +93,7 @@ class RepoBase:
                 # negative numbers represent the volumes counted from last
                 # (-1: last, -2: second last, ...)
                 volume_no = self.volume_count + 1 + volume
-            volname = os.path.join(self.vol_path, f"vol{volume_no:02}")
+            volname = os.path.join(self.vol_path, f'vol{volume_no:02}')
             return Filesystem(volname)
 
         return Filesystem(self.vol_path)
@@ -104,63 +104,63 @@ class RepoBase:
         self.gen_repo_conf()
 
     def gen_repo_conf(self):
-        self.fs.mkdir_p("conf")
-        fp = self.fs.open("conf/distributions", "w")
+        self.fs.mkdir_p('conf')
+        fp = self.fs.open('conf/distributions', 'w')
 
         need_update = False
 
         for att in self.attrs:
-            fp.write("Origin: " + self.origin + "\n")
-            fp.write("Label: " + self.origin + "\n")
-            fp.write("Codename: " + att.codename + "\n")
-            fp.write("Architectures: " + " ".join(att.arch) + "\n")
-            fp.write("Components: " + " ".join(att.components.difference(
-                set(["main/debian-installer"]))) + "\n")
-            fp.write("UDebComponents: " + " ".join(att.components.difference(
-                set(["main/debian-installer"]))) + "\n")
-            fp.write("Description: " + self.description + "\n")
-            fp.write("SignWith: " + self.keyid + "\n")
+            fp.write('Origin: ' + self.origin + '\n')
+            fp.write('Label: ' + self.origin + '\n')
+            fp.write('Codename: ' + att.codename + '\n')
+            fp.write('Architectures: ' + ' '.join(att.arch) + '\n')
+            fp.write('Components: ' + ' '.join(att.components.difference(
+                set(['main/debian-installer']))) + '\n')
+            fp.write('UDebComponents: ' + ' '.join(att.components.difference(
+                set(['main/debian-installer']))) + '\n')
+            fp.write('Description: ' + self.description + '\n')
+            fp.write('SignWith: ' + self.keyid + '\n')
 
             if 'main/debian-installer' in att.components:
-                fp.write("Update: di\n")
+                fp.write('Update: di\n')
 
-                ufp = self.fs.open("conf/updates", "w")
+                ufp = self.fs.open('conf/updates', 'w')
 
-                ufp.write("Name: di\n")
-                ufp.write("Method: " + att.mirror + "\n")
-                ufp.write("VerifyRelease: blindtrust\n")
-                ufp.write("Components: \n")
-                ufp.write("GetInRelease: no\n")
+                ufp.write('Name: di\n')
+                ufp.write('Method: ' + att.mirror + '\n')
+                ufp.write('VerifyRelease: blindtrust\n')
+                ufp.write('Components: \n')
+                ufp.write('GetInRelease: no\n')
                 # It would be nicer, to use this
                 # ufp.write( "Architectures: " + " ".join (att.arch) + "\n" )
                 # But we end up with 'armel amd64' sometimes.
                 # So lets just use the init_attr...
                 if self.init_attr:
                     ufp.write(
-                        "Architectures: " +
-                        " ".join(
+                        'Architectures: ' +
+                        ' '.join(
                             self.init_attr.arch) +
-                        "\n")
+                        '\n')
                 else:
-                    ufp.write("Architectures: " + " ".join(att.arch) + "\n")
+                    ufp.write('Architectures: ' + ' '.join(att.arch) + '\n')
 
-                ufp.write("UDebComponents: main>main\n")
+                ufp.write('UDebComponents: main>main\n')
                 ufp.close()
 
                 need_update = True
 
-            fp.write("\n")
+            fp.write('\n')
         fp.close()
 
-        export_key(self.keyid, self.fs.fname("/repo.pub"))
+        export_key(self.keyid, self.fs.fname('/repo.pub'))
 
         if need_update:
             cmd = f'reprepro --export=force --basedir "{self.fs.path}" update'
-            do(cmd, env_add={'GNUPGHOME': "/var/cache/elbe/gnupg"})
+            do(cmd, env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
         else:
             for att in self.attrs:
                 do(f'reprepro --basedir "{self.fs.path}" export {att.codename}',
-                   env_add={'GNUPGHOME': "/var/cache/elbe/gnupg"})
+                   env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
 
     def finalize(self):
         for att in self.attrs:
@@ -169,12 +169,12 @@ class RepoBase:
 
     def _includedeb(self, path, codename, components=None, prio=None):
         if self.maxsize:
-            new_size = self.fs.disk_usage("") + os.path.getsize(path)
+            new_size = self.fs.disk_usage('') + os.path.getsize(path)
             if new_size > self.maxsize:
                 self.new_repo_volume()
 
-        global_opt = ["--keepunreferencedfiles",
-                      "--export=silent-never",
+        global_opt = ['--keepunreferencedfiles',
+                      '--export=silent-never',
                       f'--basedir "{self.fs.path}"']
 
         if prio is not None:
@@ -188,7 +188,7 @@ class RepoBase:
 
         global_opt = ' '.join(global_opt)
 
-        do(f"reprepro {global_opt} includedeb {codename} {path}")
+        do(f'reprepro {global_opt} includedeb {codename} {path}')
 
     def includedeb(self, path, components=None, pkgname=None, force=False, prio=None):
         # pkgname needs only to be specified if force is enabled
@@ -212,13 +212,13 @@ class RepoBase:
 
     def _include(self, path, codename, components=None):
 
-        global_opt = ["--ignore=wrongdistribution",
-                      "--ignore=surprisingbinary",
-                      "--keepunreferencedfiles",
-                      "--export=silent-never",
+        global_opt = ['--ignore=wrongdistribution',
+                      '--ignore=surprisingbinary',
+                      '--keepunreferencedfiles',
+                      '--export=silent-never',
                       f'--basedir "{self.fs.path}"',
-                      "--priority normal",
-                      "--section misc"]
+                      '--priority normal',
+                      '--section misc']
 
         if components is not None:
             # Compatibility with old callers
@@ -228,7 +228,7 @@ class RepoBase:
 
         global_opt = ' '.join(global_opt)
 
-        do(f"reprepro {global_opt} include {codename} {path}")
+        do(f'reprepro {global_opt} include {codename} {path}')
 
     def _removedeb(self, pkgname, codename, components=None):
 
@@ -242,15 +242,15 @@ class RepoBase:
 
         global_opt = ' '.join(global_opt)
 
-        do(f"reprepro {global_opt} remove {codename} {pkgname}",
-           env_add={"GNUPGHOME": "/var/cache/elbe/gnupg"})
+        do(f'reprepro {global_opt} remove {codename} {pkgname}',
+           env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
 
     def removedeb(self, pkgname, components=None):
         self._removedeb(pkgname, self.repo_attr.codename, components)
 
     def _removesrc(self, srcname, codename, components=None):
 
-        global_opt = [f"--basedir {self.fs.path}"]
+        global_opt = [f'--basedir {self.fs.path}']
 
         if components is not None:
             # Compatibility with old callers
@@ -260,8 +260,8 @@ class RepoBase:
 
         global_opt = ' '.join(global_opt)
 
-        do(f"reprepro {global_opt} removesrc {codename} {srcname}",
-           env_add={"GNUPGHOME": "/var/cache/elbe/gnupg"})
+        do(f'reprepro {global_opt} removesrc {codename} {srcname}',
+           env_add={'GNUPGHOME': '/var/cache/elbe/gnupg'})
 
     def removesrc(self, path, components=None):
         with open(path) as fp:
@@ -284,18 +284,18 @@ class RepoBase:
 
     def _includedsc(self, path, codename, components=None):
         if self.maxsize:
-            new_size = self.fs.disk_usage("") + get_dsc_size(path)
+            new_size = self.fs.disk_usage('') + get_dsc_size(path)
             if new_size > self.maxsize:
                 self.new_repo_volume()
 
-        if self.maxsize and (self.fs.disk_usage("") > self.maxsize):
+        if self.maxsize and (self.fs.disk_usage('') > self.maxsize):
             self.new_repo_volume()
 
-        global_opt = ["--keepunreferencedfiles",
-                      "--export=silent-never",
+        global_opt = ['--keepunreferencedfiles',
+                      '--export=silent-never',
                       f'--basedir "{self.fs.path}"',
-                      "--priority normal",
-                      "--section misc"]
+                      '--priority normal',
+                      '--section misc']
 
         if components is not None:
             # Compatibility with old callers
@@ -305,7 +305,7 @@ class RepoBase:
 
         global_opt = ' '.join(global_opt)
 
-        do(f"reprepro {global_opt} includedsc {codename} {path}")
+        do(f'reprepro {global_opt} includedsc {codename} {path}')
 
     def includedsc(self, path, components=None, force=False):
         try:
@@ -333,18 +333,18 @@ class RepoBase:
     def include_init_dsc(self, path, components=None):
         self._includedsc(path, self.init_attr.codename, components)
 
-    def buildiso(self, fname, options=""):
+    def buildiso(self, fname, options=''):
         files = []
         if self.volume_count == 0:
             new_path = '"' + self.fs.path + '"'
-            do(f"genisoimage {options} -o {fname} -J -joliet-long -R {new_path}")
+            do(f'genisoimage {options} -o {fname} -J -joliet-long -R {new_path}')
             files.append(fname)
         else:
             for i in self.volume_indexes:
                 volfs = self.get_volume_fs(i)
-                newname = fname + (f"{i:02}")
-                do(f"genisoimage {options} -o {newname} -J -joliet-long "
-                   f"-R {volfs.path}")
+                newname = fname + (f'{i:02}')
+                do(f'genisoimage {options} -o {newname} -J -joliet-long '
+                   f'-R {volfs.path}')
                 files.append(newname)
 
         return files
@@ -358,17 +358,17 @@ class UpdateRepo(RepoBase):
     def __init__(self, xml, path):
         self.xml = xml
 
-        arch = xml.text("project/arch", key="arch")
-        codename = xml.text("project/suite")
+        arch = xml.text('project/arch', key='arch')
+        codename = xml.text('project/suite')
 
-        repo_attrs = RepoAttributes(codename, arch, "main")
+        repo_attrs = RepoAttributes(codename, arch, 'main')
 
         RepoBase.__init__(self,
                           path,
                           None,
                           repo_attrs,
-                          "Update",
-                          "Update")
+                          'Update',
+                          'Update')
 
 
 class CdromInitRepo(RepoBase):
@@ -376,15 +376,15 @@ class CdromInitRepo(RepoBase):
                  mirror='http://ftp.de.debian.org/debian'):
 
         init_attrs = RepoAttributes(
-            init_codename, "amd64", [
-                "main", "main/debian-installer"], mirror)
+            init_codename, 'amd64', [
+                'main', 'main/debian-installer'], mirror)
 
         RepoBase.__init__(self,
                           path,
                           None,
                           init_attrs,
-                          "Elbe",
-                          "Elbe InitVM Cdrom Repo")
+                          'Elbe',
+                          'Elbe InitVM Cdrom Repo')
 
 
 class CdromBinRepo(RepoBase):
@@ -396,11 +396,11 @@ class CdromBinRepo(RepoBase):
             path,
             mirror='http://ftp.debian.org/debian'):
 
-        repo_attrs = RepoAttributes(codename, arch, ["main", "added"], mirror)
+        repo_attrs = RepoAttributes(codename, arch, ['main', 'added'], mirror)
         if init_codename is not None:
             init_attrs = RepoAttributes(
-                init_codename, "amd64", [
-                    "main", "main/debian-installer"], mirror)
+                init_codename, 'amd64', [
+                    'main', 'main/debian-installer'], mirror)
         else:
             init_attrs = None
 
@@ -408,8 +408,8 @@ class CdromBinRepo(RepoBase):
                           path,
                           init_attrs,
                           repo_attrs,
-                          "Elbe",
-                          "Elbe Binary Cdrom Repo")
+                          'Elbe',
+                          'Elbe Binary Cdrom Repo')
 
 
 class CdromSrcRepo(RepoBase):
@@ -417,18 +417,18 @@ class CdromSrcRepo(RepoBase):
                  mirror='http://ftp.debian.org/debian'):
 
         repo_attrs = RepoAttributes(codename,
-                                    "source",
-                                    ["main",
-                                     "added",
-                                     "target",
-                                     "chroot",
-                                     "sysroot-host"],
+                                    'source',
+                                    ['main',
+                                     'added',
+                                     'target',
+                                     'chroot',
+                                     'sysroot-host'],
                                     mirror)
 
         if init_codename is not None:
             init_attrs = RepoAttributes(init_codename,
-                                        "source",
-                                        ["initvm"],
+                                        'source',
+                                        ['initvm'],
                                         mirror)
         else:
             init_attrs = None
@@ -437,28 +437,28 @@ class CdromSrcRepo(RepoBase):
                           path,
                           init_attrs,
                           repo_attrs,
-                          "Elbe",
-                          "Elbe Source Cdrom Repo",
+                          'Elbe',
+                          'Elbe Source Cdrom Repo',
                           maxsize)
 
 
 class ToolchainRepo(RepoBase):
     def __init__(self, arch, codename, path):
-        repo_attrs = RepoAttributes(codename, arch, "main")
+        repo_attrs = RepoAttributes(codename, arch, 'main')
         RepoBase.__init__(self,
                           path,
                           None,
                           repo_attrs,
-                          "toolchain",
-                          "Toolchain binary packages Repo")
+                          'toolchain',
+                          'Toolchain binary packages Repo')
 
 
 class ProjectRepo(RepoBase):
     def __init__(self, arch, codename, path):
-        repo_attrs = RepoAttributes(codename, [arch, 'amd64', 'source'], "main")
+        repo_attrs = RepoAttributes(codename, [arch, 'amd64', 'source'], 'main')
         RepoBase.__init__(self,
                           path,
                           None,
                           repo_attrs,
-                          "Local",
-                          "Self build packages Repo")
+                          'Local',
+                          'Self build packages Repo')
diff --git a/elbepack/rfs.py b/elbepack/rfs.py
index 09eb1048..a4c5c31f 100644
--- a/elbepack/rfs.py
+++ b/elbepack/rfs.py
@@ -17,12 +17,12 @@ from elbepack.shellhelper import CommandError, do, chroot, get_command_out
 
 def create_apt_prefs(xml, rfs):
 
-    filename = "etc/apt/preferences"
+    filename = 'etc/apt/preferences'
 
     if rfs.lexists(filename):
         rfs.remove(filename)
 
-    rfs.mkdir_p("/etc/apt")
+    rfs.mkdir_p('/etc/apt')
 
     pinned_origins = []
     if xml.has('project/mirror/url-list'):
@@ -45,22 +45,22 @@ def create_apt_prefs(xml, rfs):
                        'package': package}
             pinned_origins.append(pinning)
 
-    d = {"xml": xml,
-         "prj": xml.node("/project"),
-         "pkgs": xml.node("/target/pkg-list"),
-         "porgs": pinned_origins}
+    d = {'xml': xml,
+         'prj': xml.node('/project'),
+         'pkgs': xml.node('/target/pkg-list'),
+         'porgs': pinned_origins}
 
-    write_pack_template(rfs.fname(filename), "preferences.mako", d)
+    write_pack_template(rfs.fname(filename), 'preferences.mako', d)
 
 
 class DebootstrapException (Exception):
     def __init__(self):
-        Exception.__init__(self, "Debootstrap Failed")
+        Exception.__init__(self, 'Debootstrap Failed')
 
 
 class BuildEnv:
     def __init__(self, xml, path, build_sources=False,
-                 clean=False, arch="default", hostsysroot=False):
+                 clean=False, arch='default', hostsysroot=False):
 
         self.xml = xml
         self.path = path
@@ -68,21 +68,21 @@ class BuildEnv:
         self.arch = arch
         self.hostsysroot = hostsysroot
 
-        self.rfs = BuildImgFs(path, xml.defs["userinterpr"])
+        self.rfs = BuildImgFs(path, xml.defs['userinterpr'])
 
         if clean:
-            self.rfs.rmtree("")
+            self.rfs.rmtree('')
 
         # TODO think about reinitialization if elbe_version differs
-        if not self.rfs.isfile("etc/elbe_version"):
+        if not self.rfs.isfile('etc/elbe_version'):
             # avoid starting daemons inside the buildenv
-            self.rfs.mkdir_p("usr/sbin")
+            self.rfs.mkdir_p('usr/sbin')
             # grub-legacy postinst will fail if /boot/grub does not exist
-            self.rfs.mkdir_p("boot/grub")
+            self.rfs.mkdir_p('boot/grub')
             self.rfs.write_file(
-                "usr/sbin/policy-rc.d",
+                'usr/sbin/policy-rc.d',
                 0o755,
-                "#!/bin/sh\nexit 101\n")
+                '#!/bin/sh\nexit 101\n')
             self.debootstrap(arch)
             self.fresh_debootstrap = True
             self.need_dumpdebootstrap = True
@@ -94,15 +94,15 @@ class BuildEnv:
         create_apt_prefs(self.xml, self.rfs)
 
     def cdrom_umount(self):
-        if self.xml.prj.has("mirror/cdrom"):
-            cdrompath = self.rfs.fname("cdrom")
+        if self.xml.prj.has('mirror/cdrom'):
+            cdrompath = self.rfs.fname('cdrom')
             do(f'umount "{cdrompath}"')
-            do(f"rm -f {self.path}/etc/apt/trusted.gpg.d/elbe-cdrepo.gpg")
-            do(f"rm -f {self.path}/etc/apt/trusted.gpg.d/elbe-cdtargetrepo.gpg")
+            do(f'rm -f {self.path}/etc/apt/trusted.gpg.d/elbe-cdrepo.gpg')
+            do(f'rm -f {self.path}/etc/apt/trusted.gpg.d/elbe-cdtargetrepo.gpg')
 
     def cdrom_mount(self):
-        if self.xml.has("project/mirror/cdrom"):
-            cdrompath = self.rfs.fname("cdrom")
+        if self.xml.has('project/mirror/cdrom'):
+            cdrompath = self.rfs.fname('cdrom')
             do(f'mkdir -p "{cdrompath}"')
             do(f'mount -o loop "{self.xml.text("project/mirror/cdrom")}" "cdrompath"')
 
@@ -114,7 +114,7 @@ class BuildEnv:
 
     def __enter__(self):
         if os.path.exists(self.path + '/../repo/pool'):
-            do(f"mv {self.path}/../repo {self.path}")
+            do(f'mv {self.path}/../repo {self.path}')
             do(f'echo "deb copy:///repo {self.xml.text("project/suite")} main" > '
                f'{self.path}/etc/apt/sources.list.d/local.list')
             do(f'echo "deb-src copy:///repo {self.xml.text("project/suite")} main" >> '
@@ -123,7 +123,7 @@ class BuildEnv:
         self.cdrom_mount()
         self.rfs.__enter__()
 
-        if self.xml.has("project/mirror/cdrom"):
+        if self.xml.has('project/mirror/cdrom'):
             self.convert_asc_to_gpg('/cdrom/repo.pub', '/etc/apt/trusted.gpg.d/elbe-cdrepo.gpg')
             self.convert_asc_to_gpg('/cdrom/targetrepo/repo.pub',
                                     '/etc/apt/trusted.gpg.d/elbe-cdtargetrepo.gpg')
@@ -137,64 +137,64 @@ class BuildEnv:
         self.rfs.__exit__(typ, value, traceback)
         self.cdrom_umount()
         if os.path.exists(self.path + '/repo'):
-            do(f"mv {self.path}/repo {self.path}/../")
-            do(f"rm {self.path}/etc/apt/sources.list.d/local.list")
-            do(f"rm {self.path}/etc/apt/trusted.gpg.d/elbe-localrepo.gpg")
+            do(f'mv {self.path}/repo {self.path}/../')
+            do(f'rm {self.path}/etc/apt/sources.list.d/local.list')
+            do(f'rm {self.path}/etc/apt/trusted.gpg.d/elbe-localrepo.gpg')
 
-    def debootstrap(self, arch="default"):
+    def debootstrap(self, arch='default'):
 
         cleanup = False
-        suite = self.xml.prj.text("suite")
+        suite = self.xml.prj.text('suite')
 
         primary_mirror = self.xml.get_primary_mirror(
             self.rfs.fname('/cdrom/targetrepo'), hostsysroot=self.hostsysroot)
 
-        if self.xml.prj.has("mirror/primary_proxy"):
-            os.environ["no_proxy"] = "10.0.2.2,localhost,127.0.0.1"
-            proxy = self.xml.prj.text("mirror/primary_proxy")
-            proxy = proxy.strip().replace("LOCALMACHINE", "10.0.2.2")
-            os.environ["http_proxy"] = proxy
-            os.environ["https_proxy"] = proxy
+        if self.xml.prj.has('mirror/primary_proxy'):
+            os.environ['no_proxy'] = '10.0.2.2,localhost,127.0.0.1'
+            proxy = self.xml.prj.text('mirror/primary_proxy')
+            proxy = proxy.strip().replace('LOCALMACHINE', '10.0.2.2')
+            os.environ['http_proxy'] = proxy
+            os.environ['https_proxy'] = proxy
         else:
-            os.environ["no_proxy"] = ""
-            os.environ["http_proxy"] = ""
-            os.environ["https_proxy"] = ""
+            os.environ['no_proxy'] = ''
+            os.environ['http_proxy'] = ''
+            os.environ['https_proxy'] = ''
 
-        os.environ["LANG"] = "C"
-        os.environ["LANGUAGE"] = "C"
-        os.environ["LC_ALL"] = "C"
-        os.environ["DEBIAN_FRONTEND"] = "noninteractive"
-        os.environ["DEBONF_NONINTERACTIVE_SEEN"] = "true"
+        os.environ['LANG'] = 'C'
+        os.environ['LANGUAGE'] = 'C'
+        os.environ['LC_ALL'] = 'C'
+        os.environ['DEBIAN_FRONTEND'] = 'noninteractive'
+        os.environ['DEBONF_NONINTERACTIVE_SEEN'] = 'true'
 
-        logging.info("Debootstrap log")
+        logging.info('Debootstrap log')
 
-        if arch == "default":
-            arch = self.xml.text("project/buildimage/arch", key="arch")
+        if arch == 'default':
+            arch = self.xml.text('project/buildimage/arch', key='arch')
 
-        host_arch = get_command_out("dpkg --print-architecture").strip().decode()
+        host_arch = get_command_out('dpkg --print-architecture').strip().decode()
 
-        strapcmd = "debootstrap"
+        strapcmd = 'debootstrap'
 
         # Should we use a special bootstrap variant?
-        if self.xml.has("target/debootstrap/variant"):
+        if self.xml.has('target/debootstrap/variant'):
             strapcmd += f" --variant={self.xml.text('target/debootstrap/variant')}"
 
         # Should we include additional packages into bootstrap?
-        if self.xml.has("target/debootstrap/include"):
+        if self.xml.has('target/debootstrap/include'):
             strapcmd += f" --include=\"{self.xml.text('target/debootstrap/include')}\""
 
         # Should we exclude some packages from bootstrap?
-        if self.xml.has("target/debootstrap/exclude"):
+        if self.xml.has('target/debootstrap/exclude'):
             strapcmd += f" --exclude=\"{self.xml.text('target/debootstrap/exclude')}\""
 
         keyring = ''
 
         if not self.xml.is_cross(host_arch):
-            if self.xml.has("project/noauth"):
+            if self.xml.has('project/noauth'):
                 cmd = (f'{strapcmd} --no-check-gpg --arch={arch} '
                        f'"{suite}" "{self.rfs.path}" "{primary_mirror}"')
             else:
-                if self.xml.has("project/mirror/cdrom"):
+                if self.xml.has('project/mirror/cdrom'):
                     keyring = f' --keyring="{self.rfs.fname("/elbe.keyring")}"'
 
                 cmd = (f'{strapcmd} --arch={arch} '
@@ -203,7 +203,7 @@ class BuildEnv:
             try:
                 self.cdrom_mount()
                 if keyring:
-                    self.convert_asc_to_gpg("/cdrom/targetrepo/repo.pub", "/elbe.keyring")
+                    self.convert_asc_to_gpg('/cdrom/targetrepo/repo.pub', '/elbe.keyring')
                 do(cmd)
             except CommandError:
                 cleanup = True
@@ -211,15 +211,15 @@ class BuildEnv:
             finally:
                 self.cdrom_umount()
                 if cleanup:
-                    self.rfs.rmtree("/")
+                    self.rfs.rmtree('/')
 
             return
 
-        if self.xml.has("project/noauth"):
+        if self.xml.has('project/noauth'):
             cmd = (f'{strapcmd} --no-check-gpg --foreign --arch={arch} '
                    f'"{suite}" "{self.rfs.path}" "{primary_mirror}"')
         else:
-            if self.xml.has("project/mirror/cdrom"):
+            if self.xml.has('project/mirror/cdrom'):
                 keyring = f' --keyring="{self.rfs.fname("/elbe.keyring")}"'
 
             cmd = (f'{strapcmd} --foreign --arch={arch} '
@@ -228,17 +228,17 @@ class BuildEnv:
         try:
             self.cdrom_mount()
             if keyring:
-                self.convert_asc_to_gpg("/cdrom/targetrepo/repo.pub", "/elbe.keyring")
+                self.convert_asc_to_gpg('/cdrom/targetrepo/repo.pub', '/elbe.keyring')
             do(cmd)
 
-            ui = "/usr/share/elbe/qemu-elbe/" + self.xml.defs["userinterpr"]
+            ui = '/usr/share/elbe/qemu-elbe/' + self.xml.defs['userinterpr']
 
             if not os.path.exists(ui):
-                ui = "/usr/bin/" + self.xml.defs["userinterpr"]
+                ui = '/usr/bin/' + self.xml.defs['userinterpr']
 
             do(f'cp {ui} {self.rfs.fname("usr/bin")}')
 
-            if self.xml.has("project/noauth"):
+            if self.xml.has('project/noauth'):
                 chroot(self.rfs.path,
                        '/debootstrap/debootstrap --no-check-gpg --second-stage')
             else:
@@ -253,22 +253,22 @@ class BuildEnv:
         finally:
             self.cdrom_umount()
             if cleanup:
-                self.rfs.rmtree("/")
+                self.rfs.rmtree('/')
 
     def virtapt_init_dirs(self):
-        self.rfs.mkdir_p("/cache/archives/partial")
-        self.rfs.mkdir_p("/etc/apt/preferences.d")
-        self.rfs.mkdir_p("/db")
-        self.rfs.mkdir_p("/log")
-        self.rfs.mkdir_p("/state/lists/partial")
-        self.rfs.touch_file("/state/status")
+        self.rfs.mkdir_p('/cache/archives/partial')
+        self.rfs.mkdir_p('/etc/apt/preferences.d')
+        self.rfs.mkdir_p('/db')
+        self.rfs.mkdir_p('/log')
+        self.rfs.mkdir_p('/state/lists/partial')
+        self.rfs.touch_file('/state/status')
 
     def add_key(self, key, keyname):
         """
         Adds the binary OpenPGP keyring 'key' as a trusted apt keyring
         with file name 'keyname'.
         """
-        with open(self.rfs.fname(f"/etc/apt/trusted.gpg.d/{keyname}"), "wb") as outfile:
+        with open(self.rfs.fname(f'/etc/apt/trusted.gpg.d/{keyname}'), 'wb') as outfile:
             outfile.write(key)
 
     def import_keys(self):
@@ -281,35 +281,35 @@ class BuildEnv:
             # I could make a none global 'noauth' flag for mirrors
             for i, url in enumerate(self.xml.node('project/mirror/url-list')):
                 if url.has('raw-key'):
-                    key = "\n".join(line.strip(" \t") for line in url.text('raw-key').splitlines())
-                    self.add_key(unarmor_openpgp_keyring(key), f"elbe-xml-raw-key{i}.gpg")
+                    key = '\n'.join(line.strip(' \t') for line in url.text('raw-key').splitlines())
+                    self.add_key(unarmor_openpgp_keyring(key), f'elbe-xml-raw-key{i}.gpg')
 
     def initialize_dirs(self, build_sources=False):
         mirror = self.xml.create_apt_sources_list(build_sources=build_sources,
                                                   hostsysroot=self.hostsysroot)
 
-        if self.rfs.lexists("etc/apt/sources.list"):
-            self.rfs.remove("etc/apt/sources.list")
+        if self.rfs.lexists('etc/apt/sources.list'):
+            self.rfs.remove('etc/apt/sources.list')
 
-        self.rfs.write_file("etc/apt/sources.list", 0o644, mirror)
+        self.rfs.write_file('etc/apt/sources.list', 0o644, mirror)
 
-        self.rfs.mkdir_p("var/cache/elbe")
+        self.rfs.mkdir_p('var/cache/elbe')
 
         preseed = get_preseed(self.xml)
         preseed_txt = preseed_to_text(preseed)
-        self.rfs.write_file("var/cache/elbe/preseed.txt", 0o644, preseed_txt)
+        self.rfs.write_file('var/cache/elbe/preseed.txt', 0o644, preseed_txt)
         with self.rfs:
             cmd = (f'debconf-set-selections < {self.rfs.fname("var/cache/elbe/preseed.txt")}')
             chroot(self.rfs.path, cmd)
 
     def seed_etc(self):
-        passwd = self.xml.text("target/passwd_hashed")
-        stdin = f"root:{passwd}"
-        chroot(self.rfs.path, "chpasswd --encrypted", stdin=stdin)
+        passwd = self.xml.text('target/passwd_hashed')
+        stdin = f'root:{passwd}'
+        chroot(self.rfs.path, 'chpasswd --encrypted', stdin=stdin)
 
-        hostname = self.xml.text("target/hostname")
+        hostname = self.xml.text('target/hostname')
         fqdn = hostname
-        if self.xml.has("target/domain"):
+        if self.xml.has('target/domain'):
             fqdn = (f"{hostname}.{self.xml.text('target/domain')}")
 
         chroot(self.rfs.path,
@@ -326,9 +326,9 @@ class BuildEnv:
                f"""/bin/sh -c 'echo "{fqdn}" > """
                """/etc/mailname'""")
 
-        if self.xml.has("target/console"):
+        if self.xml.has('target/console'):
             serial_con, serial_baud = self.xml.text(
-                "target/console").split(',')
+                'target/console').split(',')
             if serial_baud:
                 chroot(self.rfs.path,
                        """/bin/sh -c '[ -f /etc/inittab ] && """
@@ -345,5 +345,5 @@ class BuildEnv:
                        f"""serial-getty@{serial_con}.service'""",
                        allow_fail=True)
             else:
-                logging.error("parsing console tag failed, needs to be of "
+                logging.error('parsing console tag failed, needs to be of '
                               "'/dev/ttyS0,115200' format.")
diff --git a/elbepack/rpcaptcache.py b/elbepack/rpcaptcache.py
index 78e0cbf6..7ba54fc4 100644
--- a/elbepack/rpcaptcache.py
+++ b/elbepack/rpcaptcache.py
@@ -20,8 +20,8 @@ from elbepack.aptpkgutils import getalldeps, APTPackage, fetch_binary
 from elbepack.log import async_logging
 
 
-log = logging.getLogger("log")
-soap = logging.getLogger("soap")
+log = logging.getLogger('log')
+soap = logging.getLogger('soap')
 
 
 class MyMan(BaseManager):
@@ -42,8 +42,8 @@ class MyMan(BaseManager):
         os.dup2(w, os.sys.stderr.fileno())
         # Buffering of 1 because in Python3 buffering of 0 is illegal
         # for non binary mode ..
-        os.sys.stdout = os.fdopen(os.sys.stdout.fileno(), "w", 1)
-        os.sys.stderr = os.fdopen(os.sys.stderr.fileno(), "w", 1)
+        os.sys.stdout = os.fdopen(os.sys.stdout.fileno(), 'w', 1)
+        os.sys.stderr = os.fdopen(os.sys.stderr.fileno(), 'w', 1)
         os.sys.__stdout__ = os.sys.stdout
         os.sys.__stderr__ = os.sys.stderr
 
@@ -61,7 +61,7 @@ class InChRootObject:
         self.finalizer = Finalize(self, self.rfs.leave_chroot, exitpriority=10)
 
 
- at MyMan.register("RPCAPTCache")
+ at MyMan.register('RPCAPTCache')
 class RPCAPTCache(InChRootObject):
 
     def __init__(self, rfs, arch,
@@ -70,16 +70,16 @@ class RPCAPTCache(InChRootObject):
         InChRootObject.__init__(self, rfs)
 
         self.notifier = notifier
-        config.set("APT::Architecture", arch)
+        config.set('APT::Architecture', arch)
         if norecommend:
-            config.set("APT::Install-Recommends", "0")
+            config.set('APT::Install-Recommends', '0')
         else:
-            config.set("APT::Install-Recommends", "1")
+            config.set('APT::Install-Recommends', '1')
 
         if noauth:
-            config.set("APT::Get::AllowUnauthenticated", "1")
+            config.set('APT::Get::AllowUnauthenticated', '1')
         else:
-            config.set("APT::Get::AllowUnauthenticated", "0")
+            config.set('APT::Get::AllowUnauthenticated', '0')
 
         self.cache = Cache(progress=ElbeOpProgress())
         self.cache.open(progress=ElbeOpProgress())
@@ -89,13 +89,13 @@ class RPCAPTCache(InChRootObject):
         with open(f'{filename}_{ts.tm_hour:02}{ts.tm_min:02}{ts.tm_sec:02}', 'w') as dbg:
             for p in self.cache:
                 dbg.write(
-                    f"{p.name} {p.candidate.version} {p.marked_keep} "
-                    f"{p.marked_delete} {p.marked_upgrade} "
-                    f" {p.marked_downgrade} {p.marked_install} "
-                    f" {p.marked_reinstall} {p.is_auto_installed} "
-                    f" {p.is_installed} {p.is_auto_removable} "
-                    f"{p.is_now_broken} {p.is_inst_broken} "
-                    f"{p.is_upgradable}\n")
+                    f'{p.name} {p.candidate.version} {p.marked_keep} '
+                    f'{p.marked_delete} {p.marked_upgrade} '
+                    f' {p.marked_downgrade} {p.marked_install} '
+                    f' {p.marked_reinstall} {p.is_auto_installed} '
+                    f' {p.is_installed} {p.is_auto_removable} '
+                    f'{p.is_now_broken} {p.is_inst_broken} '
+                    f'{p.is_upgradable}\n')
 
     def get_sections(self):
         ret = list({p.section for p in self.cache})
@@ -164,7 +164,7 @@ class RPCAPTCache(InChRootObject):
 
         for pkg in self.cache:
 
-            if not pkg.name.endswith("-dev"):
+            if not pkg.name.endswith('-dev'):
                 continue
 
             src_name = pkg.candidate.source_name
@@ -179,7 +179,7 @@ class RPCAPTCache(InChRootObject):
 
             dev_lst.append(pkg)
 
-        mark_install(dev_lst, "-dev")
+        mark_install(dev_lst, '-dev')
 
         # ensure that the symlinks package will be installed (it's
         # needed for fixing links inside the sysroot
@@ -194,12 +194,12 @@ class RPCAPTCache(InChRootObject):
 
             if pkg.is_installed or pkg.marked_install:
 
-                dbg_pkg = f"{pkg.name}-dbgsym"
+                dbg_pkg = f'{pkg.name}-dbgsym'
 
                 if dbg_pkg in self.cache:
                     dbgsym_lst.append(self.cache[dbg_pkg])
 
-        mark_install(dbgsym_lst, "-dbgsym")
+        mark_install(dbgsym_lst, '-dbgsym')
 
     def cleanup(self, exclude_pkgs):
         for p in self.cache:
@@ -232,9 +232,9 @@ class RPCAPTCache(InChRootObject):
         self.cache.open(progress=ElbeOpProgress())
 
     def commit(self):
-        os.environ["DEBIAN_FRONTEND"] = "noninteractive"
-        os.environ["DEBONF_NONINTERACTIVE_SEEN"] = "true"
-        print("Commiting changes ...")
+        os.environ['DEBIAN_FRONTEND'] = 'noninteractive'
+        os.environ['DEBONF_NONINTERACTIVE_SEEN'] = 'true'
+        print('Commiting changes ...')
         self.cache.commit(ElbeAcquireProgress(),
                           ElbeInstallProgress(fileno=sys.stdout.fileno()))
         self.cache.open(progress=ElbeOpProgress())
@@ -331,10 +331,10 @@ class RPCAPTCache(InChRootObject):
 
                 src_set.add((tmp.source_name, tmp.source_version))
 
-                if "Built-Using" not in section:
+                if 'Built-Using' not in section:
                     continue
 
-                built_using_lst = section["Built-Using"].split(', ')
+                built_using_lst = section['Built-Using'].split(', ')
                 for built_using in built_using_lst:
                     name, version = built_using.split(' ', 1)
                     version = version.strip('(= )')
@@ -359,7 +359,7 @@ class RPCAPTCache(InChRootObject):
 
     def download_source(self, src_name, src_version, dest_dir):
 
-        allow_untrusted = config.find_b("APT::Get::AllowUnauthenticated",
+        allow_untrusted = config.find_b('APT::Get::AllowUnauthenticated',
                                         False)
 
         rec = SourceRecords()
@@ -371,7 +371,7 @@ class RPCAPTCache(InChRootObject):
             # End of the list?
             if not next_p:
                 raise ValueError(
-                    f"No source found for {src_name}_{src_version}")
+                    f'No source found for {src_name}_{src_version}')
             if src_version == rec.version:
                 break
 
@@ -380,7 +380,7 @@ class RPCAPTCache(InChRootObject):
         if not (allow_untrusted or rec.index.is_trusted):
             raise FetchError(
                 f"Can't fetch source {src_name}_{src_version}; "
-                f"Source {rec.index.describe} is not trusted")
+                f'Source {rec.index.describe} is not trusted')
 
         # Copy from src to dst all files of the source package
         dsc = None
@@ -405,7 +405,7 @@ class RPCAPTCache(InChRootObject):
         acq.run()
 
         if dsc is None:
-            raise ValueError(f"No source found for {src_name}_{src_version}")
+            raise ValueError(f'No source found for {src_name}_{src_version}')
 
         for item in acq.items:
             if item.STAT_DONE != item.status:
diff --git a/elbepack/shellhelper.py b/elbepack/shellhelper.py
index 8bcacea6..698a1a07 100644
--- a/elbepack/shellhelper.py
+++ b/elbepack/shellhelper.py
@@ -12,8 +12,8 @@ from io import TextIOWrapper, BytesIO
 
 from elbepack.log import async_logging
 
-log = logging.getLogger("log")
-soap = logging.getLogger("soap")
+log = logging.getLogger('log')
+soap = logging.getLogger('soap')
 
 
 class CommandError(Exception):
@@ -24,7 +24,7 @@ class CommandError(Exception):
         self.cmd = cmd
 
     def __str__(self):
-        return f"Error: {self.returncode} returned from Command {self.cmd}"
+        return f'Error: {self.returncode} returned from Command {self.cmd}'
 
 
 def system(cmd, allow_fail=False, env_add=None):
@@ -240,7 +240,7 @@ def do(cmd, allow_fail=False, stdin=None, env_add=None):
     if isinstance(stdin, str):
         stdin = stdin.encode()
 
-    logging.info(cmd, extra={"context": "[CMD] "})
+    logging.info(cmd, extra={'context': '[CMD] '})
 
     r, w = os.pipe()
 
@@ -273,12 +273,12 @@ def chroot(directory, cmd, env_add=None, **kwargs):
     elbepack.shellhelper.CommandError: ...
     """
 
-    new_env = {"LANG": "C",
-               "LANGUAGE": "C",
-               "LC_ALL": "C"}
+    new_env = {'LANG': 'C',
+               'LANGUAGE': 'C',
+               'LC_ALL': 'C'}
     if env_add:
         new_env.update(env_add)
-    do(f"chroot {directory} {cmd}", env_add=new_env, **kwargs)
+    do(f'chroot {directory} {cmd}', env_add=new_env, **kwargs)
 
 
 def get_command_out(cmd, stdin=None, allow_fail=False, env_add=None):
@@ -318,7 +318,7 @@ def get_command_out(cmd, stdin=None, allow_fail=False, env_add=None):
     if isinstance(stdin, str):
         stdin = stdin.encode()
 
-    logging.info(cmd, extra={"context": "[CMD] "})
+    logging.info(cmd, extra={'context': '[CMD] '})
 
     r, w = os.pipe()
 
diff --git a/elbepack/soapclient.py b/elbepack/soapclient.py
index 02d76085..434486fa 100644
--- a/elbepack/soapclient.py
+++ b/elbepack/soapclient.py
@@ -48,7 +48,7 @@ class ElbeSoapClient:
         set_suds_debug(debug)
 
         # Attributes
-        self.wsdl = "http://" + host + ":" + str(port) + "/soap/?wsdl"
+        self.wsdl = 'http://' + host + ':' + str(port) + '/soap/?wsdl'
         self.control = None
         self.retries = 0
 
@@ -78,7 +78,7 @@ class ElbeSoapClient:
         self.service.login(user, passwd)
 
     def download_file(self, builddir, filename, dst_fname):
-        fp = open(dst_fname, "wb")
+        fp = open(dst_fname, 'wb')
         part = 0
 
         # XXX the retry logic might get removed in the future, if the error
@@ -92,20 +92,20 @@ class ElbeSoapClient:
             except BadStatusLine as e:
                 retry = retry - 1
 
-                print(f"get_file part {part} failed, retry {retry} times",
+                print(f'get_file part {part} failed, retry {retry} times',
                       file=sys.stderr)
                 print(str(e), file=sys.stderr)
                 print(repr(e.line), file=sys.stderr)
 
                 if not retry:
                     fp.close()
-                    print("file transfer failed", file=sys.stderr)
+                    print('file transfer failed', file=sys.stderr)
                     sys.exit(170)
 
-            if ret == "FileNotFound":
+            if ret == 'FileNotFound':
                 print(ret, file=sys.stderr)
                 sys.exit(171)
-            if ret == "EndOfFile":
+            if ret == 'EndOfFile':
                 fp.close()
                 return
 
@@ -122,9 +122,9 @@ class ClientAction:
 
     @classmethod
     def print_actions(cls):
-        print("available subcommands are:", file=sys.stderr)
+        print('available subcommands are:', file=sys.stderr)
         for a in cls.actiondict:
-            print(f"   {a}", file=sys.stderr)
+            print(f'   {a}', file=sys.stderr)
 
     def __new__(cls, node):
         action = cls.actiondict[node]
@@ -138,7 +138,7 @@ class ClientAction:
 
         size = 1024 * 1024
 
-        with open(filename, "rb") as f:
+        with open(filename, 'rb') as f:
 
             while True:
 
@@ -146,7 +146,7 @@ class ClientAction:
                 data = binascii.b2a_base64(bin_data)
 
                 if not isinstance(data, str):
-                    data = data.decode("ascii")
+                    data = data.decode('ascii')
 
                 append(build_dir, data)
 
@@ -166,7 +166,7 @@ class RemoveLogAction(ClientAction):
 
     def execute(self, client, _opt, args):
         if len(args) != 1:
-            print("usage: elbe control rm_log <project_dir>", file=sys.stderr)
+            print('usage: elbe control rm_log <project_dir>', file=sys.stderr)
             sys.exit(172)
 
         builddir = args[0]
@@ -189,10 +189,10 @@ class ListProjectsAction(ClientAction):
         try:
             for p in projects.SoapProject:
                 print(
-                    f"{p.builddir}\t{p.name}\t{p.version}\t{p.status}\t"
-                    f"{p.edit}")
+                    f'{p.builddir}\t{p.name}\t{p.version}\t{p.status}\t'
+                    f'{p.edit}')
         except AttributeError:
-            print("No projects configured in initvm")
+            print('No projects configured in initvm')
 
 
 ClientAction.register(ListProjectsAction)
@@ -224,7 +224,7 @@ class AddUserAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 4:
             print(
-                "usage: elbe control add_user <name> <fullname> <password> <email>",
+                'usage: elbe control add_user <name> <fullname> <password> <email>',
                 file=sys.stderr)
             sys.exit(173)
 
@@ -275,7 +275,7 @@ class ResetProjectAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control reset_project <project_dir>",
+                'usage: elbe control reset_project <project_dir>',
                 file=sys.stderr)
             sys.exit(174)
 
@@ -296,7 +296,7 @@ class DeleteProjectAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control del_project <project_dir>",
+                'usage: elbe control del_project <project_dir>',
                 file=sys.stderr)
             sys.exit(175)
 
@@ -317,7 +317,7 @@ class SetXmlAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 2:
             print(
-                "usage: elbe control set_xml <project_dir> <xml>",
+                'usage: elbe control set_xml <project_dir> <xml>',
                 file=sys.stderr)
             sys.exit(176)
 
@@ -330,7 +330,7 @@ class SetXmlAction(ClientAction):
                 skip_validate=True,
                 url_validation=ValidationMode.NO_CHECK)
         except IOError:
-            print(f"{filename} is not a valid elbe xml file")
+            print(f'{filename} is not a valid elbe xml file')
             sys.exit(177)
 
         if not x.has('target'):
@@ -340,7 +340,7 @@ class SetXmlAction(ClientAction):
 
         size = 1024 * 1024
         part = 0
-        with open(filename, "rb") as fp:
+        with open(filename, 'rb') as fp:
             while True:
 
                 xml_base64 = binascii.b2a_base64(fp.read(size))
@@ -351,19 +351,19 @@ class SetXmlAction(ClientAction):
                 # finish upload
                 if len(xml_base64) == 1:
                     part = client.service.upload_file(builddir,
-                                                      "source.xml",
+                                                      'source.xml',
                                                       xml_base64,
                                                       -1)
                 else:
                     part = client.service.upload_file(builddir,
-                                                      "source.xml",
+                                                      'source.xml',
                                                       xml_base64,
                                                       part)
                 if part == -1:
-                    print("project busy, upload not allowed")
+                    print('project busy, upload not allowed')
                     return part
                 if part == -2:
-                    print("upload of xml finished")
+                    print('upload of xml finished')
                     return 0
 
 
@@ -379,7 +379,7 @@ class BuildAction(ClientAction):
 
     def execute(self, client, opt, args):
         if len(args) != 1:
-            print("usage: elbe control build <project_dir>", file=sys.stderr)
+            print('usage: elbe control build <project_dir>', file=sys.stderr)
             sys.exit(179)
 
         builddir = args[0]
@@ -400,7 +400,7 @@ class BuildSysrootAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control build-sysroot <project_dir>",
+                'usage: elbe control build-sysroot <project_dir>',
                 file=sys.stderr)
             sys.exit(180)
 
@@ -421,7 +421,7 @@ class BuildSDKAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control build-sdk <project_dir>",
+                'usage: elbe control build-sdk <project_dir>',
                 file=sys.stderr)
             sys.exit(181)
 
@@ -442,13 +442,13 @@ class BuildCDROMsAction(ClientAction):
     def execute(self, client, opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control build-cdroms [--build-bin|--build-sources] <project_dir>",
+                'usage: elbe control build-cdroms [--build-bin|--build-sources] <project_dir>',
                 file=sys.stderr)
             sys.exit(182)
 
         if (not opt.build_bin) and (not opt.build_sources):
             print(
-                "usage: elbe control build-cdroms [--build-bin|--build-sources] <project_dir>",
+                'usage: elbe control build-cdroms [--build-bin|--build-sources] <project_dir>',
                 file=sys.stderr)
             sys.exit(183)
 
@@ -469,7 +469,7 @@ class GetFileAction(ClientAction):
     def execute(self, client, opt, args):
         if len(args) != 2:
             print(
-                "usage: elbe control get_file <project_dir> <file>",
+                'usage: elbe control get_file <project_dir> <file>',
                 file=sys.stderr)
             sys.exit(184)
 
@@ -484,7 +484,7 @@ class GetFileAction(ClientAction):
             dst_fname = str(os.path.join(dst, filename)).encode()
 
         client.download_file(builddir, filename, dst_fname)
-        print(f"{dst_fname} saved")
+        print(f'{dst_fname} saved')
 
 
 ClientAction.register(GetFileAction)
@@ -500,7 +500,7 @@ class BuildChrootAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control build_chroot_tarball <project_dir>",
+                'usage: elbe control build_chroot_tarball <project_dir>',
                 file=sys.stderr)
             sys.exit(185)
 
@@ -522,7 +522,7 @@ class DumpFileAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 2:
             print(
-                "usage: elbe control dump_file <project_dir> <file>",
+                'usage: elbe control dump_file <project_dir> <file>',
                 file=sys.stderr)
             sys.exit(186)
 
@@ -532,10 +532,10 @@ class DumpFileAction(ClientAction):
         part = 0
         while True:
             ret = client.service.get_file(builddir, filename, part)
-            if ret == "FileNotFound":
+            if ret == 'FileNotFound':
                 print(ret, file=sys.stderr)
                 sys.exit(187)
-            if ret == "EndOfFile":
+            if ret == 'EndOfFile':
                 return
 
             os.write(sys.stdout.fileno(), binascii.a2b_base64(ret))
@@ -555,7 +555,7 @@ class GetFilesAction(ClientAction):
     def execute(self, client, opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control get_files <project_dir>",
+                'usage: elbe control get_files <project_dir>',
                 file=sys.stderr)
             sys.exit(188)
 
@@ -574,9 +574,9 @@ class GetFilesAction(ClientAction):
 
             nfiles += 1
             try:
-                print(f"{f.name} \t({f.description})")
+                print(f'{f.name} \t({f.description})')
             except AttributeError:
-                print(f"{f.name}")
+                print(f'{f.name}')
 
             if opt.output:
                 fs = Filesystem('/')
@@ -602,7 +602,7 @@ class WaitProjectBusyAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control wait_busy <project_dir>",
+                'usage: elbe control wait_busy <project_dir>',
                 file=sys.stderr)
             sys.exit(190)
 
@@ -618,7 +618,7 @@ class WaitProjectBusyAction(ClientAction):
             # here
             except socket.error as e:
                 print(str(e), file=sys.stderr)
-                print("socket error during wait busy occured, retry..",
+                print('socket error during wait busy occured, retry..',
                       file=sys.stderr)
                 continue
 
@@ -635,10 +635,10 @@ class WaitProjectBusyAction(ClientAction):
         # check, whether everything is ok.
 
         prj = client.service.get_project(builddir)
-        if prj.status != "build_done":
+        if prj.status != 'build_done':
             print(
-                "Project build was not successful, current status: "
-                f"{prj.status}",
+                'Project build was not successful, current status: '
+                f'{prj.status}',
                 file=sys.stderr)
             sys.exit(191)
 
@@ -657,7 +657,7 @@ class SetCdromAction(ClientAction):
 
         if len(args) != 2:
             print(
-                "usage: elbe control set_cdrom <project_dir> <cdrom file>",
+                'usage: elbe control set_cdrom <project_dir> <cdrom file>',
                 file=sys.stderr)
             sys.exit(192)
 
@@ -683,7 +683,7 @@ class SetOrigAction(ClientAction):
 
         if len(args) != 2:
             print(
-                "usage: elbe control set_orig <project_dir> <orig file>",
+                'usage: elbe control set_orig <project_dir> <orig file>',
                 file=sys.stderr)
             sys.exit(193)
 
@@ -707,7 +707,7 @@ class ShutdownInitvmAction(ClientAction):
 
     def execute(self, client, _opt, args):
         if args:
-            print("usage: elbe control shutdown_initvm", file=sys.stderr)
+            print('usage: elbe control shutdown_initvm', file=sys.stderr)
             sys.exit(194)
 
         # if shutdown kills the daemon before it can answer the request
@@ -730,8 +730,8 @@ class SetPdebuilderAction(ClientAction):
     def execute(self, client, opt, args):
 
         if len(args) != 2 and len(args) != 3:
-            print("usage: elbe control set_pdebuild "
-                  "<project_dir> <pdebuild file>", file=sys.stderr)
+            print('usage: elbe control set_pdebuild '
+                  '<project_dir> <pdebuild file>', file=sys.stderr)
             sys.exit(195)
 
         builddir = args[0]
@@ -756,7 +756,7 @@ class BuildPbuilderAction(ClientAction):
     def execute(self, client, opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control build_pbuilder <project_dir>",
+                'usage: elbe control build_pbuilder <project_dir>',
                 file=sys.stderr)
             sys.exit(196)
 
@@ -778,7 +778,7 @@ class UpdatePbuilderAction(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe control update_pbuilder <project_dir>",
+                'usage: elbe control update_pbuilder <project_dir>',
                 file=sys.stderr)
             sys.exit(197)
 
@@ -799,7 +799,7 @@ class InstallElbeVersion(ClientAction):
     def execute(self, client, _opt, args):
         if len(args) > 1:
             print(
-                "usage: elbe control install_elbe_version [version]",
+                'usage: elbe control install_elbe_version [version]',
                 file=sys.stderr)
             sys.exit(198)
 
@@ -816,7 +816,7 @@ class InstallElbeVersion(ClientAction):
         if result.ret == 0:
             print('\nSuccess !!!')
         else:
-            print(f"\nError: apt returns {result.ret}")
+            print(f'\nError: apt returns {result.ret}')
 
 
 ClientAction.register(InstallElbeVersion)
@@ -831,9 +831,9 @@ class RepoAction(ClientAction):
 
     @classmethod
     def print_actions(cls):
-        print("available subcommands are:", file=sys.stderr)
+        print('available subcommands are:', file=sys.stderr)
         for a in cls.repoactiondict:
-            print(f"   {a}", file=sys.stderr)
+            print(f'   {a}', file=sys.stderr)
 
     def __new__(cls, node):
         action = cls.repoactiondict[node]
@@ -853,7 +853,7 @@ class ListPackagesAction(RepoAction):
     def execute(self, client, _opt, args):
         if len(args) != 1:
             print(
-                "usage: elbe prjrepo list_packages <project_dir>",
+                'usage: elbe prjrepo list_packages <project_dir>',
                 file=sys.stderr)
             sys.exit(199)
 
@@ -873,22 +873,22 @@ class DownloadAction(RepoAction):
 
     def execute(self, client, _opt, args):
         if len(args) != 1:
-            print("usage: elbe prjrepo download <project_dir>",
+            print('usage: elbe prjrepo download <project_dir>',
                   file=sys.stderr)
             sys.exit(200)
 
         builddir = args[0]
-        filename = "repo.tar.gz"
+        filename = 'repo.tar.gz'
         client.service.tar_prjrepo(builddir, filename)
 
         dst_fname = os.path.join(
-            ".",
-            "elbe-projectrepo-" +
-            datetime.now().strftime("%Y%m%d-%H%M%S") +
-            ".tar.gz")
+            '.',
+            'elbe-projectrepo-' +
+            datetime.now().strftime('%Y%m%d-%H%M%S') +
+            '.tar.gz')
 
         client.download_file(builddir, filename, dst_fname)
-        print(f"{dst_fname} saved")
+        print(f'{dst_fname} saved')
 
 
 RepoAction.register(DownloadAction)
@@ -906,13 +906,13 @@ class UploadPackageAction(RepoAction):
         # Uploads file f into builddir in intivm
         size = 1024 * 1024
         part = 0
-        with open(f, "rb") as fp:
+        with open(f, 'rb') as fp:
             while True:
 
                 xml_base64 = binascii.b2a_base64(fp.read(size))
 
                 if not isinstance(xml_base64, str):
-                    xml_base64 = xml_base64.decode("ascii")
+                    xml_base64 = xml_base64.decode('ascii')
 
                 # finish upload
                 if len(xml_base64) == 1:
@@ -926,32 +926,32 @@ class UploadPackageAction(RepoAction):
                                                       xml_base64,
                                                       part)
                 if part == -1:
-                    print("project busy, upload not allowed")
+                    print('project busy, upload not allowed')
                     return -1
                 if part == -2:
-                    print("Upload of package finished.")
+                    print('Upload of package finished.')
                     break
 
     def execute(self, client, _opt, args):
         if len(args) != 2:
             print(
-                "usage: elbe prjrepo upload_pkg <project_dir> <deb/dsc/changes file>",
+                'usage: elbe prjrepo upload_pkg <project_dir> <deb/dsc/changes file>',
                 file=sys.stderr)
             sys.exit(201)
 
         builddir = args[0]
         filename = args[1]
 
-        print("\n--------------------------")
-        print("Upload and Include Package")
-        print("--------------------------")
-        print("Check files...")
+        print('\n--------------------------')
+        print('Upload and Include Package')
+        print('--------------------------')
+        print('Check files...')
 
         filetype = os.path.splitext(filename)[1]
 
         # Check filetype
         if filetype not in ['.dsc', '.deb', '.changes']:
-            print("Error: Only .dsc, .deb or .changes files allowed to upload.")
+            print('Error: Only .dsc, .deb or .changes files allowed to upload.')
             sys.exit(202)
 
         files = [filename]  # list of all files which will be uploaded
@@ -969,18 +969,18 @@ class UploadPackageAction(RepoAction):
         abort = False
         for f in files:
             if not os.path.isfile(f):
-                print(f"File {f} not found.")
+                print(f'File {f} not found.')
                 abort = True
         # Abort if one or more source files are missing
         if abort:
             sys.exit(203)
 
-        print("Start uploading file(s)...")
+        print('Start uploading file(s)...')
         for f in files:
-            print(f"Upload {f}...")
+            print(f'Upload {f}...')
             self.upload_file(client, f, builddir)
 
-        print("Including Package in initvm...")
+        print('Including Package in initvm...')
         client.service.include_package(builddir, os.path.basename(filename))
 
 
diff --git a/elbepack/templates.py b/elbepack/templates.py
index 0908749e..577c98dc 100644
--- a/elbepack/templates.py
+++ b/elbepack/templates.py
@@ -30,7 +30,7 @@ def template(fname, d, linebreak=False):
 
 
 def write_template(outname, fname, d, linebreak=False):
-    outfile = open(outname, "w")
+    outfile = open(outname, 'w')
     outfile.write(template(fname, d, linebreak))
     outfile.close()
 
@@ -45,18 +45,18 @@ def get_preseed(xml):
     def_xml = etree(default_preseed_fname)
 
     preseed = {}
-    for c in def_xml.node("/preseed"):
-        k = (c.et.attrib["owner"], c.et.attrib["key"])
-        v = (c.et.attrib["type"], c.et.attrib["value"])
+    for c in def_xml.node('/preseed'):
+        k = (c.et.attrib['owner'], c.et.attrib['key'])
+        v = (c.et.attrib['type'], c.et.attrib['value'])
 
         preseed[k] = v
 
-    if not xml.has("./project/preseed"):
+    if not xml.has('./project/preseed'):
         return preseed
 
-    for c in xml.node("/project/preseed"):
-        k = (c.et.attrib["owner"], c.et.attrib["key"])
-        v = (c.et.attrib["type"], c.et.attrib["value"])
+    for c in xml.node('/project/preseed'):
+        k = (c.et.attrib['owner'], c.et.attrib['key'])
+        v = (c.et.attrib['type'], c.et.attrib['value'])
 
         preseed[k] = v
 
@@ -67,18 +67,18 @@ def get_initvm_preseed(xml):
     def_xml = etree(default_preseed_fname)
 
     preseed = {}
-    for c in def_xml.node("/preseed"):
-        k = (c.et.attrib["owner"], c.et.attrib["key"])
-        v = (c.et.attrib["type"], c.et.attrib["value"])
+    for c in def_xml.node('/preseed'):
+        k = (c.et.attrib['owner'], c.et.attrib['key'])
+        v = (c.et.attrib['type'], c.et.attrib['value'])
 
         preseed[k] = v
 
-    if not xml.has("./initvm/preseed"):
+    if not xml.has('./initvm/preseed'):
         return preseed
 
-    for c in xml.node("/initvm/preseed"):
-        k = (c.et.attrib["owner"], c.et.attrib["key"])
-        v = (c.et.attrib["type"], c.et.attrib["value"])
+    for c in xml.node('/initvm/preseed'):
+        k = (c.et.attrib['owner'], c.et.attrib['key'])
+        v = (c.et.attrib['type'], c.et.attrib['value'])
 
         preseed[k] = v
 
@@ -86,8 +86,8 @@ def get_initvm_preseed(xml):
 
 
 def preseed_to_text(pres):
-    retval = ""
+    retval = ''
     for k, v in pres.items():
-        retval += f"{k[0]}\t{k[1]}\t{v[0]}\t{v[1]}\n"
+        retval += f'{k[0]}\t{k[1]}\t{v[0]}\t{v[1]}\n'
 
     return retval
diff --git a/elbepack/tests/notest_pylint.py b/elbepack/tests/notest_pylint.py
index f5eb4a07..5d918a7a 100644
--- a/elbepack/tests/notest_pylint.py
+++ b/elbepack/tests/notest_pylint.py
@@ -12,31 +12,31 @@ from elbepack.directories import pack_dir, elbe_exe, elbe_dir
 class TestPylint(ElbeTestCase):
     global elbe_dir
 
-    elbe_dir = os.path.join(os.path.dirname(__file__), "../..")
+    elbe_dir = os.path.join(os.path.dirname(__file__), '../..')
 
-    pylint_opts = ["--reports=n",
-                   "--score=n",
+    pylint_opts = ['--reports=n',
+                   '--score=n',
                    f"--rcfile={os.path.join(elbe_dir, '.pylintrc')}",
-                   "--disable=W0511,R0801"]
+                   '--disable=W0511,R0801']
 
     failure_set = {os.path.join(pack_dir, path)
                    for path
                    in [
-                       "daemons/soap/esoap.py",
+                       'daemons/soap/esoap.py',
 
                        # FIXME: This one is an actual bug to be fixed
                        # 274:30: W0631: Using possibly undefined loop variable
                        # 'entry' (undefined-loop-variable)
                        # 276:26: W0631: Using possibly undefined loop variable
                        # 'entry' (undefined-loop-variable)
-                       "hdimg.py",
-
-                       "initvmaction.py",
-                       "log.py",
-                       "pbuilderaction.py",
-                       "repomanager.py",
-                       "rfs.py",
-                       "rpcaptcache.py",
+                       'hdimg.py',
+
+                       'initvmaction.py',
+                       'log.py',
+                       'pbuilderaction.py',
+                       'repomanager.py',
+                       'rfs.py',
+                       'rpcaptcache.py',
                    ]}
 
     @staticmethod
@@ -53,9 +53,9 @@ class TestPylint(ElbeTestCase):
             if self.param in TestPylint.failure_set:
                 self.stdout = e.out
                 self.skipTest(
-                    f"Pylint test for {self.param} is expected to fail")
+                    f'Pylint test for {self.param} is expected to fail')
             else:
                 raise
         else:
             if self.param in TestPylint.failure_set:
-                raise Exception(f"Pylint test for {self.param} is expected to fail, but did not !")
+                raise Exception(f'Pylint test for {self.param} is expected to fail, but did not !')
diff --git a/elbepack/tests/test_doctest.py b/elbepack/tests/test_doctest.py
index fb346d03..8d6ffcea 100644
--- a/elbepack/tests/test_doctest.py
+++ b/elbepack/tests/test_doctest.py
@@ -22,12 +22,12 @@ class ElbeDocTest(ElbeTestCase):
         self.kwargs = {}
 
         if self.param is filesystem:
-            self.kwargs["extraglobs"] = {"this": filesystem.TmpdirFilesystem()}
+            self.kwargs['extraglobs'] = {'this': filesystem.TmpdirFilesystem()}
 
     def tearDown(self):
 
         if self.param is filesystem:
-            self.kwargs["extraglobs"]["this"].delete()
+            self.kwargs['extraglobs']['this'].delete()
 
     def test_doctest(self):
         fail, _ = doctest.testmod(self.param, **self.kwargs)
diff --git a/elbepack/tests/test_flake8.py b/elbepack/tests/test_flake8.py
index 0b95cb53..39dec13f 100644
--- a/elbepack/tests/test_flake8.py
+++ b/elbepack/tests/test_flake8.py
@@ -12,10 +12,10 @@ from elbepack.directories import pack_dir, elbe_exe
 class TestPylint(ElbeTestCase):
     global elbe_dir      # pylint: disable=global-statement
 
-    elbe_dir = os.path.join(os.path.dirname(__file__), "../..")
+    elbe_dir = os.path.join(os.path.dirname(__file__), '../..')
 
-    flake8_opts = ["--max-line-length=100",
-                   "--show-source"]
+    flake8_opts = ['--max-line-length=100',
+                   '--show-source']
 
     @staticmethod
     def params():
diff --git a/elbepack/tests/test_preproc.py b/elbepack/tests/test_preproc.py
index c1c13fa5..0219032b 100644
--- a/elbepack/tests/test_preproc.py
+++ b/elbepack/tests/test_preproc.py
@@ -12,18 +12,18 @@ from elbepack.directories import elbe_exe, elbe_dir
 class TestPreproc(ElbeTestCase):
     global elbe_dir
 
-    elbe_dir = os.path.join(os.path.dirname(__file__), "../..")
+    elbe_dir = os.path.join(os.path.dirname(__file__), '../..')
 
     failure_set = {os.path.join(elbe_dir, path)
                    for path
                    in [
-                       "tests/preproc-01.xml"
+                       'tests/preproc-01.xml'
                    ]}
 
-    params = [os.path.join(elbe_dir, "tests", fname)
+    params = [os.path.join(elbe_dir, 'tests', fname)
               for fname
-              in os.listdir(os.path.join(elbe_dir, "tests"))
-              if fname.startswith("preproc") and fname.endswith(".xml")]
+              in os.listdir(os.path.join(elbe_dir, 'tests'))
+              if fname.startswith('preproc') and fname.endswith('.xml')]
 
     def test_preproc(self):
 
@@ -33,10 +33,10 @@ class TestPreproc(ElbeTestCase):
             if self.param in TestPreproc.failure_set:
                 self.stdout = e.out
                 self.skipTest(
-                    f"Preproc test for {self.param} is expected to fail")
+                    f'Preproc test for {self.param} is expected to fail')
             else:
                 raise
         else:
             if self.param in TestPreproc.failure_set:
-                raise Exception(f"Preproc test for {self.param}"
-                      "is expected to fail, but did not !")
+                raise Exception(f'Preproc test for {self.param}'
+                      'is expected to fail, but did not !')
diff --git a/elbepack/tests/test_version.py b/elbepack/tests/test_version.py
index 3daf13d0..583b59c0 100644
--- a/elbepack/tests/test_version.py
+++ b/elbepack/tests/test_version.py
@@ -11,7 +11,7 @@ from elbepack.version import elbe_version
 class TestElbepackVersion(unittest.TestCase):
 
     # This is a read-only state that is the same for every tests
-    expected_version = "14.9.3"
+    expected_version = '14.9.3'
 
     def setUp(self):
         # This is a mutable state that is different for every tests
diff --git a/elbepack/tests/test_xml.py b/elbepack/tests/test_xml.py
index 4377d97c..11e7df31 100644
--- a/elbepack/tests/test_xml.py
+++ b/elbepack/tests/test_xml.py
@@ -12,22 +12,22 @@ from elbepack.commands.test import ElbeTestCase, ElbeTestLevel, system
 
 
 @unittest.skipIf(ElbeTestCase.level < ElbeTestLevel.INITVM,
-                 "Test level not set to INITVM")
+                 'Test level not set to INITVM')
 class TestSimpleXML(ElbeTestCase):
     global elbe_dir
 
-    elbe_dir = os.path.join(os.path.dirname(__file__), "../..")
+    elbe_dir = os.path.join(os.path.dirname(__file__), '../..')
 
-    params = [os.path.join(elbe_dir, "tests", fname)
+    params = [os.path.join(elbe_dir, 'tests', fname)
               for fname
-              in os.listdir(os.path.join(elbe_dir, "tests"))
-              if fname.startswith("simple") and fname.endswith(".xml")]
+              in os.listdir(os.path.join(elbe_dir, 'tests'))
+              if fname.startswith('simple') and fname.endswith('.xml')]
 
     def test_simple_build(self):
 
-        with tempfile.TemporaryDirectory(prefix="elbe-test-simple-xml-") as build_dir:
+        with tempfile.TemporaryDirectory(prefix='elbe-test-simple-xml-') as build_dir:
 
-            prj = os.path.join(build_dir, "uuid.prj")
+            prj = os.path.join(build_dir, 'uuid.prj')
             uuid = None
 
             try:
@@ -37,12 +37,12 @@ class TestSimpleXML(ElbeTestCase):
                     f'--build-sdk --writeproject "{prj}"')
 
                 # Ensure project build is done
-                with open(prj, "r") as f:
+                with open(prj, 'r') as f:
                     uuid = f.read()
-                    system(f"{sys.executable} {elbe_exe} control list_projects | "
-                           f"grep {uuid} | grep build_done || false")
+                    system(f'{sys.executable} {elbe_exe} control list_projects | '
+                           f'grep {uuid} | grep build_done || false')
 
-                for cmd in ("cdrom", "img", "sdk", "rebuild"):
+                for cmd in ('cdrom', 'img', 'sdk', 'rebuild'):
                     with self.subTest(f'check build {cmd}'):
                         system(f'{sys.executable} {elbe_exe} check-build {cmd} "{build_dir}"')
 
@@ -55,19 +55,19 @@ class TestSimpleXML(ElbeTestCase):
 
 
 @unittest.skipIf(ElbeTestCase.level < ElbeTestLevel.INITVM,
-                 "Test level not set to INITVM")
+                 'Test level not set to INITVM')
 class TestPbuilder(ElbeTestCase):
 
-    params = [os.path.join(elbe_dir, "tests", fname)
+    params = [os.path.join(elbe_dir, 'tests', fname)
               for fname
-              in os.listdir(os.path.join(elbe_dir, "tests"))
-              if fname.startswith("pbuilder") and fname.endswith(".xml")]
+              in os.listdir(os.path.join(elbe_dir, 'tests'))
+              if fname.startswith('pbuilder') and fname.endswith('.xml')]
 
     def test_pbuilder_build(self):
 
-        with tempfile.TemporaryDirectory(prefix="elbe-test-pbuilder-xml-") as build_dir:
+        with tempfile.TemporaryDirectory(prefix='elbe-test-pbuilder-xml-') as build_dir:
 
-            prj = os.path.join(build_dir, "uuid.prj")
+            prj = os.path.join(build_dir, 'uuid.prj')
             uuid = None
 
             try:
@@ -76,7 +76,7 @@ class TestPbuilder(ElbeTestCase):
                 system(f'cd "{build_dir}"; \
                          git clone https://github.com/Linutronix/libgpio.git')
 
-                with open(prj, "r") as f:
+                with open(prj, 'r') as f:
                     uuid = f.read()
                     system(f'cd "{build_dir}/libgpio"; \
                              {sys.executable} {elbe_exe} pbuilder build --project {uuid}')
diff --git a/elbepack/toolchain.py b/elbepack/toolchain.py
index f274f132..1e426659 100644
--- a/elbepack/toolchain.py
+++ b/elbepack/toolchain.py
@@ -7,12 +7,12 @@ import string
 import glob
 import os
 
-arch2triple = {"armhf": "arm-linux-gnueabihf", "armel": "arm-linux-gnueabi"}
+arch2triple = {'armhf': 'arm-linux-gnueabihf', 'armel': 'arm-linux-gnueabi'}
 
 
 class Toolchain:
-    libc_path = "${triple}/libc"
-    gcc_libpath = "${triple}/lib"
+    libc_path = '${triple}/libc'
+    gcc_libpath = '${triple}/lib'
     pkg_libs = {}
 
     def __init__(self, path, arch, triple=None):
@@ -24,7 +24,7 @@ class Toolchain:
             self.triple = arch2triple[arch]
 
     def get_fullpath(self, path):
-        replace = {"triple": self.triple}
+        replace = {'triple': self.triple}
         tpl = string.Template(path)
         p = tpl.substitute(replace)
 
@@ -42,36 +42,36 @@ class Toolchain:
 
 
 class LinaroToolchain(Toolchain):
-    pkg_libs = {"libasan0": ["libasan.so.*"],
-                "libatomic1": ["libatomic.so.*"],
-                "libgcc1": ["libgcc_s.so.*"],
-                "libgfortran3": ["libgfortran.so.*"],
-                "libgomp1": ["libgomp.so.*"],
-                "libmudflap0": ["libmudflap.so.*", "libmudflapth.so.*"],
-                "libssp0": ["libssp.so.*"],
-                "libstdc++6": ["libstdc++.so.*"]}
+    pkg_libs = {'libasan0': ['libasan.so.*'],
+                'libatomic1': ['libatomic.so.*'],
+                'libgcc1': ['libgcc_s.so.*'],
+                'libgfortran3': ['libgfortran.so.*'],
+                'libgomp1': ['libgomp.so.*'],
+                'libmudflap0': ['libmudflap.so.*', 'libmudflapth.so.*'],
+                'libssp0': ['libssp.so.*'],
+                'libstdc++6': ['libstdc++.so.*']}
 
     pkg_deps = {
-        "libasan0": "libc6 (>= 2.13-28), "
-                    "libstdc++ (>= 4.8.3), "
-                    "libgcc1 (>= 4.8.3)",
-        "libatomic1": "libc6 (>= 2.13-28)",
-        "libgcc1": "libc6 (>= 2.13-28)",
-        "libgfortran3": "libgcc1 (>= 4.8.3)",
-        "libgomp1": "libc6 (>= 2.13-28)",
-        "libmudflap0": "libc6 (>= 2.13-28)",
-        "libssp0": "libc6 (>= 2.13-28)",
-        "libstdc++6": "libc6 (>= 2.13-28), libgcc1 (>= 4.8.3)"}
+        'libasan0': 'libc6 (>= 2.13-28), '
+                    'libstdc++ (>= 4.8.3), '
+                    'libgcc1 (>= 4.8.3)',
+        'libatomic1': 'libc6 (>= 2.13-28)',
+        'libgcc1': 'libc6 (>= 2.13-28)',
+        'libgfortran3': 'libgcc1 (>= 4.8.3)',
+        'libgomp1': 'libc6 (>= 2.13-28)',
+        'libmudflap0': 'libc6 (>= 2.13-28)',
+        'libssp0': 'libc6 (>= 2.13-28)',
+        'libstdc++6': 'libc6 (>= 2.13-28), libgcc1 (>= 4.8.3)'}
 
 
 class LinaroToolchainArmel(LinaroToolchain):
-    gcc_libpath = "arm-linux-gnueabihf/lib/arm-linux-gnueabi"
+    gcc_libpath = 'arm-linux-gnueabihf/lib/arm-linux-gnueabi'
 
 
 def get_toolchain(typ, path, arch):
-    if typ == "linaro":
+    if typ == 'linaro':
         return LinaroToolchain(path, arch)
-    if typ == "linaro_armel":
+    if typ == 'linaro_armel':
         return LinaroToolchainArmel(path, arch)
 
     raise Exception
diff --git a/elbepack/treeutils.py b/elbepack/treeutils.py
index acdcd7b6..83cd48ed 100644
--- a/elbepack/treeutils.py
+++ b/elbepack/treeutils.py
@@ -33,24 +33,24 @@ class ebase:
         self.et = et
 
     def text(self, path, **kwargs):
-        el = self.et.find("./" + path)
+        el = self.et.find('./' + path)
         if el is None:
-            if "default" in kwargs:
-                default = kwargs["default"]
-                if hasattr(default, "__getitem__") and "key" in kwargs:
-                    return default[kwargs["key"]]
+            if 'default' in kwargs:
+                default = kwargs['default']
+                if hasattr(default, '__getitem__') and 'key' in kwargs:
+                    return default[kwargs['key']]
                 return default
 
-            raise Exception(f"Cant find path {path}")
+            raise Exception(f'Cant find path {path}')
 
-        return el.text or ""
+        return el.text or ''
 
     @property
     def tag(self):
         return self.et.tag
 
     def node(self, path):
-        retval = self.et.find("./" + path)
+        retval = self.et.find('./' + path)
         if retval is not None:
             return elem(retval)
         return None
@@ -79,7 +79,7 @@ class elem(ebase):
         ebase.__init__(self, el)
 
     def ensure_child(self, tag):
-        retval = self.et.find("./" + tag)
+        retval = self.et.find('./' + tag)
         if retval is not None:
             return elem(retval)
 
@@ -126,7 +126,7 @@ class etree(ebase):
         return self.et.tostring()
 
     def ensure_child(self, tag):
-        retval = self.et.find("./" + tag)
+        retval = self.et.find('./' + tag)
         if retval is not None:
             return elem(retval)
         return elem(SubElement(self.et.getroot(), tag))
diff --git a/elbepack/updated.py b/elbepack/updated.py
index 6bf54d6e..309babf6 100644
--- a/elbepack/updated.py
+++ b/elbepack/updated.py
@@ -43,7 +43,7 @@ class UpdateStatus:
         self.step = 0
         self.nosign = False
         self.verbose = False
-        self.repo_dir = ""
+        self.repo_dir = ''
         self.status_file = '/var/cache/elbe/update_state.txt'
         with rw_access_file(self.status_file, self) as f:
             f.write('ready')
@@ -57,11 +57,11 @@ class UpdateStatus:
 
     def set_progress(self, step, percent=''):
         self.step = step
-        self.write_status(f"in_progress\t{step}\t{percent}")
+        self.write_status(f'in_progress\t{step}\t{percent}')
 
     def set_finished(self, result):
         self.step = 0
-        self.write_status(f"finished\t{result}")
+        self.write_status(f'finished\t{result}')
 
     def log(self, msg):
         if not msg.endswith('\n'):
@@ -73,12 +73,12 @@ class UpdateStatus:
             self.set_progress(3, msg_a[0])
 
         if self.step:
-            msg = "(" + str(self.step) + "/3) " + msg
+            msg = '(' + str(self.step) + '/3) ' + msg
         if self.monitor:
             try:
                 self.monitor.service.msg(msg)
             except BaseException:
-                print("logging to monitor failed, removing monitor connection")
+                print('logging to monitor failed, removing monitor connection')
                 self.monitor = None
                 print(msg)
         try:
@@ -105,39 +105,39 @@ class UpdateService (ServiceBase):
     def list_snapshots(self):
         # use comma separated string because array of strings triggers a bug in
         # python suds :(
-        snapshots = ""
+        snapshots = ''
 
-        if os.path.isfile("/etc/elbe_base.xml"):
-            snapshots += "base_version,"
+        if os.path.isfile('/etc/elbe_base.xml'):
+            snapshots += 'base_version,'
 
-        lists = os.listdir("/etc/apt/sources.list.d")
+        lists = os.listdir('/etc/apt/sources.list.d')
 
         for lic in lists:
-            snapshots += lic[:len(lic) - 5] + ","
+            snapshots += lic[:len(lic) - 5] + ','
 
         return snapshots
 
     @rpc(String, _returns=String)
     def apply_snapshot(self, ver):
-        if ver == "base_version":
-            fname = "/etc/elbe_base.xml"
+        if ver == 'base_version':
+            fname = '/etc/elbe_base.xml'
         else:
-            fname = self.app.status.repo_dir + "/" + ver + "/new.xml"
+            fname = self.app.status.repo_dir + '/' + ver + '/new.xml'
 
         try:
             apply_update(fname, self.app.status)
         except Exception as err:
-            print(f"{err}")
+            print(f'{err}')
             self.app.status.set_finished('error')
-            return f"apply snapshot {ver} failed"
+            return f'apply snapshot {ver} failed'
 
         self.app.status.set_finished('OK')
-        return f"snapshot {ver} applied"
+        return f'snapshot {ver} applied'
 
     @rpc(String)
     def register_monitor(self, wsdl_url):
         self.app.status.monitor = Client(wsdl_url, timeout=cfg['soaptimeout'])
-        self.app.status.log("connection established")
+        self.app.status.log('connection established')
 
 
 class rw_access_file:
@@ -166,21 +166,21 @@ class rw_access:
 
     def __enter__(self):
         if self.mount_orig == 'ro':
-            self.status.log(f"remount {self.mount} read/writeable")
+            self.status.log(f'remount {self.mount} read/writeable')
             try:
-                system(f"mount -o remount,rw {self.mount}")
+                system(f'mount -o remount,rw {self.mount}')
             except CommandError as e:
                 self.status.log(repr(e))
 
     def __exit__(self, _typ, _value, _traceback):
         if self.mount_orig == 'ro':
-            self.status.log(f"remount {self.mount} readonly")
+            self.status.log(f'remount {self.mount} readonly')
             try:
-                system("sync")
+                system('sync')
             except CommandError as e:
                 self.status.log(repr(e))
             try:
-                system(f"mount -o remount,ro {self.mount}")
+                system(f'mount -o remount,ro {self.mount}')
             except CommandError as e:
                 self.status.log(repr(e))
 
@@ -211,10 +211,10 @@ class rw_access:
 
 
 def fname_replace(s):
-    allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-    allowed += "0123456789"
-    allowed += "_-."
-    res = ""
+    allowed = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+    allowed += '0123456789'
+    allowed += '_-.'
+    res = ''
     for c in s:
         if c in allowed:
             res += c
@@ -229,13 +229,13 @@ def update_sourceslist(xml, update_dir, status):
         status.log('invalid repository, not added to sources.list')
         return
 
-    deb = "deb [trusted=yes] file://" + \
-        update_dir + " " + xml.text("/project/suite")
-    deb += " main\n"
-    fname = "/etc/apt/sources.list.d/"
-    fname += fname_replace(xml.text("/project/name")) + "_"
-    fname += fname_replace(xml.text("/project/version"))
-    fname += ".list"
+    deb = 'deb [trusted=yes] file://' + \
+        update_dir + ' ' + xml.text('/project/suite')
+    deb += ' main\n'
+    fname = '/etc/apt/sources.list.d/'
+    fname += fname_replace(xml.text('/project/name')) + '_'
+    fname += fname_replace(xml.text('/project/version'))
+    fname += '.list'
 
     with rw_access_file(fname, status) as f:
         f.write(deb)
@@ -248,8 +248,8 @@ def mark_install(depcache, pkg, ver, auto, status):
             depcache.mark_install(pkg, False, not auto)
             return
 
-    status.log("ERROR: " + pkg.name + ver +
-               " is not available in the cache")
+    status.log('ERROR: ' + pkg.name + ver +
+               ' is not available in the cache')
 
 
 def _apply_update(fname, status):
@@ -257,19 +257,19 @@ def _apply_update(fname, status):
     try:
         xml = etree(fname)
     except BaseException:
-        raise Exception(f"reading {fname} failed ")
+        raise Exception(f'reading {fname} failed ')
 
-    fpl = xml.node("fullpkgs")
+    fpl = xml.node('fullpkgs')
 
     sources = apt_pkg.SourceList()
     sources.read_main_list()
 
-    status.log("initialize apt")
+    status.log('initialize apt')
     apt_pkg.init()
     cache = apt_pkg.Cache(progress=ElbeOpProgress(cb=status.log))
 
     status.set_progress(1)
-    status.log("updating package cache")
+    status.log('updating package cache')
     cache.update(ElbeAcquireProgress(cb=status.log), sources)
     # quote from python-apt api doc: "A call to this method does not affect the
     # current Cache object, instead a new one should be created in order to use
@@ -284,7 +284,7 @@ def _apply_update(fname, status):
     #  if it is not mentioned in the fullpkg list purge the package out of the
     #  system.
     status.set_progress(2)
-    status.log("calculating packages to install/remove")
+    status.log('calculating packages to install/remove')
     count = len(hl_cache)
     step = count / 10
     i = 0
@@ -293,8 +293,8 @@ def _apply_update(fname, status):
         i = i + 1
         if not i % step:
             percent = percent + 10
-            status.log(str(percent) + "% - " + str(i) + "/" + str(count))
-            status.set_progress(2, str(percent) + "%")
+            status.log(str(percent) + '% - ' + str(i) + '/' + str(count))
+            status.set_progress(2, str(percent) + '%')
 
         pkg = cache[p.name]
         marked = False
@@ -310,7 +310,7 @@ def _apply_update(fname, status):
             depcache.mark_delete(pkg, True)
 
     status.set_progress(3)
-    status.log("applying snapshot")
+    status.log('applying snapshot')
     depcache.commit(ElbeAcquireProgress(cb=status.log),
                     ElbeInstallProgress(cb=status.log))
     del depcache
@@ -318,8 +318,8 @@ def _apply_update(fname, status):
     del cache
     del sources
 
-    version_file = open("/etc/updated_version", "w")
-    version_file.write(xml.text("/project/version"))
+    version_file = open('/etc/updated_version', 'w')
+    version_file.write(xml.text('/project/version'))
     version_file.close()
 
 
@@ -356,28 +356,28 @@ def post_sh(current_version, target_version, status):
 
 def get_target_version(fname):
     xml = etree(fname)
-    return xml.text("/project/version")
+    return xml.text('/project/version')
 
 
 def get_current_version():
-    with open("/etc/updated_version", "r") as version_file:
+    with open('/etc/updated_version', 'r') as version_file:
         return version_file.read()
 
 
 def get_base_version():
-    xml = etree("/etc/elbe_base.xml")
-    return xml.text("/project/version")
+    xml = etree('/etc/elbe_base.xml')
+    return xml.text('/project/version')
 
 
 def is_downgrade(target_version, current_version, base_version):
     current = current_version
-    if current == "":
+    if current == '':
         current = base_version
     return version.parse(target_version) < version.parse(current)
 
 
 def is_downgrade_allowed():
-    return os.path.isfile("/var/cache/elbe/.downgrade_allowed")
+    return os.path.isfile('/var/cache/elbe/.downgrade_allowed')
 
 
 def reject_downgrade(status, new_xml_file):
@@ -388,7 +388,7 @@ def reject_downgrade(status, new_xml_file):
         c_ver = get_current_version()
     except IOError as e:
         status.log('get current version failed: ' + str(e))
-        c_ver = ""
+        c_ver = ''
 
     if is_downgrade(t_ver, c_ver, b_ver) and not is_downgrade_allowed():
         status.log('Update is a downgrade and downgrades are not allowed')
@@ -404,7 +404,7 @@ def apply_update(fname, status):
     # process termination an we can remount the filesystem readonly
     # without errors.
     p = Process(target=_apply_update, args=(fname, status))
-    with rw_access("/", status):
+    with rw_access('/', status):
         try:
             t_ver = get_target_version(fname)
         except BaseException:
@@ -415,54 +415,54 @@ def apply_update(fname, status):
             c_ver = get_current_version()
         except IOError as e:
             status.log('get current version failed: ' + str(e))
-            c_ver = ""
+            c_ver = ''
 
         pre_sh(c_ver, t_ver, status)
         p.start()
         p.join()
-        status.log("cleanup /var/cache/apt/archives")
+        status.log('cleanup /var/cache/apt/archives')
         # don't use execute() here, it results in an error that the apt-cache
         # is locked. We currently don't understand this behaviour :(
         try:
-            system("apt-get clean")
+            system('apt-get clean')
         except CommandError as e:
             status.log(repr(e))
         if p.exitcode != 0:
             raise Exception(
-                "Applying update failed. See logfile for more information")
+                'Applying update failed. See logfile for more information')
         post_sh(c_ver, t_ver, status)
 
 
 def action_select(upd_file, status):
 
-    status.log("updating: " + upd_file)
+    status.log('updating: ' + upd_file)
 
     try:
         upd_file_z = ZipFile(upd_file)
     except BadZipfile:
-        status.log(f"update aborted (bad zip file: {upd_file})")
+        status.log(f'update aborted (bad zip file: {upd_file})')
         return
 
-    if "new.xml" not in upd_file_z.namelist():
-        status.log("update invalid (new.xml missing)")
+    if 'new.xml' not in upd_file_z.namelist():
+        status.log('update invalid (new.xml missing)')
         return
 
-    with rw_access("/tmp", status):
-        upd_file_z.extract("new.xml", "/tmp/")
+    with rw_access('/tmp', status):
+        upd_file_z.extract('new.xml', '/tmp/')
 
     # prevent downgrades (if available)
     try:
-        if reject_downgrade(status, "/tmp/new.xml"):
+        if reject_downgrade(status, '/tmp/new.xml'):
             return
     except Exception as e:
         status.log('Error while reading XML files occurred: ' + str(e))
         return
 
-    xml = etree("/tmp/new.xml")
-    prefix = status.repo_dir + "/" + fname_replace(xml.text("/project/name"))
-    prefix += "_" + fname_replace(xml.text("/project/version")) + "/"
+    xml = etree('/tmp/new.xml')
+    prefix = status.repo_dir + '/' + fname_replace(xml.text('/project/name'))
+    prefix += '_' + fname_replace(xml.text('/project/version')) + '/'
 
-    status.log("preparing update: " + prefix)
+    status.log('preparing update: ' + prefix)
 
     with rw_access(prefix, status):
         for i in upd_file_z.namelist():
@@ -472,10 +472,10 @@ def action_select(upd_file, status):
                 upd_file_z.extract(zi, prefix)
                 os.chmod(prefix + '/' + i, zi.external_attr >> 16)
             except OSError:
-                status.log(f"extraction failed: {sys.exc_info()[1]}")
+                status.log(f'extraction failed: {sys.exc_info()[1]}')
                 return
 
-    with rw_access("/var/cache/elbe", status):
+    with rw_access('/var/cache/elbe', status):
         if os.path.isfile(prefix + '/' + 'pre.sh'):
             try:
                 copy(prefix + '/' + 'pre.sh', '/var/cache/elbe/' + 'pre.sh')
@@ -488,25 +488,25 @@ def action_select(upd_file, status):
             except (OSError, IOError) as e:
                 status.log('postsh-copy failed: ' + str(e))
 
-    if os.path.isdir(prefix + "conf"):
-        status.log("copying config files:")
-        for path, _, filenames in os.walk(prefix + "conf"):
-            dst = path[len(prefix + "conf"):]
+    if os.path.isdir(prefix + 'conf'):
+        status.log('copying config files:')
+        for path, _, filenames in os.walk(prefix + 'conf'):
+            dst = path[len(prefix + 'conf'):]
             with rw_access(dst, status):
                 for f in filenames:
                     src = os.path.join(path, f)
-                    status.log("cp " + src + " " + dst)
+                    status.log('cp ' + src + ' ' + dst)
                     try:
                         mkdir_p(dst)
                         copyfile(src, dst + '/' + f)
                     except (OSError, IOError) as e:
                         status.log('failed: ' + str(e))
-        with rw_access(prefix + "conf", status):
-            rmtree(prefix + "conf")
+        with rw_access(prefix + 'conf', status):
+            rmtree(prefix + 'conf')
 
-    if os.path.isdir(prefix + "cmd"):
-        status.log("executing scripts:")
-        for path, _, filenames in os.walk(prefix + "cmd"):
+    if os.path.isdir(prefix + 'cmd'):
+        status.log('executing scripts:')
+        for path, _, filenames in os.walk(prefix + 'cmd'):
             for f in filenames:
                 cmd = os.path.join(path, f)
                 if os.path.isfile(cmd):
@@ -515,33 +515,33 @@ def action_select(upd_file, status):
                         execute(cmd, status)
                     except OSError as e:
                         status.log('exec: ' + cmd + ' - ' + str(e))
-        with rw_access(prefix + "cmd", status):
-            rmtree(prefix + "cmd")
+        with rw_access(prefix + 'cmd', status):
+            rmtree(prefix + 'cmd')
 
-    if os.path.isdir(prefix + "repo"):
+    if os.path.isdir(prefix + 'repo'):
         try:
-            update_sourceslist(xml, prefix + "repo", status)
+            update_sourceslist(xml, prefix + 'repo', status)
         except Exception as err:
             status.log(str(err))
             status.set_finished('error')
-            status.log("update apt sources list failed: " + prefix)
+            status.log('update apt sources list failed: ' + prefix)
             return
 
         try:
-            apply_update("/tmp/new.xml", status)
+            apply_update('/tmp/new.xml', status)
         except Exception as err:
             status.log(str(err))
             status.set_finished('error')
-            status.log("apply update failed: " + prefix)
+            status.log('apply update failed: ' + prefix)
             return
 
         status.set_finished('OK')
-        status.log("update done: " + prefix)
+        status.log('update done: ' + prefix)
 
 
 def is_update_file(upd_file):
     _, extension = os.path.splitext(upd_file)
-    if extension == ".gpg":
+    if extension == '.gpg':
         return True
 
     try:
@@ -549,7 +549,7 @@ def is_update_file(upd_file):
     except BadZipfile:
         return False
 
-    if "new.xml" not in upd_file_z.namelist():
+    if 'new.xml' not in upd_file_z.namelist():
         return False
 
     return True
@@ -560,10 +560,10 @@ update_lock = threading.Lock()
 
 def handle_update_file(upd_file, status, remove=False):
     with update_lock:
-        status.log("checking file: " + str(upd_file))
+        status.log('checking file: ' + str(upd_file))
         _, extension = os.path.splitext(upd_file)
 
-        if extension == ".gpg":
+        if extension == '.gpg':
             fname = unsign_file(upd_file)
             if remove:
                 os.remove(upd_file)
@@ -572,14 +572,14 @@ def handle_update_file(upd_file, status, remove=False):
                 if remove:
                     os.remove(fname)
             else:
-                status.log("checking signature failed: " + str(upd_file))
+                status.log('checking signature failed: ' + str(upd_file))
 
         elif status.nosign:
             action_select(upd_file, status)
             if remove:
                 os.remove(upd_file)
         else:
-            status.log("ignore file: " + str(upd_file))
+            status.log('ignore file: ' + str(upd_file))
 
 
 def shutdown(_signum, _fname, status):
diff --git a/elbepack/updated_monitors.py b/elbepack/updated_monitors.py
index 4b3314a8..12fd1ab9 100644
--- a/elbepack/updated_monitors.py
+++ b/elbepack/updated_monitors.py
@@ -42,7 +42,7 @@ class USBMonitor (UpdateMonitor):
             mnt = self.get_mountpoint_for_device(device.device_node)
             if not mnt:
                 self.status.log(
-                    "Detected USB drive but it was not mounted.")
+                    'Detected USB drive but it was not mounted.')
                 return
 
             for (dirpath, dirnames, filenames) in os.walk(mnt):
@@ -63,7 +63,7 @@ class USBMonitor (UpdateMonitor):
                     break
 
     def start(self):
-        self.status.log("monitoring USB")
+        self.status.log('monitoring USB')
         self.observer.start()
 
     def stop(self):
@@ -74,7 +74,7 @@ class USBMonitor (UpdateMonitor):
 
     @staticmethod
     def get_mountpoint_for_device(dev):
-        with open("/proc/mounts") as f:
+        with open('/proc/mounts') as f:
             for line in f:
                 fields = line.split()
                 try:
@@ -97,12 +97,12 @@ class FileMonitor (UpdateMonitor):
 
     class ObserverThread (threading.Thread):
         def __init__(self, status, monitor):
-            threading.Thread.__init__(self, name="ObserverThread")
+            threading.Thread.__init__(self, name='ObserverThread')
             self.status = status
             self.monitor = monitor
 
         def run(self):
-            self.status.log("monitoring updated dir")
+            self.status.log('monitoring updated dir')
 
             while 1:
                 if self.monitor.notifier.check_events(timeout=1000):
diff --git a/elbepack/updatepkg.py b/elbepack/updatepkg.py
index 69b77c58..5d136677 100644
--- a/elbepack/updatepkg.py
+++ b/elbepack/updatepkg.py
@@ -37,14 +37,14 @@ def gen_update_pkg(project, xml_filename, upd_filename,
         xml = ElbeXML(xml_filename, buildtype=override_buildtype,
                       skip_validate=skip_validate)
 
-        if not xml.has("fullpkgs"):
-            raise MissingData("Xml does not have fullpkgs list")
+        if not xml.has('fullpkgs'):
+            raise MissingData('Xml does not have fullpkgs list')
 
-        if not project.xml.has("fullpkgs"):
-            raise MissingData("Source Xml does not have fullpkgs list")
+        if not project.xml.has('fullpkgs'):
+            raise MissingData('Source Xml does not have fullpkgs list')
 
         if not project.buildenv.rfs:
-            raise MissingData("Target does not have a build environment")
+            raise MissingData('Target does not have a build environment')
 
         cache = project.get_rpcaptcache()
 
@@ -54,7 +54,7 @@ def gen_update_pkg(project, xml_filename, upd_filename,
         for p in instpkgs:
             instindex[p.name] = p
 
-        xmlpkgs = xml.node("/fullpkgs")
+        xmlpkgs = xml.node('/fullpkgs')
         xmlindex = {}
 
         fnamelist = []
@@ -66,7 +66,7 @@ def gen_update_pkg(project, xml_filename, upd_filename,
             xmlindex[name] = p
 
             if name not in instindex:
-                logging.info("Package removed: %s", name)
+                logging.info('Package removed: %s', name)
                 continue
 
             ipkg = instindex[name]
@@ -75,42 +75,42 @@ def gen_update_pkg(project, xml_filename, upd_filename,
             pfname = ipkg.installed_deb
 
             if comp == 0:
-                logging.info("Package ok: %s-%s", name, ipkg.installed_version)
+                logging.info('Package ok: %s-%s', name, ipkg.installed_version)
                 if debug:
                     fnamelist.append(pfname)
                 continue
 
             if comp > 0:
-                logging.info("Package upgrade: %s", pfname)
+                logging.info('Package upgrade: %s', pfname)
                 fnamelist.append(pfname)
             else:
-                logging.info("Package downgrade: %s-%s",
+                logging.info('Package downgrade: %s-%s',
                              name, ipkg.installed_version)
 
         for p in instpkgs:
             if p.name in xmlindex:
                 continue
 
-            logging.info("Package %s newly installed", p.name)
+            logging.info('Package %s newly installed', p.name)
             pfname = p.installed_deb
             fnamelist.append(pfname)
 
-    update = os.path.join(project.builddir, "update")
+    update = os.path.join(project.builddir, 'update')
 
     if os.path.exists(update):
         rmtree(update)
 
-    system(f"mkdir -p {update}")
+    system(f'mkdir -p {update}')
 
     if xml_filename:
-        repodir = os.path.join(update, "repo")
+        repodir = os.path.join(update, 'repo')
 
         repo = UpdateRepo(xml, repodir)
 
         for fname in fnamelist:
             path = os.path.join(
                 project.chrootpath,
-                "var/cache/apt/archives",
+                'var/cache/apt/archives',
                 fname)
             repo.includedeb(path)
 
@@ -118,10 +118,10 @@ def gen_update_pkg(project, xml_filename, upd_filename,
 
         dump_fullpkgs(project.xml, project.buildenv.rfs, cache)
 
-        project.xml.xml.write(os.path.join(update, "new.xml"))
+        project.xml.xml.write(os.path.join(update, 'new.xml'))
         system(f"cp {xml_filename} {os.path.join(update, 'base.xml')}")
     else:
-        system("cp source.xml update/new.xml")
+        system('cp source.xml update/new.xml')
 
     if project.presh_file:
         copyfile(project.presh_file, update + '/pre.sh')
@@ -137,10 +137,10 @@ def gen_update_pkg(project, xml_filename, upd_filename,
     if cfg_dir:
         inlucdedir(update, 'conf', cfg_dir)
 
-    create_zip_archive(upd_filename, update, ".")
+    create_zip_archive(upd_filename, update, '.')
 
     if project.postbuild_file:
-        logging.info("Postbuild script")
+        logging.info('Postbuild script')
         cmd = (f' "{upd_filename} {project.xml.text("project/version")} '
                f'{project.xml.text("project/name")}"')
         do(project.postbuild_file + cmd, allow_fail=True)
diff --git a/elbepack/validate.py b/elbepack/validate.py
index 8baed754..32f2ab70 100644
--- a/elbepack/validate.py
+++ b/elbepack/validate.py
@@ -15,29 +15,29 @@ def error_log_to_strings(error_log):
     uses_norecommend = False
 
     for err in error_log:
-        errors.append(f"{err.filename}:{err.line} error {err.message}")
-        if "http://www.w3.org/2003/XInclude" in err.message:
+        errors.append(f'{err.filename}:{err.line} error {err.message}')
+        if 'http://www.w3.org/2003/XInclude' in err.message:
             uses_xinclude = True
-        if "norecommend" in err.message:
+        if 'norecommend' in err.message:
             uses_norecommend = True
 
     if uses_xinclude:
-        errors.append("\nThere are XIncludes in the XML file. "
+        errors.append('\nThere are XIncludes in the XML file. '
                       "Run 'elbe preprocess' first!\n")
     if uses_norecommend:
-        errors.append("\nThe XML file uses <norecommend />. "
-                      "This function was broken all the time and did the "
-                      "opposite. If you want to retain the original "
-                      "behaviour, please specify <install-recommends /> !\n")
+        errors.append('\nThe XML file uses <norecommend />. '
+                      'This function was broken all the time and did the '
+                      'opposite. If you want to retain the original '
+                      'behaviour, please specify <install-recommends /> !\n')
     return errors
 
 
 def validate_xml(fname):
     if os.path.getsize(fname) > (1 << 30):
-        return [f"{fname} is greater than 1 GiB. "
-                "Elbe does not support files of this size."]
+        return [f'{fname} is greater than 1 GiB. '
+                'Elbe does not support files of this size.']
 
-    schema_file = "https://www.linutronix.de/projects/Elbe/dbsfed.xsd"
+    schema_file = 'https://www.linutronix.de/projects/Elbe/dbsfed.xsd'
     parser = XMLParser(huge_tree=True)
     schema_tree = etree.parse(schema_file)
     schema = etree.XMLSchema(schema_tree)
@@ -48,9 +48,9 @@ def validate_xml(fname):
         if schema.validate(xml):
             return validate_xml_content(xml)
     except etree.XMLSyntaxError:
-        return ["XML Parse error\n" + str(sys.exc_info()[1])]
+        return ['XML Parse error\n' + str(sys.exc_info()[1])]
     except BaseException:
-        return ["Unknown Exception during validation\n" +
+        return ['Unknown Exception during validation\n' +
                 str(sys.exc_info()[1])]
 
     # We have errors, return them in string form...
@@ -64,28 +64,28 @@ def validate_xml_content(xml):
     #
     # If apt-transport-https or ca-certificates is included in bootstrap,
     # we are probably fine
-    bootstrap_include = xml.findtext("/target/debootstrap/include", "")
-    if ("apt-transport-https" not in bootstrap_include
-       and "ca-certificates" not in bootstrap_include):
+    bootstrap_include = xml.findtext('/target/debootstrap/include', '')
+    if ('apt-transport-https' not in bootstrap_include
+       and 'ca-certificates' not in bootstrap_include):
 
         # Check if primary mirror is using https
-        primary_proto = xml.findtext("/project/mirror/primary_proto", "")
-        is_primary_proto_https = (primary_proto.lower() == "https")
+        primary_proto = xml.findtext('/project/mirror/primary_proto', '')
+        is_primary_proto_https = (primary_proto.lower() == 'https')
 
         # Check if any additional mirror is using https
         has_https_urls = False
-        for url in xml.findall("/project/mirror/url-list/url"):
-            b = url.findtext("binary", "").lower()
-            s = url.findtext("source", "").lower()
-            if b.startswith("https") or s.startswith("https"):
+        for url in xml.findall('/project/mirror/url-list/url'):
+            b = url.findtext('binary', '').lower()
+            s = url.findtext('source', '').lower()
+            if b.startswith('https') or s.startswith('https'):
                 has_https_urls = True
                 break
 
         if is_primary_proto_https or has_https_urls:
-            errors.append("\nThe XML contains an HTTPS mirror. "
-                          "Use debootstrap/include "
-                          "to make apt-transport-https (stretch and older) "
-                          "or ca-certificates (buster and newer) available "
-                          "in debootstrap.\n")
+            errors.append('\nThe XML contains an HTTPS mirror. '
+                          'Use debootstrap/include '
+                          'to make apt-transport-https (stretch and older) '
+                          'or ca-certificates (buster and newer) available '
+                          'in debootstrap.\n')
 
     return errors
diff --git a/elbepack/version.py b/elbepack/version.py
index 5859adfb..2f84ef8e 100644
--- a/elbepack/version.py
+++ b/elbepack/version.py
@@ -5,7 +5,7 @@
 
 from elbepack.directories import pack_dir
 
-elbe_version = "14.9.3"
+elbe_version = '14.9.3'
 
 elbe_initvm_packagelist = ['python3-elbe-buildenv',
                            'python3-elbe-soap',
diff --git a/elbepack/virtapt.py b/elbepack/virtapt.py
index c4deac8a..e6635807 100644
--- a/elbepack/virtapt.py
+++ b/elbepack/virtapt.py
@@ -21,7 +21,7 @@ from elbepack.rfs import create_apt_prefs
 
 
 def getdeps(pkg):
-    for dd in pkg.depends_list.get("Depends", []):
+    for dd in pkg.depends_list.get('Depends', []):
         for d in dd:
             yield d.target_pkg.name
 
@@ -46,7 +46,7 @@ def lookup_uri(v, d, target_pkg):
             for x in pkg.provides_list:
                 if target_pkg == x[0]:
                     return lookup_uri(v, d, x[2].parent_pkg.name)
-        return "", "", ""
+        return '', '', ''
 
     x = v.source.find_index(c.file_list[0][0])
 
@@ -55,7 +55,7 @@ def lookup_uri(v, d, target_pkg):
     uri = x.archive_uri(r.filename)
 
     if not x.is_trusted:
-        return target_pkg, uri, ""
+        return target_pkg, uri, ''
 
     hashval = str(r.hashes.find('SHA256')).split(':')[1]
 
@@ -67,8 +67,8 @@ class VirtApt:
 
         self.xml = xml
 
-        arch = xml.text("project/buildimage/arch", key="arch")
-        suite = xml.text("project/suite")
+        arch = xml.text('project/buildimage/arch', key='arch')
+        suite = xml.text('project/suite')
 
         self.basefs = TmpdirFilesystem()
         self.initialize_dirs()
@@ -77,37 +77,37 @@ class VirtApt:
 
         mirror = self.xml.create_apt_sources_list(build_sources=True,
                                                   initvm=False)
-        self.basefs.write_file("etc/apt/sources.list", 0o644, mirror)
+        self.basefs.write_file('etc/apt/sources.list', 0o644, mirror)
 
         self.setup_gpg()
         self.import_keys()
 
-        apt_pkg.config.set("APT::Architecture", arch)
-        apt_pkg.config.set("APT::Architectures", arch)
-        apt_pkg.config.set("Acquire::http::Proxy::127.0.0.1", "DIRECT")
-        apt_pkg.config.set("APT::Install-Recommends", "0")
-        apt_pkg.config.set("Dir::Etc", self.basefs.fname('/'))
-        apt_pkg.config.set("Dir::Etc::Trusted",
+        apt_pkg.config.set('APT::Architecture', arch)
+        apt_pkg.config.set('APT::Architectures', arch)
+        apt_pkg.config.set('Acquire::http::Proxy::127.0.0.1', 'DIRECT')
+        apt_pkg.config.set('APT::Install-Recommends', '0')
+        apt_pkg.config.set('Dir::Etc', self.basefs.fname('/'))
+        apt_pkg.config.set('Dir::Etc::Trusted',
                            self.basefs.fname('/etc/apt/trusted.gpg'))
-        apt_pkg.config.set("Dir::Etc::TrustedParts",
+        apt_pkg.config.set('Dir::Etc::TrustedParts',
                            self.basefs.fname('/etc/apt/trusted.gpg.d'))
-        apt_pkg.config.set("APT::Cache-Limit", "0")
-        apt_pkg.config.set("APT::Cache-Start", "32505856")
-        apt_pkg.config.set("APT::Cache-Grow", "2097152")
-        apt_pkg.config.set("Dir::State", self.basefs.fname("state"))
-        apt_pkg.config.set("Dir::State::status",
-                           self.basefs.fname("state/status"))
-        apt_pkg.config.set("Dir::Cache", self.basefs.fname("cache"))
-        apt_pkg.config.set("Dir::Cache::archives",
-                           self.basefs.fname("cache/archives"))
-        apt_pkg.config.set("Dir::Etc", self.basefs.fname("etc/apt"))
-        apt_pkg.config.set("Dir::Log", self.basefs.fname("log"))
+        apt_pkg.config.set('APT::Cache-Limit', '0')
+        apt_pkg.config.set('APT::Cache-Start', '32505856')
+        apt_pkg.config.set('APT::Cache-Grow', '2097152')
+        apt_pkg.config.set('Dir::State', self.basefs.fname('state'))
+        apt_pkg.config.set('Dir::State::status',
+                           self.basefs.fname('state/status'))
+        apt_pkg.config.set('Dir::Cache', self.basefs.fname('cache'))
+        apt_pkg.config.set('Dir::Cache::archives',
+                           self.basefs.fname('cache/archives'))
+        apt_pkg.config.set('Dir::Etc', self.basefs.fname('etc/apt'))
+        apt_pkg.config.set('Dir::Log', self.basefs.fname('log'))
         if self.xml.has('project/noauth'):
-            apt_pkg.config.set("APT::Get::AllowUnauthenticated", "1")
-            apt_pkg.config.set("Acquire::AllowInsecureRepositories", "1")
+            apt_pkg.config.set('APT::Get::AllowUnauthenticated', '1')
+            apt_pkg.config.set('Acquire::AllowInsecureRepositories', '1')
         else:
-            apt_pkg.config.set("APT::Get::AllowUnauthenticated", "0")
-            apt_pkg.config.set("Acquire::AllowInsecureRepositories", "0")
+            apt_pkg.config.set('APT::Get::AllowUnauthenticated', '0')
+            apt_pkg.config.set('Acquire::AllowInsecureRepositories', '0')
 
         apt_pkg.init_system()
 
@@ -119,7 +119,7 @@ class VirtApt:
         except BaseException as e:
             print(e)
 
-        apt_pkg.config.set("APT::Default-Release", suite)
+        apt_pkg.config.set('APT::Default-Release', suite)
 
         self.cache = apt_pkg.Cache()
         try:
@@ -129,7 +129,7 @@ class VirtApt:
 
         try:
             self.depcache = apt_pkg.DepCache(self.cache)
-            prefs_name = self.basefs.fname("/etc/apt/preferences")
+            prefs_name = self.basefs.fname('/etc/apt/preferences')
             self.depcache.read_pinfile(prefs_name)
         except BaseException as e:
             print(e)
@@ -142,7 +142,7 @@ class VirtApt:
         Adds the binary OpenPGP keyring 'key' as a trusted apt keyring
         with file name 'keyname'.
         """
-        with open(self.basefs.fname(f"/etc/apt/trusted.gpg.d/{keyname}"), "wb") as outfile:
+        with open(self.basefs.fname(f'/etc/apt/trusted.gpg.d/{keyname}'), 'wb') as outfile:
             outfile.write(key)
 
     def import_keys(self):
@@ -155,9 +155,9 @@ class VirtApt:
             # I could make a none global 'noauth' flag for mirrors
             for i, url in enumerate(self.xml.node('project/mirror/url-list')):
                 if url.has('raw-key'):
-                    key = "\n".join(line.strip(" \t")
+                    key = '\n'.join(line.strip(' \t')
                                     for line in url.text('raw-key').splitlines()[1:-1])
-                    self.add_key(unarmor_openpgp_keyring(key), f"elbe-virtapt-raw-key{i}.gpg")
+                    self.add_key(unarmor_openpgp_keyring(key), f'elbe-virtapt-raw-key{i}.gpg')
 
     def start(self):
         pass
@@ -170,27 +170,27 @@ class VirtApt:
         return True
 
     def initialize_dirs(self):
-        self.basefs.mkdir_p("cache/archives/partial")
-        self.basefs.mkdir_p("etc/apt/preferences.d")
-        self.basefs.mkdir_p("etc/apt/trusted.gpg.d")
-        self.basefs.mkdir_p("db")
-        self.basefs.mkdir_p("log")
-        self.basefs.mkdir_p("state/lists/partial")
-        self.basefs.mkdir_p("tmp")
-        self.basefs.touch_file("state/status")
+        self.basefs.mkdir_p('cache/archives/partial')
+        self.basefs.mkdir_p('etc/apt/preferences.d')
+        self.basefs.mkdir_p('etc/apt/trusted.gpg.d')
+        self.basefs.mkdir_p('db')
+        self.basefs.mkdir_p('log')
+        self.basefs.mkdir_p('state/lists/partial')
+        self.basefs.mkdir_p('tmp')
+        self.basefs.touch_file('state/status')
 
     def setup_gpg(self):
-        ring_path = self.basefs.fname("etc/apt/trusted.gpg")
-        if not os.path.isdir("/etc/apt/trusted.gpg.d"):
+        ring_path = self.basefs.fname('etc/apt/trusted.gpg')
+        if not os.path.isdir('/etc/apt/trusted.gpg.d'):
             print("/etc/apt/trusted.gpg.d doesn't exist")
-            print("apt-get install debian-archive-keyring may "
-                  "fix this problem")
+            print('apt-get install debian-archive-keyring may '
+                  'fix this problem')
             sys.exit(204)
 
-        if os.path.exists("/etc/apt/trusted.gpg"):
+        if os.path.exists('/etc/apt/trusted.gpg'):
             system(f'cp /etc/apt/trusted.gpg "{ring_path}"')
 
-        trustkeys = os.listdir("/etc/apt/trusted.gpg.d")
+        trustkeys = os.listdir('/etc/apt/trusted.gpg.d')
         for key in trustkeys:
             system(f'cp "/etc/apt/trusted.gpg.d/{key}" "{ring_path}.d"')
 
@@ -271,7 +271,7 @@ class VirtApt:
                             pkg = self.cache[x[2].parent_pkg.name]
                             c = d.get_candidate_ver(pkg)
             if not c:
-                print(f"couldnt get candidate: {pkg}")
+                print(f'couldnt get candidate: {pkg}')
             else:
                 for p in getdeps(c):
                     if [y for y in deps if y[0] == p]:
diff --git a/elbepack/xmldefaults.py b/elbepack/xmldefaults.py
index c8c03e06..39353573 100644
--- a/elbepack/xmldefaults.py
+++ b/elbepack/xmldefaults.py
@@ -5,192 +5,192 @@
 import random
 
 armel_defaults = {
-    "arch": "armel",
-    "interpreter": "qemu-system-arm",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb",
-    "nicmodel": "smc91c111",
-    "triplet": "arm-linux-gnueabi",
-    "sdkgccpkg": "g++-arm-linux-gnueabi",
-    "elfcode": "ARM",
+    'arch': 'armel',
+    'interpreter': 'qemu-system-arm',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb',
+    'nicmodel': 'smc91c111',
+    'triplet': 'arm-linux-gnueabi',
+    'sdkgccpkg': 'g++-arm-linux-gnueabi',
+    'elfcode': 'ARM',
 }
 
 armel_linaro48_defaults = {
-    "arch": "armel",
-    "interpreter": "qemu-system-arm",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb",
-    "nicmodel": "smc91c111",
-    "triplet": "arm-linux-gnueabi",
-    "sdkgccpkg": "g++-arm-linux-gnueabi",
-    "toolchaintype": "linaro_armel",
-    "toolchainver": "4.8.3",
-    "elfcode": "ARM",
+    'arch': 'armel',
+    'interpreter': 'qemu-system-arm',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb',
+    'nicmodel': 'smc91c111',
+    'triplet': 'arm-linux-gnueabi',
+    'sdkgccpkg': 'g++-arm-linux-gnueabi',
+    'toolchaintype': 'linaro_armel',
+    'toolchainver': '4.8.3',
+    'elfcode': 'ARM',
 }
 
 armel_virtio_defaults = {
-    "arch": "armel",
-    "interpreter": "qemu-system-arm-virtio",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb",
-    "nicmodel": "smc91c111",
-    "triplet": "arm-linux-gnueabi",
-    "sdkgccpkg": "g++-arm-linux-gnueabi",
-    "elfcode": "ARM",
+    'arch': 'armel',
+    'interpreter': 'qemu-system-arm-virtio',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb',
+    'nicmodel': 'smc91c111',
+    'triplet': 'arm-linux-gnueabi',
+    'sdkgccpkg': 'g++-arm-linux-gnueabi',
+    'elfcode': 'ARM',
 }
 
 armhf_defaults = {
-    "arch": "armhf",
-    "interpreter": "qemu-system-arm",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb -cpu cortex-a9",
-    "nicmodel": "smc91c111",
-    "triplet": "arm-linux-gnueabihf",
-    "sdkgccpkg": "g++-arm-linux-gnueabihf",
-    "elfcode": "ARM",
+    'arch': 'armhf',
+    'interpreter': 'qemu-system-arm',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb -cpu cortex-a9',
+    'nicmodel': 'smc91c111',
+    'triplet': 'arm-linux-gnueabihf',
+    'sdkgccpkg': 'g++-arm-linux-gnueabihf',
+    'elfcode': 'ARM',
 }
 
 armhf_linaro48_defaults = {
-    "arch": "armhf",
-    "interpreter": "qemu-system-arm",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb -cpu cortex-a9",
-    "nicmodel": "smc91c111",
-    "triplet": "arm-linux-gnueabihf",
-    "toolchaintype": "linaro",
-    "toolchainver": "4.8.3",
-    "elfcode": "ARM",
+    'arch': 'armhf',
+    'interpreter': 'qemu-system-arm',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb -cpu cortex-a9',
+    'nicmodel': 'smc91c111',
+    'triplet': 'arm-linux-gnueabihf',
+    'toolchaintype': 'linaro',
+    'toolchainver': '4.8.3',
+    'elfcode': 'ARM',
 }
 
 armhf_virtio_defaults = {
-    "arch": "armhf",
-    "interpreter": "qemu-system-arm-virtio",
-    "userinterpr": "qemu-arm-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "versatilepb -cpu cortex-a9",
-    "nicmodel": "virtio",
-    "triplet": "arm-linux-gnueabihf",
-    "sdkgccpkg": "g++-arm-linux-gnueabihf",
-    "elfcode": "ARM"
+    'arch': 'armhf',
+    'interpreter': 'qemu-system-arm-virtio',
+    'userinterpr': 'qemu-arm-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'versatilepb -cpu cortex-a9',
+    'nicmodel': 'virtio',
+    'triplet': 'arm-linux-gnueabihf',
+    'sdkgccpkg': 'g++-arm-linux-gnueabihf',
+    'elfcode': 'ARM'
 }
 
 aarch64_defaults = {
-    "arch": "arm64",
-    "interpreter": "qemu-system-aarch64",
-    "userinterpr": "qemu-aarch64-static",
-    "console": "ttyAMA0,115200n1",
-    "machine": "virt -cpu cortex-a57",
-    "nicmodel": "virtio",
-    "triplet": "aarch64-linux-gnu",
-    "sdkgccpkg": "g++-aarch64-linux-gnu",
-    "elfcode": "ARM aarch64",
+    'arch': 'arm64',
+    'interpreter': 'qemu-system-aarch64',
+    'userinterpr': 'qemu-aarch64-static',
+    'console': 'ttyAMA0,115200n1',
+    'machine': 'virt -cpu cortex-a57',
+    'nicmodel': 'virtio',
+    'triplet': 'aarch64-linux-gnu',
+    'sdkgccpkg': 'g++-aarch64-linux-gnu',
+    'elfcode': 'ARM aarch64',
 }
 
 ppc_defaults = {
-    "arch": "powerpc",
-    "interpreter": "qemu-system-ppc",
-    "userinterpr": "qemu-ppc-static",
-    "console": "ttyPZ0,115200n1",
-    "machine": "mac99",
-    "nicmodel": "rtl8139",
-    "triplet": "powerpc-linux-gnu",
-    "sdkgccpkg": "g++-powerpc-linux-gnu",
-    "elfcode": "PowerPC or cisco 4500",
+    'arch': 'powerpc',
+    'interpreter': 'qemu-system-ppc',
+    'userinterpr': 'qemu-ppc-static',
+    'console': 'ttyPZ0,115200n1',
+    'machine': 'mac99',
+    'nicmodel': 'rtl8139',
+    'triplet': 'powerpc-linux-gnu',
+    'sdkgccpkg': 'g++-powerpc-linux-gnu',
+    'elfcode': 'PowerPC or cisco 4500',
 }
 
 ppcspe_defaults = {
-    "arch": "powerpcspe",
-    "interpreter": "qemu-system-ppc",
-    "userinterpr": "qemu-ppc-static",
-    "console": "ttyS0,115200n1",
-    "machine": "mpc8544ds",
-    "nicmodel": "rtl8139",
-    "triplet": "powerpc-linux-gnuspe",
+    'arch': 'powerpcspe',
+    'interpreter': 'qemu-system-ppc',
+    'userinterpr': 'qemu-ppc-static',
+    'console': 'ttyS0,115200n1',
+    'machine': 'mpc8544ds',
+    'nicmodel': 'rtl8139',
+    'triplet': 'powerpc-linux-gnuspe',
 }
 
 ppc64el_defaults = {
-    "arch": "ppc64el",
-    "interpreter": "qemu-system-ppc64",
-    "userinterpr": "qemu-ppc64le-static",
-    "console": "ttyS0,115200n1",
-    "machine": "none",
-    "nicmodel": "virtio",
-    "triplet": "powerpc64le-linux-gnu",
-    "sdkgccpkg": "g++-powerpc64le-linux-gnu",
-    "elfcode": "64-bit PowerPC or cisco 7500",
+    'arch': 'ppc64el',
+    'interpreter': 'qemu-system-ppc64',
+    'userinterpr': 'qemu-ppc64le-static',
+    'console': 'ttyS0,115200n1',
+    'machine': 'none',
+    'nicmodel': 'virtio',
+    'triplet': 'powerpc64le-linux-gnu',
+    'sdkgccpkg': 'g++-powerpc64le-linux-gnu',
+    'elfcode': '64-bit PowerPC or cisco 7500',
 }
 
 amd64_defaults = {
-    "arch": "amd64",
-    "interpreter": "qemu-system-x86_64",
-    "interpreter-args": ["-accel", "kvm"],
-    "console": "ttyS0,115200n1",
-    "machine": "pc",
-    "nicmodel": "virtio",
-    "triplet": "x86_64-linux-gnu",
-    "sdkgccpkg": "g++",
-    "elfcode": "x86-64",
+    'arch': 'amd64',
+    'interpreter': 'qemu-system-x86_64',
+    'interpreter-args': ['-accel', 'kvm'],
+    'console': 'ttyS0,115200n1',
+    'machine': 'pc',
+    'nicmodel': 'virtio',
+    'triplet': 'x86_64-linux-gnu',
+    'sdkgccpkg': 'g++',
+    'elfcode': 'x86-64',
 }
 
 i386_defaults = {
-    "arch": "i386",
-    "interpreter": "kvm",
-    "console": "ttyS0,115200n1",
-    "machine": "pc",
-    "nicmodel": "virtio",
-    "triplet": "i686-linux-gnu",
-    "sdkgccpkg": "g++-i686-linux-gnu",
-    "elfcode": "Intel 80386",
+    'arch': 'i386',
+    'interpreter': 'kvm',
+    'console': 'ttyS0,115200n1',
+    'machine': 'pc',
+    'nicmodel': 'virtio',
+    'triplet': 'i686-linux-gnu',
+    'sdkgccpkg': 'g++-i686-linux-gnu',
+    'elfcode': 'Intel 80386',
 }
 
 riscv64_defaults = {
-    "arch": "riscv64",
-    "interpreter": "qemu-system-riscv64",
-    "userinterpr": "qemu-riscv64-static",
-    "console": "ttyS0,115200n1",
-    "machine": "sifive_u",
-    "nicmodel": "virtio",
-    "triplet": "riscv64-linux-gnu",
-    "sdkgccpkg": "g++-riscv64-linux-gnu",
-    "elfcode": "RISC-V 64 bit",
+    'arch': 'riscv64',
+    'interpreter': 'qemu-system-riscv64',
+    'userinterpr': 'qemu-riscv64-static',
+    'console': 'ttyS0,115200n1',
+    'machine': 'sifive_u',
+    'nicmodel': 'virtio',
+    'triplet': 'riscv64-linux-gnu',
+    'sdkgccpkg': 'g++-riscv64-linux-gnu',
+    'elfcode': 'RISC-V 64 bit',
 }
 
 archindep_defaults = {
-    "name": "elbe-buildenv",
-    "size": "20G",
-    "img": "qcow2",
-    "mem": "1GiB",
-    "swap-size": "0",
-    "max-cpus": "8",
-    "sdkarch": "amd64",
+    'name': 'elbe-buildenv',
+    'size': '20G',
+    'img': 'qcow2',
+    'mem': '1GiB',
+    'swap-size': '0',
+    'max-cpus': '8',
+    'sdkarch': 'amd64',
 }
 
-defaults = {"armel": armel_defaults,
-            "armel-linaro48": armel_linaro48_defaults,
-            "armel-virtio": armel_virtio_defaults,
-            "armhf": armhf_defaults,
-            "armhf-linaro48": armhf_linaro48_defaults,
-            "armhf-virtio": armhf_virtio_defaults,
-            "aarch64": aarch64_defaults,
-            "ppc": ppc_defaults,
-            "ppcspe": ppcspe_defaults,
-            "ppc64el": ppc64el_defaults,
-            "amd64": amd64_defaults,
-            "i386": i386_defaults,
-            "riscv64": riscv64_defaults,
-            "nodefaults": {}}
+defaults = {'armel': armel_defaults,
+            'armel-linaro48': armel_linaro48_defaults,
+            'armel-virtio': armel_virtio_defaults,
+            'armhf': armhf_defaults,
+            'armhf-linaro48': armhf_linaro48_defaults,
+            'armhf-virtio': armhf_virtio_defaults,
+            'aarch64': aarch64_defaults,
+            'ppc': ppc_defaults,
+            'ppcspe': ppcspe_defaults,
+            'ppc64el': ppc64el_defaults,
+            'amd64': amd64_defaults,
+            'i386': i386_defaults,
+            'riscv64': riscv64_defaults,
+            'nodefaults': {}}
 
 
 def get_random_mac():
     binaddr = [random.randint(0, 255) for _ in range(6)]
     binaddr[0] &= 0xfe
     binaddr[0] |= 0x02
-    s = [f"{x:02x}" for x in binaddr]
+    s = [f'{x:02x}' for x in binaddr]
 
     return ':'.join(s)
 
@@ -199,12 +199,12 @@ class ElbeDefaults:
 
     def __init__(self, build_type):
 
-        assert build_type in defaults, ("Invalid buildtype %s\n"
-                                        "Valid buildtypes are:\n  - %s" %
-                                        (build_type, "\n  - ".join(defaults.keys())))
+        assert build_type in defaults, ('Invalid buildtype %s\n'
+                                        'Valid buildtypes are:\n  - %s' %
+                                        (build_type, '\n  - '.join(defaults.keys())))
 
         self.defaults = defaults[build_type]
-        self.defaults["nicmac"] = get_random_mac()
+        self.defaults['nicmac'] = get_random_mac()
 
         self.generic_defaults = archindep_defaults
 
diff --git a/elbepack/xmlpreprocess.py b/elbepack/xmlpreprocess.py
index 804c23bf..3118dba2 100644
--- a/elbepack/xmlpreprocess.py
+++ b/elbepack/xmlpreprocess.py
@@ -39,39 +39,39 @@ class XMLPreprocessError(Exception):
 def preprocess_pgp_key(xml):
 
     for key in xml.iterfind('.//mirror/url-list/url/key'):
-        print(f"[WARN] <key>{key.text}</key> is deprecated. "
-              "You should use raw-key instead.")
+        print(f'[WARN] <key>{key.text}</key> is deprecated. '
+              'You should use raw-key instead.')
         try:
             keyurl = key.text.strip().replace('LOCALMACHINE', 'localhost')
             myKey = urlopen(keyurl).read().decode('ascii')
-            key.tag = "raw-key"
-            key.text = f"\n{myKey}\n"
+            key.tag = 'raw-key'
+            key.text = f'\n{myKey}\n'
         except HTTPError:
             raise XMLPreprocessError(
-                f"Invalid PGP Key URL in <key> tag: {keyurl}")
+                f'Invalid PGP Key URL in <key> tag: {keyurl}')
         except URLError:
             raise XMLPreprocessError(
-                f"Problem with PGP Key URL in <key> tag: {keyurl}")
+                f'Problem with PGP Key URL in <key> tag: {keyurl}')
 
 
 def preprocess_bootstrap(xml):
-    "Replaces a maybe existing debootstrapvariant element with debootstrap"
+    """Replaces a maybe existing debootstrapvariant element with debootstrap"""
 
-    old_node = xml.find(".//debootstrapvariant")
+    old_node = xml.find('.//debootstrapvariant')
     if old_node is None:
         return
 
-    print("[WARN] <debootstrapvariant> is deprecated. Use <debootstrap> instead.")
+    print('[WARN] <debootstrapvariant> is deprecated. Use <debootstrap> instead.')
 
-    bootstrap = Element("debootstrap")
+    bootstrap = Element('debootstrap')
 
-    bootstrap_variant = Element("variant")
+    bootstrap_variant = Element('variant')
     bootstrap_variant.text = old_node.text
     bootstrap.append(bootstrap_variant)
 
-    old_includepkgs = old_node.get("includepkgs")
+    old_includepkgs = old_node.get('includepkgs')
     if old_includepkgs:
-        bootstrap_include = Element("include")
+        bootstrap_include = Element('include')
         bootstrap_include.text = old_includepkgs
         bootstrap.append(bootstrap_include)
 
@@ -79,33 +79,33 @@ def preprocess_bootstrap(xml):
 
 
 def preprocess_tune2fs(xml):
-    "Replaces all maybe existing tune2fs elements with fs-finetuning command"
+    """Replaces all maybe existing tune2fs elements with fs-finetuning command"""
 
-    old_nodes = xml.findall(".//tune2fs")
+    old_nodes = xml.findall('.//tune2fs')
     for old_node in old_nodes:
-        print("[WARN] <tune2fs> is deprecated. Use <fs-finetuning> instead.")
+        print('[WARN] <tune2fs> is deprecated. Use <fs-finetuning> instead.')
 
         fs_node = old_node.getparent()
-        finetuning_node = fs_node.find("fs-finetuning")
+        finetuning_node = fs_node.find('fs-finetuning')
         if finetuning_node is None:
-            finetuning_node = SubElement(fs_node, "fs-finetuning")
+            finetuning_node = SubElement(fs_node, 'fs-finetuning')
 
-        command = SubElement(finetuning_node, "device-command")
-        command.text = f"tune2fs {old_node.text} {{device}}"
+        command = SubElement(finetuning_node, 'device-command')
+        command.text = f'tune2fs {old_node.text} {{device}}'
 
         fs_node.remove(old_node)
 
 
 def preprocess_iso_option(xml):
 
-    src_opts = xml.find(".//src-cdrom/src-opts")
+    src_opts = xml.find('.//src-cdrom/src-opts')
     if src_opts is None:
         return
 
-    strict = ("strict" in src_opts.attrib
-              and src_opts.attrib["strict"] == "true")
+    strict = ('strict' in src_opts.attrib
+              and src_opts.attrib['strict'] == 'true')
 
-    for opt in src_opts.iterfind("./*"):
+    for opt in src_opts.iterfind('./*'):
         valid = iso_option_valid(opt.tag, opt.text)
         if valid is True:
             continue
@@ -113,21 +113,21 @@ def preprocess_iso_option(xml):
         tag = f'<{opt.tag}>{opt.text}</{opt.tag}>'
 
         if valid is False:
-            violation = f"Invalid ISO option {tag}"
+            violation = f'Invalid ISO option {tag}'
         elif isinstance(valid, int):
             violation = (
-                f"Option {tag} will be truncated by {valid} characters")
+                f'Option {tag} will be truncated by {valid} characters')
         elif isinstance(valid, str):
             violation = (
                 f"Character '{valid}' ({ord(valid[0])}) in ISO option {tag} "
-                "violated ISO-9660")
+                'violated ISO-9660')
         if strict:
             raise XMLPreprocessError(violation)
-        print(f"[WARN] {violation}")
+        print(f'[WARN] {violation}')
 
 
 def preprocess_initvm_ports(xml):
-    "Filters out the default port forwardings to prevent qemu conflict"
+    """Filters out the default port forwardings to prevent qemu conflict"""
 
     for forward in xml.iterfind('initvm/portforwarding/forward'):
         prot = forward.find('proto')
@@ -145,15 +145,15 @@ def preprocess_proxy_add(xml, opt_proxy=None):
     """Add proxy to mirrors from CLI arguments or environment variable"""
 
     # Add proxy from CLI or env?
-    set_proxy = opt_proxy or os.getenv("http_proxy")
+    set_proxy = opt_proxy or os.getenv('http_proxy')
 
     if set_proxy is None:
         return
 
-    proxy_tag = "primary_proxy"
+    proxy_tag = 'primary_proxy'
 
     # For all mirrors
-    for mirror in xml.iterfind(".//mirror"):
+    for mirror in xml.iterfind('.//mirror'):
 
         current_proxy = mirror.find(proxy_tag)
 
@@ -177,7 +177,7 @@ def preprocess_mirror_replacement(xml):
 
     ms = cfg['mirrorsed'].split()
     if (len(ms) % 2) == 1:
-        raise XMLPreprocessError("Uneven number of (search, replace) Values !")
+        raise XMLPreprocessError('Uneven number of (search, replace) Values !')
 
     # now zip even and uneven elements of mirrorsed.split()
     replacements = list(zip(islice(ms, 0, None, 2), islice(ms, 1, None, 2)))
@@ -208,31 +208,31 @@ def preprocess_mirrors(xml):
     """
 
     # global noauth
-    for node in xml.iterfind(".//noauth"):
-        print("[WARN] <noauth> is deprecated. "
-              "Use <option>trusted=yes</option> instead.")
+    for node in xml.iterfind('.//noauth'):
+        print('[WARN] <noauth> is deprecated. '
+              'Use <option>trusted=yes</option> instead.')
 
         parent = node.getparent()
 
         # Add trusted=yes to primary mirror
-        poptions = parent.find(".//mirror/options")
+        poptions = parent.find('.//mirror/options')
         if poptions is None:
-            poptions = etree.Element("options")
-            parent.find(".//mirror").append(poptions)
+            poptions = etree.Element('options')
+            parent.find('.//mirror').append(poptions)
 
-        ptrusted = etree.Element("option")
-        ptrusted.text = "trusted=yes"
+        ptrusted = etree.Element('option')
+        ptrusted.text = 'trusted=yes'
         poptions.append(ptrusted)
 
         # Add trusted=yes to all secondary mirrors
-        for url in parent.iterfind(".//mirror/url-list/url"):
-            options = url.find("options")
+        for url in parent.iterfind('.//mirror/url-list/url'):
+            options = url.find('options')
             if options is None:
-                options = etree.Element("options")
+                options = etree.Element('options')
                 url.append(options)
 
-            trusted = etree.Element("option")
-            trusted.text = "trusted=yes"
+            trusted = etree.Element('option')
+            trusted.text = 'trusted=yes'
             options.append(trusted)
 
         # TODO:old - Uncomment the following whenever there's no more
@@ -241,11 +241,11 @@ def preprocess_mirrors(xml):
         #
         # parent.remove(node)
 
-    preg = re.compile(r".*\[(.*)\](.*)", re.DOTALL)
+    preg = re.compile(r'.*\[(.*)\](.*)', re.DOTALL)
 
     # binary's and source's options
-    for path in (".//mirror/url-list/url/binary",
-                 ".//mirror/url-list/url/source"):
+    for path in ('.//mirror/url-list/url/binary',
+                 './/mirror/url-list/url/source'):
 
         for node in xml.iterfind(path):
 
@@ -263,13 +263,13 @@ def preprocess_mirrors(xml):
 
             # No <options>? Create it
             parent = node.getparent()
-            options = parent.find("options")
+            options = parent.find('options')
             if options is None:
-                options = etree.Element("options")
+                options = etree.Element('options')
                 parent.append(options)
 
             # Adding subelement <option>
-            option = etree.Element("option")
+            option = etree.Element('option')
             option.text = opt
             options.append(option)
 
@@ -280,25 +280,25 @@ def preprocess_passwd(xml):
     """
 
     # migrate root password
-    for passwd in xml.iterfind(".//target/passwd"):
+    for passwd in xml.iterfind('.//target/passwd'):
         # legacy support: move plain-text password to login action
-        if xml.find(".//action/login") is not None:
-            xml.find(".//action/login").text = passwd.text
+        if xml.find('.//action/login') is not None:
+            xml.find('.//action/login').text = passwd.text
 
-        passwd.tag = "passwd_hashed"
+        passwd.tag = 'passwd_hashed'
         passwd.text = f'{sha512_crypt.hash(passwd.text, rounds=5000)}'
-        logging.warning("Please replace <passwd> with <passwd_hashed>. "
-                        "The generated sha512crypt hash only applies 5000 rounds for "
-                        "backwards compatibility reasons. This is considered insecure nowadays.")
+        logging.warning('Please replace <passwd> with <passwd_hashed>. '
+                        'The generated sha512crypt hash only applies 5000 rounds for '
+                        'backwards compatibility reasons. This is considered insecure nowadays.')
 
     # migrate user passwords
-    for adduser in xml.iterfind(".//target/finetuning/adduser[@passwd]"):
+    for adduser in xml.iterfind('.//target/finetuning/adduser[@passwd]'):
         passwd = adduser.attrib['passwd']
         adduser.attrib['passwd_hashed'] = sha512_crypt.hash(passwd, rounds=5000)
         del adduser.attrib['passwd']
         logging.warning("Please replace adduser's passwd attribute with passwd_hashed. "
-                        "The generated sha512crypt hash only applies 5000 rounds for "
-                        "backwards compatibility reasons. This is considered insecure nowadays.")
+                        'The generated sha512crypt hash only applies 5000 rounds for '
+                        'backwards compatibility reasons. This is considered insecure nowadays.')
 
 
 def xmlpreprocess(xml_input_file, xml_output_file, variants=None, proxy=None, gzip=9):
@@ -314,7 +314,7 @@ def xmlpreprocess(xml_input_file, xml_output_file, variants=None, proxy=None, gz
     else:
         variants = set(variants)
 
-    schema_file = "https://www.linutronix.de/projects/Elbe/dbsfed.xsd"
+    schema_file = 'https://www.linutronix.de/projects/Elbe/dbsfed.xsd'
     parser = XMLParser(huge_tree=True)
     schema_tree = etree.parse(schema_file)
     schema = etree.XMLSchema(schema_tree)
@@ -395,30 +395,30 @@ def xmlpreprocess(xml_input_file, xml_output_file, variants=None, proxy=None, gz
             # if validation succedes write xml file
             xml.write(
                 xml_output_file,
-                encoding="UTF-8",
+                encoding='UTF-8',
                 pretty_print=True,
                 compression=gzip)
             # the rest of the code is exception and error handling
             return
 
     except etree.XMLSyntaxError:
-        raise XMLPreprocessError("XML Parse error\n" + str(sys.exc_info()[1]))
+        raise XMLPreprocessError('XML Parse error\n' + str(sys.exc_info()[1]))
     except ArchivedirError:
-        raise XMLPreprocessError("<archivedir> handling failed\n" +
+        raise XMLPreprocessError('<archivedir> handling failed\n' +
                                  str(sys.exc_info()[1]))
     except BaseException:
         raise XMLPreprocessError(
-            "Unknown Exception during validation\n" + str(sys.exc_info()[1]))
+            'Unknown Exception during validation\n' + str(sys.exc_info()[1]))
 
     # We have errors, return them in string form...
-    raise XMLPreprocessError("\n".join(error_log_to_strings(schema.error_log)))
+    raise XMLPreprocessError('\n'.join(error_log_to_strings(schema.error_log)))
 
 
 class PreprocessWrapper:
     def __init__(self, xmlfile, opt):
         self.xmlfile = xmlfile
         self.outxml = None
-        self.options = ""
+        self.options = ''
 
         if opt.variant:
             self.options += f' --variants "{opt.variant}"'
@@ -431,7 +431,7 @@ class PreprocessWrapper:
                f'-o {self.outxml} {self.xmlfile}')
         ret, _, err = command_out_stderr(cmd)
         if ret != 0:
-            print("elbe preprocess failed.", file=sys.stderr)
+            print('elbe preprocess failed.', file=sys.stderr)
             print(err, file=sys.stderr)
             raise CommandError(cmd, ret)
 
diff --git a/elbepack/ziparchives.py b/elbepack/ziparchives.py
index e3efc25f..bbfb648f 100644
--- a/elbepack/ziparchives.py
+++ b/elbepack/ziparchives.py
@@ -7,7 +7,7 @@ import os
 
 
 def create_zip_archive(zipfilename, path, inarchpath):
-    with ZipFile(zipfilename, "w", ZIP_DEFLATED) as zf:
+    with ZipFile(zipfilename, 'w', ZIP_DEFLATED) as zf:
         for root, _, files in os.walk(path):
             archpath = os.path.join(inarchpath, os.path.relpath(root, path))
             zf.write(root, archpath)
-- 
2.43.0



More information about the elbe-devel mailing list