]> Frank Brehm's Git Trees - pixelpark/admin-tools.git/commitdiff
Moving pp_lib => lib/pp_lib
authorFrank Brehm <frank@brehm-online.com>
Tue, 19 Jan 2021 22:18:34 +0000 (23:18 +0100)
committerFrank Brehm <frank@brehm-online.com>
Tue, 19 Jan 2021 22:18:34 +0000 (23:18 +0100)
66 files changed:
lib/pp_lib/__init__.py [new file with mode: 0644]
lib/pp_lib/app.py [new file with mode: 0644]
lib/pp_lib/barracuda_sync_app.py [new file with mode: 0644]
lib/pp_lib/cfg_app.py [new file with mode: 0644]
lib/pp_lib/check_puppet_env_app.py [new file with mode: 0644]
lib/pp_lib/colored.py [new file with mode: 0644]
lib/pp_lib/common.py [new file with mode: 0644]
lib/pp_lib/config_named_app.py [new file with mode: 0644]
lib/pp_lib/deploy_zones_from_pdns.py [new file with mode: 0644]
lib/pp_lib/differ.py [new file with mode: 0644]
lib/pp_lib/dnsui_users.py [new file with mode: 0644]
lib/pp_lib/du.py [new file with mode: 0644]
lib/pp_lib/errors.py [new file with mode: 0644]
lib/pp_lib/format_du.py [new file with mode: 0644]
lib/pp_lib/global_version.py [new file with mode: 0644]
lib/pp_lib/homes_admin.py [new file with mode: 0644]
lib/pp_lib/idna_xlate.py [new file with mode: 0644]
lib/pp_lib/import_pdnsdata.py [new file with mode: 0644]
lib/pp_lib/ldap_app.py [new file with mode: 0644]
lib/pp_lib/mailaddress.py [new file with mode: 0644]
lib/pp_lib/merge.py [new file with mode: 0644]
lib/pp_lib/mk_home_app.py [new file with mode: 0644]
lib/pp_lib/obj.py [new file with mode: 0644]
lib/pp_lib/pdns_app.py [new file with mode: 0644]
lib/pp_lib/pdns_list_zones.py [new file with mode: 0644]
lib/pp_lib/pdns_migrate_ns.py [new file with mode: 0644]
lib/pp_lib/pdns_record.py [new file with mode: 0644]
lib/pp_lib/pdns_show_zone.py [new file with mode: 0644]
lib/pp_lib/pdns_zone.py [new file with mode: 0644]
lib/pp_lib/pidfile.py [new file with mode: 0644]
lib/pp_lib/quota_check.py [new file with mode: 0644]
lib/pp_lib/rec_dict.py [new file with mode: 0644]
lib/pp_lib/test_home_app.py [new file with mode: 0644]
pp_lib/__init__.py [deleted file]
pp_lib/app.py [deleted file]
pp_lib/barracuda_sync_app.py [deleted file]
pp_lib/cfg_app.py [deleted file]
pp_lib/check_puppet_env_app.py [deleted file]
pp_lib/colored.py [deleted file]
pp_lib/common.py [deleted file]
pp_lib/config_named_app.py [deleted file]
pp_lib/deploy_zones_from_pdns.py [deleted file]
pp_lib/differ.py [deleted file]
pp_lib/dnsui_users.py [deleted file]
pp_lib/du.py [deleted file]
pp_lib/errors.py [deleted file]
pp_lib/format_du.py [deleted file]
pp_lib/global_version.py [deleted file]
pp_lib/homes_admin.py [deleted file]
pp_lib/idna_xlate.py [deleted file]
pp_lib/import_pdnsdata.py [deleted file]
pp_lib/ldap_app.py [deleted file]
pp_lib/mailaddress.py [deleted file]
pp_lib/merge.py [deleted file]
pp_lib/mk_home_app.py [deleted file]
pp_lib/obj.py [deleted file]
pp_lib/pdns_app.py [deleted file]
pp_lib/pdns_list_zones.py [deleted file]
pp_lib/pdns_migrate_ns.py [deleted file]
pp_lib/pdns_record.py [deleted file]
pp_lib/pdns_show_zone.py [deleted file]
pp_lib/pdns_zone.py [deleted file]
pp_lib/pidfile.py [deleted file]
pp_lib/quota_check.py [deleted file]
pp_lib/rec_dict.py [deleted file]
pp_lib/test_home_app.py [deleted file]

diff --git a/lib/pp_lib/__init__.py b/lib/pp_lib/__init__.py
new file mode 100644 (file)
index 0000000..99e1da9
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+__version__ = '0.1.5'
+
+# vim: ts=4 et list
diff --git a/lib/pp_lib/app.py b/lib/pp_lib/app.py
new file mode 100644 (file)
index 0000000..755c8dc
--- /dev/null
@@ -0,0 +1,809 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import sys
+import os
+import logging
+import re
+import traceback
+
+# Third party modules
+import argparse
+
+# Own modules
+from .errors import FunctionNotImplementedError, PpAppError
+
+from .common import terminal_can_colors
+from .common import caller_search_path
+
+from .colored import ColoredFormatter, colorstr
+
+from .obj import PpBaseObject
+
+__version__ = '0.3.6'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpApplication(PpBaseObject):
+    """
+    Class for the application objects.
+    """
+
+    re_prefix = re.compile(r'^[a-z0-9][a-z0-9_]*$', re.IGNORECASE)
+    re_anum = re.compile(r'[^A-Z0-9_]+', re.IGNORECASE)
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=False, usage=None, description=None,
+            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None):
+
+        self.arg_parser = None
+        """
+        @ivar: argparser object to parse commandline parameters
+        @type: argparse.ArgumentParser
+        """
+
+        self.args = None
+        """
+        @ivar: an object containing all commandline parameters
+               after parsing them
+        @type: Namespace
+        """
+
+        self._exit_value = 0
+        """
+        @ivar: return value of the application for exiting with sys.exit().
+        @type: int
+        """
+
+        self._usage = usage
+        """
+        @ivar: usage text used on argparse
+        @type: str
+        """
+
+        self._description = description
+        """
+        @ivar: a short text describing the application
+        @type: str
+        """
+
+        self._argparse_epilog = argparse_epilog
+        """
+        @ivar: an epilog displayed at the end of the argparse help screen
+        @type: str
+        """
+
+        self._argparse_prefix_chars = argparse_prefix_chars
+        """
+        @ivar: The set of characters that prefix optional arguments.
+        @type: str
+        """
+
+        self._terminal_has_colors = False
+        """
+        @ivar: flag, that the current terminal understands color ANSI codes
+        @type: bool
+        """
+
+        self._quiet = False
+        self._force = False
+        self._simulate = False
+
+        self.env = {}
+        """
+        @ivar: a dictionary with all application specifiv environment variables,
+               they will detected by the env_prefix property of this object,
+               and their names will transformed before saving their values in
+               self.env by removing the env_prefix from the variable name.
+        @type: dict
+        """
+
+        self._env_prefix = None
+        """
+        @ivar: a prefix for environment variables to detect them and to assign
+               their transformed names and their values in self.env
+        @type: str
+        """
+
+        super(PpApplication, self).__init__(
+            appname=appname,
+            verbose=verbose,
+            version=version,
+            base_dir=base_dir,
+            initialized=False,
+        )
+
+        if env_prefix:
+            ep = str(env_prefix).strip()
+            if not ep:
+                msg = "Invalid env_prefix {!r} given - it may not be empty.".format(env_prefix)
+                raise PpAppError(msg)
+            match = self.re_prefix.search(ep)
+            if not match:
+                msg = (
+                    "Invalid characters found in env_prefix {!r}, only "
+                    "alphanumeric characters and digits and underscore "
+                    "(this not as the first character) are allowed.").format(env_prefix)
+                raise PpAppError(msg)
+            self._env_prefix = ep
+        else:
+            ep = self.appname.upper() + '_'
+            self._env_prefix = self.re_anum.sub('_', ep)
+
+        self._init_arg_parser()
+        self._perform_arg_parser()
+
+        self._init_env()
+        self._perform_env()
+
+    # -----------------------------------------------------------
+    @property
+    def exit_value(self):
+        """The return value of the application for exiting with sys.exit()."""
+        return self._exit_value
+
+    @exit_value.setter
+    def exit_value(self, value):
+        v = int(value)
+        if v >= 0:
+            self._exit_value = v
+        else:
+            LOG.warn("Wrong exit_value {!r}, must be >= 0".format(value))
+
+    # -----------------------------------------------------------
+    @property
+    def exitvalue(self):
+        """The return value of the application for exiting with sys.exit()."""
+        return self._exit_value
+
+    @exitvalue.setter
+    def exitvalue(self, value):
+        self.exit_value = value
+
+    # -----------------------------------------------------------
+    @property
+    def usage(self):
+        """The usage text used on argparse."""
+        return self._usage
+
+    # -----------------------------------------------------------
+    @property
+    def description(self):
+        """A short text describing the application."""
+        return self._description
+
+    # -----------------------------------------------------------
+    @property
+    def argparse_epilog(self):
+        """An epilog displayed at the end of the argparse help screen."""
+        return self._argparse_epilog
+
+    # -----------------------------------------------------------
+    @property
+    def argparse_prefix_chars(self):
+        """The set of characters that prefix optional arguments."""
+        return self._argparse_prefix_chars
+
+    # -----------------------------------------------------------
+    @property
+    def terminal_has_colors(self):
+        """A flag, that the current terminal understands color ANSI codes."""
+        return self._terminal_has_colors
+
+    # -----------------------------------------------------------
+    @property
+    def env_prefix(self):
+        """A prefix for environment variables to detect them."""
+        return self._env_prefix
+
+    # -----------------------------------------------------------
+    @property
+    def usage_term(self):
+        """The localized version of 'usage: '"""
+        return 'Usage: '
+
+    # -----------------------------------------------------------
+    @property
+    def usage_term_len(self):
+        """The length of the localized version of 'usage: '"""
+        return len(self.usage_term)
+
+    # -----------------------------------------------------------
+    @property
+    def quiet(self):
+        """Quiet execution of the application,
+            only warnings and errors are emitted."""
+        return self._quiet
+
+    @quiet.setter
+    def quiet(self, value):
+        self._quiet = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def force(self):
+        """Forced execution of the application."""
+        return self._force
+
+    @force.setter
+    def force(self, value):
+        self._force = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def simulate(self):
+        """Simulation mode, nothing is really done."""
+        return self._simulate
+
+    @simulate.setter
+    def simulate(self, value):
+        self._simulate = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def show_force_opt(self):
+        """Flag, whether the command line option '--force' should be shown."""
+        return getattr(self, '_show_force_opt', False)
+
+    @show_force_opt.setter
+    def show_force_opt(self, value):
+        self._show_force_opt = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def show_simulate_opt(self):
+        """Flag, whether the command line option '--simulate' should be shown."""
+        return getattr(self, '_show_simulate_opt', False)
+
+    @show_simulate_opt.setter
+    def show_simulate_opt(self, value):
+        self._show_simulate_opt = bool(value)
+
+    # -------------------------------------------------------------------------
+    def exit(self, retval=-1, msg=None, trace=False):
+        """
+        Universal method to call sys.exit(). If fake_exit is set, a
+        FakeExitError exception is raised instead (useful for unittests.)
+
+        @param retval: the return value to give back to theoperating system
+        @type retval: int
+        @param msg: a last message, which should be emitted before exit.
+        @type msg: str
+        @param trace: flag to output a stack trace before exiting
+        @type trace: bool
+
+        @return: None
+
+        """
+
+        retval = int(retval)
+        trace = bool(trace)
+
+        root_logger = logging.getLogger()
+        has_handlers = False
+        if root_logger.handlers:
+            has_handlers = True
+
+        if msg:
+            if has_handlers:
+                if retval:
+                    LOG.error(msg)
+                else:
+                    LOG.info(msg)
+            if not has_handlers:
+                if hasattr(sys.stderr, 'buffer'):
+                    sys.stderr.buffer.write(str(msg) + "\n")
+                else:
+                    sys.stderr.write(str(msg) + "\n")
+
+        if trace:
+            if has_handlers:
+                if retval:
+                    LOG.error(traceback.format_exc())
+                else:
+                    LOG.info(traceback.format_exc())
+            else:
+                traceback.print_exc()
+
+        sys.exit(retval)
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PpApplication, self).as_dict(short=short)
+        res['exit_value'] = self.exit_value
+        res['usage'] = self.usage
+        res['quiet'] = self.quiet
+        res['force'] = self.force
+        res['simulate'] = self.simulate
+        res['description'] = self.description
+        res['argparse_epilog'] = self.argparse_epilog
+        res['argparse_prefix_chars'] = self.argparse_prefix_chars
+        res['terminal_has_colors'] = self.terminal_has_colors
+        res['env_prefix'] = self.env_prefix
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_logging(self):
+        """
+        Initialize the logger object.
+        It creates a colored loghandler with all output to STDERR.
+        Maybe overridden in descendant classes.
+
+        @return: None
+        """
+
+        log_level = logging.INFO
+        if self.verbose:
+            log_level = logging.DEBUG
+        elif self.quiet:
+            log_level = logging.WARNING
+
+        root_logger = logging.getLogger()
+        root_logger.setLevel(log_level)
+
+        # create formatter
+        format_str = ''
+        if self.verbose > 1:
+            format_str = '[%(asctime)s]: '
+        format_str += self.appname + ': '
+        if self.verbose:
+            if self.verbose > 1:
+                format_str += '%(name)s(%(lineno)d) %(funcName)s() '
+            else:
+                format_str += '%(name)s '
+        format_str += '%(levelname)s - %(message)s'
+        formatter = None
+        if self.terminal_has_colors:
+            formatter = ColoredFormatter(format_str)
+        else:
+            formatter = logging.Formatter(format_str)
+
+        # create log handler for console output
+        lh_console = logging.StreamHandler(sys.stderr)
+        lh_console.setLevel(log_level)
+        lh_console.setFormatter(formatter)
+
+        root_logger.addHandler(lh_console)
+
+        return
+
+    # -------------------------------------------------------------------------
+    def terminal_can_color(self):
+        """
+        Method to detect, whether the current terminal (stdout and stderr)
+        is able to perform ANSI color sequences.
+
+        @return: both stdout and stderr can perform ANSI color sequences
+        @rtype: bool
+
+        """
+
+        term_debug = False
+        if self.verbose > 3:
+            term_debug = True
+        return terminal_can_colors(debug=term_debug)
+
+    # -------------------------------------------------------------------------
+    def post_init(self):
+        """
+        Method to execute before calling run(). Here could be done some
+        finishing actions after reading in commandline parameters,
+        configuration a.s.o.
+
+        This method could be overwritten by descendant classes, these
+        methhods should allways include a call to post_init() of the
+        parent class.
+
+        """
+
+        self.perform_arg_parser()
+        self.init_logging()
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.simulate:
+            LOG.warn("Simulation mode - nothing is really done.")
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """
+        Dummy function as main routine.
+
+        MUST be overwritten by descendant classes.
+
+        """
+
+        raise FunctionNotImplementedError('_run', self.__class__.__name__)
+
+    # -------------------------------------------------------------------------
+    def __call__(self):
+        """
+        Helper method to make the resulting object callable, e.g.::
+
+            app = PBApplication(...)
+            app()
+
+        @return: None
+
+        """
+
+        self.run()
+
+    # -------------------------------------------------------------------------
+    def run(self):
+        """
+        The visible start point of this object.
+
+        @return: None
+
+        """
+
+        LOG.debug("Executing {} ...".format(self.__class__.__name__))
+
+        if not self.initialized:
+            self.handle_error(
+                "The application is not completely initialized.", '', True)
+            self.exit(9)
+
+        try:
+            self.pre_run()
+        except Exception as e:
+            self.handle_error(str(e), e.__class__.__name__, True)
+            self.exit(98)
+
+        if not self.initialized:
+            raise PpAppError(
+                "Object {!r} seems not to be completely initialized.".format(
+                    self.__class__.__name__))
+
+        try:
+            self._run()
+        except Exception as e:
+            self.handle_error(str(e), e.__class__.__name__, True)
+            self.exit_value = 99
+
+        if self.verbose > 1:
+            LOG.info("Ending.")
+
+        try:
+            self.post_run()
+        except Exception as e:
+            self.handle_error(str(e), e.__class__.__name__, True)
+            self.exit_value = 97
+
+        self.exit(self.exit_value)
+
+    # -------------------------------------------------------------------------
+    def post_run(self):
+        """
+        Dummy function to run after the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.verbose > 1:
+            LOG.info("executing post_run() ...")
+
+    # -------------------------------------------------------------------------
+    def _init_arg_parser(self):
+        """
+        Local called method to initiate the argument parser.
+
+        @raise PBApplicationError: on some errors
+
+        """
+
+        self.arg_parser = argparse.ArgumentParser(
+            prog=self.appname,
+            description=self.description,
+            usage=self.usage,
+            epilog=self.argparse_epilog,
+            prefix_chars=self.argparse_prefix_chars,
+            add_help=False,
+        )
+
+        self.init_arg_parser()
+
+        general_group = self.arg_parser.add_argument_group('General options')
+        general_group.add_argument(
+            '--color',
+            action="store",
+            dest='color',
+            const='yes',
+            default='auto',
+            nargs='?',
+            choices=['yes', 'no', 'auto'],
+            help="Use colored output for messages.",
+        )
+
+        verbose_group = general_group.add_mutually_exclusive_group()
+
+        verbose_group.add_argument(
+            "-v", "--verbose",
+            action="count",
+            dest='verbose',
+            help='Increase the verbosity level',
+        )
+
+        verbose_group.add_argument(
+            "-q", "--quiet",
+            action="store_true",
+            dest='quiet',
+            help='Silent execution, only warnings and errors are emitted.',
+        )
+
+        if self.show_force_opt:
+            general_group.add_argument(
+                "-f", "--force",
+                action="store_true", dest="force",
+                help="Forced execution of this application",
+            )
+
+        if self.show_simulate_opt:
+            help_msg = getattr(self, '_simulate_opt_help', None)
+            if not help_msg or str(help_msg) == '':
+                help_msg = "Simulation af all actions, nothing is really done."
+            general_group.add_argument(
+                "-s", "--simulate",
+                action="store_true", dest="simulate", help=help_msg,
+            )
+
+        general_group.add_argument(
+            "-h", "--help",
+            action='help',
+            dest='help',
+            help='Show this help message and exit'
+        )
+        general_group.add_argument(
+            "--usage",
+            action='store_true',
+            dest='usage',
+            help="Display brief usage message and exit"
+        )
+        general_group.add_argument(
+            "-V", '--version',
+            action='version',
+            version='Version of %(prog)s: {}'.format(self.version),
+            help="Show program's version number and exit"
+        )
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Public available method to initiate the argument parser.
+
+        Note::
+             avoid adding the general options '--verbose', '--help', '--usage'
+             and '--version'. These options are allways added after executing
+             this method.
+
+        Descendant classes may override this method.
+
+        """
+
+        pass
+
+    # -------------------------------------------------------------------------
+    def _perform_arg_parser(self):
+        """
+        Underlaying method for parsing arguments.
+        """
+
+        self.args = self.arg_parser.parse_args()
+
+        if self.args.usage:
+            self.arg_parser.print_usage(sys.stdout)
+            self.exit(0)
+
+        if self.args.verbose is not None and self.args.verbose > self.verbose:
+            self.verbose = self.args.verbose
+
+        if self.args.quiet:
+            self.quiet = self.args.quiet
+
+        if self.args.color == 'yes':
+            self._terminal_has_colors = True
+        elif self.args.color == 'no':
+            self._terminal_has_colors = False
+        else:
+            self._terminal_has_colors = self.terminal_can_color()
+
+        if getattr(self.args, 'force', False):
+            self.force = True
+
+        if getattr(self.args, 'simulate', False):
+            self.simulate = True
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+
+        Descendant classes may override this method.
+        """
+
+        pass
+
+    # -------------------------------------------------------------------------
+    def _init_env(self):
+        """
+        Initialization of self.env by application specific environment
+        variables.
+
+        It calls self.init_env(), after it has done his job.
+
+        """
+
+        for (key, value) in list(os.environ.items()):
+
+            if not key.startswith(self.env_prefix):
+                continue
+
+            newkey = key.replace(self.env_prefix, '', 1)
+            self.env[newkey] = value
+
+        self.init_env()
+
+    # -------------------------------------------------------------------------
+    def init_env(self):
+        """
+        Public available method to initiate self.env additional to the implicit
+        initialization done by this module.
+        Maybe it can be used to import environment variables, their
+        names not starting with self.env_prefix.
+
+        Currently a dummy method, which ca be overriden by descendant classes.
+
+        """
+
+        pass
+
+    # -------------------------------------------------------------------------
+    def _perform_env(self):
+        """
+        Method to do some useful things with the found environment.
+
+        It calls self.perform_env(), after it has done his job.
+
+        """
+
+        # try to detect verbosity level from environment
+        if 'VERBOSE' in self.env and self.env['VERBOSE']:
+            v = 0
+            try:
+                v = int(self.env['VERBOSE'])
+            except ValueError:
+                v = 1
+            if v > self.verbose:
+                self.verbose = v
+
+        self.perform_env()
+
+    # -------------------------------------------------------------------------
+    def perform_env(self):
+        """
+        Public available method to perform found environment variables after
+        initialization of self.env.
+
+        Currently a dummy method, which ca be overriden by descendant classes.
+
+        """
+
+        pass
+
+    # -------------------------------------------------------------------------
+    def colored(self, msg, color):
+        """
+        Wrapper function to colorize the message. Depending, whether the current
+        terminal can display ANSI colors, the message is colorized or not.
+
+        @param msg: The message to colorize
+        @type msg: str
+        @param color: The color to use, must be one of the keys of COLOR_CODE
+        @type color: str
+
+        @return: the colorized message
+        @rtype: str
+
+        """
+
+        if not self.terminal_has_colors:
+            return msg
+        return colorstr(msg, color)
+
+    # -------------------------------------------------------------------------
+    def get_command(self, cmd, quiet=False):
+        """
+        Searches the OS search path for the given command and gives back the
+        normalized position of this command.
+        If the command is given as an absolute path, it check the existence
+        of this command.
+
+        @param cmd: the command to search
+        @type cmd: str
+        @param quiet: No warning message, if the command could not be found,
+                      only a debug message
+        @type quiet: bool
+
+        @return: normalized complete path of this command, or None,
+                 if not found
+        @rtype: str or None
+
+        """
+
+        if self.verbose > 2:
+            LOG.debug("Searching for command {!r} ...".format(cmd))
+
+        # Checking an absolute path
+        if os.path.isabs(cmd):
+            if not os.path.exists(cmd):
+                LOG.warning("Command {!r} doesn't exists.".format(cmd))
+                return None
+            if not os.access(cmd, os.X_OK):
+                msg = "Command {!r} is not executable.".format(cmd)
+                LOG.warning(msg)
+                return None
+            return os.path.normpath(cmd)
+
+        # Checking a relative path
+        for d in caller_search_path():
+            if self.verbose > 3:
+                LOG.debug("Searching command in {!r} ...".format(d))
+            p = os.path.join(d, cmd)
+            if os.path.exists(p):
+                if self.verbose > 2:
+                    LOG.debug("Found {!r} ...".format(p))
+                if os.access(p, os.X_OK):
+                    return os.path.normpath(p)
+                else:
+                    LOG.debug("Command {!r} is not executable.".format(p))
+
+        # command not found, sorry
+        if quiet:
+            if self.verbose > 2:
+                LOG.debug("Command {!r} not found.".format(cmd))
+        else:
+            LOG.warning("Command {!r} not found.".format(cmd))
+
+        return None
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/barracuda_sync_app.py b/lib/pp_lib/barracuda_sync_app.py
new file mode 100644 (file)
index 0000000..6ed67f1
--- /dev/null
@@ -0,0 +1,498 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the barracuda-sync application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import re
+import textwrap
+import copy
+import shlex
+
+# Third party modules
+import six
+
+from ldap3 import ObjectDef
+from ldap3 import BASE, LEVEL, SUBTREE                              # noqa
+
+# Own modules
+from .common import pp
+
+from .ldap_app import PpLdapAppError, PpLdapApplication
+
+from .mailaddress import MailAddress
+
+__version__ = '0.4.4'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpBarracudaSyncError(PpLdapAppError):
+    pass
+
+
+# =============================================================================
+class PpBarracudaSyncApp(PpLdapApplication):
+    """Class for the 'barracuda-sync' application to ensure a synchronisation
+        of all existing aliases and virtual aliases in Postfix with the
+        LDAP entries used by Barracuda to ensure the existence of aliases.
+    """
+
+    default_barracuda_base_dn = 'ou=barracuda,ou=Applications,o=Pixelpark,o=isp'
+    postfix_config_dir = os.sep + os.path.join('etc', 'postfix')
+    postfix_maps_dir = os.path.join(postfix_config_dir, 'maps')
+
+    default_virtaliases_files = [
+        os.path.join(postfix_maps_dir, 'virtual-aliases'),
+    ]
+
+    default_ignore_aliases = [
+        'root',
+    ]
+
+    default_origin = 'pixelpark.com'
+
+    re_virtaliases_line = re.compile(r'^([^#\s:]+)\s', re.MULTILINE)
+
+    open_args = {}
+    if six.PY3:
+        open_args = {
+            'encoding': 'utf-8',
+            'errors': 'surrogateescape',
+        }
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.barracuda_base_dn = self.default_barracuda_base_dn
+        self.virtaliases_files = copy.copy(self.default_virtaliases_files)
+        self.origin = self.default_origin
+        self.ignore_aliases = copy.copy(self.default_ignore_aliases)
+
+        self.existing_aliases = []
+        self.ldap_aliases = []
+        self.aliases_to_create = []
+        self.aliases_to_remove = []
+        self.ignore_aliases_res = []
+
+        self._show_simulate_opt = True
+
+        description = textwrap.dedent('''\
+            Synchronization of existing virtual aliases
+            with alias definitions in LDAP for Barracuda.
+            ''').strip()
+
+        super(PpBarracudaSyncApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='barracuda-sync'
+        )
+
+        self._check_virtaliases_files()
+        self._init_ignore_aliases_res()
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PpBarracudaSyncApp, self).as_dict(short=short)
+        res['default_barracuda_base_dn'] = self.default_barracuda_base_dn
+        res['postfix_config_dir'] = self.postfix_config_dir
+        res['postfix_maps_dir'] = self.postfix_maps_dir
+        res['default_virtaliases_files'] = self.default_virtaliases_files
+        res['default_origin'] = self.default_origin
+        res['open_args'] = self.open_args
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        self.arg_parser.add_argument(
+            '-P', '--postfix-dir',
+            metavar="DIR", dest='postfix_dir',
+            help="Configuration directory for Postfix (default: {!r}).".format(
+                self.postfix_config_dir)
+        )
+
+        super(PpBarracudaSyncApp, self).init_arg_parser()
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpBarracudaSyncApp, self).perform_config()
+
+        virtaliases_files = None
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 2:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            if section_name.lower() not in ('barracuda-sync', 'barracuda_sync', 'barracudasync'):
+                continue
+
+            section = self.cfg[section_name]
+            if self.verbose > 2:
+                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                    n=section_name, s=pp(section)))
+
+            if 'postfix_dir' in section:
+                self._init_postfix_dir(section['postfix_dir'])
+
+            if 'virtaliases_files' in section:
+                virtaliases_files = self._cfg_virtaliases_files(
+                    section['virtaliases_files'], virtaliases_files)
+
+            if 'ignore_aliases' in section:
+                self._cfg_ignore_aliases(section['ignore_aliases'])
+
+            if 'base_dn' in section:
+                v = section['base_dn'].strip()
+                if v:
+                    self.barracuda_base_dn = v
+
+            if 'origin' in section:
+                v = section['origin'].strip()
+                if v:
+                    self.origin = v
+
+        if hasattr(self.args, 'postfix_dir') and self.args.postfix_dir:
+            self._init_postfix_dir(self.args.postfix_dir)
+
+        if not os.path.isdir(self.postfix_config_dir):
+            LOG.error("Postfix directory {!r} does not exists or is not a directory.".format(
+                self.postfix_config_dir))
+            self.exit(1)
+
+        if not os.path.isdir(self.postfix_maps_dir):
+            LOG.error("Postfix maps directory {!r} does not exists or is not a directory.".format(
+                self.postfix_maps_dir))
+            self.exit(1)
+
+        self._init_virtaliases_files(virtaliases_files)
+
+    # -------------------------------------------------------------------------
+    def _cfg_virtaliases_files(self, value, virtaliases_files):
+
+        ret = None
+        if virtaliases_files is not None:
+            ret = copy.copy(virtaliases_files)
+
+        files = shlex.split(value)
+        if files:
+            if ret is None:
+                ret = []
+            for f in files:
+                if f not in ret:
+                    ret.append(f)
+
+        return ret
+
+    # -------------------------------------------------------------------------
+    def _cfg_ignore_aliases(self, value):
+
+        aliases = shlex.split(value)
+        if aliases:
+            for alias in aliases:
+                if alias.startswith('-'):
+                    alias = alias[1:]
+                    if alias == '':
+                        continue
+                    if alias in self.ignore_aliases:
+                        self.ignore_aliases.remove(alias)
+                elif alias not in self.ignore_aliases:
+                    self.ignore_aliases.append(alias)
+
+    # -------------------------------------------------------------------------
+    def _init_virtaliases_files(self, virtaliases_files):
+
+        self.virtaliases_files = copy.copy(self.default_virtaliases_files)
+        if virtaliases_files is None:
+            return
+
+        self.virtaliases_files = []
+        for afile in virtaliases_files:
+            if not os.path.isabs(afile):
+                afile = os.path.join(self.postfix_config_dir, afile)
+            afile = os.path.normpath(afile)
+            if afile not in self.virtaliases_files:
+                self.virtaliases_files.append(afile)
+
+    # -------------------------------------------------------------------------
+    def _check_virtaliases_files(self):
+
+        ok = True
+        for afile in self.virtaliases_files:
+
+            if not os.path.exists(afile):
+                LOG.error("Virtual aliases file {!r} does not exists.".format(afile))
+                ok = False
+                continue
+
+            if not os.path.isfile(afile):
+                LOG.error("Virtual aliases file {!r} is not a regular file.".format(afile))
+                ok = False
+                continue
+
+            if not os.access(afile, os.R_OK):
+                LOG.error("No read access to virtual aliases file {!r}.".format(afile))
+                ok = False
+                continue
+
+        if not ok:
+            self.exit(1)
+
+    # -------------------------------------------------------------------------
+    def _init_postfix_dir(self, value):
+
+        if os.path.isdir(value):
+            d = os.path.abspath(value)
+            self.postfix_config_dir = d
+            self.postfix_maps_dir = os.path.join(d, 'maps')
+            self.default_aliases_files = [
+                os.path.join(self.postfix_maps_dir, 'aliases'),
+            ]
+            self.default_virtaliases_files = [
+                os.path.join(self.postfix_maps_dir, 'virtual-aliases'),
+            ]
+        else:
+            LOG.warn("Postfix directory {!r} does not exists or is not a directory.".format(
+                value))
+
+    # -------------------------------------------------------------------------
+    def _init_ignore_aliases_res(self):
+
+        LOG.debug("Preparing regexes for aliases to ignore ...")
+        self.ignore_aliases_res = []
+
+        for alias in self.ignore_aliases:
+
+            a = alias.strip()
+            if a == '':
+                continue
+            pattern = r'^' + alias
+            if not MailAddress.valid_address(alias):
+                pattern += r'(?:@(?:.*\.)?' + re.escape(self.origin) + r')?'
+            pattern += r'\s*$'
+            regex = re.compile(pattern, re.IGNORECASE)
+            self.ignore_aliases_res.append(regex)
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        super(PpBarracudaSyncApp, self).pre_run()
+
+        self._check_ldap_barracuda_container()
+
+    # -------------------------------------------------------------------------
+    def _check_ldap_barracuda_container(self):
+
+        LOG.debug("Checking existence of Baracuda LDAP container {!r}.".format(
+            self.barracuda_base_dn))
+        query = '(objectclass=organizationalunit)'
+
+        self.ldap_connection.search(
+            search_base=self.barracuda_base_dn, search_filter=query,
+            search_scope=BASE, attributes='*')
+
+        LOG.debug("Found {} entries.".format(len(self.ldap_connection.response)))
+        if len(self.ldap_connection.response) < 1:
+            LOG.error((
+                "Did not found LDAP container {!r} for "
+                "Barracuda alias definitions.").format(
+                self.barracuda_base_dn))
+            self.exit(5)
+
+        entry = self.ldap_connection.response[0]
+        if self.verbose > 1:
+            LOG.debug("Container entry - class {cl!r}, content:\n{co}".format(
+                cl=entry.__class__.__name__, co=pp(entry)))
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        LOG.info("Starting ...")
+
+        self.read_virtaliases_files()
+        self.existing_aliases.sort(key=str.lower)
+        LOG.info("Found {} existing aliases.".format(len(self.existing_aliases)))
+        if self.verbose > 1:
+            LOG.debug("Existing aliases:\n{}".format(pp(self.existing_aliases)))
+
+        self.read_ldap_aliases()
+        self.eval_diffs()
+
+        self.add_failing_ldap_entries()
+        self.remove_unnecessary_aliases()
+
+        LOG.info("Fertsch!")
+
+    # -------------------------------------------------------------------------
+    def read_virtaliases_files(self):
+
+        LOG.info("Reading all virtual aliases files ...")
+        for afile in self.virtaliases_files:
+            if self.verbose > 1:
+                LOG.debug("Checking for virtaliases file {!r} ...".format(afile))
+            if not os.path.isfile(afile):
+                continue
+            content = ''
+            LOG.debug("Reading virtaliases file {!r} ...".format(afile))
+            with open(afile, 'r', **self.open_args) as fh:
+                content = fh.read()
+            aliases = self.re_virtaliases_line.findall(content)
+            for alias in aliases:
+                do_add = True
+                for regex in self.ignore_aliases_res:
+                    if regex.search(alias):
+                        do_add = False
+                        break
+                if not do_add:
+                    continue
+                if alias not in self.existing_aliases:
+                    if self.verbose > 2:
+                        LOG.debug("Registring existing alias {!r}.".format(alias))
+                    self.existing_aliases.append(alias)
+
+    # -------------------------------------------------------------------------
+    def read_ldap_aliases(self):
+
+        LOG.info("Reading all aliases from LDAP ...")
+
+        alias = ObjectDef(['mailRecipient'])
+        alias += ['cn', 'mail']
+
+        query_filter = '(&(objectclass=mailRecipient)(mail=*))'
+
+        entries = self.ldap_search_subtree(alias, query_filter, base=self.barracuda_base_dn)
+
+        for entry in entries:
+            dn = entry.entry_dn
+            cn = entry['cn'][0]
+            mail = entry['mail'][0]
+            if self.verbose > 3:
+                LOG.debug("Found LDAP alias, DN: {dn!r}, CN: {cn!r}, Mail: {m!r}.".format(
+                    dn=dn, cn=cn, m=mail))
+
+            if not cn:
+                continue
+            if cn not in self.ldap_aliases:
+                if self.verbose > 2:
+                    LOG.debug("Registring LDAP alias {!r}.".format(cn))
+                self.ldap_aliases.append(cn)
+
+        self.ldap_aliases.sort(key=str.lower)
+        LOG.info("Found {} LDAP aliases.".format(len(self.ldap_aliases)))
+        if self.verbose > 1:
+            LOG.debug("LDAP aliases:\n{}".format(pp(self.ldap_aliases)))
+
+    # -------------------------------------------------------------------------
+    def eval_diffs(self):
+
+        LOG.info("Evaluating differences ...")
+
+        self.aliases_to_create = []
+        self.aliases_to_remove = []
+
+        for alias in self.existing_aliases:
+            if alias not in self.ldap_aliases and alias not in self.aliases_to_create:
+                self.aliases_to_create.append(alias)
+
+        for alias in self.ldap_aliases:
+            if alias not in self.existing_aliases and alias not in self.aliases_to_remove:
+                self.aliases_to_remove.append(alias)
+
+        LOG.info("Aliases to create in LDAP:\n{}".format(pp(self.aliases_to_create)))
+        LOG.info("Aliases to remove from LDAP:\n{}".format(pp(self.aliases_to_remove)))
+
+    # -------------------------------------------------------------------------
+    def add_failing_ldap_entries(self):
+
+        LOG.info("Adding failing LDAP aliases ...")
+
+        for alias in self.aliases_to_create:
+
+            mail = alias
+            if not MailAddress.valid_address(alias):
+                mail += '@' + self.origin
+
+            dn = 'cn=' + alias + ',' + self.barracuda_base_dn
+            object_class = ["top", "mailRecipient"]
+            attributes = {
+                'mail': mail,
+            }
+            LOG.info("Creating LDAP alias {a!r} => {dn!r}.".format(a=alias, dn=dn))
+            LOG.debug("Object-Classes: {}".format(pp(object_class)))
+            LOG.debug("Attributes: {}".format(pp(attributes)))
+            if not self.simulate:
+                self.ldap_connection.add(dn, object_class, attributes)
+                LOG.debug("Result: {}".format(self.ldap_connection.result))
+
+    # -------------------------------------------------------------------------
+    def remove_unnecessary_aliases(self):
+
+        LOG.info("Removing unnecessary LDAP aliases ...")
+
+        attributes = ['cn', 'mail']
+
+        for alias in self.aliases_to_remove:
+
+            query = '(&(objectclass=mailRecipient)(cn=' + alias + '))'
+            LOG.debug("Searching for entry with CN {!r}.".format(alias))
+            self.ldap_connection.search(
+                search_base=self.barracuda_base_dn,
+                search_filter=query,
+                search_scope=LEVEL,
+                attributes=attributes)
+
+            LOG.debug("Found {} entries.".format(len(self.ldap_connection.response)))
+
+            if not self.ldap_connection.response:
+                LOG.error("No LDAP entry found for CN {!r}.".format(alias))
+                continue
+            entry = self.ldap_connection.response[0]
+            dn = entry['dn']
+
+            LOG.info("Removing LDAP entry {!r} ...".format(dn))
+            if not self.simulate:
+                self.ldap_connection.delete(dn)
+                LOG.debug("Result: {}".format(self.ldap_connection.result))
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/cfg_app.py b/lib/pp_lib/cfg_app.py
new file mode 100644 (file)
index 0000000..6efbe10
--- /dev/null
@@ -0,0 +1,939 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the application object with support
+          for configuration files.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import re
+import copy
+import json
+import socket
+import pwd
+import pipes
+import codecs
+import ipaddress
+
+from subprocess import Popen, PIPE
+
+from email.mime.text import MIMEText
+from email import charset
+
+import smtplib
+
+# Third party modules
+import six
+
+from six import StringIO
+from six.moves import configparser
+
+from configparser import Error as ConfigParseError
+
+# Own modules
+from .global_version import __version__ as __global_version__
+
+from .errors import PpAppError
+
+from .common import pp, to_bool, RE_DOT_AT_END
+
+from .merge import merge_structure
+
+from .mailaddress import MailAddress
+
+from .app import PpApplication
+
+__version__ = '0.7.1'
+LOG = logging.getLogger(__name__)
+
+VALID_MAIL_METHODS = ('smtp', 'sendmail')
+
+
+# =============================================================================
+class PpCfgAppError(PpAppError):
+    """Base error class for all exceptions happened during
+    execution this configured application"""
+
+    pass
+
+
+# =============================================================================
+class PpConfigApplication(PpApplication):
+    """
+    Class for configured application objects.
+    """
+
+    default_mail_recipients = [
+        'frank.brehm@pixelpark.com'
+    ]
+    default_mail_cc = [
+        'thomas.kotschok@pixelpark.com',
+    ]
+
+    default_reply_to = 'frank.brehm@pixelpark.com'
+
+    default_mail_server = 'mx.pixelpark.net'
+
+    current_user_name = pwd.getpwuid(os.getuid()).pw_name
+    current_user_gecos = pwd.getpwuid(os.getuid()).pw_gecos
+    default_mail_from = MailAddress(current_user_name, socket.getfqdn())
+
+    whitespace_re = re.compile(r'(?:[,;]+|\s*[,;]*\s+)+')
+
+    charset.add_charset('utf-8', charset.SHORTEST, charset.QP)
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=None, usage=None, description=None,
+            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
+            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False):
+
+        self.cfg_encoding = cfg_encoding
+        self._need_config_file = bool(need_config_file)
+
+        self.cfg = {}
+
+        self._cfg_dir = None
+        self.cfg_stems = []
+        self.cfg_files = []
+        self.log_cfg_files = []
+
+        self.mail_recipients = copy.copy(self.default_mail_recipients)
+        self.mail_from = '{n} <{m}>'.format(
+            n=self.current_user_gecos, m=self.default_mail_from)
+        self.mail_cc = copy.copy(self.default_mail_cc)
+        self.reply_to = self.default_reply_to
+        self.mail_method = 'smtp'
+        self.mail_server = self.default_mail_server
+        self.smtp_port = 25
+        self._config_has_errors = None
+
+        super(PpConfigApplication, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
+            initialized=False, usage=usage, description=description,
+            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
+            env_prefix=env_prefix,
+        )
+
+        if cfg_dir is None:
+            self._cfg_dir = 'pixelpark'
+        else:
+            d = str(cfg_dir).strip()
+            if d == '':
+                self._cfg_dir = None
+            else:
+                self._cfg_dir = d
+
+        if cfg_stems:
+            if isinstance(cfg_stems, list):
+                for stem in cfg_stems:
+                    s = str(stem).strip()
+                    if not s:
+                        msg = "Invalid configuration stem {!r} given.".format(stem)
+                        raise PpCfgAppError(msg)
+                    self.cfg_stems.append(s)
+            else:
+                s = str(cfg_stems).strip()
+                if not s:
+                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
+                    raise PpCfgAppError(msg)
+                self.cfg_stems.append(s)
+        else:
+            self.cfg_stems = self.appname
+
+        self._init_cfgfiles()
+
+        enc = getattr(self.args, 'cfg_encoding', None)
+        if enc:
+            self.cfg_encoding = enc
+
+        self.perform_arg_parser()
+        self.init_logging()
+
+        self._read_config()
+        self._perform_config()
+
+        self._init_log_cfgfiles()
+        self.reinit_logging()
+
+    # -----------------------------------------------------------
+    @property
+    def need_config_file(self):
+        """
+        hide command line parameter --default-config and
+        don't execute generation of default config
+        """
+        return getattr(self, '_need_config_file', False)
+
+    # -----------------------------------------------------------
+    @property
+    def cfg_encoding(self):
+        """The encoding character set of the configuration files."""
+        return self._cfg_encoding
+
+    @cfg_encoding.setter
+    def cfg_encoding(self, value):
+        try:
+            codec = codecs.lookup(value)
+        except Exception as e:
+            msg = "{c} on setting encoding {v!r}: {e}".format(
+                c=e.__class__.__name__, v=value, e=e)
+            LOG.error(msg)
+        else:
+            self._cfg_encoding = codec.name
+
+    # -----------------------------------------------------------
+    @property
+    def config_has_errors(self):
+        """A flag, showing, that there are errors in configuration."""
+        return self._config_has_errors
+
+    @config_has_errors.setter
+    def config_has_errors(self, value):
+        if value is None:
+            self._config_has_errors = None
+        else:
+            self._config_has_errors = to_bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def cfg_dir(self):
+        """The directory containing the configuration files."""
+        return self._cfg_dir
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PpConfigApplication, self).as_dict(short=short)
+        res['need_config_file'] = self.need_config_file
+        res['cfg_encoding'] = self.cfg_encoding
+        res['cfg_dir'] = self.cfg_dir
+        res['config_has_errors'] = self.config_has_errors
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        mail_group = self.arg_parser.add_argument_group('Mailing options')
+
+        mail_group.add_argument(
+            '--recipients', '--mail-recipients',
+            metavar="ADDRESS", nargs='+', dest="mail_recipients",
+            help="Mail addresses of all recipients for mails generated by this script."
+        )
+
+        mail_group.add_argument(
+            '--cc', '--mail-cc',
+            metavar="ADDRESS", nargs='*', dest="mail_cc",
+            help="Mail addresses of all CC recipients for mails generated by this script."
+        )
+
+        mail_group.add_argument(
+            '--reply-to', '--mail-reply-to',
+            metavar="ADDRESS", dest="mail_reply_to",
+            help="Reply mail address for mails generated by this script."
+        )
+
+        mail_group.add_argument(
+            '--mail-method',
+            metavar="METHOD", choices=VALID_MAIL_METHODS, dest="mail_method",
+            help=(
+                "Method for sending the mails generated by this script. "
+                "Valid values: {v}, default: {d!r}.".format(
+                    v=', '.join(map(lambda x: repr(x), VALID_MAIL_METHODS)),
+                    d=self.mail_method))
+        )
+
+        mail_group.add_argument(
+            '--mail-server',
+            metavar="SERVER", dest="mail_server",
+            help=(
+                "Mail server for submitting generated by this script if "
+                "the mail method of this script is 'smtp'. Default: {!r}.").format(
+                self.mail_server)
+        )
+
+        mail_group.add_argument(
+            '--smtp-port',
+            metavar="PORT", type=int, dest='smtp_port',
+            help=(
+                "The port to use for submitting generated by this script if "
+                "the mail method of this script is 'smtp'. Default: {}.".format(self.smtp_port))
+        )
+
+        cfgfile_group = self.arg_parser.add_argument_group('Config file options')
+
+        cfgfile_group.add_argument(
+            "-C", "--cfgfile", "--cfg-file", "--config",
+            metavar="FILE", nargs='+', dest="cfg_file",
+            help="Configuration files to use additional to the standard configuration files.",
+        )
+
+        cfgfile_group.add_argument(
+            "--log-cfgfile",
+            metavar="FILE", dest="log_cfgfile",
+            help=(
+                "Configuration file for logging in JSON format. "
+                "See https://docs.python.org/3/library/logging.config.html"
+                "#logging-config-dictschema how the structures has to be defined.")
+        )
+
+        cfgfile_group.add_argument(
+            "--cfg-encoding",
+            metavar="ENCODING", dest="cfg_encoding", default=self.cfg_encoding,
+            help=(
+                "The encoding character set of the configuration files "
+                "(default: %(default)r)."),
+        )
+
+    # -------------------------------------------------------------------------
+    def _init_cfgfiles(self):
+        """Method to generate the self.cfg_files list."""
+
+        self.cfg_files = []
+
+        cfg_basenames = []
+        for stem in self.cfg_stems:
+            cfg_basename = '{}.ini'.format(stem)
+            cfg_basenames.append(cfg_basename)
+
+        # add /etc/app/app.ini or $VIRTUAL_ENV/etc/app/app.ini
+        etc_dir = os.sep + 'etc'
+        if 'VIRTUAL_ENV' in os.environ:
+            etc_dir = os.path.join(os.environ['VIRTUAL_ENV'], 'etc')
+        for cfg_basename in cfg_basenames:
+            syscfg_fn = None
+            if self.cfg_dir:
+                syscfg_fn = os.path.join(etc_dir, self.cfg_dir, cfg_basename)
+            else:
+                syscfg_fn = os.path.join(etc_dir, cfg_basename)
+            self.cfg_files.append(syscfg_fn)
+
+        # add <WORKDIR>/etc/app.ini
+        mod_dir = os.path.dirname(__file__)
+        work_dir = os.path.abspath(os.path.join(mod_dir, '..'))
+        work_etc_dir = os.path.join(work_dir, 'etc')
+        if self.verbose > 1:
+            LOG.debug("Searching for {!r} ...".format(work_etc_dir))
+        for cfg_basename in cfg_basenames:
+            self.cfg_files.append(os.path.join(work_etc_dir, cfg_basename))
+
+        # add $HOME/.config/app.ini
+        usercfg_fn = None
+        user_cfg_dir = os.path.expanduser('~/.config')
+        if user_cfg_dir:
+            if self.cfg_dir:
+                user_cfg_dir = os.path.join(user_cfg_dir, self.cfg_dir)
+            if self.verbose > 1:
+                LOG.debug("user_cfg_dir: {!r}".format(user_cfg_dir))
+            for cfg_basename in cfg_basenames:
+                usercfg_fn = os.path.join(user_cfg_dir, cfg_basename)
+                self.cfg_files.append(usercfg_fn)
+
+        # add a configfile given on command line with --cfg-file
+        cmdline_cfg = getattr(self.args, 'cfg_file', None)
+        if cmdline_cfg:
+            for usercfg_fn in cmdline_cfg:
+                self.cfg_files.append(usercfg_fn)
+
+    # -------------------------------------------------------------------------
+    def _init_log_cfgfiles(self):
+        """Method to generate the self.log_cfg_files list."""
+
+        self.log_cfg_files = []
+
+        cfg_basename = 'logging.json'
+
+        # add /etc/app/logging.json or $VIRTUAL_ENV/etc/app/logging.json
+        etc_dir = os.sep + 'etc'
+        if 'VIRTUAL_ENV' in os.environ:
+            etc_dir = os.path.join(os.environ['VIRTUAL_ENV'], 'etc')
+        syscfg_fn = None
+        if self.cfg_dir:
+            syscfg_fn = os.path.join(etc_dir, self.cfg_dir, cfg_basename)
+        else:
+            syscfg_fn = os.path.join(etc_dir, cfg_basename)
+        self.log_cfg_files.append(syscfg_fn)
+
+        # add <WORKDIR>/etc/app.ini
+        mod_dir = os.path.dirname(__file__)
+        work_dir = os.path.abspath(os.path.join(mod_dir, '..'))
+        work_etc_dir = os.path.join(work_dir, 'etc')
+        if self.verbose > 1:
+            LOG.debug("Searching for {!r} ...".format(work_etc_dir))
+        self.log_cfg_files.append(os.path.join(work_etc_dir, cfg_basename))
+
+        # add $HOME/.config/app.ini
+        usercfg_fn = None
+        user_cfg_dir = os.path.expanduser('~/.config')
+        if user_cfg_dir:
+            if self.cfg_dir:
+                user_cfg_dir = os.path.join(user_cfg_dir, self.cfg_dir)
+            if self.verbose > 1:
+                LOG.debug("user_cfg_dir: {!r}".format(user_cfg_dir))
+            usercfg_fn = os.path.join(user_cfg_dir, cfg_basename)
+            self.log_cfg_files.append(usercfg_fn)
+
+        # add a configfile given on command line with --log-cfgfile
+        cmdline_cfg = getattr(self.args, 'log_cfgfile', None)
+        if cmdline_cfg:
+            self.log_cfg_files.append(cmdline_cfg)
+
+        if self.verbose > 1:
+            LOG.debug("Log config files:\n{}".format(pp(self.log_cfg_files)))
+
+    # -------------------------------------------------------------------------
+    def _init_logging_from_jsonfile(self):
+
+        open_opts = {}
+        if six.PY3:
+            open_opts['encoding'] = 'utf-8'
+            open_opts['errors'] = 'surrogateescape'
+
+        found = False
+        for cfg_file in reversed(self.log_cfg_files):
+
+            if self.verbose > 1:
+                LOG.debug("Searching for {!r} ...".format(cfg_file))
+
+            if not os.path.exists(cfg_file):
+                continue
+            if not os.path.isfile(cfg_file):
+                continue
+            if not os.access(cfg_file, os.R_OK):
+                msg = "No read access to {!r}.".format(cfg_file)
+                self.handle_error(msg, "File error")
+                continue
+
+            log_cfg = None
+            if self.verbose > 1:
+                LOG.debug("Reading and evaluating {!r} ...".format(cfg_file))
+            with open(cfg_file, 'r', **open_opts) as fh:
+                try:
+                    log_cfg = json.load(fh)
+                except (ValueError, TypeError) as e:
+                    msg = "Wrong file {!r} - ".format(cfg_file) + str(e)
+                    self.handle_error(msg, e.__class__.__name__)
+                    continue
+            if self.verbose:
+                if 'root' in log_cfg:
+                    log_cfg['root']['level'] = 'DEBUG'
+                if 'handlers' in log_cfg:
+                    for handler_name in log_cfg['handlers'].keys():
+                        handler = log_cfg['handlers'][handler_name]
+                        handler['level'] = 'DEBUG'
+            if self.verbose > 1:
+                LOG.debug("Evaluated configuration from JSON:\n{} ...".format(pp(log_cfg)))
+            try:
+                logging.config.dictConfig(log_cfg)
+            except Exception as e:
+                msg = "Wrong file {!r} - ".format(cfg_file) + str(e)
+                self.handle_error(msg, e.__class__.__name__)
+                continue
+            found = True
+            break
+
+        return found
+
+    # -------------------------------------------------------------------------
+    def reinit_logging(self):
+        """
+        Re-Initialize the logger object.
+        It creates a colored loghandler with all output to STDERR.
+        Maybe overridden in descendant classes.
+
+        @return: None
+        """
+
+        root_logger = logging.getLogger()
+
+        if self._init_logging_from_jsonfile():
+            if self.verbose:
+                root_logger.setLevel(logging.DEBUG)
+            return
+
+        return
+
+    # -------------------------------------------------------------------------
+    def _read_config(self):
+
+        if self.verbose > 2:
+            LOG.debug("Reading config files with character set {!r} ...".format(
+                self.cfg_encoding))
+        self._config_has_errors = None
+
+        open_opts = {}
+        if six.PY3 and self.cfg_encoding:
+            open_opts['encoding'] = self.cfg_encoding
+            open_opts['errors'] = 'surrogateescape'
+
+        for cfg_file in self.cfg_files:
+            if self.verbose > 2:
+                LOG.debug("Searching for {!r} ...".format(cfg_file))
+            if not os.path.isfile(cfg_file):
+                if self.verbose > 3:
+                    LOG.debug("Config file {!r} not found.".format(cfg_file))
+                continue
+            if self.verbose > 1:
+                LOG.debug("Reading {!r} ...".format(cfg_file))
+
+            config = configparser.ConfigParser()
+            try:
+                with open(cfg_file, 'r', **open_opts) as fh:
+                    stream = StringIO("[default]\n" + fh.read())
+                    if six.PY2:
+                        config.readfp(stream)
+                    else:
+                        config.read_file(stream)
+            except ConfigParseError as e:
+                msg = "Wrong configuration in {!r} found: ".format(cfg_file)
+                msg += str(e)
+                self.handle_error(msg, "Configuration error")
+                continue
+
+            cfg = {}
+            for section in config.sections():
+                if section not in cfg:
+                    cfg[section] = {}
+                for (key, value) in config.items(section):
+                    k = key.lower()
+                    cfg[section][k] = value
+            if self.verbose > 2:
+                LOG.debug("Evaluated config from {f!r}:\n{c}".format(
+                    f=cfg_file, c=pp(cfg)))
+            self.cfg = merge_structure(self.cfg, cfg)
+
+        if self.verbose > 1:
+            LOG.debug("Evaluated config total:\n{}".format(pp(self.cfg)))
+
+    # -------------------------------------------------------------------------
+    def _perform_config(self):
+        """Execute some actions after reading the configuration."""
+
+        for section_name in self.cfg.keys():
+
+            section = self.cfg[section_name]
+
+            if section_name.lower() == 'general':
+                self._perform_config_general(section, section_name)
+                continue
+
+            if section_name.lower() == 'mail':
+                self._perform_config_mail(section, section_name)
+                continue
+
+        self.perform_config()
+
+        self._perform_mail_cmdline_options()
+
+        if self.config_has_errors:
+            LOG.error("There are errors in configuration.")
+            self.exit(1)
+        else:
+            LOG.debug("There are no errors in configuration.")
+            self.config_has_errors = False
+
+    # -------------------------------------------------------------------------
+    def _perform_config_general(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'verbose' in section:
+            v = section['verbose']
+            if to_bool(v):
+                try:
+                    v = int(v)
+                except ValueError:
+                    v = 1
+                    pass
+                except TypeError:
+                    v = 1
+                    pass
+                if v > self.verbose:
+                    self.verbose = v
+                root_logger = logging.getLogger()
+                root_logger.setLevel(logging.DEBUG)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        self._perform_config_mail_rcpt(section, section_name)
+        self._perform_config_mail_cc(section, section_name)
+        self._perform_config_mail_reply_to(section, section_name)
+        self._perform_config_mail_method(section, section_name)
+        self._perform_config_mail_server(section, section_name)
+        self._perform_config_smtp_port(section, section_name)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail_rcpt(self, section, section_name):
+
+        if 'mail_recipients' not in section:
+            return
+
+        v = section['mail_recipients'].strip()
+        self.mail_recipients = []
+        if v:
+            tokens = self.whitespace_re.split(v)
+            for token in tokens:
+                if MailAddress.valid_address(token):
+                    if token not in self.mail_recipients:
+                        self.mail_recipients.append(token)
+                else:
+                    msg = (
+                        "Found invalid recipient mail address {!r} "
+                        "in configuration.").format(
+                        token)
+                    LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail_cc(self, section, section_name):
+
+        if 'mail_cc' not in section:
+            return
+
+        v = section['mail_cc'].strip()
+        self.mail_cc = []
+        if v:
+            tokens = self.whitespace_re.split(v)
+            if self.verbose > 1:
+                LOG.debug("CC addresses:\n{}".format(pp(tokens)))
+            for token in tokens:
+                if MailAddress.valid_address(token):
+                    if token not in self.mail_cc:
+                        self.mail_cc.append(token)
+                else:
+                    msg = "Found invalid cc mail address {!r} in configuration.".format(
+                        token)
+                    LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail_reply_to(self, section, section_name):
+
+        if 'reply_to' not in section:
+            return
+
+        v = section['reply_to'].strip()
+        self.reply_to = None
+        if v:
+            tokens = self.whitespace_re.split(v)
+            if len(tokens):
+                if MailAddress.valid_address(tokens[0]):
+                    self.reply_to = tokens[0]
+                else:
+                    msg = "Found invalid reply mail address {!r} in configuration.".format(
+                        tokens[0])
+                    LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail_method(self, section, section_name):
+
+        if 'mail_method' not in section:
+            return
+
+        v = section['mail_method'].strip().lower()
+        if v:
+            if v in VALID_MAIL_METHODS:
+                self.mail_method = v
+            else:
+                msg = "Found invalid mail method {!r} in configuration.".format(
+                    section['mail_method'])
+                LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_config_mail_server(self, section, section_name):
+
+        if 'mail_server' not in section:
+            return
+
+        v = section['reply_to'].strip()
+        if v:
+            self.mail_server = v
+
+    # -------------------------------------------------------------------------
+    def _perform_config_smtp_port(self, section, section_name):
+
+        if 'smtp_port' not in section:
+            return
+
+        v = section['smtp_port']
+        port = self.smtp_port
+        try:
+            port = int(v)
+        except (ValueError, TypeError):
+            msg = "Found invalid SMTP port number {!r} in configuration.".format(v)
+            LOG.error(msg)
+        else:
+            if port <= 0:
+                msg = "Found invalid SMTP port number {!r} in configuration.".format(port)
+                LOG.error(msg)
+            else:
+                self.smtp_port = port
+
+    # -------------------------------------------------------------------------
+    def _perform_mail_cmdline_options(self):
+
+        self._perform_cmdline_mail_rcpt()
+        self._perform_cmdline_mail_cc()
+        self._perform_cmdline_reply_to()
+
+        v = getattr(self.args, 'mail_method', None)
+        if v:
+            self.mail_method = v
+
+        v = getattr(self.args, 'mail_server', None)
+        if v:
+            self.mail_server = v
+
+        v = getattr(self.args, 'smtp_port', None)
+        if v is not None:
+            if v <= 0:
+                msg = "Got invalid SMTP port number {!r}.".format(v)
+                LOG.error(msg)
+            else:
+                self.smtp_port = v
+
+    # -------------------------------------------------------------------------
+    def _perform_cmdline_mail_rcpt(self):
+
+        v = getattr(self.args, 'mail_recipients', None)
+        if v is not None:
+            self.mail_recipients = []
+            for addr in v:
+                tokens = self.whitespace_re.split(addr)
+                for token in tokens:
+                    if MailAddress.valid_address(token):
+                        if token not in self.mail_recipients:
+                            self.mail_recipients.append(token)
+                    else:
+                        msg = "Got invalid recipient mail address {!r}.".format(token)
+                        LOG.error(msg)
+        if not self.mail_recipients:
+            msg = "Did not found any valid recipient mail addresses."
+            LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_cmdline_mail_cc(self):
+
+        v = getattr(self.args, 'mail_cc', None)
+        if v is None:
+            return
+
+        self.mail_cc = []
+        for addr in v:
+            tokens = self.whitespace_re.split(addr)
+            for token in tokens:
+                if MailAddress.valid_address(token):
+                    if token not in self.mail_cc:
+                        self.mail_cc.append(token)
+                else:
+                    msg = "Got invalid CC mail address {!r}.".format(token)
+                    LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def _perform_cmdline_reply_to(self):
+
+        v = getattr(self.args, 'mail_reply_to', None)
+        if not v:
+            return
+
+        tokens = self.whitespace_re.split(v)
+        if len(tokens):
+            if MailAddress.valid_address(tokens[0]):
+                self.reply_to = tokens[0]
+            else:
+                msg = "Got invalid reply mail address {!r}.".format(
+                    tokens[0])
+                LOG.error(msg)
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+        """
+        Execute some actions after reading the configuration.
+
+        This method should be explicitely called by all perform_config()
+        methods in descendant classes.
+        """
+
+        pass
+
+    # -------------------------------------------------------------------------
+    def send_mail(self, subject, body):
+
+        xmailer = "{a} (Admin Tools version {v})".format(
+            a=self.appname, v=__global_version__)
+
+        mail = MIMEText(body, 'plain', 'utf-8')
+        mail['Subject'] = subject
+        mail['From'] = self.mail_from
+        mail['To'] = ', '.join(self.mail_recipients)
+        mail['Reply-To'] = self.reply_to
+        mail['X-Mailer'] = xmailer
+        if self.mail_cc:
+            mail['Cc'] = ', '.join(self.mail_cc)
+
+        if self.verbose > 1:
+            LOG.debug("Mail to send:\n{}".format(mail.as_string(unixfrom=True)))
+
+        if self.mail_method == 'smtp':
+            self._send_mail_smtp(mail)
+        else:
+            self._send_mail_sendmail(mail)
+
+    # -------------------------------------------------------------------------
+    def _send_mail_smtp(self, mail):
+
+        with smtplib.SMTP(self.mail_server, self.smtp_port) as smtp:
+            if self.verbose > 2:
+                smtp.set_debuglevel(2)
+            elif self.verbose > 1:
+                smtp.set_debuglevel(1)
+
+            smtp.send_message(mail)
+
+    # -------------------------------------------------------------------------
+    def _send_mail_sendmail(self, mail):
+
+        # Searching for the location of sendmail ...
+        paths = (
+            '/usr/sbin/sendmail',
+            '/usr/lib/sendmail',
+        )
+        sendmail = None
+        for path in paths:
+            if os.path.isfile(path) and os.access(path, os.X_OK):
+                sendmail = path
+                break
+
+        if not sendmail:
+            msg = "Did not found sendmail executable."
+            LOG.error(msg)
+            return
+
+        cmd = [sendmail, "-t", "-oi"]
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        p = Popen(cmd, stdin=PIPE, universal_newlines=True)
+        p.communicate(mail.as_string())
+
+    # -------------------------------------------------------------------------
+    def post_init(self):
+        """
+        Method to execute before calling run(). Here could be done some
+        finishing actions after reading in commandline parameters,
+        configuration a.s.o.
+
+        This method could be overwritten by descendant classes, these
+        methhods should allways include a call to post_init() of the
+        parent class.
+
+        """
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def is_local_domain(self, domain):
+
+        zone_name = RE_DOT_AT_END.sub('', domain)
+
+        if self.verbose > 1:
+            LOG.debug("Checking, whether {!r} is a local zone.".format(zone_name))
+
+        tld = zone_name.split('.')[-1]
+        if tld in ('intern', 'internal', 'local', 'localdomain', 'lokal'):
+            LOG.debug("Zone {!r} has a local TLD {!r}.".format(zone_name, tld))
+            return True
+
+        zone_base = zone_name.split('.')[0]
+        if zone_base in ('intern', 'internal', 'local', 'localdomain', 'lokal'):
+            LOG.debug("Zone {!r} has a local base {!r}.".format(zone_name, tld))
+            return True
+
+        if tld != 'arpa':
+            if self.verbose > 2:
+                LOG.debug("Zone {!r} has a public TLD {!r}.".format(zone_name, tld))
+                return False
+
+        if zone_name.endswith('.in-addr.arpa'):
+            tupels = []
+            for tupel in reversed(zone_name.replace('.in-addr.arpa', '').split('.')):
+                tupels.append(tupel)
+            if self.verbose > 2:
+                LOG.debug("Got IPv4 tupels from zone {!r}: {}".format(zone_name, pp(tupels)))
+            bitmask = None
+            if len(tupels) == 1:
+                bitmask = 8
+                tupels.append('0')
+                tupels.append('0')
+                tupels.append('0')
+            elif len(tupels) == 2:
+                tupels.append('0')
+                tupels.append('0')
+                bitmask = 16
+            elif len(tupels) == 3:
+                bitmask = 24
+                tupels.append('0')
+            else:
+                LOG.warn("Could not interprete reverse IPv4 zone {!r}.".format(zone_name))
+                return False
+            net_address = '.'.join(tupels) + '/{}'.format(bitmask)
+            if self.verbose > 2:
+                LOG.debug(
+                    "Got IPv4 network address of zone {!r}: {!r}.".format(
+                        zone_name, net_address))
+            network = ipaddress.ip_network(net_address)
+            if network.is_global:
+                if self.verbose > 1:
+                    LOG.debug(
+                        "The network {!r} of zone {!r} is allocated for public networks.".format(
+                            net_address, zone_name))
+                return False
+            LOG.debug("The network {!r} of zone {!r} is allocated for local networks.".format(
+                net_address, zone_name))
+            return True
+
+        if self.verbose > 2:
+            LOG.debug(
+                "Zone {!r} seems to be a reverse zone for a public network.".format(zone_name))
+        return False
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/check_puppet_env_app.py b/lib/pp_lib/check_puppet_env_app.py
new file mode 100644 (file)
index 0000000..8025c20
--- /dev/null
@@ -0,0 +1,769 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module the check-puppet-env application
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import re
+import copy
+import json
+import socket
+import pwd
+import sys
+import glob
+import datetime
+import warnings
+
+# Third party modules
+import six
+import yaml
+import requests
+
+from six import StringIO
+from six.moves import configparser
+
+from configparser import Error as ConfigParseError
+
+# Own modules
+from .global_version import __version__ as __global_version__
+
+from .errors import PpAppError
+
+from .common import pp, to_bool, RE_DOT_AT_END
+
+from .merge import merge_structure
+
+from .app import PpApplication
+
+__version__ = '0.6.2'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class CheckPuppetEnvError(PpAppError):
+    """Base error class for all exceptions happened during
+    execution this application"""
+
+    pass
+
+
+# =============================================================================
+class CheckPuppetEnvApp(PpApplication):
+    """
+    Class for the check-puppet-env application objects.
+    """
+
+    default_puppet_root_env_dir = os.sep + os.path.join('etc', 'puppetlabs', 'code', 'environments')
+
+    open_args = {}
+    if six.PY3:
+        open_args = {
+            'encoding': 'utf-8',
+            'errors': 'surrogateescape',
+        }
+
+    dev_null = os.sep + os.path.join('dev', 'null')
+
+    default_forge_uri = 'https://forgeapi.puppet.com/v3/modules'
+    default_http_timeout = 30
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=None, usage=None, description=None,
+            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
+            puppet_root_env_dir=None, out_dir=None,
+            ):
+
+        self.puppet_root_env_dir = puppet_root_env_dir
+        if not self.puppet_root_env_dir:
+            self.puppet_root_env_dir = self.default_puppet_root_env_dir
+        self.out_dir = None
+        self.environments = []
+        self.env_name = None
+        self.env_dir = None
+        self.modules_root_dir = None
+        self.modules = {}
+        self.dependencies = []
+        self.rev_dep = {}
+        self.no_write = False
+        self.forge_uri = self.default_forge_uri
+        self.http_timeout = self.default_http_timeout
+
+        super(CheckPuppetEnvApp, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
+            initialized=False, usage=usage, description=description,
+            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
+            env_prefix=env_prefix,
+        )
+
+        self.initialized = False
+
+        if out_dir:
+            self.out_dir = out_dir
+        else:
+            self.out_dir = os.path.join(self.base_dir, 'tmp')
+
+        self.post_init()
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(CheckPuppetEnvApp, self).as_dict(short=short)
+        res['dev_null'] = self.dev_null
+        res['open_args'] = self.open_args
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        self.arg_parser.add_argument(
+            '-D', '--env-dir', metavar="DIRECTORY", dest="env_dir",
+            help="Parent directory of all puppet environments, default: {!r}".format(
+                self.puppet_root_env_dir)
+        )
+
+        self.arg_parser.add_argument(
+            '-E', '--env', '--environment',
+            dest="env", required=True, metavar="ENVIRONMENT",
+            help="The Puppet environment to analyze."
+        )
+
+        self.arg_parser.add_argument(
+            '-O', '--out', '--output-dir',
+            metavar="DIRECTORY", dest="out_dir",
+            help="Output directory of all analyzing results, default: {!r}".format(
+                os.path.join(self.base_dir, 'tmp'))
+        )
+
+        self.arg_parser.add_argument(
+            '-N', '--no-write', action="store_true", dest="no_write",
+            help="Do not generate output files.",
+        )
+
+        self.arg_parser.add_argument(
+            '--forge-uri', metavar="URL", dest='forge_uri',
+            help="URL of the Puppetforge-API-Server, default: {!r}".format(
+                self.default_forge_uri)
+        )
+
+        self.arg_parser.add_argument(
+            '-T', '--timeout', '--http-timeout',
+            metavar="SECS", dest='http_timeout', type=int,
+            help=(
+                "Timeout for requesting current version of a module from Puppetforge, "
+                "default: {} seconds.").format(self.default_http_timeout)
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+
+        if self.args.no_write:
+            self.no_write = True
+
+        puppet_root_env_dir = self.puppet_root_env_dir
+        retval = 5
+
+        if self.args.env_dir:
+            puppet_root_env_dir = self.args.env_dir
+            retval = 0
+
+        if not os.path.exists(puppet_root_env_dir):
+            msg = (
+                self.appname + ': ' +
+                "Puppet environment directory {!r} does not exists.".format(puppet_root_env_dir))
+            sys.stderr.write(msg + '\n\n')
+            self.exit(retval)
+
+        if not os.path.isdir(puppet_root_env_dir):
+            msg = (
+                self.appname + ': ' +
+                "Path for Puppet environment directory {!r} is not a directory.".format(
+                    puppet_root_env_dir))
+            sys.stderr.write(msg + '\n\n')
+            self.exit(retval)
+
+        self.puppet_root_env_dir = puppet_root_env_dir
+
+        if self.args.forge_uri:
+            self.forge_uri = self.args.forge_uri
+        if self.args.http_timeout:
+            self.http_timeout = self.args.http_timeout
+
+        self._init_puppet_environments()
+        self.env_name = self.args.env
+        self.env_dir = os.path.join(self.puppet_root_env_dir, self.env_name)
+
+        if not os.path.exists(self.env_dir):
+            msg = (
+                self.appname + ': ' +
+                "Invalid Puppet environment {e!r} - directory {d!r} does not exists.".format(
+                    e=self.env_name, d=self.env_dir))
+            sys.stderr.write(msg + '\n\n')
+            msg = "Valid environments are:\n"
+            for env in self.environments:
+                msg += "  * {}\n".format(env)
+            sys.stderr.write(msg + '\n')
+            self.arg_parser.print_usage(sys.stdout)
+            self.exit(0)
+
+        if not os.path.isdir(puppet_root_env_dir):
+            msg = (
+                self.appname + ': ' +
+                "Invalid Puppet environment {e!r} - path {d!r} is not a directory.".format(
+                    e=self.env_name, d=self.env_dir))
+            sys.stderr.write(msg + '\n\n')
+            msg = "Valid environments are:\n"
+            for env in self.environments:
+                msg += "  * {}\n".format(env)
+            sys.stderr.write(msg + '\n')
+            self.exit(retval)
+
+        out_dir = self.out_dir
+        retval = 6
+
+        if self.args.out_dir:
+            out_dir = self.args.out_dir
+            retval = 0
+
+        if not os.path.exists(out_dir):
+            msg = (
+                self.appname + ': ' +
+                "Output directory {!r} does not exists.".format(out_dir))
+            sys.stderr.write(msg + '\n\n')
+            self.exit(retval)
+
+        if not os.path.isdir(out_dir):
+            msg = (
+                self.appname + ': ' +
+                "Path for Output directory {!r} is not a directory.".format(
+                    out_dir))
+            sys.stderr.write(msg + '\n\n')
+            self.exit(retval)
+
+        self.out_dir = out_dir
+
+    # -------------------------------------------------------------------------
+    def _init_puppet_environments(self):
+
+        pattern = os.path.join(self.puppet_root_env_dir, '*')
+        dirs = glob.glob(pattern)
+
+        self.environments = []
+        for path in dirs:
+            if os.path.isdir(path):
+                env = os.path.basename(path)
+                self.environments.append(env)
+
+        self.environments.sort(key=str.lower)
+
+    # -------------------------------------------------------------------------
+    def post_init(self):
+        """
+        Method to execute before calling run(). Here could be done some
+        finishing actions after reading in commandline parameters,
+        configuration a.s.o.
+
+        This method could be overwritten by descendant classes, these
+        methhods should allways include a call to post_init() of the
+        parent class.
+
+        """
+
+        self.perform_arg_parser()
+        self.init_logging()
+        self.modules_root_dir = os.path.join(self.env_dir, 'modules')
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """
+        Main application routine.
+        """
+
+        self.collect_modules()
+        self.print_modules()
+        self.verify_dependencies()
+        self.write_modinfo_yaml()
+        self.print_not_depended()
+        self.write_dependencies()
+
+    # -------------------------------------------------------------------------
+    def write_modinfo_yaml(self):
+
+        outfile_base = 'modules-info.{e}.{d}.yaml'.format(
+            e=self.env_name, d=datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S'))
+        out_file = os.path.join(self.out_dir, outfile_base)
+
+        LOG.info("Writing information about modules in {!r}...".format(out_file))
+        if self.no_write:
+            LOG.debug("Don't writing into {!r}...".format(out_file))
+            if self.verbose:
+                print()
+                print("Module information:")
+                print()
+                print('---')
+                print(yaml.dump(self.modules, width=240))
+                print()
+                return
+
+        with open(out_file, 'w', **self.open_args) as fh:
+            fh.write('---\n')
+            fh.write(yaml.dump(self.modules, width=240))
+
+    # -------------------------------------------------------------------------
+    def print_not_depended(self):
+
+        print()
+        print("Module, von denen keine anderen Module abhängen:")
+        print("================================================")
+        print()
+
+        len_base = 1
+        for b_name in self.modules.keys():
+            base_name = str(b_name)
+            if len(base_name) > len_base:
+                len_base = len(base_name)
+
+        template = '  * {{b:<{}}} -> {{n}}'.format(len_base)
+
+        for b_name in sorted(self.modules.keys(), key=str.lower):
+
+            module_info = self.modules[b_name]
+            base_name = str(b_name)
+
+            if base_name not in self.rev_dep or not self.rev_dep[base_name]:
+                print(template.format(b=base_name, n=module_info['name']))
+
+        print()
+
+    # -------------------------------------------------------------------------
+    def verify_dependencies(self):
+
+        LOG.info("Verifying dependencies ...")
+
+        mods = {}
+        self.dependencies = []
+        self.rev_dep = {}
+        re_name_split = re.compile(r'([^/_-]+)[/_-](.*)')
+        connectors = ('-', '_', '/')
+
+        for b_name in self.modules.keys():
+            module_info = self.modules[b_name]
+            base_name = str(b_name)
+            if module_info['name']:
+                mod_name = module_info['name']
+                mods[module_info['name']] = base_name
+            else:
+                LOG.warn("Did not found complete name of module {!r}.".format(base_name))
+
+        for b_name in self.modules.keys():
+            module_info = self.modules[b_name]
+            base_name = str(b_name)
+            if base_name not in self.rev_dep:
+                self.rev_dep[base_name] = []
+            if not module_info['dependencies']:
+                continue
+            if not module_info['name']:
+                LOG.debug("Did not found complete name of module {!r}.".format(base_name))
+                continue
+            if not module_info['vendor']:
+                LOG.warn("Did not found vendor of module {!r}.".format(base_name))
+            mod_name = module_info['name']
+            if self.verbose > 1:
+                LOG.debug("Checking dependencies of module {!r}...".format(mod_name))
+
+            for dep_key in module_info['dependencies'].keys():
+                dep_mod = str(dep_key)
+                if dep_mod in mods:
+                    dep = (dep_mod, mod_name)
+                    self.dependencies.append(dep)
+                    if mods[dep_mod] not in self.rev_dep:
+                        self.rev_dep[mods[dep_mod]] = []
+                    if base_name not in self.rev_dep[mods[dep_mod]]:
+                        self.rev_dep[mods[dep_mod]].append(base_name)
+                    module_info['dependencies'][dep_key]['module'] = mods[dep_mod]
+                    continue
+                if self.verbose > 2:
+                    LOG.debug("Dependency to {d!r} of module {m!r} wrong formatted.".format(
+                        d=dep_mod, m=mod_name))
+                match = re_name_split.match(dep_mod)
+                found = False
+                if match:
+                    dep_mod_vendor = match.group(1)
+                    dep_mod_base = match.group(2)
+                for connector in connectors:
+                    dep_mod_name = dep_mod_vendor + connector + dep_mod_base
+                    if dep_mod_name in mods:
+                        dep = (dep_mod_name, mod_name)
+                        self.dependencies.append(dep)
+                        if mods[dep_mod_name] not in self.rev_dep:
+                            self.rev_dep[mods[dep_mod_name]] = []
+                        if base_name not in self.rev_dep[mods[dep_mod_name]]:
+                            self.rev_dep[mods[dep_mod_name]].append(base_name)
+                        module_info['dependencies'][dep_key]['module'] = mods[dep_mod_name]
+                        found = True
+                        break
+                if found:
+                    continue
+                LOG.warn("Did not found dependency to {d!r} of module {m!r}.".format(
+                    d=dep_mod, m=mod_name))
+
+        if self.verbose > 2:
+            LOG.debug("Found dependencies:\n{}".format(pp(self.dependencies)))
+            LOG.debug("Reverse dependencies:\n{}".format(pp(self.rev_dep)))
+
+    # -------------------------------------------------------------------------
+    def write_dependencies(self):
+
+        outfile_base = 'modules-deps.{e}.{d}.dot'.format(
+            e=self.env_name, d=datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S'))
+        out_file = os.path.join(self.out_dir, outfile_base)
+
+        LOG.info("Writing graphviz dot file about module dependecies in {!r}...".format(out_file))
+
+        header_lines = (
+            'digraph Dependencies {',
+            '',
+            '\t// Graph attributes',
+            '\tnodesep=0.7;',
+        )
+
+        def printout(fh, line):
+            if self.verbose:
+                print(line)
+            fh.write(line + '\n')
+
+        if self.no_write:
+            LOG.debug("Don't writing into {!r}...".format(out_file))
+            out_file = self.dev_null
+
+        with open(out_file, 'w', **self.open_args) as fh:
+
+            # File header
+            for line in header_lines:
+                printout(fh, line)
+
+            # Print nodes
+            line = '\n\t// Modules as nodes'
+            printout(fh, line)
+
+            for b_name in sorted(self.modules.keys(), key=str.lower):
+
+                module_info = self.modules[b_name]
+                base_name = str(b_name)
+
+                mod_name = base_name
+                if module_info['name']:
+                    mod_name = module_info['name']
+                tgt_dot_id = module_info['dot_id']
+
+                line = '\t{};'.format(tgt_dot_id)
+                printout(fh, line)
+
+            line = '\n\t// #############################\n\t// Dependencies'
+            printout(fh, line)
+
+            # Print dependencies as edges
+            for b_name in sorted(self.modules.keys(), key=str.lower):
+
+                module_info = self.modules[b_name]
+                base_name = str(b_name)
+
+                mod_name = base_name
+                if module_info['name']:
+                    mod_name = module_info['name']
+                tgt_dot_id = module_info['dot_id']
+
+                line = '\n\t// {i} ({n})'.format(i=tgt_dot_id, n=mod_name)
+                printout(fh, line)
+
+                for dep_key in module_info['dependencies'].keys():
+                    dep_mod = str(dep_key)
+                    src_module = module_info['dependencies'][dep_key]['module']
+                    if src_module in self.modules:
+                        src_dot_id = self.modules[src_module]['dot_id']
+                        line = '\t{src} -> {tgt};'.format(
+                            src=src_dot_id, tgt=tgt_dot_id)
+                        printout(fh, line)
+
+            # File footer
+            printout(fh, '\n}\n')
+
+    # -------------------------------------------------------------------------
+    def print_modules(self):
+
+        title_base = 'Modul'
+        title_name = 'Name komplett'
+        title_vendor = 'Vendor'
+        title_uversion = "Upstream Version"
+        title_version = 'Version'
+
+        len_base = len(title_base)
+        len_name = len(title_name)
+        len_vendor = len(title_vendor)
+        len_uversion = len(title_uversion)
+        len_version = len(title_version)
+
+        for b_name in self.modules.keys():
+            module_info = self.modules[b_name]
+            base_name = str(b_name)
+            if len(base_name) > len_base:
+                len_base = len(base_name)
+            if module_info['name']:
+                if len(module_info['name']) > len_name:
+                    len_name = len(module_info['name'])
+            if module_info['vendor']:
+                if len(module_info['vendor']) > len_vendor:
+                    len_vendor = len(module_info['vendor'])
+            if module_info['upstream_version']:
+                if len(module_info['upstream_version']) > len_uversion:
+                    len_uversion = len(module_info['upstream_version'])
+            if module_info['version']:
+                if len(module_info['version']) > len_version:
+                    len_version = len(module_info['version'])
+
+        template = ((
+            '{{base:<{lb}}}  {{name:<{ln}}}  {{vendor:<{lven}}}    '
+            '{{uversion:<{luver}}} {{version:<{lver}}}').format(
+                lb=len_base, ln=len_name, lven=len_vendor,
+                luver=len_uversion, lver=len_version))
+        len_total = len_base + len_name + len_vendor + len_uversion + len_version + 12
+        if self.verbose > 1:
+            LOG.debug("Module line template: {!r}".format(template))
+        print()
+        print(template.format(
+            base=title_base, name=title_name, vendor=title_vendor,
+            uversion=title_uversion, version=title_version))
+        print('=' * len_total)
+
+        for b_name in sorted(self.modules.keys(), key=str.lower):
+
+            module_info = self.modules[b_name]
+            base_name = str(b_name)
+
+            mod_name = '~'
+            if module_info['name']:
+                mod_name = module_info['name']
+
+            vendor_name = '~'
+            if module_info['vendor']:
+                vendor_name = module_info['vendor']
+
+            uver = '~'
+            if module_info['upstream_version']:
+                uver = module_info['upstream_version']
+
+            version = '~'
+            if module_info['version']:
+                version = module_info['version']
+
+            print(template.format(
+                base=base_name, name=mod_name, vendor=vendor_name,
+                uversion=uver, version=version))
+
+        print()
+
+    # -------------------------------------------------------------------------
+    def collect_modules(self):
+
+        LOG.info("Collecting all modules from {!r} ...".format(self.modules_root_dir))
+        self.modules = {}
+
+        if not os.path.exists(self.modules_root_dir):
+            LOG.error("Directory {!r} does not exists.".format(self.modules_root_dir))
+            self.exit(7)
+
+        if not os.path.isdir(self.modules_root_dir):
+            LOG.error("Path {!r} is not a directory".format(self.modules_root_dir))
+            self.exit(7)
+
+        pattern = os.path.join(self.modules_root_dir, '*')
+        if self.verbose > 2:
+            LOG.debug("Globbing pattern for module directories: {!r}".format(pattern))
+        for module_dir in glob.glob(pattern):
+            module_info = self.get_module_info(module_dir)
+            if module_info:
+                base_name = module_info['base_name']
+                self.modules[base_name] = module_info
+                upstream_version = self.get_upstream_version(module_info)
+                self.modules[base_name]['upstream_version'] = upstream_version
+                if not self.verbose:
+                    if upstream_version:
+                        print('.', end='', flush=True)
+                    else:
+                        print('~', end='', flush=True)
+                else:
+                    print('!', end='', flush=True)
+        if not self.verbose:
+            print()
+
+        if self.verbose > 2:
+            LOG.debug("Found module information:\n{}".format(pp(self.modules)))
+
+    # -------------------------------------------------------------------------
+    def get_upstream_version(self, module_info):
+
+        version = None
+
+        url = "{url}/{user}-{module}".format(
+            url=self.forge_uri, user=module_info['vendor'], module=module_info['base_name'])
+
+        LOG.debug((
+            "Trying to get current version of module {user}-{module} from Puppet forge.").format(
+                user=module_info['vendor'], module=module_info['base_name']))
+        if self.verbose > 2:
+            LOG.debug("URL to request: {}".format(url))
+
+        session = requests.Session()
+        with warnings.catch_warnings(record=True) as w:
+            warnings.simplefilter("always")
+            response = session.request('GET', url, timeout=self.http_timeout)
+            if w:
+                warn_class = w[-1].category.__name__
+                warn_msg = '{}: {}'.format(
+                    warn_class, w[-1].message)
+                if warn_class == 'SubjectAltNameWarning':
+                    LOG.debug(warn_msg)
+                else:
+                    LOG.warn(warn_msg)
+
+        LOG.debug("Got status code: {}.".format(response.status_code))
+        if not response.ok:
+            LOG.debug("Did not found module {user}-{module} on Puppet forge.".format(
+                user=module_info['vendor'], module=module_info['base_name']))
+            return None
+
+        if not response.text:
+            LOG.warn("No output for URL {!r}".format(url))
+            return None
+        if self.verbose > 2:
+            msg = "Output:\n{}".format(response.text)
+            LOG.debug(msg)
+
+        js_info = response.json()
+        if 'current_release' in js_info:
+            if 'version' in js_info['current_release']:
+                version = js_info['current_release']['version']
+            else:
+                msg = "Did not found version of current_release of module {user}-{module}.".format(
+                    user=module_info['vendor'], module=module_info['base_name'])
+                LOG.warn(msg)
+        else:
+            msg = "Did not found current_release of module {user}-{module}.".format(
+                user=module_info['vendor'], module=module_info['base_name'])
+            LOG.warn(msg)
+
+        LOG.debug("Vurrent version of module {user}-{module} is {version}.".format(
+            user=module_info['vendor'], module=module_info['base_name'], version=version))
+
+        return version
+
+    # -------------------------------------------------------------------------
+    def get_module_info(self, module_dir):
+
+        if self.verbose > 2:
+            LOG.debug("Get module information from {!r}.".format(module_dir))
+
+        if not os.path.exists(module_dir):
+            LOG.warn("Directory {!r} does not exists.".format(module_dir))
+            return None
+
+        if not os.path.isdir(module_dir):
+            LOG.warn("Path {!r} is not a directory".format(module_dir))
+            return None
+
+        re_dot_id = re.compile(r'[/-]+')
+
+        module_info = {}
+        module_info['base_name'] = os.path.basename(module_dir)
+        metadata_file = os.path.join(module_dir, 'metadata.json')
+        if not os.path.exists(metadata_file):
+            LOG.warn("Metadatafile {!r} does not exists.".format(metadata_file))
+            return None
+        if not os.path.isfile(metadata_file):
+            LOG.warn("Metadatafile {!r} is not a regular file.".format(metadata_file))
+            return None
+        if not os.access(metadata_file, os.R_OK):
+            LOG.warn("Metadatafile {!r} is readable.".format(metadata_file))
+            return None
+        if self.verbose > 2:
+            LOG.debug("Reading and evaluating {!r}.".format(metadata_file))
+        meta_info = {}
+
+        try:
+            with open(metadata_file, 'r', **self.open_args) as fh:
+                meta_info = json.load(fh)
+        except json.JSONDecodeError as e:
+            LOG.warn((
+                "Could not interprete {f!r} (line {l}, column {c}) "
+                "as a regular JSON file: {e}").format(
+                f=metadata_file, l=e.lineno, c=e.colno, e=e.msg))
+            return None
+
+        module_info['name'] = None
+        module_info['dot_id'] = None
+        module_info['vendor'] = None
+        module_info['version'] = None
+        module_info['dependencies'] = {}
+        if 'name'in meta_info:
+            module_info['name'] = meta_info['name']
+            pat_vendor = r'^(\S+)[-_/]' + re.escape(module_info['base_name']) + r'$'
+            match = re.match(pat_vendor, module_info['name'])
+            if match:
+                module_info['vendor'] = match.group(1)
+            module_info['dot_id'] = re_dot_id.sub('_', module_info['name'])
+        else:
+            module_info['dot_id'] = re_dot_id.sub('_',  module_info['base_name'])
+
+        if 'version' in meta_info:
+            module_info['version'] = meta_info['version']
+
+        if 'dependencies' in meta_info:
+            for dep in meta_info['dependencies']:
+                if 'name' in dep:
+                    dep_info = {
+                        'name': dep['name'],
+                        'version': None,
+                        'module': None,
+                    }
+                    if 'version_requirement' in dep:
+                        dep_info['version'] = dep['version_requirement']
+                    module_info['dependencies'][dep['name']] = dep_info
+
+        return module_info
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/colored.py b/lib/pp_lib/colored.py
new file mode 100644 (file)
index 0000000..12264f9
--- /dev/null
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@summary: additional logging formatter for colored output via console
+"""
+
+# Standard modules
+import logging
+# import os.path
+# import sys
+import copy
+
+# Third party modules
+
+# Own modules
+
+# import pb_provisioning.common
+
+# from pb_provisioning.common import to_unicode_or_bust, to_utf8_or_bust
+
+__version__ = '0.1.4'
+
+# =============================================================================
+# Color coding module variables and helper functions
+
+COLOR_CODE = {
+    'ENDC': 0,  # RESET COLOR
+    'BOLD': 1,
+    'UNDERLINE': 4,
+    'BLINK': 5,
+    'INVERT': 7,
+    'CONCEALD': 8,
+    'STRIKE': 9,
+    'GREY30': 90,
+    'GREY40': 2,
+    'GREY65': 37,
+    'GREY70': 97,
+    'GREY20_BG': 40,
+    'GREY33_BG': 100,
+    'GREY80_BG': 47,
+    'GREY93_BG': 107,
+    'DARK_RED': 31,
+    'RED': 91,
+    'RED_BG': 41,
+    'LIGHT_RED_BG': 101,
+    'DARK_YELLOW': 33,
+    'YELLOW': 93,
+    'YELLOW_BG': 43,
+    'LIGHT_YELLOW_BG': 103,
+    'DARK_BLUE': 34,
+    'BLUE': 94,
+    'BLUE_BG': 44,
+    'LIGHT_BLUE_BG': 104,
+    'DARK_MAGENTA': 35,
+    'PURPLE': 95,
+    'MAGENTA_BG': 45,
+    'LIGHT_PURPLE_BG': 105,
+    'DARK_CYAN': 36,
+    'AUQA': 96,
+    'AQUA': 96,
+    'CYAN_BG': 46,
+    'LIGHT_AUQA_BG': 106,
+    'LIGHT_AQUA_BG': 106,
+    'DARK_GREEN': 32,
+    'GREEN': 92,
+    'GREEN_BG': 42,
+    'LIGHT_GREEN_BG': 102,
+    'BLACK': 30,
+}
+
+
+# -----------------------------------------------------------------------------
+def termcode(num):
+    """
+    Output of an ANSII terminal code.
+    """
+
+    return('\033[%sm' % (num))
+
+
+# -----------------------------------------------------------------------------
+def colorstr(message, color):
+    """
+    Wrapper function to colorize the message.
+
+    @param message: The message to colorize
+    @type message: str
+    @param color: The color to use, must be one of the keys of COLOR_CODE
+    @type color: str
+
+    @return: the colorized message
+    @rtype: str
+
+    """
+
+    tcode = ''
+    if isinstance(color, (list, tuple)):
+        for clr in color:
+            tcode += termcode(COLOR_CODE[clr])
+    else:
+        tcode = termcode(COLOR_CODE[color])
+
+    return tcode + message + termcode(COLOR_CODE['ENDC'])
+
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class ColoredFormatter(logging.Formatter):
+    """
+    A variant of code found at:
+    http://stackoverflow.com/questions/384076/how-can-i-make-the-python-logging-output-to-be-colored
+    """
+
+    LEVEL_COLOR = {
+        'DEBUG': None,
+        'INFO': 'GREEN',
+        'WARNING': 'YELLOW',
+        'ERROR': ('BOLD', 'RED'),
+        'CRITICAL': 'RED_BG',
+    }
+
+    # -------------------------------------------------------------------------
+    def __init__(self, fmt=None, datefmt=None):
+        """
+        Initialize the formatter with specified format strings.
+
+        Initialize the formatter either with the specified format string, or a
+        default. Allow for specialized date formatting with the optional
+        datefmt argument (if omitted, you get the ISO8601 format).
+        """
+
+        logging.Formatter.__init__(self, fmt, datefmt)
+
+    # -----------------------------------------------------------
+    @property
+    def color_debug(self):
+        """The color used to output debug messages."""
+        return self.LEVEL_COLOR['DEBUG']
+
+    @color_debug.setter
+    def color_debug(self, value):
+        self.LEVEL_COLOR['DEBUG'] = value
+
+    # -----------------------------------------------------------
+    @property
+    def color_info(self):
+        """The color used to output info messages."""
+        return self.LEVEL_COLOR['INFO']
+
+    @color_info.setter
+    def color_info(self, value):
+        self.LEVEL_COLOR['INFO'] = value
+
+    # -----------------------------------------------------------
+    @property
+    def color_warning(self):
+        """The color used to output warning messages."""
+        return self.LEVEL_COLOR['WARNING']
+
+    @color_warning.setter
+    def color_warning(self, value):
+        self.LEVEL_COLOR['WARNING'] = value
+
+    # -----------------------------------------------------------
+    @property
+    def color_error(self):
+        """The color used to output error messages."""
+        return self.LEVEL_COLOR['ERROR']
+
+    @color_error.setter
+    def color_error(self, value):
+        self.LEVEL_COLOR['ERROR'] = value
+
+    # -----------------------------------------------------------
+    @property
+    def color_critical(self):
+        """The color used to output critical messages."""
+        return self.LEVEL_COLOR['CRITICAL']
+
+    @color_critical.setter
+    def color_critical(self, value):
+        self.LEVEL_COLOR['CRITICAL'] = value
+
+    # -------------------------------------------------------------------------
+    def format(self, record):
+        """
+        Format the specified record as text.
+        """
+
+        record = copy.copy(record)
+        levelname = record.levelname
+
+        if levelname in self.LEVEL_COLOR:
+
+            record.name = colorstr(record.name, 'BOLD')
+            record.filename = colorstr(record.filename, 'BOLD')
+            record.module = colorstr(record.module, 'BOLD')
+            record.funcName = colorstr(record.funcName, 'BOLD')
+            record.pathname = colorstr(record.pathname, 'BOLD')
+            record.processName = colorstr(record.processName, 'BOLD')
+            record.threadName = colorstr(record.threadName, 'BOLD')
+
+            if self.LEVEL_COLOR[levelname] is not None:
+                record.levelname = colorstr(
+                    levelname, self.LEVEL_COLOR[levelname])
+                record.msg = colorstr(record.msg, self.LEVEL_COLOR[levelname])
+
+        return logging.Formatter.format(self, record)
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/common.py b/lib/pp_lib/common.py
new file mode 100644 (file)
index 0000000..e0d7729
--- /dev/null
@@ -0,0 +1,387 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for common used functions.
+"""
+
+# Standard modules
+import sys
+import os
+import logging
+import re
+import pprint
+import platform
+import locale
+
+# Third party modules
+import six
+
+# Own modules
+
+__version__ = '0.5.3'
+
+LOG = logging.getLogger(__name__)
+
+RE_YES = re.compile(r'^\s*(?:y(?:es)?|true)\s*$', re.IGNORECASE)
+RE_NO = re.compile(r'^\s*(?:no?|false|off)\s*$', re.IGNORECASE)
+PAT_TO_BOOL_TRUE = locale.nl_langinfo(locale.YESEXPR)
+RE_TO_BOOL_TRUE = re.compile(PAT_TO_BOOL_TRUE)
+PAT_TO_BOOL_FALSE = locale.nl_langinfo(locale.NOEXPR)
+RE_TO_BOOL_FALSE = re.compile(PAT_TO_BOOL_FALSE)
+
+RE_DOT = re.compile(r'\.')
+RE_DOT_AT_END = re.compile(r'(\.)*$')
+RE_DECIMAL = re.compile(r'^\d+$')
+RE_IPV4_PTR = re.compile(r'\.in-addr\.arpa\.$', re.IGNORECASE)
+RE_IPV6_PTR = re.compile(r'\.ip6\.arpa\.$', re.IGNORECASE)
+
+
+# =============================================================================
+def pp(value, indent=4, width=99, depth=None):
+    """
+    Returns a pretty print string of the given value.
+
+    @return: pretty print string
+    @rtype: str
+    """
+
+    pretty_printer = pprint.PrettyPrinter(
+        indent=indent, width=width, depth=depth)
+    return pretty_printer.pformat(value)
+
+
+# =============================================================================
+def terminal_can_colors(debug=False):
+    """
+    Method to detect, whether the current terminal (stdout and stderr)
+    is able to perform ANSI color sequences.
+
+    @return: both stdout and stderr can perform ANSI color sequences
+    @rtype: bool
+
+    """
+
+    cur_term = ''
+    if 'TERM' in os.environ:
+        cur_term = os.environ['TERM'].lower().strip()
+
+    colored_term_list = (
+        r'ansi',
+        r'linux.*',
+        r'screen.*',
+        r'[xeak]term.*',
+        r'gnome.*',
+        r'rxvt.*',
+        r'interix',
+    )
+    term_pattern = r'^(?:' + r'|'.join(colored_term_list) + r')$'
+    re_term = re.compile(term_pattern)
+
+    ansi_term = False
+    env_term_has_colors = False
+
+    if cur_term:
+        if cur_term == 'ansi':
+            env_term_has_colors = True
+            ansi_term = True
+        elif re_term.search(cur_term):
+            env_term_has_colors = True
+    if debug:
+        sys.stderr.write(
+            "ansi_term: %r, env_term_has_colors: %r\n" % (
+                ansi_term, env_term_has_colors))
+
+    has_colors = False
+    if env_term_has_colors:
+        has_colors = True
+    for handle in [sys.stdout, sys.stderr]:
+        if (hasattr(handle, "isatty") and handle.isatty()):
+            if debug:
+                sys.stderr.write("%s is a tty.\n" % (handle.name))
+            if (platform.system() == 'Windows' and not ansi_term):
+                if debug:
+                    sys.stderr.write("platform is Windows and not ansi_term.\n")
+                has_colors = False
+        else:
+            if debug:
+                sys.stderr.write("%s is not a tty.\n" % (handle.name))
+            if ansi_term:
+                pass
+            else:
+                has_colors = False
+
+    return has_colors
+
+
+# =============================================================================
+def to_bool(value):
+    """
+    Converter from string to boolean values (e.g. from configurations)
+    """
+
+    if not value:
+        return False
+
+    try:
+        v_int = int(value)
+    except ValueError:
+        pass
+    except TypeError:
+        pass
+    else:
+        if v_int == 0:
+            return False
+        else:
+            return True
+
+    global PAT_TO_BOOL_TRUE
+    global RE_TO_BOOL_TRUE
+    global PAT_TO_BOOL_FALSE
+    global RE_TO_BOOL_FALSE
+
+    c_yes_expr = locale.nl_langinfo(locale.YESEXPR)
+    if c_yes_expr != PAT_TO_BOOL_TRUE:
+        PAT_TO_BOOL_TRUE = c_yes_expr
+        RE_TO_BOOL_TRUE = re.compile(PAT_TO_BOOL_TRUE)
+    # LOG.debug("Current pattern for 'yes': %r.", c_yes_expr)
+
+    c_no_expr = locale.nl_langinfo(locale.NOEXPR)
+    if c_no_expr != PAT_TO_BOOL_FALSE:
+        PAT_TO_BOOL_FALSE = c_no_expr
+        RE_TO_BOOL_FALSE = re.compile(PAT_TO_BOOL_FALSE)
+    # LOG.debug("Current pattern for 'no': %r.", c_no_expr)
+
+    v_str = ''
+    if isinstance(value, str):
+        v_str = value
+        if six.PY2:
+            if isinstance(value, unicode):                      # noqa
+                v_str = value.encode('utf-8')
+    elif six.PY3 and isinstance(value, bytes):
+        v_str = value.decode('utf-8')
+    else:
+        v_str = str(value)
+
+    match = RE_YES.search(v_str)
+    if match:
+        return True
+    match = RE_TO_BOOL_TRUE.search(v_str)
+    if match:
+        return True
+
+    match = RE_NO.search(v_str)
+    if match:
+        return False
+    match = RE_TO_BOOL_FALSE.search(v_str)
+    if match:
+        return False
+
+    return bool(value)
+
+
+# =============================================================================
+def to_unicode(obj, encoding='utf-8'):
+
+    do_decode = False
+    if six.PY2:
+        if isinstance(obj, str):
+            do_decode = True
+    else:
+        if isinstance(obj, bytes):
+            do_decode = True
+
+    if do_decode:
+        obj = obj.decode(encoding)
+
+    return obj
+
+
+# =============================================================================
+def to_utf8(obj):
+
+    return encode_or_bust(obj, 'utf-8')
+
+
+# =============================================================================
+def encode_or_bust(obj, encoding='utf-8'):
+
+    do_encode = False
+    if six.PY2:
+        if isinstance(obj, unicode):                            # noqa
+            do_encode = True
+    else:
+        if isinstance(obj, str):
+            do_encode = True
+
+    if do_encode:
+        obj = obj.encode(encoding)
+
+    return obj
+
+
+# =============================================================================
+def to_bytes(obj, encoding='utf-8'):
+    "Wrapper for encode_or_bust()"
+
+    return encode_or_bust(obj, encoding)
+
+
+# =============================================================================
+def to_str(obj, encoding='utf-8'):
+    """
+    Transformes the given string-like object into the str-type according
+    to the current Python version.
+    """
+
+    if six.PY2:
+        return encode_or_bust(obj, encoding)
+    else:
+        return to_unicode(obj, encoding)
+
+
+# =============================================================================
+def caller_search_path():
+    """
+    Builds a search path for executables from environment $PATH
+    including some standard paths.
+
+    @return: all existing search paths
+    @rtype: list
+    """
+
+    path_list = []
+    search_path = os.environ['PATH']
+    if not search_path:
+        search_path = os.defpath
+
+    search_path_list = [
+        '/opt/PPlocal/bin',
+    ]
+
+    for d in search_path.split(os.pathsep):
+        search_path_list.append(d)
+
+    default_path = [
+        '/bin',
+        '/usr/bin',
+        '/usr/local/bin',
+        '/sbin',
+        '/usr/sbin',
+        '/usr/local/sbin',
+        '/usr/ucb',
+        '/usr/sfw/bin',
+        '/opt/csw/bin',
+        '/usr/openwin/bin',
+        '/usr/ccs/bin',
+    ]
+
+    for d in default_path:
+        search_path_list.append(d)
+
+    for d in search_path_list:
+        if not os.path.exists(d):
+            continue
+        if not os.path.isdir(d):
+            continue
+        d_abs = os.path.realpath(d)
+        if d_abs not in path_list:
+            path_list.append(d_abs)
+
+    return path_list
+
+# =============================================================================
+def compare_fqdn(x, y):
+
+    # LOG.debug("Comparing {!r} <=> {!r}.".format(x, y))
+
+    # First check for None values
+    if x is None and y is None:
+        return 0
+    if x is None:
+        return -1
+    if y is None:
+        return 1
+
+    # Check for empty FQDNs
+    xs = str(x).strip().lower()
+    ys = str(y).strip().lower()
+
+    if xs == '' and ys == '':
+        return 0
+    if xs == '':
+        return -1
+    if ys == '':
+        return 1
+
+    # Ensure a dot at end
+    xs = RE_DOT_AT_END.sub('.', xs)
+    ys = RE_DOT_AT_END.sub('.', ys)
+
+    if xs == ys:
+        return 0
+
+    # Reverse IPv4 zones first, then reverse IPv6 zones
+    if RE_IPV4_PTR.search(xs):
+        if not RE_IPV4_PTR.search(ys):
+            return -1
+    elif RE_IPV4_PTR.search(ys):
+        if not RE_IPV4_PTR.search(xs):
+            return 1
+    elif RE_IPV6_PTR.search(xs):
+        if not RE_IPV6_PTR.search(ys):
+            return -1
+    elif RE_IPV6_PTR.search(ys):
+        if not RE_IPV6_PTR.search(xs):
+            return 1
+
+    return compare_fqdn_tokens(xs, ys)
+
+# =============================================================================
+def compare_fqdn_tokens(xs, ys):
+
+    xa = RE_DOT.split(xs)
+    xa.reverse()
+    xa.pop(0)
+
+    ya = RE_DOT.split(ys)
+    ya.reverse()
+    ya.pop(0)
+
+    # Compare token from the last to the first
+    nr_tokens = min(len(xa), len(ya))
+    while nr_tokens > 0:
+        token_x = xa.pop(0)
+        token_y = ya.pop(0)
+        if RE_DECIMAL.match(token_x) and RE_DECIMAL.match(token_y):
+            num_x = int(token_x)
+            num_y = int(token_y)
+            if num_x < num_y:
+                return -1
+            elif num_x > num_y:
+                return 1
+        else:
+            if token_x < token_y:
+                return -1
+            elif token_x > token_y:
+                return 1
+        nr_tokens -= 1
+
+    if len(xa):
+        return 1
+    if len(ya):
+        return -1
+
+    return 0
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/config_named_app.py b/lib/pp_lib/config_named_app.py
new file mode 100644 (file)
index 0000000..9ec3133
--- /dev/null
@@ -0,0 +1,1900 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: A module for the application class for configuring named
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import re
+import pwd
+import copy
+import textwrap
+import socket
+import grp
+import tempfile
+import time
+import datetime
+import ipaddress
+import stat
+import shutil
+import shlex
+
+from subprocess import Popen, TimeoutExpired, PIPE
+
+# Third party modules
+import six
+import requests
+
+from six.moves.urllib.parse import urlunsplit
+
+# Own modules
+from .common import pp, to_bool, to_str
+
+from .cfg_app import PpCfgAppError, PpConfigApplication
+
+from .pidfile import PidFileError, PidFile
+
+__version__ = '0.7.4'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpConfigNamedError(PpCfgAppError):
+    pass
+
+
+# =============================================================================
+class PpConfigNamedApp(PpConfigApplication):
+    """
+    Class for a application 'config-named' for configuring the BIND named daemon.
+    """
+
+    default_pidfile = '/run/dns-deploy-zones.pid'
+
+    default_pdns_api_host = 'systemshare.pixelpark.com'
+    default_pdns_api_port = 8081
+    default_pdns_api_root_path = '/api/v1'
+    default_pdns_api_server_id = 'localhost'
+
+    default_named_conf_dir = '/etc'
+    default_named_conf = 'named.conf'
+    default_named_bindkeys_file = 'named.iscdlv.key'
+    default_named_rootkeys_file = 'named.root.key'
+    default_named_def_zones_file = 'named.rfc1912.zones'
+    default_named_acl_cfg_file = 'named.acl.conf'
+    default_named_log_cfg_file = 'named.log.conf'
+    default_named_zones_cfg_file = 'named.zones.conf'
+
+    default_named_basedir = '/var/named'
+    default_named_datadir = 'data'
+    default_named_slavedir = 'slaves'
+    default_named_managed_keysdir = 'dynamic'
+    default_named_root_zone_file = 'named.ca'
+
+    default_named_rundir = '/run/named'
+    default_named_pidfile = 'named.pid'
+    default_named_session_keyfile = 'session.key'
+
+    default_named_log_dir = '/var/log/named'
+
+    default_named_version2show = 'none'
+
+    default_zone_masters = [
+        '217.66.53.86',
+    ]
+
+    default_cmd_checkconf = '/usr/sbin/named-checkconf'
+    default_cmd_reload = '/usr/sbin/rndc reload'
+    default_cmd_status = '/usr/bin/systemctl status named.service'
+    default_cmd_start = '/usr/bin/systemctl start named.service'
+    default_cmd_restart = '/usr/bin/systemctl restart named.service'
+
+    re_split_addresses = re.compile(r'[,;\s]+')
+    re_integer = re.compile(r'^\s*(\d+)\s*$')
+
+    re_ipv4_zone = re.compile(r'^((?:\d+\.)+)in-addr\.arpa\.$')
+    re_ipv6_zone = re.compile(r'^((?:[\da-f]\.)+)ip6\.arpa\.$')
+
+    re_block_comment = re.compile(r'/\*.*?\*/', re.MULTILINE | re.DOTALL)
+    re_line_comment = re.compile(r'(?://|#).*$', re.MULTILINE)
+
+    open_args = {}
+    if six.PY3:
+        open_args = {
+            'encoding': 'utf-8',
+            'errors': 'surrogateescape',
+        }
+
+    log_channels = {
+        'named': {
+            'type': 'file',
+            'print-time': True,
+            'print-category': True,
+            'print-severity': True,
+        },
+        'syslog': {
+            'type': 'syslog',
+            'facility': 'daemon',
+            'print-category': True,
+        },
+        'security': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'config': {
+            'type': 'file',
+            'severity': 'debug',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'dnssec': {
+            'type': 'file',
+            'severity': 'dynamic',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'ratelimit': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'query': {
+            'type': 'file',
+            'severity': 'debug',
+            'print-time': True,
+        },
+        'query-error': {
+            'type': 'file',
+            'severity': 'notice',
+            'print-time': True,
+        },
+        'resolver': {
+            'type': 'file',
+            'severity': 'dynamic',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'xfer-in': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'xfer-out': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'update': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'notify': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'unmatched': {
+            'type': 'file',
+            'print-time': True,
+            'print-category': True,
+            'print-severity': True,
+        },
+        'network': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+        'lame-servers': {
+            'type': 'file',
+            'print-time': True,
+            'print-severity': True,
+        },
+    }
+
+    log_categories = {
+        'client': ['null'],
+        'config': ['syslog', 'named', 'config'],
+        'database': ['syslog', 'named'],
+        'default': ['syslog', 'named'],
+        'delegation-only': ['syslog', 'named'],
+        'dispatch': ['syslog', 'named'],
+        'dnssec': ['syslog', 'named', 'dnssec'],
+        'general': ['syslog', 'named'],
+        'lame-servers': ['lame-servers'],
+        'network': ['syslog', 'named', 'network'],
+        'notify': ['syslog', 'named', 'notify'],
+        'queries': ['query', 'query-error'],
+        'resolver': ['syslog', 'named', 'resolver'],
+        'rpz': ['syslog', 'named'],
+        'rate-limit': ['syslog', 'named', 'ratelimit'],
+        'security': ['syslog', 'named', 'security'],
+        'unmatched': ['syslog', 'named', 'unmatched'],
+        'update': ['syslog', 'named', 'update'],
+        'update-security': ['syslog', 'named', 'update', 'security'],
+        'xfer-in': ['syslog', 'named', 'xfer-in'],
+        'xfer-out': ['syslog', 'named', 'xfer-out'],
+    }
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self._show_simulate_opt = True
+
+        self.pidfile_name = self.default_pidfile
+
+        self.pdns_api_host = self.default_pdns_api_host
+        self.pdns_api_port = self.default_pdns_api_port
+        self.pdns_api_root_path = self.default_pdns_api_root_path
+        self.pdns_api_server_id = self.default_pdns_api_server_id
+        self.pdns_api_key = None
+
+        self.is_internal = False
+        self.named_listen_on_v6 = False
+
+        # Configuration files and directories
+        self.named_conf_dir = self.default_named_conf_dir
+        self._named_conf = self.default_named_conf
+        self._named_bindkeys_file = self.default_named_bindkeys_file
+        self._named_rootkeys_file = self.default_named_rootkeys_file
+        self._named_def_zones_file = self.default_named_def_zones_file
+        self._named_acl_cfg_file = self.default_named_acl_cfg_file
+        self._named_log_cfg_file = self.default_named_log_cfg_file
+        self._named_zones_cfg_file = self.default_named_zones_cfg_file
+
+        # Variable status directories and files
+        self.named_basedir = self.default_named_basedir
+        self._named_datadir = self.default_named_datadir
+        self._named_slavedir = self.default_named_slavedir
+        self._named_managed_keysdir = self.default_named_managed_keysdir
+        self._named_root_zone_file = self.default_named_root_zone_file
+
+        # Runtime volatile directories and files
+        self.named_rundir = self.default_named_rundir
+        self._named_pidfile = self.default_named_pidfile
+        self._named_session_keyfile = self.default_named_session_keyfile
+
+        # Runtime user and group
+        self.named_user = 'named'
+        self.named_uid = None
+        self.named_group = 'named'
+        self.named_gid = None
+
+        self.named_dnssec = False
+
+        # Logging configuration
+        self.named_logdir = '/var/log/named'
+        self.query_log = False
+
+        self.named_show_bind_version = False
+        self.named_version2show = self.default_named_version2show
+
+        self.zone_masters = copy.copy(self.default_zone_masters)
+
+        self.zones = []
+        self.pidfile = None
+
+        self.tempdir = None
+        self.temp_named_conf = None
+        self.temp_acl_cfg_file = None
+        self.temp_log_cfg_file = None
+        self.temp_zones_cfg_file = None
+        self.keep_tempdir = False
+
+        self.backup_suffix = (
+            '.' + datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S') + '.bak')
+
+        self.reload_necessary = False
+        self.restart_necessary = False
+
+        self.cmd_checkconf = self.default_cmd_checkconf
+        self.cmd_reload = self.default_cmd_reload
+        self.cmd_status = self.default_cmd_status
+        self.cmd_start = self.default_cmd_start
+        self.cmd_restart = self.default_cmd_restart
+
+        self.files2replace = {}
+        self.moved_files = {}
+
+        self.acls = {
+            'allow-notify': ['dnsmaster.pixelpark.com'],
+            'allow-transfer': ['dnsmaster.pixelpark.com'],
+        }
+
+        description = textwrap.dedent('''\
+            Generation of configuration of named (the BIND 9 name daemon).
+            ''').strip()
+
+        super(PpConfigNamedApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='dns-deploy-zones',
+        )
+
+        self.post_init()
+
+    # -------------------------------------------
+    @property
+    def named_conf(self):
+        """The named.conf as an absolute pathname."""
+        return os.path.join(self.named_conf_dir, self._named_conf)
+
+    # -------------------------------------------
+    @property
+    def named_acl_cfg_file(self):
+        """The config file for ACLs as an absolute pathname."""
+        return os.path.join(self.named_conf_dir, self._named_acl_cfg_file)
+
+    # -------------------------------------------
+    @property
+    def named_bindkeys_file(self):
+        """The file for bind keys."""
+        return os.path.join(self.named_conf_dir, self._named_bindkeys_file)
+
+    # -------------------------------------------
+    @property
+    def named_rootkeys_file(self):
+        """The file for root keys."""
+        return os.path.join(self.named_conf_dir, self._named_rootkeys_file)
+
+    # -------------------------------------------
+    @property
+    def named_def_zones_file(self):
+        """The file for default zones."""
+        return os.path.join(self.named_conf_dir, self._named_def_zones_file)
+
+    # -------------------------------------------
+    @property
+    def named_log_cfg_file(self):
+        """The file for logging configuration."""
+        return os.path.join(self.named_conf_dir, self._named_log_cfg_file)
+
+    # -------------------------------------------
+    @property
+    def named_zones_cfg_file(self):
+        """The file for configuration of all own zones."""
+        return os.path.join(self.named_conf_dir, self._named_zones_cfg_file)
+
+    # -------------------------------------------
+    @property
+    def rndc_config_file(self):
+        """The config file for RNDC (included in named.conf)"""
+        return os.path.join(self.named_conf_dir, 'rndc.key')
+
+    # -------------------------------------------
+    @property
+    def named_pidfile(self):
+        """The PID file for the named daemon."""
+        return os.path.join(self.named_rundir, self._named_pidfile)
+
+    # -------------------------------------------
+    @property
+    def named_session_keyfile(self):
+        """The file for the named session key."""
+        return os.path.join(self.named_rundir, self._named_session_keyfile)
+
+    # -------------------------------------------
+    @property
+    def named_datadir_abs(self):
+        """The directory for additional data of named."""
+        return os.path.join(self.named_basedir, self._named_datadir)
+
+    # -------------------------------------------
+    @property
+    def named_datadir_rel(self):
+        """The directory for additional data of named."""
+        return self._named_datadir
+
+    # -------------------------------------------
+    @property
+    def named_dump_dir(self):
+        """Directory name of the named dump file."""
+        return os.path.join(self.named_basedir, 'dump')
+
+    # -------------------------------------------
+    @property
+    def named_dump_file(self):
+        """File name of the named dump file."""
+        return os.path.join(self.named_dump_dir, 'named_dump.db')
+
+    # -------------------------------------------
+    @property
+    def named_stats_dir(self):
+        """Directory name of the named statistics."""
+        return os.path.join(self.named_basedir, 'stats')
+
+    # -------------------------------------------
+    @property
+    def named_stats_file(self):
+        """File name of the named statistics file."""
+        return os.path.join(self.named_stats_dir, 'named.stats')
+
+    # -------------------------------------------
+    @property
+    def named_slavedir_rel(self):
+        """The directory for zone files of slave zones."""
+        return self._named_slavedir
+
+    # -------------------------------------------
+    @property
+    def named_slavedir_abs(self):
+        """The directory for zone files of slave zones."""
+        return os.path.join(self.named_basedir, self._named_slavedir)
+
+    # -------------------------------------------
+    @property
+    def named_root_zone_file_rel(self):
+        """The filename of the root zone."""
+        return self._named_root_zone_file
+
+    # -------------------------------------------
+    @property
+    def named_root_zone_file_abs(self):
+        """The filename of the root zone."""
+        return os.path.join(self.named_basedir, self._named_root_zone_file)
+
+    # -------------------------------------------
+    @property
+    def named_managed_keysdir(self):
+        """The directory for managed session keys."""
+        return os.path.join(self.named_basedir, self._named_managed_keysdir)
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PpConfigNamedApp, self).as_dict(short=short)
+        res['named_conf'] = self.named_conf
+        res['named_acl_cfg_file'] = self.named_acl_cfg_file
+        res['named_pidfile'] = self.named_pidfile
+        res['named_session_keyfile'] = self.named_session_keyfile
+        res['named_bindkeys_file'] = self.named_bindkeys_file
+        res['named_rootkeys_file'] = self.named_rootkeys_file
+        res['named_def_zones_file'] = self.named_def_zones_file
+        res['named_log_cfg_file'] = self.named_log_cfg_file
+        res['named_zones_cfg_file'] = self.named_zones_cfg_file
+        res['rndc_config_file'] = self.rndc_config_file
+        res['named_dump_dir'] = self.named_dump_dir
+        res['named_dump_file'] = self.named_dump_file
+        res['named_stats_dir'] = self.named_stats_dir
+        res['named_stats_file'] = self.named_stats_file
+        res['named_datadir_abs'] = self.named_datadir_abs
+        res['named_datadir_rel'] = self.named_datadir_rel
+        res['named_slavedir_abs'] = self.named_slavedir_abs
+        res['named_slavedir_rel'] = self.named_slavedir_rel
+        res['named_managed_keysdir'] = self.named_managed_keysdir
+        res['named_root_zone_file_rel'] = self.named_root_zone_file_rel
+        res['named_root_zone_file_abs'] = self.named_root_zone_file_abs
+        res['default_pidfile'] = self.default_pidfile
+        res['default_pdns_api_host'] = self.default_pdns_api_host
+        res['default_pdns_api_port'] = self.default_pdns_api_port
+        res['default_pdns_api_root_path'] = self.default_pdns_api_root_path
+        res['default_pdns_api_server_id'] = self.default_pdns_api_server_id
+        res['default_named_conf_dir'] = self.default_named_conf_dir
+        res['default_named_conf'] = self.default_named_conf
+        res['default_named_bindkeys_file'] = self.default_named_bindkeys_file
+        res['default_named_rootkeys_file'] = self.default_named_rootkeys_file
+        res['default_named_def_zones_file'] = self.default_named_def_zones_file
+        res['default_named_acl_cfg_file'] = self.default_named_acl_cfg_file
+        res['default_named_log_cfg_file'] = self.default_named_log_cfg_file
+        res['default_named_zones_cfg_file'] = self.default_named_zones_cfg_file
+        res['default_named_basedir'] = self.default_named_basedir
+        res['default_named_datadir'] = self.default_named_datadir
+        res['default_named_slavedir'] = self.default_named_slavedir
+        res['default_named_managed_keysdir'] = self.default_named_managed_keysdir
+        res['default_named_root_zone_file'] = self.default_named_root_zone_file
+        res['default_named_rundir'] = self.default_named_rundir
+        res['default_named_pidfile'] = self.default_named_pidfile
+        res['default_named_session_keyfile'] = self.default_named_session_keyfile
+        res['default_named_log_dir'] = self.default_named_log_dir
+        res['default_named_version2show'] = self.default_named_version2show
+        res['default_zone_masters'] = copy.copy(self.default_zone_masters)
+        res['default_cmd_checkconf'] = copy.copy(self.default_cmd_checkconf)
+        res['default_cmd_reload'] = copy.copy(self.default_cmd_reload)
+        res['default_cmd_start'] = copy.copy(self.default_cmd_start)
+        res['default_cmd_status'] = copy.copy(self.default_cmd_status)
+        res['default_cmd_restart'] = copy.copy(self.default_cmd_restart)
+        res['re_split_addresses'] = self.re_split_addresses
+        res['re_integer'] = self.re_integer
+        res['re_ipv4_zone'] = self.re_ipv4_zone
+        res['re_ipv6_zone'] = self.re_ipv6_zone
+        res['open_args'] = self.open_args
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+
+        is_internal_group = self.arg_parser.add_mutually_exclusive_group()
+
+        is_internal_group.add_argument(
+            '--internal', '--not-public', dest='internal', action='store_true',
+            help="Creating a named configuration for a internal name server.",
+        )
+
+        is_internal_group.add_argument(
+            '--public', '--not-internal', dest='public', action='store_true',
+            help="Creating a named configuration for a public name server.",
+        )
+
+        query_log_group = self.arg_parser.add_mutually_exclusive_group()
+
+        query_log_group.add_argument(
+            '--querylog', dest='querylog', action='store_true',
+            help="Enabling query logging in the named configuration.",
+        )
+
+        query_log_group.add_argument(
+            '--no-querylog', dest='no_querylog', action='store_true',
+            help="Disabling query logging in the named configuration.",
+        )
+
+        self.arg_parser.add_argument(
+            '-K', '--keep-tempdir', dest='keep_tempdir', action='store_true',
+            help=(
+                "Keeping the temporary directory instead of removing it at the end "
+                "(e.g. for debugging purposes)"),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpConfigNamedApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 3:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            section = self.cfg[section_name]
+
+            if section_name.lower() == 'app':
+                self._check_path_config(section, section_name, 'pidfile', 'pidfile_name', True)
+
+            if section_name.lower() in (
+                    'powerdns-api', 'powerdns_api', 'powerdnsapi',
+                    'pdns-api', 'pdns_api', 'pdnsapi'):
+                self.set_api_options(section, section_name)
+
+            if section_name.lower() == 'named':
+                self.set_named_options(section, section_name)
+
+            if section_name.lower() == 'acl':
+                self.read_acl_lists(section, section_name)
+
+        self._perform_cmdline_opts()
+
+    # -------------------------------------------------------------------------
+    def _perform_cmdline_opts(self):
+
+        if hasattr(self.args, 'internal') and self.args.internal:
+            self.is_internal = True
+        elif hasattr(self.args, 'public') and self.args.public:
+            self.is_internal = False
+
+        if hasattr(self.args, 'querylog') and self.args.querylog:
+            self.query_log = True
+        elif hasattr(self.args, 'no_querylog') and self.args.no_querylog:
+            self.query_log = False
+
+        self.keep_tempdir = getattr(self.args, 'keep_tempdir', False)
+
+    # -------------------------------------------------------------------------
+    def set_api_options(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'host' in section:
+            v = section['host']
+            host = v.lower().strip()
+            if host:
+                self.pdns_api_host = host
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+                if port <= 0 or port > 2**16:
+                    raise ValueError(
+                        "a port must be greater than 0 and less than {}.".format(2**16))
+            except (TypeError, ValueError) as e:
+                LOG.error(
+                    "Wrong port number {!r} in configuration section {!r}: {}".format(
+                        section['port'], section_name, e))
+                self.config_has_errors = True
+            else:
+                self.pdns_api_port = port
+
+        self._check_path_config(
+            section, section_name, 'root_path',
+            'pdns_api_root_path', True, 'root path of the PowerDNS')
+
+        if 'server_id' in section and section['server_id'].strip():
+            self.pdns_api_server_id = section['server_id'].strip().lower()
+
+        if 'key' in section:
+            key = section['key'].strip()
+            self.pdns_api_key = key
+
+    # -------------------------------------------------------------------------
+    def set_named_options(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'is_internal' in section:
+            if section['is_internal'] is None:
+                self.is_internal = False
+            else:
+                self.is_internal = to_bool(section['is_internal'])
+
+        if 'listen_on_v6' in section and section['listen_on_v6'] is not None:
+            self.named_listen_on_v6 = to_bool(section['listen_on_v6'])
+
+        if 'dnssec' in section and section['dnssec'] is not None:
+            self.named_dnssec = to_bool(section['dnssec'])
+
+        # Configuration files and directories
+        self._check_path_config(section, section_name, 'config_dir', 'named_conf_dir', True)
+        self._check_path_config(section, section_name, 'named_conf', '_named_conf', False)
+        self._check_path_config(
+            section, section_name, 'bindkeys_file', '_named_bindkeys_file', False)
+        self._check_path_config(
+            section, section_name, 'rootkeys_file', '_named_rootkeys_file', False)
+        self._check_path_config(
+            section, section_name, 'default_zones_file', '_named_def_zones_file', False)
+        self._check_path_config(
+            section, section_name, 'acl_cfg_file', '_named_acl_cfg_file', False)
+        self._check_path_config(
+            section, section_name, 'log_cfg_file', '_named_log_cfg_file', False)
+        self._check_path_config(
+            section, section_name, 'zones_cfg_file', '_named_zones_cfg_file', False)
+
+        # Variable status directories and files
+        self._check_path_config(section, section_name, 'base_dir', 'named_basedir', True)
+        self._check_path_config(section, section_name, 'data_dir', '_named_datadir', False)
+        self._check_path_config(section, section_name, 'slave_dir', '_named_slavedir', False)
+        self._check_path_config(
+            section, section_name, 'managed_keys_dir', '_named_managed_keysdir', False)
+        self._check_path_config(
+            section, section_name, 'root_zone_file', '_named_root_zone_file', False)
+
+        # Runtime volatile directories and files
+        self._check_path_config(section, section_name, 'run_dir', 'named_rundir', True)
+        self._check_path_config(section, section_name, 'pidfile', '_named_pidfile', False)
+        self._check_path_config(
+            section, section_name, 'session_keyfile', '_named_session_keyfile', False)
+
+        # Logging configuration
+        self._check_path_config(section, section_name, 'log_dir', 'named_logdir', True)
+        if 'query_log' in section:
+            self.query_log = to_bool(section['query_log'])
+
+        if 'show_bind_version' in section and section['show_bind_version'] is not None:
+            self.named_show_bind_version = to_bool(section['show_bind_version'])
+
+        if 'version_to_show' in section and section['version_to_show'] is not None:
+            self.named_version2show = section['version_to_show'].strip()
+
+        # Runtime user and group
+        if 'named_user' in section and section['named_user'] is not None:
+            self.named_user = section['named_user'].strip()
+        if 'named_group' in section and section['named_group'] is not None:
+            self.named_group = section['named_group'].strip()
+
+        if 'masters' in section:
+            self._get_masters_from_cfg(section['masters'], section_name)
+
+        for item in ('cmd_checkconf', 'cmd_reload', 'cmd_status', 'cmd_start', 'cmd_restart'):
+            if item in section and section[item].strip():
+                setattr(self, item, section[item].strip())
+
+    # -------------------------------------------------------------------------
+    def _check_path_config(self, section, section_name, key, class_prop, absolute=True, desc=None):
+
+        if key not in section:
+            return
+
+        d = ''
+        if desc:
+            d = ' ' + str(desc).strip()
+
+        path = section[key].strip()
+        if not path:
+            msg = "No path given for{} [{}]/{} in configuration.".format(
+                d, section_name, key)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        if absolute and not os.path.isabs(path):
+            msg = "Path {!r} for{} [{}]/{} in configuration must be an absolute path.".format(
+                path, d, section_name, key)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        setattr(self, class_prop, path)
+
+    # -------------------------------------------------------------------------
+    def _get_masters_from_cfg(self, value, section_name):
+
+        value = value.strip()
+        if not value:
+            msg = "No masters given in [{}]/masters.".format(section_name)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        masters = []
+
+        for m in self.re_split_addresses.split(value):
+            if m:
+                m = m.strip().lower()
+                try:
+                    addr_info = socket.getaddrinfo(                                         # noqa
+                        m, 53, proto=socket.IPPROTO_TCP, family=socket.AF_INET)             # noqa
+                except socket.gaierror as e:
+                    msg = (
+                        "Invalid hostname or address {!r} found in "
+                        "[{}]/masters: {}").format(m, section_name, e)
+                    LOG.error(msg)
+                    self.config_has_errors = True
+                    m = None
+            if m:
+                masters.append(m)
+        if masters:
+            self.zone_masters = masters
+
+    # -------------------------------------------------------------------------
+    def read_acl_lists(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        for acl_name in section.keys():
+
+            entries_str = section[acl_name].strip()
+            entries = self.re_split_addresses.split(entries_str)
+            self.acls[acl_name] = entries
+
+    # -------------------------------------------------------------------------
+    def post_init(self):
+
+        super(PpConfigNamedApp, self).post_init()
+        self.initialized = False
+
+        cred_ok = True
+        LOG.debug("Checking named user {!r} and group {!r} ...".format(
+            self.named_user, self.named_group))
+
+        match = self.re_integer.search(self.named_user)
+        if match:
+            self.named_uid = int(match.group(1))
+        else:
+            try:
+                uid = pwd.getpwnam(self.named_user).pw_uid
+            except KeyError:
+                msg = "Username {!r} not found.".format(self.named_user)
+                LOG.error(msg)
+                cred_ok = False
+            else:
+                self.named_uid = uid
+
+        match = self.re_integer.search(self.named_group)
+        if match:
+            self.named_gid = int(match.group(1))
+        else:
+            try:
+                gid = grp.getgrnam(self.named_group).gr_gid
+            except KeyError:
+                msg = "Group {!r} not found.".format(self.named_group)
+                LOG.error(msg)
+                cred_ok = False
+            else:
+                self.named_gid = gid
+
+        if not cred_ok:
+            self.exit(1)
+
+        self.pidfile = PidFile(
+            filename=self.pidfile_name, appname=self.appname, verbose=self.verbose,
+            base_dir=self.base_dir, simulate=self.simulate)
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        my_uid = os.geteuid()
+        if my_uid:
+            if self.simulate:
+                LOG.warn("You must be root to execute this script.")
+                group_ids = {}
+                for group in grp.getgrall():
+                    for g_username in group.gr_mem:
+                        g_uid = pwd.getpwnam(g_username).pw_uid
+                        if g_uid == my_uid:
+                            group_ids[group.gr_name] = group.gr_gid
+                if self.verbose > 2:
+                    LOG.debug("You are a member of the groups:\n{}".format(group_ids))
+                if self.named_group in group_ids:
+                    LOG.warn((
+                        "But hey - this is simulation mode, and and so it's "
+                        "sufficient to be a member of group {!r} ...").format(
+                        self.named_group))
+                else:
+                    LOG.error((
+                        "But also in simulation mode you has to be a member "
+                        "of group {!r}!").format(self.named_group))
+                    self.exit(1)
+                time.sleep(1)
+            else:
+                LOG.error("You must be root to execute this script.")
+                self.exit(1)
+
+        try:
+            self.pidfile.create()
+        except PidFileError as e:
+            LOG.error("Could not occupy pidfile: {}".format(e))
+            self.exit(7)
+            return
+
+        try:
+
+            self.get_api_zones()
+            self.init_temp_objects()
+            self.create_temp_files()
+            self.compare_files()
+            self.check_directories()
+
+            try:
+                self.replace_configfiles()
+                if not self.check_namedconf():
+                    self.restore_configfiles()
+                    self.exit(99)
+                self.apply_config()
+            except Exception:
+                self.restore_configfiles()
+                raise
+
+        finally:
+            self.cleanup()
+            self.pidfile = None
+
+    # -------------------------------------------------------------------------
+    def create_temp_files(self):
+
+        LOG.info("Generating all config files in a temporary directory ...")
+
+        self.generate_acl_file()
+        self.generate_named_conf()
+        self.generate_log_cfg_file()
+        self.generate_slave_cfg_file()
+
+    # -------------------------------------------------------------------------
+    def init_temp_objects(self):
+        """Init temporary objects and properties."""
+
+        self.tempdir = tempfile.mkdtemp(
+            prefix=(self.appname + '.'), suffix='.tmp.d'
+        )
+        LOG.debug("Temporary directory: {!r}.".format(self.tempdir))
+
+        self.temp_named_conf = os.path.join(
+            self.tempdir, self.default_named_conf)
+        self.temp_acl_cfg_file = os.path.join(
+            self.tempdir, self.default_named_acl_cfg_file)
+        self.temp_log_cfg_file = os.path.join(
+            self.tempdir, self.default_named_log_cfg_file)
+        self.temp_zones_cfg_file = os.path.join(
+            self.tempdir, self.default_named_zones_cfg_file)
+
+        if self.verbose > 1:
+            LOG.debug("Temporary named.conf: {!r}".format(self.temp_named_conf))
+            LOG.debug("Temporary ACL conf: {!r}".format(self.temp_acl_cfg_file))
+            LOG.debug("Temporary LOG conf: {!r}".format(self.temp_log_cfg_file))
+            LOG.debug("Temporary zones conf: {!r}".format(self.temp_zones_cfg_file))
+
+    # -------------------------------------------------------------------------
+    def generate_acl_file(self):
+
+        LOG.info("Generating {} ...".format(self.default_named_acl_cfg_file))
+
+        cur_date = datetime.datetime.now().isoformat(' ')
+
+        lines = []
+        lines.append('###############################################################')
+        lines.append('')
+        lines.append(' Bind9 configuration file for ACLs')
+        lines.append(' {}'.format(self.named_acl_cfg_file))
+        lines.append('')
+        lines.append(' Generated at: {}'.format(cur_date))
+        lines.append('')
+        lines.append('###############################################################')
+        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
+
+        content = header
+
+        for acl_name in sorted(self.acls.keys()):
+
+            lines = []
+            lines.append('')
+            lines.append('// ---------------------------------------------------------------')
+            lines.append('acl {} {{'.format(acl_name))
+            if acl_name in ('allow-notify', 'allow-transfer'):
+                lines.append('\t// Localhost')
+                lines.append('\t127.0.0.1;')
+                lines.append('\t::1;')
+
+            ips_done = []
+
+            for entry in self.acls[acl_name]:
+
+                hostname = entry
+                ip = None
+                ips = []
+                try:
+                    ip = ipaddress.ip_address(entry)
+                    ips.append(entry)
+                    hostname = socket.getfqdn(entry)
+                except ValueError:
+                    for info in socket.getaddrinfo(entry, 53):
+                        if info[0] not in (socket.AF_INET, socket.AF_INET6):
+                            continue
+                        if info[0] == socket.AF_INET:
+                            ips.append(info[4][0])
+                        elif self.named_listen_on_v6:
+                            ips.append(info[4][0])
+
+                if ips and hostname:
+                    lines.append('\t// {}'.format(hostname))
+                    for ip in sorted(ips):
+                        if ip not in ips_done:
+                            lines.append('\t{};'.format(ip))
+                            ips_done.append(ip)
+                else:
+                    msg = "Did not found IP address of {!r} for ACL {!r}.".format(
+                        entry, acl_name)
+                    LOG.error(msg)
+
+            lines.append('};')
+
+            content += '\n'.join(lines) + '\n'
+
+        content += '\n// vim: ts=8 filetype=named noet noai\n'
+
+        with open(self.temp_acl_cfg_file, 'w', **self.open_args) as fh:
+            fh.write(content)
+
+        if self.verbose > 2:
+            LOG.debug("Generated {!r}:\n{}".format(self.temp_acl_cfg_file, content.strip()))
+
+    # -------------------------------------------------------------------------
+    def generate_named_conf(self):                                                          # noqa
+
+        LOG.info("Generating {} ...".format(self.default_named_conf))
+
+        cur_date = datetime.datetime.now().isoformat(' ')
+
+        lines = []
+        lines.append('###############################################################')
+        lines.append('')
+        lines.append(' Main Bind9 configuration file')
+        lines.append(' {}'.format(self.named_conf))
+        lines.append('')
+        lines.append(' Provided by Red Hat bind package to configure the ISC BIND named(8) DNS')
+        lines.append('')
+        lines.append(' See /usr/share/doc/bind*/sample/ for example named configuration files.')
+        lines.append('')
+        lines.append(' See the BIND Administrator\'s Reference Manual (ARM) for details about the')
+        lines.append(' configuration located in /usr/share/doc/bind-{version}/Bv9ARM.html')
+        lines.append('')
+        lines.append(' Generated at: {}'.format(cur_date))
+        lines.append('')
+        lines.append('###############################################################')
+        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
+
+        content = header
+
+        content += '\n// access control lists\n'
+        content += 'include "{}";\n'.format(self.named_acl_cfg_file)
+
+        option_lines = []
+        option_lines.append('options {')
+        option_lines.append('\tlisten-on { any; };')
+        if self.named_listen_on_v6:
+            option_lines.append('\tlisten-on-v6 { any; };')
+        else:
+            option_lines.append('\tlisten-on-v6 { ::1; };')
+        option_lines.append('')
+        option_lines.append('\trecursion no;')
+        option_lines.append('')
+        option_lines.append('\tdirectory "{}";'.format(self.named_basedir))
+        option_lines.append('\tpid-file "{}";'.format(self.named_pidfile))
+        option_lines.append('\tdump-file "{}";'.format(self.named_dump_file))
+        option_lines.append('\tstatistics-file "{}";'.format(self.named_stats_file))
+        option_lines.append('\tsession-keyfile "{}";'.format(self.named_session_keyfile))
+
+        option_lines.append('')
+        option_lines.append('\t// DNSSEC')
+        option_lines.append('\tdnssec-enable yes;')
+        option_lines.append('\tdnssec-validation yes;')
+
+        option_lines.append('')
+        option_lines.append('\t// Path to ISC DLV key')
+        option_lines.append('\tbindkeys-file "{}";'.format(self.named_bindkeys_file))
+
+        option_lines.append('')
+        option_lines.append('\tmanaged-keys-directory "{}";'.format(self.named_managed_keysdir))
+
+        option_lines.append('')
+        option_lines.append('\tallow-transfer {')
+        option_lines.append('\t\tallow-transfer;')
+        option_lines.append('\t};')
+
+        option_lines.append('')
+        option_lines.append('\tallow-notify {')
+        option_lines.append('\t\tallow-notify;')
+        option_lines.append('\t};')
+
+        if not self.named_show_bind_version:
+            option_lines.append('')
+            option_lines.append('\tversion "{}";'.format(self.named_version2show))
+
+        option_lines.append('')
+        option_lines.append('};')
+        content += '\n' + '\n'.join(option_lines) + '\n'
+
+        if not os.path.exists(self.rndc_config_file):
+            LOG.error("File {!r} does not exists, please generate it with `rndc-confgen`.".format(
+                self.rndc_config_file))
+            if not self.simulate:
+                self.exit(8)
+        elif not os.path.isfile(self.rndc_config_file):
+            LOG.error("File {!r} is not a regular file.".format(self.rndc_config_file))
+            if not self.simulate:
+                self.exit(8)
+        content += '\n// Managed Keys of RNDC\n'
+        content += 'include "{}";\n'.format(self.rndc_config_file)
+        content += '\ncontrols {\n'
+        content += '\tinet 127.0.0.1 port 953 allow {\n'
+        content += '\t\t127.0.0.1;\n'
+        content += '\t\t::1/128;\n'
+        content += '\t} keys {\n'
+        content += '\t\t"rndc-key";\n'
+        content += '\t};\n'
+        content += '};\n'
+
+        content += '\n// logging configuration\n'
+        content += 'include "{}";\n'.format(self.named_log_cfg_file)
+
+        # Defining root zone file
+        if not os.path.exists(self.named_root_zone_file_abs):
+            LOG.error("File {!r} does not exists.".format(self.named_root_zone_file_abs))
+            if not self.simulate:
+                self.exit(8)
+        elif not os.path.isfile(self.named_root_zone_file_abs):
+            LOG.error("File {!r} is not a regular file.".format(self.named_root_zone_file_abs))
+            if not self.simulate:
+                self.exit(8)
+        fname = self.named_root_zone_file_rel
+        if os.path.isabs(fname):
+            fname_rel = os.path.relpath(fname, self.named_basedir)
+            if not fname_rel.startswith('../'):
+                fname = fname_rel
+        content += '\nzone "." {\n'
+        content += '\ttype hint;\n'
+        content += '\tfile "{}";\n'.format(fname)
+        content += '};\n'
+
+        # Including zone definitions for default zones
+        if not os.path.exists(self.named_def_zones_file):
+            LOG.error("File {!r} does not exists.".format(self.named_def_zones_file))
+            if not self.simulate:
+                self.exit(8)
+        elif not os.path.isfile(self.named_def_zones_file):
+            LOG.error("File {!r} is not a regular file.".format(self.named_def_zones_file))
+            if not self.simulate:
+                self.exit(8)
+        content += '\n// Default zones per RFC 1912\n'
+        content += 'include "{}";\n'.format(self.named_def_zones_file)
+
+        # Including root keys for DNSSEC
+        if not os.path.exists(self.named_rootkeys_file):
+            LOG.error("File {!r} does not exists.".format(self.named_rootkeys_file))
+            if not self.simulate:
+                self.exit(8)
+        elif not os.path.isfile(self.named_rootkeys_file):
+            LOG.error("File {!r} is not a regular file.".format(self.named_rootkeys_file))
+            if not self.simulate:
+                self.exit(8)
+        content += '\n// Including root keys for DNSSEC\n'
+        content += 'include "{}";\n'.format(self.named_rootkeys_file)
+
+        content += '\n// Including definitions of all slave zones\n'
+        content += 'include "{}";\n'.format(self.named_zones_cfg_file)
+
+        content += '\n// vim: ts=8 filetype=named noet noai\n'
+
+        with open(self.temp_named_conf, 'w', **self.open_args) as fh:
+            fh.write(content)
+
+        if self.verbose > 2:
+            LOG.debug("Generated {!r}:\n{}".format(self.temp_named_conf, content.strip()))
+
+    # -------------------------------------------------------------------------
+    def generate_log_cfg_file(self):                                                        # noqa
+
+        LOG.info("Generating {} ...".format(self.default_named_log_cfg_file))
+
+        cur_date = datetime.datetime.now().isoformat(' ')
+
+        lines = []
+        lines.append('###############################################################')
+        lines.append('')
+        lines.append(' Bind9 configuration for logging')
+        lines.append(' {}'.format(self.named_log_cfg_file))
+        lines.append('')
+        lines.append(' Generated at: {}'.format(cur_date))
+        lines.append('')
+        lines.append('###############################################################')
+        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
+
+        content = header
+
+        content += '\nlogging {\n'
+
+        content += '\n\t// -----------------------------------\n'
+        content += '\t// Channels\n'
+        for channel in sorted(self.log_channels.keys()):
+            channel_def = self.log_channels[channel]
+            lines = []
+            lines.append('')
+            lines.append('\tchannel {} {{'.format(channel))
+            ctype = 'file'
+            if 'type' in channel_def:
+                if channel_def['type'].lower() in ('file', 'syslog', 'stderr', 'null'):
+                    ctype = channel_def['type'].lower()
+                else:
+                    LOG.error("Wrong type {!r} for logging channel {!r}.".format(
+                        channel_def['type'], channel))
+                    continue
+            if ctype == 'file':
+                filename = os.path.join(self.named_logdir, channel + '.log')
+                lines.append('\t\tfile "{}";'.format(filename))
+            elif ctype == 'syslog':
+                fac = 'daemon'
+                if 'facility' in channel_def and channel_def['facility'].strip():
+                    fac = channel_def['facility'].strip().lower()
+                lines.append('\t\tsyslog {};'.format(fac))
+            else:
+                lines.append('\t\t{};'.format(ctype))
+
+            if 'severity' in channel_def and channel_def['severity'].strip():
+                lines.append('\t\tseverity {};'.format(channel_def['severity'].strip().lower()))
+
+            if 'print-category' in channel_def:
+                if to_bool(channel_def['print-category']):
+                    lines.append('\t\tprint-category yes;')
+
+            if 'print-severity' in channel_def:
+                if to_bool(channel_def['print-severity']):
+                    lines.append('\t\tprint-severity yes;')
+
+            if 'print-time' in channel_def:
+                if to_bool(channel_def['print-time']):
+                    lines.append('\t\tprint-time yes;')
+
+            lines.append('\t};')
+
+            content += '\n'.join(lines) + '\n'
+
+        content += '\n\t// -----------------------------------\n'
+        content += '\t// Categories\n'
+        for cat_name in sorted(self.log_categories.keys()):
+            lines = []
+            lines.append('')
+            channels = self.log_categories[cat_name]
+            lines.append('\tcategory {} {{'.format(cat_name))
+
+            if not channels:
+                channels = ['null']
+            if cat_name == 'queries':
+                if self.query_log:
+                    if 'query' not in channels:
+                        channels.append('query')
+                else:
+                    if 'query' in channels:
+                        channels.remove('query')
+
+            for channel in channels:
+                lines.append('\t\t{};'.format(channel))
+
+            lines.append('\t};')
+
+            content += '\n'.join(lines) + '\n'
+
+        content += '\n};\n'
+        content += '\n// vim: ts=8 filetype=named noet noai\n'
+
+        with open(self.temp_log_cfg_file, 'w', **self.open_args) as fh:
+            fh.write(content)
+
+        if self.verbose > 2:
+            LOG.debug("Generated {!r}:\n{}".format(self.temp_log_cfg_file, content.strip()))
+
+    # -------------------------------------------------------------------------
+    def generate_slave_cfg_file(self):
+
+        LOG.info("Generating {} ...".format(self.default_named_zones_cfg_file))
+
+        cur_date = datetime.datetime.now().isoformat(' ')
+        re_rev = re.compile(r'^rev\.', re.IGNORECASE)
+        re_trail_dot = re.compile(r'\.+$')
+
+        lines = []
+        lines.append('###############################################################')
+        lines.append('')
+        lines.append(' Bind9 configuration file for slave sones')
+        lines.append(' {}'.format(self.named_zones_cfg_file))
+        lines.append('')
+        lines.append(' Generated at: {}'.format(cur_date))
+        lines.append('')
+        lines.append('###############################################################')
+        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
+
+        content = header
+
+        for zone in self.zones:
+
+            account = str(zone['account']).lower().strip()
+
+            zname = re_trail_dot.sub('', zone['name'])
+            show_name = zone['canonical_name']
+            show_name = re_rev.sub('Reverse ', show_name)
+            show_name = re_trail_dot.sub('', show_name)
+            if account.startswith('intern') or account.startswith('local'):
+                if not self.is_internal:
+                    LOG.debug("Ignoring zone {!r}, because it's an internal zone.".format(zname))
+                    continue
+            else:
+                if self.is_internal:
+                    LOG.debug("Ignoring zone {!r}, because it's a public zone.".format(zname))
+                    continue
+
+            zfile = os.path.join(
+                self.named_slavedir_rel, re_trail_dot.sub('', zone['canonical_name']) + '.zone')
+
+            lines = []
+            lines.append('')
+            lines.append('// {}'.format(show_name))
+            lines.append('zone "{}" in {{'.format(zname))
+            lines.append('\tmasters {')
+            for master in self.zone_masters:
+                lines.append('\t\t{};'.format(master))
+            lines.append('\t};')
+            lines.append('\ttype slave;')
+            lines.append('\tfile "{}";'.format(zfile))
+            lines.append('};')
+
+            content += '\n'.join(lines) + '\n'
+
+        content += '\n// vim: ts=8 filetype=named noet noai\n'
+
+        with open(self.temp_zones_cfg_file, 'w', **self.open_args) as fh:
+            fh.write(content)
+
+        if self.verbose > 2:
+            LOG.debug("Generated {!r}:\n{}".format(self.temp_zones_cfg_file, content.strip()))
+
+    # -------------------------------------------------------------------------
+    def get_api_zones(self):
+
+        LOG.info("Trying to get all zones from PDNS API ...")
+
+        headers = {}
+        if self.pdns_api_key:
+            headers['X-API-Key'] = self.pdns_api_key
+
+        path = os.path.join(
+            self.pdns_api_root_path, 'servers', self.pdns_api_server_id, 'zones')
+        server = self.pdns_api_host
+        if self.pdns_api_port != 80:
+            server = '{}:{}'.format(server, self.pdns_api_port)
+        url = urlunsplit(('http', server, path, None, None))
+        LOG.debug("URL to send API call: {!r}.".format(url))
+        if self.verbose > 1:
+            LOG.debug("Headers:\n%s", pp(headers))
+        session = requests.Session()
+        response = session.request(
+            'GET', url, headers=headers, timeout=10)
+        if self.verbose > 1:
+            LOG.debug("Response status code: {}".format(response.status_code))
+        if not response.ok:
+            try:
+                err = response.json()
+                code = err['httpStatus']
+                msg = err['messages']
+                LOG.error("Got an error from API ({}) with status {}: {}".format(
+                    url, code, msg))
+                self.exit(6)
+            except ValueError:
+                msg = 'Failed to parse the response from {!r}: {}'.format(
+                    url, response.text)
+                LOG.error(msg)
+                self.exit(6)
+
+        json_response = response.json()
+        if self.verbose > 3:
+            LOG.debug("Got a response:\n{}".format(pp(json_response)))
+
+        for entry in json_response:
+
+            # {   'account': '',
+            #     'dnssec': False,
+            #     'id': '56.66.217.in-addr.arpa.',
+            #     'kind': 'Master',
+            #     'last_check': 0,
+            #     'masters': [],
+            #     'name': '56.66.217.in-addr.arpa.',
+            #     'notified_serial': 2018080202,
+            #     'serial': 2018080202,
+            #     'url': 'api/v1/servers/localhost/zones/56.66.217.in-addr.arpa.'},
+
+            zone_name = entry['name']
+            zone = {
+                'name': zone_name,
+                'account': entry['account'],
+                'kind': entry['kind'],
+                'serial': entry['serial'],
+            }
+
+            if entry['dnssec']:
+                self.named_dnssec = True
+            if self.verbose > 1:
+                LOG.debug("Found zone {!r}.".format(zone_name))
+
+            uni_name = None
+            match = self.re_ipv4_zone.search(zone_name)
+            if match:
+                prefix = self._get_ipv4_prefix(match.group(1))
+                if prefix:
+                    if prefix == '127.0.0':
+                        LOG.debug("Pure local zone {!r} will not be considered.".format(prefix))
+                        continue
+                    uni_name = 'rev.' + prefix
+
+            if not uni_name:
+                match = self.re_ipv6_zone.search(zone_name)
+                if match:
+                    prefix = self._get_ipv6_prefix(match.group(1))
+                    if prefix:
+                        uni_name = 'rev.' + prefix
+
+            if not uni_name:
+                uni_name = zone_name.encode('utf-8').decode('idna')
+
+            zone['canonical_name'] = uni_name
+
+            self.zones.append(zone)
+
+        self.zones.sort(key=lambda x: x['canonical_name'])
+
+        if self.verbose > 2:
+            LOG.debug("Got zones:\n{}".format(pp(self.zones)))
+
+    # -------------------------------------------------------------------------
+    def _get_ipv4_prefix(self, match):
+
+        tuples = []
+        for t in match.split('.'):
+            if t:
+                tuples.insert(0, t)
+        if self.verbose > 2:
+            LOG.debug("Got IPv4 tuples: {}".format(pp(tuples)))
+        return '.'.join(tuples)
+
+    # -------------------------------------------------------------------------
+    def _get_ipv6_prefix(self, match):
+
+        tuples = []
+        for t in match.split('.'):
+            if t:
+                tuples.insert(0, t)
+
+        tokens = []
+        while len(tuples):
+            token = ''.join(tuples[0:4]).ljust(4, '0')
+            if token.startswith('000'):
+                token = token[3:]
+            elif token.startswith('00'):
+                token = token[2:]
+            elif token.startswith('0'):
+                token = token[1:]
+            tokens.append(token)
+            del tuples[0:4]
+
+        if self.verbose > 2:
+            LOG.debug("Got IPv6 tokens: {}".format(pp(tokens)))
+
+        return ':'.join(tokens)
+
+    # -------------------------------------------------------------------------
+    def compare_files(self):
+
+        LOG.info("Comparing generated files with existing ones.")
+
+        if not self.files_equal_content(self.temp_named_conf, self.named_conf):
+            self.reload_necessary = True
+            self.restart_necessary = True
+            self.files2replace[self.named_conf] = self.temp_named_conf
+
+        if not self.files_equal_content(self.temp_acl_cfg_file, self.named_acl_cfg_file):
+            self.reload_necessary = True
+            self.files2replace[self.named_acl_cfg_file] = self.temp_acl_cfg_file
+
+        if not self.files_equal_content(self.temp_log_cfg_file, self.named_log_cfg_file):
+            self.reload_necessary = True
+            self.restart_necessary = True
+            self.files2replace[self.named_log_cfg_file] = self.temp_log_cfg_file
+
+        if not self.files_equal_content(self.temp_zones_cfg_file, self.named_zones_cfg_file):
+            self.reload_necessary = True
+            self.files2replace[self.named_zones_cfg_file] = self.temp_zones_cfg_file
+
+        if self.verbose > 1:
+            LOG.debug("Files to replace:\n{}".format(pp(self.files2replace)))
+
+    # -------------------------------------------------------------------------
+    def files_equal_content(self, file_src, file_tgt):
+
+        LOG.debug("Comparing {!r} with {!r} ...".format(file_src, file_tgt))
+
+        if not file_src:
+            raise PpConfigNamedError("Source file not defined.")
+        if not file_tgt:
+            raise PpConfigNamedError("Target file not defined.")
+
+        if not os.path.exists(file_src):
+            raise PpConfigNamedError("Source file {!r} does not exists.".format(file_src))
+        if not os.path.isfile(file_src):
+            raise PpConfigNamedError("Source file {!r} is not a regular file.".format(file_src))
+
+        if not os.path.exists(file_tgt):
+            LOG.debug("Target file {!r} does not exists.".format(file_tgt))
+            return False
+        if not os.path.isfile(file_tgt):
+            raise PpConfigNamedError("Target file {!r} is not a regular file.".format(file_tgt))
+
+        content_src = ''
+        if self.verbose > 2:
+            LOG.debug("Reading {!r} ...".format(file_src))
+        with open(file_src, 'r', **self.open_args) as fh:
+            content_src = fh.read()
+        lines_str_src = self.re_block_comment.sub('', content_src)
+        lines_str_src = self.re_line_comment.sub('', lines_str_src)
+        lines_src = []
+        for line in lines_str_src.splitlines():
+            line = line.strip()
+            if line:
+                lines_src.append(line)
+        if self.verbose > 3:
+            LOG.debug("Cleaned version of {!r}:\n{}".format(
+                file_src, '\n'.join(lines_src)))
+
+        content_tgt = ''
+        if self.verbose > 2:
+            LOG.debug("Reading {!r} ...".format(file_tgt))
+        with open(file_tgt, 'r', **self.open_args) as fh:
+            content_tgt = fh.read()
+        lines_str_tgt = self.re_block_comment.sub('', content_tgt)
+        lines_str_tgt = self.re_line_comment.sub('', lines_str_tgt)
+        lines_tgt = []
+        for line in lines_str_tgt.splitlines():
+            line = line.strip()
+            if line:
+                lines_tgt.append(line)
+        if self.verbose > 3:
+            LOG.debug("Cleaned version of {!r}:\n{}".format(
+                file_tgt, '\n'.join(lines_tgt)))
+
+        if len(lines_src) != len(lines_tgt):
+            LOG.debug((
+                "Source file {!r} has different number essential lines ({}) than "
+                "the target file {!r} ({} lines).").format(
+                file_src, len(lines_src), file_tgt, len(lines_tgt)))
+            return False
+
+        i = 0
+        while i < len(lines_src):
+            if lines_src[i] != lines_tgt[i]:
+                LOG.debug((
+                    "Source file {!r} has a different content than "
+                    "the target file {!r}.").format(file_src, lines_tgt))
+                return False
+            i += 1
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def check_directories(self):
+
+        LOG.info("Checking all necessary directories for existence and ownership.")
+
+        self.check_directory(self.named_conf_dir)
+        self.check_directory(self.named_basedir, None, self.named_gid, 0o750)
+        self.check_directory(self.named_datadir_abs, self.named_uid, self.named_gid, 0o770)
+        self.check_directory(self.named_dump_dir, self.named_uid, self.named_gid, 0o770)
+        self.check_directory(self.named_stats_dir, self.named_uid, self.named_gid, 0o770)
+        self.check_directory(self.named_slavedir_abs, self.named_uid, self.named_gid, 0o770)
+        self.check_directory(self.named_managed_keysdir, self.named_uid, self.named_gid, 0o770)
+        self.check_directory(self.named_logdir, self.named_uid, self.named_gid, 0o755)
+
+    # -------------------------------------------------------------------------
+    def check_directory(self, dirname, owner_id=None, group_id=None, mode=None):            # noqa
+
+        LOG.debug("Checking directory {!r} ...".format(dirname))
+
+        if not os.path.exists(dirname):
+            LOG.info("Creating directory {!r} ...".format(dirname))
+            if not self.simulate:
+                os.makedirs(dirname, mode=0o755)
+        elif not os.path.isdir(dirname):
+            LOG.error("Path {!r} exists, but is not a directory.".format(dirname))
+            return False
+        else:
+            LOG.debug("Directory {!r} already exists.".format(dirname))
+
+        fstat = None
+        if os.path.exists(dirname):
+            fstat = os.lstat(dirname)
+        else:
+            fstat = os.lstat('/etc')
+
+        uid_set = -1
+        gid_set = -1
+        if owner_id is not None:
+            if fstat.st_uid != owner_id:
+                uid_set = owner_id
+        if group_id is not None:
+            if fstat.st_gid != group_id:
+                gid_set = group_id
+
+        if owner_id is not None and group_id is not None:
+            cur_user = fstat.st_uid
+            cur_group = fstat.st_gid
+            try:
+                cur_user = '{!r}'.format(pwd.getpwuid(fstat.st_uid).pw_name)
+            except KeyError as e:
+                LOG.warn("User id {} not known: {}".format(fstat.st_uid, e))
+            try:
+                cur_group = '{!r}'.format(grp.getgrgid(fstat.st_gid).gr_name)
+            except KeyError as e:
+                LOG.warn("Group id {} not known: {}".format(fstat.st_gid, e))
+            LOG.debug("Current owners of {!r} are {}:{} ({}:{}).".format(
+                dirname, fstat.st_uid, fstat.st_gid, cur_user, cur_group))
+
+        if uid_set != -1 or gid_set != -1:
+            LOG.info("Setting ownership of {!r} to {}:{} ...".format(
+                dirname, uid_set, gid_set))
+            if not self.simulate:
+                os.chown(dirname, uid_set, gid_set)
+
+        if mode is not None:
+            current_permissions = stat.S_IMODE(fstat.st_mode)
+            LOG.debug("Current permissions of {!r} are {:04o}.".format(
+                dirname, current_permissions))
+            new_mode = mode
+
+            if new_mode & stat.S_IWUSR:
+                new_mode |= stat.S_IRUSR
+            if new_mode & stat.S_IRUSR:
+                new_mode |= stat.S_IXUSR
+
+            if new_mode & stat.S_IWGRP:
+                new_mode |= stat.S_IRGRP
+            if new_mode & stat.S_IRGRP:
+                new_mode |= stat.S_IXGRP
+
+            if new_mode & stat.S_IWOTH:
+                new_mode |= stat.S_IROTH
+            if new_mode & stat.S_IROTH:
+                new_mode |= stat.S_IXOTH
+
+            if new_mode != current_permissions:
+                LOG.info("Setting permissions of {!r} to {:04o} ...".format(
+                    dirname, new_mode))
+                if not self.simulate:
+                    os.chmod(dirname, new_mode)
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def replace_configfiles(self):
+
+        if not self.files2replace:
+            LOG.debug("No replacement of any config files necessary.")
+            return
+
+        LOG.debug("Start replacing of config files ...")
+
+        for tgt_file in self.files2replace.keys():
+
+            backup_file = tgt_file + self.backup_suffix
+
+            if os.path.exists(tgt_file):
+                self.moved_files[tgt_file] = backup_file
+                LOG.info("Copying {!r} => {!r} ...".format(tgt_file, backup_file))
+                if not self.simulate:
+                    shutil.copy2(tgt_file, backup_file)
+
+        if self.verbose > 1:
+            LOG.debug("All backuped config files:\n{}".format(pp(self.moved_files)))
+
+        for tgt_file in self.files2replace.keys():
+            src_file = self.files2replace[tgt_file]
+            LOG.info("Copying {!r} => {!r} ...".format(src_file, tgt_file))
+            if not self.simulate:
+                shutil.copy2(src_file, tgt_file)
+
+    # -------------------------------------------------------------------------
+    def restore_configfiles(self):
+
+        LOG.error("Restoring of original config files because of an exception.")
+
+        for tgt_file in self.moved_files.keys():
+            backup_file = self.moved_files[tgt_file]
+            LOG.info("Moving {!r} => {!r} ...".format(backup_file, tgt_file))
+            if not self.simulate:
+                if os.path.exists(backup_file):
+                    os.rename(backup_file, tgt_file)
+                else:
+                    LOG.error("Could not find backup file {!r}.".format(backup_file))
+
+    # -------------------------------------------------------------------------
+    def cleanup(self):
+
+        LOG.info("Cleaning up ...")
+
+        for tgt_file in self.moved_files.keys():
+            backup_file = self.moved_files[tgt_file]
+            LOG.debug("Searching for {!r}.".format(backup_file))
+            if os.path.exists(backup_file):
+                LOG.info("Removing {!r} ...".format(backup_file))
+                if not self.simulate:
+                    os.remove(backup_file)
+
+        # -----------------------
+        def emit_rm_err(function, path, excinfo):
+            LOG.error("Error removing {!r} - {}: {}".format(
+                path, excinfo[1].__class__.__name__, excinfo[1]))
+
+        if self.tempdir:
+            if self.keep_tempdir:
+                msg = (
+                    "Temporary directory {!r} will not be removed. "
+                    "It's on yours to remove it manually.").format(self.tempdir)
+                LOG.warn(msg)
+            else:
+                LOG.debug("Destroying temporary directory {!r} ...".format(self.tempdir))
+                shutil.rmtree(self.tempdir, False, emit_rm_err)
+                self.tempdir = None
+
+    # -------------------------------------------------------------------------
+    def check_namedconf(self):
+
+        LOG.info("Checking syntax correctness of named.conf ...")
+        cmd = shlex.split(self.cmd_checkconf)
+        LOG.debug("Executing: {}".format(' '.join(cmd)))
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=10)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.warn("Output on STDOUT: {}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.warn("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def apply_config(self):
+
+        if not self.reload_necessary and not self.restart_necessary:
+            LOG.info("Reload or restart of named is not necessary.")
+            return
+
+        running = self.named_running()
+        if not running:
+            LOG.warn("Named is not running, please start it manually.")
+            return
+
+        if self.restart_necessary:
+            self.restart_named()
+        else:
+            self.reload_named()
+
+    # -------------------------------------------------------------------------
+    def named_running(self):
+
+        LOG.debug("Checking, whether named is running ...")
+
+        cmd = shlex.split(self.cmd_status)
+        LOG.debug("Executing: {}".format(' '.join(cmd)))
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=10)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.warn("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def start_named(self):
+
+        LOG.info("Starting named ...")
+
+        cmd = shlex.split(self.cmd_start)
+        LOG.debug("Executing: {}".format(' '.join(cmd)))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def restart_named(self):
+
+        LOG.info("Restarting named ...")
+
+        cmd = shlex.split(self.cmd_restart)
+        LOG.debug("Executing: {}".format(' '.join(cmd)))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def reload_named(self):
+
+        LOG.info("Reloading named ...")
+
+        cmd = shlex.split(self.cmd_reload)
+        LOG.debug("Executing: {}".format(' '.join(cmd)))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/deploy_zones_from_pdns.py b/lib/pp_lib/deploy_zones_from_pdns.py
new file mode 100644 (file)
index 0000000..4c404d0
--- /dev/null
@@ -0,0 +1,884 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: A module for the application class for configuring named
+"""
+from __future__ import absolute_import
+
+import os
+import logging
+import logging.config
+import textwrap
+import re
+import shlex
+import copy
+import datetime
+import socket
+import tempfile
+import time
+import shutil
+import pipes
+
+from subprocess import Popen, TimeoutExpired, PIPE
+
+from functools import cmp_to_key
+
+# Third party modules
+import six
+from pytz import timezone, UnknownTimeZoneError
+
+# Own modules
+from .common import pp, compare_fqdn, to_str, to_bool
+
+from .pdns_app import PpPDNSAppError, PpPDNSApplication
+
+from .pidfile import PidFileError, PidFile
+
+__version__ = '0.5.4'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpDeployZonesError(PpPDNSAppError):
+    pass
+
+
+# =============================================================================
+class PpDeployZonesApp(PpPDNSApplication):
+    """
+    Class for a application 'dns-deploy-zones' for configuring slaves
+    of the BIND named daemon.
+    """
+
+    default_pidfile = '/run/dns-deploy-zones.pid'
+
+    default_named_conf_dir = '/etc'
+    default_named_zones_cfg_file = 'named.zones.conf'
+    default_named_basedir = '/var/named'
+    default_named_slavedir = 'slaves'
+
+    zone_masters_local = [
+        '217.66.53.87',
+    ]
+
+    zone_masters_public = [
+        '217.66.53.97',
+    ]
+
+    default_cmd_checkconf = '/usr/sbin/named-checkconf'
+    default_cmd_reload = '/usr/sbin/rndc reload'
+    default_cmd_status = '/usr/bin/systemctl status named.service'
+    default_cmd_start = '/usr/bin/systemctl start named.service'
+    default_cmd_restart = '/usr/bin/systemctl restart named.service'
+
+    re_ipv4_zone = re.compile(r'^((?:\d+\.)+)in-addr\.arpa\.$')
+    re_ipv6_zone = re.compile(r'^((?:[\da-f]\.)+)ip6\.arpa\.$')
+
+    re_block_comment = re.compile(r'/\*.*?\*/', re.MULTILINE | re.DOTALL)
+    re_line_comment = re.compile(r'(?://|#).*$', re.MULTILINE)
+
+    re_split_addresses = re.compile(r'[,;\s]+')
+    re_integer = re.compile(r'^\s*(\d+)\s*$')
+
+    open_args = {}
+    if six.PY3:
+        open_args = {
+            'encoding': 'utf-8',
+            'errors': 'surrogateescape',
+        }
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, base_dir=None, version=__version__):
+
+        self.zones = []
+        self.pidfile = None
+
+        self._show_simulate_opt = True
+
+        self.is_internal = False
+        self.named_listen_on_v6 = False
+        self.pidfile_name = self.default_pidfile
+
+        # Configuration files and directories
+        self.named_conf_dir = self.default_named_conf_dir
+        self._named_zones_cfg_file = self.default_named_zones_cfg_file
+        self.named_basedir = self.default_named_basedir
+        self._named_slavedir = self.default_named_slavedir
+
+        self.zone_masters = copy.copy(self.zone_masters_public)
+        self.masters_configured = False
+
+        self.tempdir = None
+        self.temp_zones_cfg_file = None
+        self.keep_tempdir = False
+        self.keep_backup = False
+
+        self.backup_suffix = (
+            '.' + datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S') + '.bak')
+
+        self.reload_necessary = False
+        self.restart_necessary = False
+
+        self.cmd_checkconf = self.default_cmd_checkconf
+        self.cmd_reload = self.default_cmd_reload
+        self.cmd_status = self.default_cmd_status
+        self.cmd_start = self.default_cmd_start
+        self.cmd_restart = self.default_cmd_restart
+
+        self.files2replace = {}
+        self.moved_files = {}
+
+        description = textwrap.dedent('''\
+            Generation of the BIND9 configuration file for slave zones.
+            ''')
+
+        super(PpDeployZonesApp, self).__init__(
+            appname=appname, version=version, description=description,
+            base_dir=base_dir, cfg_stems='dns-deploy-zones', environment="public",
+        )
+
+        self.post_init()
+
+    # -------------------------------------------
+    @property
+    def named_zones_cfg_file(self):
+        """The file for configuration of all own zones."""
+        return os.path.join(self.named_conf_dir, self._named_zones_cfg_file)
+
+    # -------------------------------------------
+    @property
+    def named_slavedir_rel(self):
+        """The directory for zone files of slave zones."""
+        return self._named_slavedir
+
+    # -------------------------------------------
+    @property
+    def named_slavedir_abs(self):
+        """The directory for zone files of slave zones."""
+        return os.path.join(self.named_basedir, self._named_slavedir)
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+
+        super(PpDeployZonesApp, self).init_arg_parser()
+
+        self.arg_parser.add_argument(
+            '-B', '--backup', dest="keep_backup", action='store_true',
+            help=("Keep a backup file for each changed configuration file."),
+        )
+
+        self.arg_parser.add_argument(
+            '-K', '--keep-tempdir', dest='keep_tempdir', action='store_true',
+            help=(
+                "Keeping the temporary directory instead of removing it at the end "
+                "(e.g. for debugging purposes)"),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+        """
+
+        super(PpDeployZonesApp, self).perform_arg_parser()
+
+        if self.args.keep_tempdir:
+            self.keep_tempdir = True
+
+        if self.args.keep_backup:
+            self.keep_backup = True
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpDeployZonesApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 3:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            section = self.cfg[section_name]
+
+            if section_name.lower() == 'app':
+                self._check_path_config(section, section_name, 'pidfile', 'pidfile_name', True)
+                if 'keep-backup' in section:
+                    self.keep_backup = to_bool(section['keep-backup'])
+                if 'keep_backup' in section:
+                    self.keep_backup = to_bool(section['keep_backup'])
+
+            if section_name.lower() == 'named':
+                self.set_named_options(section, section_name)
+
+        if not self.masters_configured:
+            if self.environment == 'local':
+                self.zone_masters = copy.copy(self.zone_masters_local)
+            else:
+                self.zone_masters = copy.copy(self.zone_masters_public)
+
+    # -------------------------------------------------------------------------
+    def set_named_options(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        # Configuration files and directories
+        self._check_path_config(
+            section, section_name, 'config_dir', 'named_conf_dir', True)
+        self._check_path_config(
+            section, section_name, 'zones_cfg_file', '_named_zones_cfg_file', False)
+        self._check_path_config(section, section_name, 'base_dir', 'named_basedir', True)
+        self._check_path_config(section, section_name, 'slave_dir', '_named_slavedir', False)
+
+        if 'listen_on_v6' in section and section['listen_on_v6'] is not None:
+            self.named_listen_on_v6 = to_bool(section['listen_on_v6'])
+
+        if 'masters' in section:
+            self._get_masters_from_cfg(section['masters'], section_name)
+
+        for item in ('cmd_checkconf', 'cmd_reload', 'cmd_status', 'cmd_start', 'cmd_restart'):
+            if item in section and section[item].strip():
+                setattr(self, item, section[item].strip())
+
+    # -------------------------------------------------------------------------
+    def _get_masters_from_cfg(self, value, section_name):
+
+        value = value.strip()
+        if not value:
+            msg = "No masters given in [{}]/masters.".format(section_name)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        masters = []
+
+        for m in self.re_split_addresses.split(value):
+            if m:
+                m = m.strip().lower()
+                LOG.debug("Checking given master address {!r} ...".format(m))
+                try:
+                    addr_infos = socket.getaddrinfo(
+                        m, 53, proto=socket.IPPROTO_TCP)
+                    for addr_info in addr_infos:
+                        addr = addr_info[4][0]
+                        if not self.named_listen_on_v6 and addr_info[0] == socket.AF_INET6:
+                            msg = (
+                                "Not using {!r} as a master IP address, because "
+                                "we are not using IPv6.").format(addr)
+                            LOG.debug(msg)
+                            continue
+                        if addr in masters:
+                            LOG.debug("Address {!r} already in masters yet.".format(addr))
+                        else:
+                            LOG.debug("Address {!r} not in masters yet.".format(addr))
+                            masters.append(addr)
+
+                except socket.gaierror as e:
+                    msg = (
+                        "Invalid hostname or address {!r} found in "
+                        "[{}]/masters: {}").format(m, section_name, e)
+                    LOG.error(msg)
+                    self.config_has_errors = True
+                    m = None
+        if masters:
+            if self.verbose > 2:
+                LOG.debug("Using configured masters: {}".format(pp(masters)))
+            self.zone_masters = masters
+            self.masters_configured = True
+        else:
+            LOG.warn("No valid masters found in configuration.")
+
+    # -------------------------------------------------------------------------
+    def post_init(self):
+
+        super(PpDeployZonesApp, self).post_init()
+        self.initialized = False
+
+        if not self.quiet:
+            print('')
+
+        LOG.debug("Post init phase.")
+
+        LOG.debug("Checking for masters, which are local addresses ...")
+        ext_masters = []
+        for addr in self.zone_masters:
+            if addr in self.local_addresses:
+                LOG.debug(
+                    "Address {!r} IS in list of local addresses.".format(addr))
+            else:
+                LOG.debug(
+                    "Address {!r} not in list of local addresses.".format(addr))
+                ext_masters.append(addr)
+        self.zone_masters = ext_masters
+        LOG.info("Using masters for slave zones: {}".format(
+            ', '.join(map(lambda x: '{!r}'.format(x), self.zone_masters))))
+
+        self.pidfile = PidFile(
+            filename=self.pidfile_name, appname=self.appname, verbose=self.verbose,
+            base_dir=self.base_dir, simulate=self.simulate)
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        super(PpDeployZonesApp, self).pre_run()
+
+        if self.environment == 'global':
+            LOG.error(
+                "Using the global DNS master is not supported, "
+                "please use 'local' or 'public'")
+            self.exit(1)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        local_tz_name = 'Europe/Berlin'
+        if 'TZ' in os.environ and os.environ['TZ']:
+            local_tz_name = os.environ['TZ']
+        try:
+            local_tz = timezone(local_tz_name)
+        except UnknownTimeZoneError:
+            LOG.error("Unknown time zone: {!r}.".format(local_tz_name))
+            self.exit(6)
+
+        my_uid = os.geteuid()
+        if my_uid:
+            msg = "You must be root to execute this script."
+            if self.simulate:
+                LOG.warn(msg)
+                time.sleep(1)
+            else:
+                LOG.error(msg)
+                self.exit(1)
+
+        try:
+            self.pidfile.create()
+        except PidFileError as e:
+            LOG.error("Could not occupy pidfile: {}".format(e))
+            self.exit(7)
+            return
+
+        try:
+
+            LOG.info("Starting: {}".format(
+                datetime.datetime.now(local_tz).strftime('%Y-%m-%d %H:%M:%S %Z')))
+
+            self.zones = self.get_api_zones()
+            self.zones.sort(key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode))
+
+            self.init_temp_objects()
+            self.generate_slave_cfg_file()
+            self.compare_files()
+
+            try:
+                self.replace_configfiles()
+                if not self.check_namedconf():
+                    self.restore_configfiles()
+                    self.exit(99)
+                self.apply_config()
+            except Exception:
+                self.restore_configfiles()
+                raise
+
+        finally:
+            self.cleanup()
+            self.pidfile = None
+            LOG.info("Ending: {}".format(
+                datetime.datetime.now(local_tz).strftime('%Y-%m-%d %H:%M:%S %Z')))
+
+    # -------------------------------------------------------------------------
+    def cleanup(self):
+
+        LOG.info("Cleaning up ...")
+
+        for tgt_file in self.moved_files.keys():
+            backup_file = self.moved_files[tgt_file]
+            LOG.debug("Searching for {!r}.".format(backup_file))
+            if os.path.exists(backup_file):
+                if self.keep_backup:
+                    LOG.info("Keep existing backup file {!r}.".format(backup_file))
+                else:
+                    LOG.info("Removing {!r} ...".format(backup_file))
+                    if not self.simulate:
+                        os.remove(backup_file)
+
+        # -----------------------
+        def emit_rm_err(function, path, excinfo):
+            LOG.error("Error removing {!r} - {}: {}".format(
+                path, excinfo[1].__class__.__name__, excinfo[1]))
+
+        if self.tempdir:
+            if self.keep_tempdir:
+                msg = (
+                    "Temporary directory {!r} will not be removed. "
+                    "It's on yours to remove it manually.").format(self.tempdir)
+                LOG.warn(msg)
+            else:
+                LOG.debug("Destroying temporary directory {!r} ...".format(self.tempdir))
+                shutil.rmtree(self.tempdir, False, emit_rm_err)
+                self.tempdir = None
+
+    # -------------------------------------------------------------------------
+    def init_temp_objects(self):
+        """Init temporary objects and properties."""
+
+        self.tempdir = tempfile.mkdtemp(
+            prefix=(self.appname + '.'), suffix='.tmp.d'
+        )
+        LOG.debug("Temporary directory: {!r}.".format(self.tempdir))
+
+        self.temp_zones_cfg_file = os.path.join(
+            self.tempdir, self.default_named_zones_cfg_file)
+
+        if self.verbose > 1:
+            LOG.debug("Temporary zones conf: {!r}".format(self.temp_zones_cfg_file))
+
+    # -------------------------------------------------------------------------
+    def generate_slave_cfg_file(self):
+
+        LOG.info("Generating {} ...".format(self.default_named_zones_cfg_file))
+
+        cur_date = datetime.datetime.now().isoformat(' ')
+        re_rev = re.compile(r'^rev\.', re.IGNORECASE)
+        re_trail_dot = re.compile(r'\.+$')
+
+        lines = []
+        lines.append('###############################################################')
+        lines.append('')
+        lines.append(' Bind9 configuration file for slave sones')
+        lines.append(' {}'.format(self.named_zones_cfg_file))
+        lines.append('')
+        lines.append(' Generated at: {}'.format(cur_date))
+        lines.append('')
+        lines.append('###############################################################')
+        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
+
+        content = header
+
+        for zone in self.zones:
+
+            canonical_name = zone.name_unicode
+            match = self.re_ipv4_zone.search(zone.name)
+            if match:
+                prefix = self._get_ipv4_prefix(match.group(1))
+                if prefix:
+                    if prefix == '127.0.0':
+                        LOG.debug("Pure local zone {!r} will not be considered.".format(prefix))
+                        continue
+                    canonical_name = 'rev.' + prefix
+            else:
+                match = self.re_ipv6_zone.search(zone.name)
+                if match:
+                    prefix = self._get_ipv6_prefix(match.group(1))
+                    if prefix:
+                        canonical_name = 'rev.' + prefix
+
+            show_name = canonical_name
+            show_name = re_rev.sub('Reverse ', show_name)
+            show_name = re_trail_dot.sub('', show_name)
+            zname = re_trail_dot.sub('', zone.name)
+
+            zfile = os.path.join(
+                self.named_slavedir_rel, re_trail_dot.sub('', canonical_name) + '.zone')
+
+            lines = []
+            lines.append('')
+            lines.append('// {}'.format(show_name))
+            lines.append('zone "{}" in {{'.format(zname))
+            lines.append('\tmasters {')
+            for master in self.zone_masters:
+                lines.append('\t\t{};'.format(master))
+            lines.append('\t};')
+            lines.append('\ttype slave;')
+            lines.append('\tfile "{}";'.format(zfile))
+            lines.append('};')
+
+            content += '\n'.join(lines) + '\n'
+
+        content += '\n// vim: ts=8 filetype=named noet noai\n'
+
+        with open(self.temp_zones_cfg_file, 'w', **self.open_args) as fh:
+            fh.write(content)
+
+        if self.verbose > 2:
+            LOG.debug("Generated {!r}:\n{}".format(self.temp_zones_cfg_file, content.strip()))
+
+    # -------------------------------------------------------------------------
+    def _get_ipv4_prefix(self, match):
+
+        tuples = []
+        for t in match.split('.'):
+            if t:
+                tuples.insert(0, t)
+        if self.verbose > 2:
+            LOG.debug("Got IPv4 tuples: {}".format(pp(tuples)))
+        return '.'.join(tuples)
+
+    # -------------------------------------------------------------------------
+    def _get_ipv6_prefix(self, match):
+
+        tuples = []
+        for t in match.split('.'):
+            if t:
+                tuples.insert(0, t)
+
+        tokens = []
+        while len(tuples):
+            token = ''.join(tuples[0:4]).ljust(4, '0')
+            if token.startswith('000'):
+                token = token[3:]
+            elif token.startswith('00'):
+                token = token[2:]
+            elif token.startswith('0'):
+                token = token[1:]
+            tokens.append(token)
+            del tuples[0:4]
+
+        if self.verbose > 2:
+            LOG.debug("Got IPv6 tokens: {}".format(pp(tokens)))
+
+        return ':'.join(tokens)
+
+    # -------------------------------------------------------------------------
+    def compare_files(self):
+
+        LOG.info("Comparing generated files with existing ones.")
+
+        if not self.files_equal_content(self.temp_zones_cfg_file, self.named_zones_cfg_file):
+            self.reload_necessary = True
+            self.files2replace[self.named_zones_cfg_file] = self.temp_zones_cfg_file
+
+        if self.verbose > 1:
+            LOG.debug("Files to replace:\n{}".format(pp(self.files2replace)))
+
+    # -------------------------------------------------------------------------
+    def files_equal_content(self, file_src, file_tgt):
+
+        LOG.debug("Comparing {!r} with {!r} ...".format(file_src, file_tgt))
+
+        if not file_src:
+            raise PpDeployZonesError("Source file not defined.")
+        if not file_tgt:
+            raise PpDeployZonesError("Target file not defined.")
+
+        if not os.path.exists(file_src):
+            raise PpDeployZonesError("Source file {!r} does not exists.".format(file_src))
+        if not os.path.isfile(file_src):
+            raise PpDeployZonesError("Source file {!r} is not a regular file.".format(file_src))
+
+        if not os.path.exists(file_tgt):
+            LOG.debug("Target file {!r} does not exists.".format(file_tgt))
+            return False
+        if not os.path.isfile(file_tgt):
+            raise PpDeployZonesError("Target file {!r} is not a regular file.".format(file_tgt))
+
+        content_src = ''
+        if self.verbose > 2:
+            LOG.debug("Reading {!r} ...".format(file_src))
+        with open(file_src, 'r', **self.open_args) as fh:
+            content_src = fh.read()
+        lines_str_src = self.re_block_comment.sub('', content_src)
+        lines_str_src = self.re_line_comment.sub('', lines_str_src)
+        lines_src = []
+        for line in lines_str_src.splitlines():
+            line = line.strip()
+            if line:
+                lines_src.append(line)
+        if self.verbose > 3:
+            LOG.debug("Cleaned version of {!r}:\n{}".format(
+                file_src, '\n'.join(lines_src)))
+
+        content_tgt = ''
+        if self.verbose > 2:
+            LOG.debug("Reading {!r} ...".format(file_tgt))
+        with open(file_tgt, 'r', **self.open_args) as fh:
+            content_tgt = fh.read()
+        lines_str_tgt = self.re_block_comment.sub('', content_tgt)
+        lines_str_tgt = self.re_line_comment.sub('', lines_str_tgt)
+        lines_tgt = []
+        for line in lines_str_tgt.splitlines():
+            line = line.strip()
+            if line:
+                lines_tgt.append(line)
+        if self.verbose > 3:
+            LOG.debug("Cleaned version of {!r}:\n{}".format(
+                file_tgt, '\n'.join(lines_tgt)))
+
+        if len(lines_src) != len(lines_tgt):
+            LOG.debug((
+                "Source file {!r} has different number essential lines ({}) than "
+                "the target file {!r} ({} lines).").format(
+                file_src, len(lines_src), file_tgt, len(lines_tgt)))
+            return False
+
+        i = 0
+        while i < len(lines_src):
+            if lines_src[i] != lines_tgt[i]:
+                LOG.debug((
+                    "Source file {!r} has a different content than "
+                    "the target file {!r}.").format(file_src, lines_tgt))
+                return False
+            i += 1
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def replace_configfiles(self):
+
+        if not self.files2replace:
+            LOG.debug("No replacement of any config files necessary.")
+            return
+
+        LOG.debug("Start replacing of config files ...")
+
+        for tgt_file in self.files2replace.keys():
+
+            backup_file = tgt_file + self.backup_suffix
+
+            if os.path.exists(tgt_file):
+                self.moved_files[tgt_file] = backup_file
+                LOG.info("Copying {!r} => {!r} ...".format(tgt_file, backup_file))
+                if not self.simulate:
+                    shutil.copy2(tgt_file, backup_file)
+
+        if self.verbose > 1:
+            LOG.debug("All backuped config files:\n{}".format(pp(self.moved_files)))
+
+        for tgt_file in self.files2replace.keys():
+            src_file = self.files2replace[tgt_file]
+            LOG.info("Copying {!r} => {!r} ...".format(src_file, tgt_file))
+            if not self.simulate:
+                shutil.copy2(src_file, tgt_file)
+
+    # -------------------------------------------------------------------------
+    def restore_configfiles(self):
+
+        LOG.error("Restoring of original config files because of an exception.")
+
+        for tgt_file in self.moved_files.keys():
+            backup_file = self.moved_files[tgt_file]
+            LOG.info("Moving {!r} => {!r} ...".format(backup_file, tgt_file))
+            if not self.simulate:
+                if os.path.exists(backup_file):
+                    os.rename(backup_file, tgt_file)
+                else:
+                    LOG.error("Could not find backup file {!r}.".format(backup_file))
+
+    # -------------------------------------------------------------------------
+    def check_namedconf(self):
+
+        LOG.info("Checking syntax correctness of named.conf ...")
+        cmd = shlex.split(self.cmd_checkconf)
+        if 'named-checkconf' in self.cmd_checkconf and self.verbose > 2:
+            cmd.append('-p')
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=10)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.warn("Output on STDOUT: {}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.warn("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def apply_config(self):
+
+        if not self.reload_necessary and not self.restart_necessary:
+            LOG.info("Reload or restart of named is not necessary.")
+            return
+
+        running = self.named_running()
+        if not running:
+            LOG.warn("Named is not running, please start it manually.")
+            return
+
+        if self.restart_necessary:
+            self.restart_named()
+        else:
+            self.reload_named()
+
+    # -------------------------------------------------------------------------
+    def named_running(self):
+
+        LOG.debug("Checking, whether named is running ...")
+
+        cmd = shlex.split(self.cmd_status)
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=10)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.warn("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def start_named(self):
+
+        LOG.info("Starting named ...")
+
+        cmd = shlex.split(self.cmd_start)
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def restart_named(self):
+
+        LOG.info("Restarting named ...")
+
+        cmd = shlex.split(self.cmd_restart)
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def reload_named(self):
+
+        LOG.info("Reloading named ...")
+
+        cmd = shlex.split(self.cmd_reload)
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        LOG.debug("Executing: {}".format(cmd_str))
+
+        if self.simulate:
+            return
+
+        std_out = None
+        std_err = None
+        ret_val = None
+
+        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
+            try:
+                std_out, std_err = proc.communicate(timeout=30)
+            except TimeoutExpired:
+                proc.kill()
+                std_out, std_err = proc.communicate()
+            ret_val = proc.wait()
+
+        LOG.debug("Return value: {!r}".format(ret_val))
+        if std_out and std_out.strip():
+            s = to_str(std_out.strip())
+            LOG.debug("Output on STDOUT:\n{}".format(s))
+        if std_err and std_err.strip():
+            s = to_str(std_err.strip())
+            LOG.error("Output on STDERR: {}".format(s))
+
+        if ret_val:
+            return False
+
+        return True
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/differ.py b/lib/pp_lib/differ.py
new file mode 100644 (file)
index 0000000..24b7d1d
--- /dev/null
@@ -0,0 +1,367 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@summary: The module for the MailAddress object.
+"""
+
+# Standard modules
+import os
+import logging
+import re
+from datetime import datetime, timezone
+import difflib
+import pprint
+import copy
+
+from difflib import Differ, IS_LINE_JUNK, IS_CHARACTER_JUNK
+# from difflib import SequenceMatcher
+
+# Third party modules
+import six
+
+# Own modules
+
+__version__ = '0.2.4'
+LOG = logging.getLogger(__name__)
+
+DEFAULT_COMMENT_CHAR = '#'
+
+
+# =============================================================================
+def pp(value, indent=4, width=99, depth=None):
+
+    pretty_printer = pprint.PrettyPrinter(
+        indent=indent, width=width, depth=depth)
+    return pretty_printer.pformat(value)
+
+
+# =============================================================================
+class ConfigDiffer(Differ):
+    """
+    A class for comparing the contents of two contents of configuration files
+    without consideration of comments and whitespaces.
+    """
+
+    pat_linejunk = r'^\s*(?:\#.*)?$'
+    re_linejunk = re.compile(pat_linejunk)
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def is_line_junk(cls, line):
+        return cls.re_linejunk.search(line) is not None
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, comment_chars=None, ignore_empty=False,
+            ignore_whitespace=False, ignore_comment=False, case_insensitive=False):
+
+        self.comment_chars = []
+        self.re_comment_list = []
+        self.re_token_list = []
+        self.ignore_empty = ignore_empty
+        self.ignore_whitespace = ignore_whitespace
+        self.ignore_comment = ignore_comment
+        self.case_insensitive = case_insensitive
+
+        if self.ignore_comment:
+            self.ignore_empty = True
+
+        if comment_chars:
+            if isinstance(comment_chars, (list, set, tuple)):
+                for char in comment_chars:
+                    if not char:
+                        continue
+                    self.comment_chars.append(str(char))
+            else:
+                self.comment_chars.append(str(comment_chars))
+        elif comment_chars is None:
+            self.comment_chars.append(DEFAULT_COMMENT_CHAR)
+
+        super(ConfigDiffer, self).__init__(
+            linejunk=IS_LINE_JUNK, charjunk=IS_CHARACTER_JUNK)
+
+        re_flags = re.MULTILINE
+        if six.PY3:
+            re_flags = re.MULTILINE | re.UNICODE
+
+        # a single quoted token
+        pat = r"^(\s*'(?:\\(?!')|\\'|(?:(?<!\\)[^']))*)(?#single-q-token)"
+        self.re_token_list.append(re.compile(pat, re_flags))
+
+        # a double quoted token
+        pat = r'^(\s*"(?:\\(?!")|\\"|(?:(?<!\\)[^"]))*")(?#double-q-token)'
+        self.re_token_list.append(re.compile(pat, re_flags))
+
+        # a token without quotings
+        pat = r'^(\s*(?:[^\s"' + r"'" + r']+|\\["' + r"'" + r'])+)(?#token-wo-quote)'
+        self.re_token_list.append(re.compile(pat, re_flags))
+
+        self.re_whitespace = re.compile(r'\s+', re_flags)
+
+        self.re_empty = re.compile(r'^(\s*)$')
+
+        if self.comment_chars:
+            i = 0
+            for char in self.comment_chars:
+
+                pat = r'^\s*' + re.escape(char) + r'.*$(?#sole-comment)'
+                self.re_comment_list.append(re.compile(pat, re_flags))
+
+                pat = (
+                    r'^(\s*"(?:[^"](?!' + re.escape(char) + r'))*)\s*' +
+                    re.escape(char) + r'.*$(?#comment-{}-wo-dq)'.format(i))
+                self.re_token_list.append(re.compile(pat, re_flags))
+
+                pat = (
+                    r"^(\s*'(?:[^'](?!" + re.escape(char) + r'))*)\s*' +
+                    re.escape(char) + r'.*$(?#comment-{}-wo-sq)'.format(i))
+                self.re_token_list.append(re.compile(pat, re_flags))
+
+                i += 1
+
+    # -------------------------------------------------------------------------
+    def remove_comments(self, line):
+
+        if not self.re_comment_list:
+            # LOG.debug('line false     %r', line)
+            return line
+
+        if self.re_empty.match(line):
+            # LOG.debug('line empty     %r', line)
+            return line
+
+        old_line = line
+        new_line = ''
+
+        while True:
+
+            # LOG.debug('loop:          old_line: %r, new_line: %r', old_line, new_line)
+
+            for regex in self.re_comment_list:
+                if regex.search(old_line):
+                    new_line += regex.sub('', old_line)
+                    # LOG.debug(
+                    #     'found comment: old_line: %r, new_line: %r, pattern: %r',
+                    #     old_line, new_line, regex.pattern)
+                    return new_line
+
+            token_found = False
+            for regex in self.re_token_list:
+                match = regex.search(old_line)
+                if match:
+                    new_line += match.group(1)
+                    old_line = regex.sub('', old_line)
+                    # LOG.debug(
+                    #     'found token:   old_line: %r, new_line: %r, pattern: %r',
+                    #     old_line, new_line, regex.pattern)
+                    token_found = True
+                    break
+
+            match = self.re_empty.search(old_line)
+            if match:
+                # LOG.debug('old_line empty %r', old_line)
+                new_line += match.group(1)
+                return new_line
+
+            if token_found:
+                continue
+
+            return new_line + old_line
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting function for translating object structure
+        into a string
+
+        @return: structure as string
+        @rtype:  str
+        """
+
+        return pp(self.__dict__)
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("comment_chars=%r" % (self.comment_chars))
+        fields.append("ignore_empty=%r" % (self.ignore_empty))
+        fields.append("ignore_whitespace=%r" % (self.ignore_whitespace))
+        fields.append("ignore_comment=%r" % (self.ignore_comment))
+        fields.append("case_insensitive=%r" % (self.case_insensitive))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    def _mangle_lines(self, lines):
+
+        if isinstance(lines, (list, set, tuple)):
+            line_list = copy.copy(lines)
+        else:
+            line_list = [str(lines)]
+
+        if (not self.ignore_empty and not self.ignore_whitespace and
+                not self.ignore_comment and not self.case_insensitive):
+            return line_list
+
+        reult_list = []
+        for item in line_list:
+            if self.ignore_empty and self.re_empty.search(item):
+                continue
+            item_cp = str(item)
+            if self.ignore_whitespace:
+                item_cp = self.re_whitespace.sub(' ', item_cp)
+            if self.ignore_comment:
+                item_cp = self.remove_comments(item_cp)
+            if self.case_insensitive:
+                item_cp = item_cp.lower()
+            reult_list.append(item_cp)
+
+        return reult_list
+
+    # -------------------------------------------------------------------------
+    def compare(self, a, b):
+
+        list_a = self._mangle_lines(a)
+        list_b = self._mangle_lines(b)
+
+        return super(ConfigDiffer, self).compare(list_a, list_b)
+
+    # -------------------------------------------------------------------------
+    def unified_diff(self, a, b, n=3, lineterm='\n'):
+
+        list_a = self._mangle_lines(a)
+        list_b = self._mangle_lines(b)
+
+        return difflib.unified_diff(list_a, list_b, n=n, lineterm=lineterm)
+
+    # -------------------------------------------------------------------------
+    def is_equal(self, a, b):
+
+        equal = True
+        for line in self.compare(a, b):
+            if not line.startswith(' '):
+                LOG.debug("Difference line: {}".format(line))
+                equal = False
+
+        return equal
+
+
+# =============================================================================
+class ConfigFileDiffer(ConfigDiffer):
+    """
+    A class for comparing the contents of two configuration files
+    without consideration of comments and whitespaces.
+    """
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def file_mtime(cls, path):
+
+        mtime = 0
+        if os.path.exists(path):
+            mtime = os.stat(path).st_mtime
+        t = datetime.fromtimestamp(mtime, timezone.utc)
+        return t.astimezone().isoformat(" ")
+
+    # -------------------------------------------------------------------------
+    def __init__(self):
+
+        super(ConfigFileDiffer, self).__init__()
+
+    # -------------------------------------------------------------------------
+    def compare(self, from_file, to_file):
+
+        from_content = []
+        to_content = []
+
+        open_args = {}
+        if six.PY3:
+            open_args = {
+                'encoding': 'utf-8',
+                'errors': 'surrogateescape',
+            }
+
+        if from_file:
+            if os.path.isfile(from_file):
+                LOG.debug("Reading {!r} ...".format(from_file))
+                with open(from_file, 'r', **open_args) as fh:
+                    from_content = fh.readlines()
+
+        if to_file:
+            if os.path.isfile(to_file):
+                LOG.debug("Reading {!r} ...".format(to_file))
+                with open(to_file, 'r', **open_args) as fh:
+                    to_content = fh.readlines()
+
+        return super(ConfigFileDiffer, self).compare(from_content, to_content)
+
+    # -------------------------------------------------------------------------
+    def is_equal(self, from_file, to_file):
+
+        equal = True
+        for line in self.compare(from_file, to_file):
+            if line.startswith('+') or line.startswith('-'):
+                subline = line[1:].rstrip()
+                if self.is_line_junk(subline):
+                    LOG.debug("Line {!r} is junk.".format(subline))
+                else:
+                    LOG.debug(line.rstrip())
+                    equal = False
+
+        return equal
+
+    # -------------------------------------------------------------------------
+    def unified_diff(self, from_file, to_file, n=3, lineterm='\n'):
+
+        from_content = []
+        to_content = []
+        null_time = datetime.fromtimestamp(0, timezone.utc).astimezone().isoformat(" ")
+        from_mtime = null_time
+        to_mtime = null_time
+
+        open_args = {}
+        if six.PY3:
+            open_args = {
+                'encoding': 'utf-8',
+                'errors': 'surrogateescape',
+            }
+
+        if from_file:
+            if os.path.isfile(from_file):
+                from_mtime = self.file_mtime(from_file)
+                with open(from_file, 'r', **open_args) as fh:
+                    from_content = fh.readlines()
+        else:
+            from_file = '<None>'
+
+        if to_file:
+            if os.path.isfile(to_file):
+                to_mtime = self.file_mtime(to_file)
+                with open(to_file, 'r', **open_args) as fh:
+                    to_content = fh.readlines()
+        else:
+            to_file = '<None>'
+
+        return difflib.unified_diff(
+            from_content, to_content,
+            fromfile=from_file, tofile=to_file,
+            fromfiledate=from_mtime, tofiledate=to_mtime,
+            n=n, lineterm=lineterm)
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/dnsui_users.py b/lib/pp_lib/dnsui_users.py
new file mode 100644 (file)
index 0000000..2f72638
--- /dev/null
@@ -0,0 +1,650 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the check-dnsui-users application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import textwrap
+import socket
+import re
+import traceback
+
+# Third party modules
+# from ldap3 import ObjectDef, AttrDef, Reader, Writer
+from ldap3 import ObjectDef
+import psycopg2
+
+# Own modules
+from .common import pp
+
+from .ldap_app import PpLdapAppError, PpLdapApplication
+
+__version__ = '0.4.5'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class DnsuiUsersError(PpLdapAppError):
+    pass
+
+# =============================================================================
+class DnsuiUsersApp(PpLdapApplication):
+    """Class for the 'check-dnsui-users' application to ensure:
+        * all users in DNSUI DB, which are not existing in LDAP, are disabled
+        * all users in LDAP, which are members of group 'Administratoren Pixelpark Berlin',
+          are existing and have administrator access.
+    """
+
+    default_admin_group = "cn=Administratoren Pixelpark Berlin"
+
+    # DB data
+    default_db_host = 'master.pp-dns.com'
+    default_db_port = 5432
+    default_db_db = 'dnsui'
+    default_db_user = 'pdnsadm'
+
+    re_ldap_node = re.compile(r'^\s*[a-z]+[a-z0-9]*\s*=\s*\S+', re.IGNORECASE)
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.admin_users = []
+        self.admin_user_dns = []
+        self.admin_group = self.default_admin_group
+
+        self.db_user_index = {}
+        self.ldap_user_index = {}
+        self.users_to_add = []
+        self.users_to_update = []
+        self.db_users_deactivate = []
+
+        self.db_host = self.default_db_host
+        self.db_port = self.default_db_port
+        self.db_db = self.default_db_db
+        self.db_user = self.default_db_user
+        self.db_pass = None
+
+        self.db_users = []
+
+        self.db_connection = None
+
+        self._show_simulate_opt = True
+
+        description = textwrap.dedent('''\
+            Checking existence of all LDAP users in DNSUI database in LDAP, otherwise
+            deactivating them. Checking, that all members of the defined admin group
+            in LDAP are existing in DB and are enabled administrators.
+            ''').strip()
+
+        super(DnsuiUsersApp, self).__init__(
+            appname=appname, version=version, description=description)
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+        """
+        Execute some actions after reading the configuration.
+
+        This method should be explicitely called by all perform_config()
+        methods in descendant classes.
+        """
+
+        super(DnsuiUsersApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 2:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+            section = self.cfg[section_name]
+
+            if section_name.lower() == 'ldap':
+                self.do_admin_group_config(section_name, section)
+
+            if section_name.lower() in ('db', 'database'):
+                self.do_db_cfg(section_name, section)
+
+    # -------------------------------------------------------------------------
+    def do_admin_group_config(self, section_name, section):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'admin_group' not in section:
+            return
+
+        admin_group = str(section['admin_group']).strip()
+        if not admin_group:
+            msg = "Empty value {v!r} for admin group in {s}/admin_group given.".format(
+                s=section_name, v=section['admin_group'])
+            raise DnsuiUsersError(msg)
+
+        if not self.re_ldap_node.match(admin_group):
+            msg = "Invalid value {v!r} for admin group in {s}/admin_group given.".format(
+                s=section_name, v=section['admin_group'])
+            raise DnsuiUsersError(msg)
+
+        self.admin_group = admin_group
+
+    # -------------------------------------------------------------------------
+    def do_db_cfg(self, section_name, section):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'host' in section:
+            host = section['host'].lower().strip()
+            if not host:
+                LOG.error('Invalid hostname {!r} in configuration section {!r} found.'.format(
+                    section['host'], section_name))
+            else:
+                try:
+                    _ = socket.getaddrinfo(host, 5432, proto=socket.IPPROTO_TCP)            # noqa
+                except socket.gaierror as e:
+                    msg = 'Invalid hostname {!r} in configuration section {!r}: {}'.format(
+                        section['host'], section_name, e)
+                    LOG.error(msg)
+                    self.config_has_errors = True
+                else:
+                    self.db_host = host
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+                if port <= 0:
+                    raise ValueError("port number may not be negative.")
+                elif port >= (2 ** 16):
+                    raise ValueError("port number must be less than {}".format((2 ** 16)))
+            except (ValueError, TypeError) as e:
+                msg = 'Invalid port number {!r} in configuration section {!r}: {}'.format(
+                    section['port'], section_name, e)
+                LOG.error(msg)
+                self.config_has_errors = True
+            else:
+                self.db_port = port
+
+        if 'db' in section:
+            db = section['db'].lower().strip()
+            if not db:
+                LOG.error('Invalid database name {!r} in configuration section {!r} found.'.format(
+                    section['db'], section_name))
+            else:
+                self.db_db = db
+
+        if 'user' in section:
+            user = section['user'].lower().strip()
+            if not user:
+                LOG.error('Invalid database user {!r} in configuration section {!r} found.'.format(
+                    section['user'], section_name))
+                self.config_has_errors = True
+            else:
+                self.db_user = user
+
+        if 'password' in section:
+            self.db_pass = section['password']
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+
+        super(DnsuiUsersApp, self).pre_run()
+        self.connect_db()
+
+    # -------------------------------------------------------------------------
+    def connect_db(self):
+
+        result = None
+
+        LOG.debug("Connecting to PostgreSQL database on {}@{}:{}/{} ...".format(
+            self.db_user, self.db_host, self.db_port, self.db_db))
+        try:
+            self.db_connection = psycopg2.connect(
+                host=self.db_host,
+                port=self.db_port,
+                dbname=self.db_db,
+                user=self.db_user,
+                password=self.db_pass,
+            )
+
+            sql = 'SHOW server_version'
+            if self.verbose > 1:
+                LOG.debug("SQL: {}".format(sql))
+            with self.db_connection.cursor() as cursor:
+                cursor.execute(sql)
+                result = cursor.fetchone()
+            if self.verbose > 2:
+                LOG.debug("Got version info:\n{}".format(pp(result)))
+            LOG.info("Database is PostgreSQL version {!r}.".format(result[0]))
+
+        except psycopg2.OperationalError as e:
+            LOG.error("Could not connect to database ({}): {}".format(
+                e.__class__.__name__, e))
+            self.exit(7)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        try:
+
+            LOG.info("Starting user checks ...")
+
+            self.get_admin_user_dns()
+            self.get_admin_users()
+            self.get_db_users()
+
+            self.check_current_admin_users()
+            self.check_current_db_users()
+
+            self.insert_db_users()
+            self.change_db_users()
+            self.deactivate_db_users()
+
+        finally:
+            self._close_db()
+
+    # -------------------------------------------------------------------------
+    def get_admin_user_dns(self):
+
+        LOG.info("Getting list of admin users.")
+
+        self.admin_user_dns = []
+
+        query_filter = (
+            '(&(|(objectclass=groupOfUniqueNames)(objectclass=groupOfURLs))({}))').format(
+                self.admin_group)
+        LOG.debug("Query filter: {!r}".format(query_filter))
+
+        group = ObjectDef(['objectclass', 'groupOfURLs'])
+        group += ['cn', 'memberURL', 'uniqueMember']
+
+        group_entries = self.ldap_search_subtree(group, query_filter)
+        if self.verbose > 1:
+            LOG.debug("Found {} LDAP entries.".format(len(group_entries)))
+
+        if not group_entries:
+            LOG.warn("Did not found any admin groups.")
+            return
+
+        for entry in group_entries:
+            member_urls = []
+            member_dns = []
+            for url in entry['memberURL']:
+                member_urls.append(url)
+            for dn in entry['uniqueMember']:
+                member_dns.append(dn)
+
+            if self.verbose > 2:
+                LOG.debug("Found memberURL: {}".format(pp(member_urls)))
+                LOG.debug("Found unique members:\n{}".format(pp(member_dns)))
+
+            for dn in member_dns:
+                if 'servicedesk' in dn:
+                    continue
+                if dn not in self.admin_user_dns:
+                    self.admin_user_dns.append(dn)
+
+        self.admin_user_dns.sort()
+        LOG.debug("Found admin user dn's:\n{}".format(pp(self.admin_user_dns)))
+
+    # -------------------------------------------------------------------------
+    def get_admin_users(self):
+
+        if not self.admin_user_dns:
+            LOG.warn("Did not found any admin users.")
+            return
+
+        LOG.info("Getting data of admin users from LDAP.")
+
+        person = ObjectDef(['posixAccount', 'shadowAccount'])
+        person += ["uid", "givenName", "sn", "mail"]
+
+        index = 0
+        for dn in self.admin_user_dns:
+
+            if self.verbose > 1:
+                LOG.debug("Searching for admin user {!r}.".format(dn))
+            entries = self.ldap_search_object(person, dn)
+            if self.verbose >= 2:
+                LOG.debug("Found {} LDAP entries.".format(len(entries)))
+            if not entries:
+                LOG.error("No LDAP entry found for DN {!r}.".format(dn))
+                continue
+
+            entry = entries[0]
+            sn = entry['sn'][0].strip()
+            fn = None
+            if entry['givenName'] and entry['givenName'][0]:
+                fn = entry['givenName'][0].strip()
+                if fn == '':
+                    fn = None
+            mail = None
+            if entry['mail'] and entry['mail'][0]:
+                mail = entry['mail'][0].strip()
+                if mail == '':
+                    mail = None
+            name = sn
+            if fn:
+                name = fn + ' ' + sn
+            uid = entry['uid'][0]
+            user = {
+                'dn': dn,
+                'uid': uid,
+                'givenName': fn,
+                'sn': sn,
+                'mail': mail,
+                'name': name
+            }
+            self.admin_users.append(user)
+            self.ldap_user_index[uid] = index
+            index += 1
+
+        LOG.debug("Found admin users:\n{}".format(pp(self.admin_users)))
+
+    # -------------------------------------------------------------------------
+    def get_db_users(self):
+
+        LOG.info("Get list of current users in DB.")
+
+        self.db_users = []
+
+        sql = textwrap.dedent('''\
+            SELECT id, uid, name, email, active, admin, developer
+              FROM public.user
+             WHERE auth_realm = 'LDAP'
+             ORDER BY uid
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("SQL:\n{}".format(sql))
+
+        with self.db_connection.cursor() as db_cursor:
+
+            db_cursor.execute(sql)
+            results = db_cursor.fetchall()
+
+            if self.verbose > 2:
+                LOG.debug("Got users:\n{}".format(pp(results)))
+
+            index = 0
+            for result in results:
+                uid = result[1]
+                user = {
+                    'id': result[0],
+                    'uid': uid,
+                    'name': result[2],
+                    'email': result[3],
+                    'active': result[4],
+                    'admin': result[5],
+                    'developer': result[6],
+                }
+                self.db_users.append(user)
+                self.db_user_index[uid] = index
+                index += 1
+
+        if self.verbose > 1:
+            LOG.debug("Found database users:\n{}".format(pp(self.db_users)))
+            LOG.debug("Uid index:\n{}".format(pp(self.db_user_index)))
+
+    # -------------------------------------------------------------------------
+    def check_current_admin_users(self):
+
+        LOG.info("Checking admin users from LDAP for existence in DB.")
+
+        for ldap_user in self.admin_users:
+
+            uid = ldap_user['uid']
+            if uid in self.db_user_index:
+
+                db_user = self.db_users[self.db_user_index[uid]]
+                change_data = {}
+                if db_user['name'] != ldap_user['name']:
+                    change_data['name'] = ldap_user['name']
+                if db_user['email'] != ldap_user['mail']:
+                    change_data['email'] = ldap_user['mail']
+                if db_user['active'] != 1:
+                    change_data['active'] = 1
+                if db_user['admin'] != 1:
+                    change_data['admin'] = 1
+                if db_user['developer'] != 1:
+                    change_data['developer'] = 1
+                if change_data.keys():
+                    change_data['id'] = db_user['id']
+                    self.users_to_update.append(change_data)
+
+            else:
+
+                db_user = {
+                    'uid': uid,
+                    'name': ldap_user['name'],
+                    'email': ldap_user['mail'],
+                    'active': 1,
+                    'admin': 1,
+                    'developer': 1,
+                }
+                self.users_to_add.append(db_user)
+
+    # -------------------------------------------------------------------------
+    def check_current_db_users(self):
+
+        LOG.info("Checking current users in DB for existence in LDAP.")
+
+        person = ObjectDef(['posixAccount', 'shadowAccount'])
+        person += ["uid", "givenName", "sn", "mail"]
+
+        for db_user in self.db_users:
+
+            uid = db_user['uid']
+            db_id = db_user['id']
+            LOG.debug("Checking DB user {n!r} ({u}) ...".format(n=db_user['name'], u=uid))
+
+            if uid in self.ldap_user_index:
+                if self.verbose > 1:
+                    LOG.debug("DB user {!r} is an active administrator.".format(uid))
+                continue
+
+            query_filter = (
+                '(&(objectclass=posixAccount)(objectclass=shadowAccount)'
+                '(inetuserstatus=active)(objectclass=pppixelaccount)'
+                '(!(ou=*Extern))(uid={}))').format(uid)
+            if self.verbose > 1:
+                LOG.debug("Query filter: {!r}".format(query_filter))
+
+            entries = self.ldap_search_subtree(person, query_filter)
+            if self.verbose > 1:
+                LOG.debug("Found {} LDAP entries.".format(len(entries)))
+            if entries:
+
+                entry = entries[0]
+                change_data = {}
+
+                if db_user['active'] != 1:
+                    change_data['active'] = 1
+
+                if db_user['admin'] != 0:
+                    change_data['admin'] = 0
+
+                sn = entry['sn'][0].strip()
+                fn = None
+                if entry['givenName'] and entry['givenName'][0]:
+                    fn = entry['givenName'][0].strip()
+                    if fn == '':
+                        fn = None
+                mail = None
+                if entry['mail'] and entry['mail'][0]:
+                    mail = entry['mail'][0].strip()
+                    if mail == '':
+                        mail = None
+                name = sn
+                if fn:
+                    name = fn + ' ' + sn
+
+                if db_user['name'] != name:
+                    change_data['name'] = name
+                if db_user['email'] != mail:
+                    change_data['email'] = mail
+                if db_user['developer'] != 1:
+                    change_data['developer'] = 1
+
+                if change_data.keys():
+                    change_data['id'] = db_id
+                    self.users_to_update.append(change_data)
+                else:
+                    LOG.debug("Data uf user {n!r} ({u}) are still correct.".format(
+                        n=db_user['name'], u=uid))
+            else:
+                if db_user['active'] != 0:
+                    LOG.warn(
+                        "DB user {n!r} ({u}) does not exists anymore, will be dectivated.".format(
+                            n=db_user['name'], u=uid))
+                    self.db_users_deactivate.append(db_id)
+                else:
+                    LOG.debug("User {n!r} ({u}) is already dectivated.".format(
+                        n=db_user['name'], u=uid))
+
+    # -------------------------------------------------------------------------
+    def insert_db_users(self):
+
+        if not self.users_to_add:
+            LOG.info("No user data to add to database.")
+            return
+
+        LOG.info("Adding new users to database.")
+        if self.verbose > 1:
+            LOG.debug("User data to insert:\n{}".format(pp(self.users_to_add)))
+
+        sql = textwrap.dedent('''\
+            INSERT INTO public.user (uid, name, email, auth_realm, active, admin, developer)
+                VALUES (%(uid)s, %(name)s, %(email)s, 'LDAP', %(active)s, %(admin)s, %(developer)s)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Insert SQL:\n{}".format(sql))
+
+        with self.db_connection.cursor() as db_cursor:
+
+            for db_user in self.users_to_add:
+
+                LOG.warn("Adding user {n!r} ({u}) ...".format(n=db_user['name'], u=db_user['uid']))
+
+                if self.verbose > 1:
+                    show_sql = db_cursor.mogrify(sql, db_user)
+                    LOG.debug("Executing:\n{}".format(show_sql))
+                if not self.simulate:
+                    db_cursor.execute(sql, db_user)
+
+        LOG.debug("Commiting changes ...")
+        self.db_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def change_db_users(self):
+
+        if not self.users_to_update:
+            LOG.info("No user data to update.")
+            return
+
+        LOG.info("Updating user data in database.")
+        if self.verbose > 1:
+            LOG.debug("User data to update:\n{}".format(pp(self.users_to_update)))
+
+        with self.db_connection.cursor() as db_cursor:
+
+            for db_user in self.users_to_update:
+
+                # LOG.warn(
+                #     "Updating user {n!r} ({u}) ...".format(
+                #         n=db_user['name'], u=db_user['uid']))
+                msg = "Udating user db id {}:".format(db_user['id'])
+
+                sql = 'UPDATE public.user SET'
+                updates = []
+                msg_list = []
+                if 'name' in db_user:
+                    updates.append(' name = %(name)s')
+                    msg_list.append("name = {!r}".format(db_user['name']))
+                if 'email' in db_user:
+                    updates.append(' email = %(email)s')
+                    msg_list.append("email = {!r}".format(db_user['email']))
+                if 'active' in db_user:
+                    updates.append(' active = %(active)s')
+                    msg_list.append("active = {!r}".format(db_user['active']))
+                if 'admin' in db_user:
+                    updates.append(' admin = %(admin)s')
+                    msg_list.append("admin = {!r}".format(db_user['admin']))
+                if 'developer' in db_user:
+                    updates.append(' developer = %(developer)s')
+                    msg_list.append("developer = {!r}".format(db_user['developer']))
+                sql += ', '.join(updates)
+                sql += ' WHERE id = %(id)s'
+                msg += ' ' + ', '.join(msg_list)
+
+                LOG.warn(msg)
+
+                if self.verbose > 1:
+                    show_sql = db_cursor.mogrify(sql, db_user)
+                    LOG.debug("Executing:\n{}".format(show_sql))
+                if not self.simulate:
+                    db_cursor.execute(sql, db_user)
+
+        LOG.debug("Commiting changes ...")
+        self.db_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def deactivate_db_users(self):
+
+        if not self.db_users_deactivate:
+            LOG.info("No user data to deactivate.")
+            return
+
+        LOG.info("Deactivating users in database.")
+        if self.verbose > 1:
+            LOG.debug("User Ids to deactivate:\n{}".format(pp(self.db_users_deactivate)))
+
+        sql = "UPDATE public.user SET active = 0 WHERE id = %s"
+
+        with self.db_connection.cursor() as db_cursor:
+
+            for db_id in self.db_users_deactivate:
+                if self.verbose > 1:
+                    show_sql = db_cursor.mogrify(sql, (db_id, ))
+                    LOG.debug("Executing:\n{}".format(show_sql))
+                if not self.simulate:
+                    db_cursor.execute(sql, (db_id, ))
+
+        LOG.debug("Commiting changes ...")
+        self.db_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def _close_db(self):
+
+        if self.db_connection:
+            LOG.debug("Closing database connection.")
+            try:
+                self.db_connection.close()
+            except Exception as e:
+                LOG.error("Could not close database connection ({}): {}".format(
+                    e.__class__.__name__, e))
+                traceback.print_exc()
+            self.db_connection = None
+
+    # -------------------------------------------------------------------------
+    def post_run(self):
+
+        if self.verbose > 1:
+            LOG.info("executing post_run() ...")
+        self._close_db()
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/du.py b/lib/pp_lib/du.py
new file mode 100644 (file)
index 0000000..8f731b1
--- /dev/null
@@ -0,0 +1,315 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import locale
+import re
+
+# Third party modules
+import six
+
+# Own modules
+from .common import pp, to_str
+
+from .obj import PpBaseObjectError, PpBaseObject
+
+
+__version__ = '0.4.2'
+
+LOG = logging.getLogger(__name__)
+
+DU_UNITS = ['K', 'k', 'M', 'm', 'G', 'g', 'T', 't', 'H', 'h']
+DU_UNIT_EXP = {
+    'K': 0,
+    'M': 1,
+    'G': 2,
+    'T': 3,
+}
+
+
+# =============================================================================
+class DuError(PpBaseObjectError):
+    pass
+
+
+# =============================================================================
+class DuParseError(DuError):
+
+    # -------------------------------------------------------------------------
+    def __init__(self, line):
+        self.line = line
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+
+        msg = "Could not parse line from DU output: {!r}".format(self.line)
+        return msg
+
+
+# =============================================================================
+class DuListError(DuError):
+    pass
+
+
+# =============================================================================
+class DuEntry(PpBaseObject):
+    """
+    Class encapsulating one DU entry.
+    """
+
+    kilo = 1024
+    if six.PY2:
+        kilo = long(1024)                                   # noqa
+
+    human_limit = 1.5
+
+    factor = {}
+    hlimits = {}
+    for unit in DU_UNIT_EXP.keys():
+        exp = DU_UNIT_EXP[unit]
+        factor[unit] = kilo ** exp
+        hlimits[unit] = human_limit * float(factor[unit])
+
+    locale_conv = locale.localeconv()
+    dp = '.'
+    ts = ','
+    if 'decimal_point' in locale_conv and locale_conv['decimal_point'] != '.':
+        dp = locale_conv['decimal_point']
+    if 'thousands_sep' in locale_conv:
+        ts = locale_conv['thousands_sep']
+
+    parse_pattern = r'^\s*(\d+(?:' + re.escape(dp) + r'\d*)?)([KMGT])?\s+(\S+.*)'
+    parse_re = re.compile(parse_pattern, re.IGNORECASE)
+
+    # -------------------------------------------------------------------------
+    def __init__(
+            self, size_kb, path, appname=None, verbose=0, base_dir=None):
+
+        self._size_kb = None
+        self._path = None
+
+        super(DuEntry, self).__init__(
+            appname=appname, verbose=verbose, version=__version__,
+            base_dir=base_dir, initialized=False)
+
+        self.size_kb = size_kb
+        self.path = path
+
+        self.initialized = True
+
+    # -----------------------------------------------------------
+    @property
+    def size_kb(self):
+        """The size of the entry in KiBytes."""
+        if not hasattr(self, '_size_kb'):
+            if six.PY2:
+                return long(0)                              # noqa
+            return 0
+        return getattr(self, '_size_kb', 0)
+
+    @size_kb.setter
+    def size_kb(self, value):
+        if six.PY2:
+            v = long(value)                                 # noqa
+        else:
+            v = int(value)
+        if v >= 0:
+            self._size_kb = v
+        else:
+            LOG.warn("Wrong size for DU entry{!r}, must be >= 0".format(value))
+
+    # -----------------------------------------------------------
+    @property
+    def path(self):
+        """The path name of the DU entry."""
+        return self._path
+
+    @path.setter
+    def path(self, value):
+        if value is not None:
+            self._path = str(to_str(value))
+        else:
+            self._path = None
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("size_kb={!r}".format(self.size_kb))
+        fields.append("path={!r}".format(self.path))
+        fields.append("appname={!r}".format(self.appname))
+        fields.append("verbose={!r}".format(self.verbose))
+        fields.append("base_dir={!r}".format(self.base_dir))
+        fields.append("initialized={!r}".format(self.initialized))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(DuEntry, self).as_dict(short=short)
+        res['size_kb'] = self.size_kb
+        res['path'] = self.path
+        res['dp'] = self.dp
+        res['ts'] = self.ts
+        res['parse_pattern'] = self.parse_pattern
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        return self.to_str()
+
+    # -------------------------------------------------------------------------
+    def size_str(self, unit='K', precision=0):
+
+        u = unit.upper()
+        unit_show = u
+        if u in self.factor:
+            size_float = float(self.size_kb) / float(self.factor[u])
+            if u == 'K':
+                unit_show = ''
+                precision = 0
+        else:
+            # Human readable
+            unit_show = 'K'
+            size_float = float(self.size_kb)
+            precision = 0
+            if self.verbose > 2:
+                LOG.debug("Checking size_float {s} for factors\n{f}".format(
+                    s=size_float, f=pp(self.hlimits)))
+
+            if size_float > self.hlimits['T']:
+                unit_show = 'T'
+                size_float = size_float / float(self.factor['T'])
+                precision = 1
+            elif size_float > self.hlimits['G']:
+                unit_show = 'G'
+                size_float = size_float / float(self.factor['G'])
+                precision = 1
+            elif size_float > self.hlimits['M']:
+                unit_show = 'M'
+                size_float = size_float / float(self.factor['M'])
+                precision = 1
+            if self.verbose > 2:
+                LOG.debug("Computed size_float: {s} {u}".format(
+                    s=size_float, u=unit_show))
+
+        if unit_show != '':
+            unit_show = ' ' + unit_show
+        template = "{{:,.{:d}f}}".format(precision) + unit_show
+        size_show = template.format(size_float)
+
+        # Localisation
+        if self.dp != '.':
+            size_show = size_show.replace('.', ';').replace(',', self.ts).replace(';', self.dp)
+
+        return size_show
+
+    # -------------------------------------------------------------------------
+    def to_str(self, unit='K', precision=0, size_width=None):
+
+        width = 16
+        unit = unit.upper()
+        if size_width is None:
+            if unit == 'K':
+                width = 16
+            elif unit == 'M':
+                width = 13
+                if precision:
+                    width += 1 + precision
+            elif unit == 'G':
+                width = 9
+                if precision:
+                    width += 1 + precision
+            elif unit == 'T':
+                width = 5
+                if precision:
+                    width += 1 + precision
+            else:
+                # Human
+                width = 9
+        else:
+            width = size_width
+
+        size_show = self.size_str(unit=unit, precision=precision)
+        line = "{0:>{w}s}      {1}".format(size_show, self.path, w=width)
+
+        return line
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def init_locales(cls):
+
+        cls.locale_conv = locale.localeconv()
+        cls.dp = '.'
+        if 'decimal_point' in cls.locale_conv and cls.locale_conv['decimal_point'] != '.':
+            cls.dp = cls.locale_conv['decimal_point']
+        if 'thousands_sep' in cls.locale_conv:
+            cls.ts = cls.locale_conv['thousands_sep']
+
+        cls.parse_pattern = r'^\s*(\d+(?:' + re.escape(cls.dp) + r'\d*)?)([KMGT])?\s+(\S+.*)'
+        cls.parse_re = re.compile(cls.parse_pattern, re.IGNORECASE)
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def from_line(cls, line, appname=None, verbose=0, base_dir=None):
+
+        match = cls.parse_re.match(line)
+        if not match:
+            raise DuParseError(line)
+
+        if verbose > 3:
+            LOG.debug("Got matching groups: {}.".format(match.groups()))
+
+        sz = match.group(1)
+        if cls.ts:
+            sz = sz.replace(cls.ts, '')
+        if cls.dp != '.':
+            sz = sz.replace(cls.dp, '.')
+        if verbose > 2:
+            LOG.debug("De-localized size: {!r}.".format(sz))
+        size = float(sz)
+        unit = match.group(2)
+        path = match.group(3)
+
+        if unit is not None:
+            unit = unit.upper()
+            if unit in cls.factor:
+                size *= cls.factor[unit]
+
+        entry = cls(
+            size_kb=size, path=path, appname=appname, verbose=verbose, base_dir=base_dir)
+
+        return entry
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/errors.py b/lib/pp_lib/errors.py
new file mode 100644 (file)
index 0000000..2a566e7
--- /dev/null
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@summary: module for some common used error classes
+"""
+
+# Standard modules
+import errno
+
+
+__version__ = '0.4.1'
+
+# =============================================================================
+class PpError(Exception):
+    """
+    Base error class for all other self defined exceptions.
+    """
+
+    pass
+
+
+# =============================================================================
+class PpAppError(PpError):
+
+    pass
+
+
+# =============================================================================
+class InvalidMailAddressError(PpError):
+    """Class for a exception in case of a malformed mail address."""
+
+    # -------------------------------------------------------------------------
+    def __init__(self, address, msg=None):
+
+        self.address = address
+        self.msg = msg
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+
+        msg = "Wrong mail address {a!r} ({c})".format(
+            a=self.address, c=self.address.__class__.__name__)
+        if self.msg:
+            msg += ': ' + self.msg
+        else:
+            msg += '.'
+        return msg
+
+
+# =============================================================================
+class FunctionNotImplementedError(PpError, NotImplementedError):
+    """
+    Error class for not implemented functions.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, function_name, class_name):
+        """
+        Constructor.
+
+        @param function_name: the name of the not implemented function
+        @type function_name: str
+        @param class_name: the name of the class of the function
+        @type class_name: str
+
+        """
+
+        self.function_name = function_name
+        if not function_name:
+            self.function_name = '__unkown_function__'
+
+        self.class_name = class_name
+        if not class_name:
+            self.class_name = '__unkown_class__'
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting into a string for error output.
+        """
+
+        msg = "Function {func}() has to be overridden in class {cls!r}."
+        return msg.format(func=self.function_name, cls=self.class_name)
+
+# =============================================================================
+class IoTimeoutError(PpError, IOError):
+    """
+    Special error class indicating a timout error on a read/write operation
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, strerror, timeout, filename=None):
+        """
+        Constructor.
+
+        @param strerror: the error message about the operation
+        @type strerror: str
+        @param timeout: the timout in seconds leading to the error
+        @type timeout: float
+        @param filename: the filename leading to the error
+        @type filename: str
+
+        """
+
+        t_o = None
+        try:
+            t_o = float(timeout)
+        except ValueError:
+            pass
+        self.timeout = t_o
+
+        if t_o is not None:
+            strerror += " (timeout after {:0.1f} secs)".format(t_o)
+
+        if filename is None:
+            super(IoTimeoutError, self).__init__(errno.ETIMEDOUT, strerror)
+        else:
+            super(IoTimeoutError, self).__init__(
+                errno.ETIMEDOUT, strerror, filename)
+
+# =============================================================================
+class ReadTimeoutError(IoTimeoutError):
+    """
+    Special error class indicating a timout error on reading of a file.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, timeout, filename):
+        """
+        Constructor.
+
+        @param timeout: the timout in seconds leading to the error
+        @type timeout: float
+        @param filename: the filename leading to the error
+        @type filename: str
+
+        """
+
+        strerror = "Timeout error on reading"
+        super(ReadTimeoutError, self).__init__(strerror, timeout, filename)
+
+
+# =============================================================================
+class WriteTimeoutError(IoTimeoutError):
+    """
+    Special error class indicating a timout error on a writing into a file.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, timeout, filename):
+        """
+        Constructor.
+
+        @param timeout: the timout in seconds leading to the error
+        @type timeout: float
+        @param filename: the filename leading to the error
+        @type filename: str
+
+        """
+
+        strerror = "Timeout error on writing"
+        super(WriteTimeoutError, self).__init__(strerror, timeout, filename)
+
+# =============================================================================
+class CouldntOccupyLockfileError(PpError):
+    """
+    Special error class indicating, that a lockfile couldn't coccupied
+    after a defined time.
+    """
+
+    # -----------------------------------------------------
+    def __init__(self, lockfile, duration, tries):
+        """
+        Constructor.
+
+        @param lockfile: the lockfile, which could't be occupied.
+        @type lockfile: str
+        @param duration: The duration in seconds, which has lead to this situation
+        @type duration: float
+        @param tries: the number of tries creating the lockfile
+        @type tries: int
+
+        """
+
+        self.lockfile = str(lockfile)
+        self.duration = float(duration)
+        self.tries = int(tries)
+
+    # -----------------------------------------------------
+    def __str__(self):
+
+        return "Couldn't occupy lockfile {!r} in {:0.1f} seconds with {} tries.".format(
+            self.lockfile, self.duration, self.tries)
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/format_du.py b/lib/pp_lib/format_du.py
new file mode 100644 (file)
index 0000000..596f400
--- /dev/null
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import textwrap
+import sys
+import copy
+
+# Third party modules
+import six
+
+# Own modules
+from .common import pp
+
+from .app import PpApplication
+
+from .du import DuParseError, DuEntry
+from .du import DU_UNITS, DU_UNIT_EXP
+
+try:
+    from .local_version import __version__ as my_version
+except ImportError:
+    from .global_version import __version__ as my_version
+
+__version__ = '0.4.4'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class FormatDuApp(PpApplication):
+    """
+    Application class for the format-du command
+    """
+
+    units = copy.copy(DU_UNITS)
+    unit_exp = copy.copy(DU_UNIT_EXP)
+
+    # -------------------------------------------------------------------------
+    def __init__(
+            self, appname=None, verbose=0, version=my_version, *arg, **kwargs):
+
+        indent = ' ' * self.usage_term_len
+
+        usage = textwrap.dedent("""\
+        %(prog)s [--color [{{yes,no,auto}}]] [-v] [Format options] [FILE]
+
+        {i}%(prog)s --usage
+        {i}%(prog)s -h|--help
+        {i}%(prog)s -V|--version
+        """).strip().format(i=indent)
+
+        desc = """Formats the output of 'du -k' for various modifiers."""
+
+        self.precision = 0
+        self.unit = 'K'
+        self.factor = 1
+        self.total = False
+
+        super(FormatDuApp, self).__init__(
+            usage=usage,
+            description=desc,
+            appname=appname,
+            verbose=verbose,
+            version=version,
+            *arg, **kwargs
+        )
+
+        self.post_init()
+        DuEntry.init_locales()
+        if self.verbose > 2:
+            LOG.debug("Locale conversions:\n{}".format(pp(DuEntry.locale_conv)))
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+        """
+
+        super(FormatDuApp, self).init_arg_parser()
+
+        format_options = self.arg_parser.add_argument_group('Format options')
+
+        format_options.add_argument(
+            '-c', '--total',
+            action='store_true', dest='total',
+            help="Produces a grand total",
+        )
+
+        format_options.add_argument(
+            '-u', '--unit',
+            dest='unit', metavar='UNIT',
+            choices=self.units,
+            help=(
+                "Unit for displaying the results. Valid units are: 'K' (KiBytes, the default), "
+                "'M' (MiBytes), 'G' (GiBytes) and 'H' (human readable, the most appropriate unit "
+                "will be used. In case of 'K', no unit will be displayed.")
+        )
+
+        format_options.add_argument(
+            '-p', '--precision',
+            type=int, default=0, metavar='DIGITS',
+            help="Number of digits for displaying the result (default: %(default)r).",
+        )
+
+        self.arg_parser.add_argument(
+            'file',
+            metavar='FILE', type=str, nargs='?',
+            help=(
+                'A file with the output of "du -k". If not given or "-", then '
+                'the standard input will be read.'),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+
+        Descendant classes may override this method.
+        """
+
+        if self.args.total:
+            self.total = True
+
+        if self.args.unit:
+            self.unit = self.args.unit.upper()
+            if self.unit in self.unit_exp:
+                exp = self.unit_exp[self.unit]
+                self.factor = 1024 ** exp
+
+        if self.args.precision is not None:
+            if self.args.precision < 0:
+                p = self.colored('{!r}'.format(self.args.precision), 'RED')
+                LOG.error("Invalid precision {}, it must not be less than zero.".format(p))
+                sys.stderr.write('\n')
+                self.arg_parser.print_help(sys.stderr)
+                self.exit(1)
+            self.precision = self.args.precision
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """The underlaying startpoint of the application."""
+
+        fh = None
+        opened = False
+        open_args = {}
+        if six.PY3:
+            open_args['encoding'] = 'utf-8'
+            open_args['errors'] = 'surrogateescape'
+        filename = None
+
+        try:
+            if self.args.file and self.args.file != '-':
+                fh = open(self.args.file, 'r', **open_args)
+                opened = True
+                filename = '{!r}'.format(self.args.file)
+            else:
+                fh = sys.stdin
+                filename = '<standard input>'
+
+            LOG.debug("Reading DU info from {}.".format(filename))
+            self.read_file(fh)
+
+        finally:
+            if opened:
+                fh.close()
+
+    # -------------------------------------------------------------------------
+    def read_file(self, fh):
+
+        line = None
+        eof = False
+        lnr = 0
+
+        total = 0
+        if six.PY2:
+            total = long(0)                                             # noqa
+
+        while not eof:
+            lnr += 1
+            line = fh.readline()
+            if not line:
+                eof = True
+                break
+            line = line.strip()
+            if not line:
+                continue
+            entry = self.eval_line(line, lnr)
+            if entry:
+                total += entry.size_kb
+
+        if self.total:
+            total_entry = DuEntry(
+                size_kb=total, path='total', appname=self.appname,
+                verbose=self.verbose, base_dir=self.base_dir)
+            if self.verbose > 1:
+                LOG.debug("Total entry:\n{}".format(pp(total_entry.as_dict())))
+            print(total_entry.to_str(unit=self.unit, precision=self.precision))
+
+        if self.verbose > 1:
+            LOG.debug("Finished reading.")
+
+    # -------------------------------------------------------------------------
+    def eval_line(self, line, lnr):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating line {!r} ...".format(line))
+
+        try:
+            entry = DuEntry.from_line(
+                line=line, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
+        except DuParseError as e:
+            LOG.error("Could not parse line {lnr}: {e}".format(lnr=lnr, e=e))
+            LOG.debug("Parsing pattern: {!r}".format(DuEntry.parse_pattern))
+            return None
+
+        if self.verbose > 1:
+            LOG.debug("Entry:\n{}".format(pp(entry.as_dict())))
+        print(entry.to_str(unit=self.unit, precision=self.precision))
+
+        return entry
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/global_version.py b/lib/pp_lib/global_version.py
new file mode 100644 (file)
index 0000000..33a817e
--- /dev/null
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicis Pixelpark GmbH, Berlin
+@summary: Modules global version number
+"""
+
+__author__ = 'Frank Brehm <frank.brehm@pixelpark.com>'
+__contact__ = 'frank.brehm@pixelpark.com'
+__version__ = '0.7.0'
+__license__ = 'LGPL3+'
+
+# vim: fileencoding=utf-8 filetype=python ts=4
diff --git a/lib/pp_lib/homes_admin.py b/lib/pp_lib/homes_admin.py
new file mode 100644 (file)
index 0000000..af5f03d
--- /dev/null
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The base module all maintaining scripts for the home directories
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import re
+import pwd
+import glob
+
+# Third party modules
+import six
+
+# Own modules
+from .common import pp
+
+from .cfg_app import PpCfgAppError, PpConfigApplication
+
+__version__ = '0.1.3'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpHomesAdminError(PpCfgAppError):
+    pass
+
+
+# =============================================================================
+class PpHomesAdminApp(PpConfigApplication):
+    """
+    Base class for applications maintaining the global Home directories.
+    """
+
+    # /mnt/nfs
+    default_chroot_homedir = os.sep + os.path.join('mnt', 'nfs')
+    # /home
+    default_home_root = os.sep + 'home'
+
+    # /etc/pixelpark/exclude_homes
+    default_exclude_file = os.sep + os.path.join('etc', 'pixelpark', 'exclude_homes')
+
+    comment_re = re.compile(r'\s*#.*')
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, description=None,
+            cfg_stems='homes-admin', version=__version__):
+
+        self.default_mail_recipients = [
+            'admin.berlin@pixelpark.com'
+        ]
+        self.default_mail_cc = []
+
+        self.chroot_homedir = self.default_chroot_homedir
+        self.home_root_abs = self.default_home_root
+        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
+
+        self.exclude_file = self.default_exclude_file
+
+        self.exclude_dirs = []
+        self.passwd_home_dirs = []
+        self.unnecessary_dirs = []
+
+        super(PpHomesAdminApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems=cfg_stems,
+        )
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+
+        homes_group = self.arg_parser.add_argument_group('Homes administration options.')
+
+        homes_group.add_argument(
+            '-R', '--chroot-dir',
+            metavar='DIR', dest='chroot_homedir',
+            help=(
+                "Directory, where the {h!r} share is mounted from the "
+                "NFS server. Maybe '/', default: {d!r}.").format(
+                    h=self.default_home_root, d=self.default_chroot_homedir)
+        )
+
+        homes_group.add_argument(
+            '-H', '--homes',
+            metavar='DIR', dest='home_root',
+            help=(
+                "The shared directory on the NFS server for all home directories. "
+                "Default: {!r}.").format(self.default_home_root)
+        )
+
+        homes_group.add_argument(
+            '-E', '--exclude-file',
+            metavar='FILE', dest='exclude_file',
+            help=(
+                "The file containing all directories underneath {h!r}, which are  "
+                "excluded from all operations. Default: {f!r}.").format(
+                    h=self.default_home_root, f=self.default_exclude_file)
+        )
+
+        super(PpHomesAdminApp, self).init_arg_parser()
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpHomesAdminApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 3:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            section = self.cfg[section_name]
+
+            if section_name.lower() not in (
+                    'test-home', 'test_home', 'testhome', 'homes', 'admin'):
+                continue
+
+            if self.verbose > 2:
+                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                    n=section_name, s=pp(section)))
+
+            if section_name.lower() == 'homes':
+
+                if 'chroot_homedir' in section:
+                    v = section['chroot_homedir']
+                    if not os.path.isabs(v):
+                        msg = (
+                            "The chrooted path of the home directories must be an "
+                            "absolute pathname (found [{s}]/chroot_homedir "
+                            "=> {v!r} in configuration.").format(s=section_name, v=v)
+                        raise PpHomesAdminError(msg)
+                    self.chroot_homedir = v
+
+                if 'home_root' in section:
+                    v = section['home_root']
+                    if not os.path.isabs(v):
+                        msg = (
+                            "The root path of the home directories must be an "
+                            "absolute pathname (found [{s}]/home_root "
+                            "=> {v!r} in configuration.").format(s=section_name, v=v)
+                        raise PpHomesAdminError(msg)
+                    self.home_root_abs = v
+
+            elif section_name.lower() == 'admin':
+
+                if 'exclude_file' in section:
+                    v = section['exclude_file']
+                    if not os.path.isabs(v):
+                        msg = (
+                            "The path of file of excluded directories must be an "
+                            "absolute pathname (found [{s}]/exclude_file "
+                            "=> {v!r} in configuration.").format(s=section_name, v=v)
+                        raise PpHomesAdminError(msg)
+                    self.exclude_file = v
+
+        self._perform_home_cmdline_opts()
+
+        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
+        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
+
+    # -------------------------------------------------------------------------
+    def _perform_home_cmdline_opts(self):
+
+        if hasattr(self.args, 'chroot_homedir') and self.args.chroot_homedir:
+            v = self.args.chroot_homedir
+            if not os.path.isabs(v):
+                msg = (
+                    "The chrooted path of the home directories must be an "
+                    "absolute pathname (got {!r} as command line parameter).").format(v)
+                raise PpHomesAdminError(msg)
+            self.chroot_homedir = v
+
+        if hasattr(self.args, 'home_root') and self.args.home_root:
+            v = self.args.home_root
+            if not os.path.isabs(v):
+                msg = (
+                    "The root path of the home directories must be an "
+                    "absolute pathname (got {!r} as command line parameter).").format(v)
+                raise PpHomesAdminError(msg)
+            self.home_root_abs = v
+
+        if hasattr(self.args, 'exclude_file') and self.args.exclude_file:
+            v = self.args.exclude_file
+            if not os.path.isabs(v):
+                msg = (
+                    "The path of file of excluded directories must be an "
+                    "absolute pathname (got {!r} as command line parameter).").format(v)
+                raise PpHomesAdminError(msg)
+            self.exclude_file = v
+
+    # -------------------------------------------------------------------------
+    def read_exclude_dirs(self):
+
+        LOG.info("Reading exclude file {!r} ...".format(self.exclude_file))
+        upper_dir = os.pardir + os.sep
+
+        if not os.path.exists(self.exclude_file):
+            msg = "Exclude file {!r} does not exists.".format(self.exclude_file)
+            LOG.error(msg)
+            return
+
+        if not os.path.isfile(self.exclude_file):
+            msg = "Exclude file {!r} is not a regular file.".format(self.exclude_file)
+            LOG.error(msg)
+            return
+
+        if not os.access(self.exclude_file, os.R_OK):
+            msg = "No read access to exclude file {!r}.".format(self.exclude_file)
+            LOG.error(msg)
+            return
+
+        open_args = {}
+        if six.PY3:
+            open_args['encoding'] = 'utf-8'
+            open_args['errors'] = 'surrogateescape'
+
+        with open(self.exclude_file, 'r', **open_args) as fh:
+            lnr = 0
+            for line in fh.readlines():
+                lnr += 1
+                line = line.strip()
+                if not line:
+                    continue
+                line = self.comment_re.sub('', line)
+                if not line:
+                    continue
+                if self.verbose > 3:
+                    LOG.debug("Evaluating line {l!r} (file {f!r}, line {lnr}).".format(
+                        l=line, f=self.exclude_file, lnr=lnr))
+                tokens = self.whitespace_re.split(line)
+                for token in tokens:
+                    if not os.path.isabs(token):
+                        LOG.warn((
+                            "Entry {e!r} in file {f!r}, line {l}, "
+                            "is not an absolute path.").format(
+                            e=token, f=self.exclude_file, l=lnr))
+                        continue
+                    home_relative = os.path.relpath(token, self.home_root_abs)
+                    if token == os.sep or home_relative.startswith(upper_dir):
+                        LOG.warn((
+                            "Entry {e!r} in file {f!r}, line {l}, "
+                            "is outside home root {h!r}.").format(
+                            e=token, f=self.exclude_file, l=lnr, h=self.home_root_abs))
+                        continue
+                    if token not in self.exclude_dirs:
+                        self.exclude_dirs.append(token)
+
+        self.exclude_dirs.sort(key=str.lower)
+
+        LOG.debug("Found {} directories to exclude.".format(len(self.exclude_dirs)))
+        if self.verbose > 2:
+            LOG.debug("Found directories to exclude:\n{}".format(pp(self.exclude_dirs)))
+
+    # -------------------------------------------------------------------------
+    def read_passwd_homes(self):
+
+        LOG.info("Reading all home directories from 'getent passwd' ...")
+
+        upper_dir = os.pardir + os.sep
+        entries = pwd.getpwall()
+
+        for entry in entries:
+            home = entry.pw_dir
+            if not home:
+                continue
+            home_relative = os.path.relpath(home, self.home_root_abs)
+            if home == os.sep or home_relative.startswith(upper_dir):
+                if self.verbose > 1:
+                    LOG.debug((
+                        "Home directory {d!r} of user {u!r} "
+                        "is outside home root {h!r}.").format(
+                        d=home, u=entry.pw_name, h=self.home_root_abs))
+                continue
+            if home not in self.passwd_home_dirs:
+                self.passwd_home_dirs.append(home)
+
+        self.passwd_home_dirs.sort(key=str.lower)
+
+        LOG.debug("Found {} home directories in passwd.".format(len(self.passwd_home_dirs)))
+        if self.verbose > 2:
+            LOG.debug("Home directories in passwd:\n{}".format(pp(self.passwd_home_dirs)))
+
+    # -------------------------------------------------------------------------
+    def check_homes(self):
+
+        LOG.info("Checking for unnecessary home directories ...")
+
+        glob_pattern = os.path.join(self.home_root_real, '*')
+        all_home_entries = glob.glob(glob_pattern)
+
+        for path in all_home_entries:
+            if not os.path.isdir(path):
+                continue
+            home_rel = os.sep + os.path.relpath(path, self.chroot_homedir)
+            if self.verbose > 2:
+                LOG.debug("Checking {p!r} ({h!r}) ...".format(
+                    p=path, h=home_rel))
+            if home_rel in self.passwd_home_dirs:
+                continue
+            if home_rel in self.exclude_dirs:
+                continue
+            LOG.debug("Marking {!r} as unnecessary.".format(home_rel))
+            self.unnecessary_dirs.append(home_rel)
+
+        self.unnecessary_dirs.sort(key=str.lower)
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/idna_xlate.py b/lib/pp_lib/idna_xlate.py
new file mode 100644 (file)
index 0000000..86f612c
--- /dev/null
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the 'idna-xlate' application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import textwrap
+import sys
+import copy
+
+# Third party modules
+import six
+
+# Own modules
+from .common import pp, to_str, to_bytes
+
+from .app import PpApplication
+
+try:
+    from .local_version import __version__ as my_version
+except ImportError:
+    from .global_version import __version__ as my_version
+
+__version__ = '0.2.1'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class IdnaXlateApp(PpApplication):
+    """
+    Application class for the idna-xlate command.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=my_version, *arg, **kwargs):
+
+        self.items = []
+
+        indent = ' ' * self.usage_term_len
+
+        usage = textwrap.dedent("""\
+            %(prog)s [--color [{{yes,no,auto}}]] [-v | -q] ITEM [ITEM ...]
+
+            {i}%(prog)s --usage
+            {i}%(prog)s -h|--help
+            {i}%(prog)s -V|--version
+            """).strip().format(i=indent)
+
+        desc = "Formats the given items into IDNA formatted strings (Punycode)."
+
+        super(IdnaXlateApp, self).__init__(
+            usage=usage,
+            description=desc,
+            verbose=verbose,
+            version=version,
+            *arg, **kwargs
+        )
+
+        self.post_init()
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+        """
+
+        super(IdnaXlateApp, self).init_arg_parser()
+
+
+        self.arg_parser.add_argument(
+            'items',
+            metavar='ITEM', type=str, nargs='+',
+            help=(
+                'The item to translate into IDNA encoded strings.'),
+        )
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """The underlaying startpoint of the application."""
+
+        if self.verbose:
+            print("Items to translate:\n")
+
+        for item in self.args.items:
+
+            if item == 'xn--':
+                print(" * {}".format(self.colored(
+                    "Invalid item 'xn--'", ('BOLD', 'RED'))))
+                continue
+
+            item_idna = item
+            if 'xn--' in item:
+                item_idna = to_str(to_bytes(item).decode('idna'))
+            else:
+                item_idna = to_str(item.encode('idna'))
+
+            print(" * {i!r}: {p!r}".format(i=item, p=item_idna))
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/import_pdnsdata.py b/lib/pp_lib/import_pdnsdata.py
new file mode 100644 (file)
index 0000000..93caabe
--- /dev/null
@@ -0,0 +1,1141 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the 'import-pdnsdata' application
+          to import all data from the current PowerDNS database
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import re
+import textwrap
+import traceback
+import socket
+import datetime
+import time
+
+# Third party modules
+import psycopg2
+import pymysql
+
+# Own modules
+from .common import pp, to_str
+from .common import RE_DOT_AT_END
+
+from .cfg_app import PpCfgAppError, PpConfigApplication
+
+from .pdns_record import PdnsSoaData
+
+__version__ = '0.10.3'
+LOG = logging.getLogger(__name__)
+
+# =============================================================================
+class ImportPdnsdataError(PpCfgAppError):
+    pass
+
+# =============================================================================
+class ImportPdnsdataApp(PpConfigApplication):
+    """
+    Application class for the 'import-pdnsdata'-Application.
+    """
+
+    # Source DB data
+    default_src_db_host = 'mysql-pp06.pixelpark.com'
+    default_src_db_port = 3306
+    default_src_db_schema = 'pdns'
+    default_src_db_user = 'pdns'
+
+    # Target DB data
+    default_tgt_db_type = 'postgresql'
+    default_tgt_db_host = 'systemshare.pixelpark.com'
+    default_tgt_db_port_psql = 5432
+    default_tgt_db_port_mysql = 3306
+    default_tgt_db_schema = 'pdns'
+    default_tgt_db_user = 'pdns'
+
+    re_is_local_account = re.compile(r'(lo[ck]al|privat|intern)', re.IGNORECASE)
+
+    sql_insert_domain = textwrap.dedent('''\
+        INSERT INTO domains (id, name, master, last_check, type, notified_serial, account)
+            VALUES (%(id)s, %(name)s, %(master)s, %(last_check)s,
+                %(type)s, %(notified_serial)s, %(account)s)
+        ''').strip()
+
+    sql_insert_dom_meta = textwrap.dedent('''\
+        INSERT INTO domainmetadata (domain_id, kind, content)
+            VALUES (%(domain_id)s, %(kind)s, %(content)s)
+        ''').strip()
+
+    sql_insert_record = textwrap.dedent('''\
+        INSERT INTO records (id, domain_id, name, type, content,
+                             ttl, prio, change_date, disabled,
+                             ordername, auth)
+             VALUES (%(id)s, %(domain_id)s, %(name)s, %(type)s, %(content)s,
+                     %(ttl)s, %(prio)s, %(change_date)s, %(disabled)s,
+                     %(ordername)s, %(auth)s)
+        ''').strip()
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        description = textwrap.dedent('''\
+        Importing complete Database for PowerDNS from old DB into the new one.
+        ''').strip()
+
+        self.default_mail_recipients = ['frank.brehm@pixelpark.com']
+
+        self.src_db_host = self.default_src_db_host
+        self.src_db_port = self.default_src_db_port
+        self.src_db_schema = self.default_src_db_schema
+        self.src_db_user = self.default_src_db_user
+        self.src_db_pass = None
+
+        self.tgt_db_type = self.default_tgt_db_type
+        self.tgt_db_host = self.default_tgt_db_host
+        self.tgt_db_port = self.default_tgt_db_port_psql
+        if self.tgt_db_type == 'mysql':
+            self.tgt_db_port = self.default_tgt_db_port_mysql
+        self.tgt_db_schema = self.default_tgt_db_schema
+        self.tgt_db_user = self.default_tgt_db_user
+        self.tgt_db_pass = None
+
+        self.src_connection = None
+        self.tgt_connection = None
+
+        self.domain_ids = {}
+
+        self._show_simulate_opt = True
+
+        super(ImportPdnsdataApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='import-pdnsdata'
+        )
+
+        self.nr = {
+            'cryptokeys': {
+                'has_domain': True,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+            'domainmetadata': {
+                'has_domain': True,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+            'domains': {
+                'has_domain': False,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+            'records': {
+                'has_domain': True,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+            'supermasters': {
+                'has_domain': False,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+            'tsigkeys': {
+                'has_domain': False,
+                'total': 0,
+                'valid': 0,
+                'invalid': 0,
+            },
+        }
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(ImportPdnsdataApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 2:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+            section = self.cfg[section_name]
+
+            if section_name.lower() in ('src_db', 'src_db', 'srcdb', 'source', 'src'):
+                self.do_src_db_cfg(section_name, section)
+
+            if section_name.lower() in ('tgt_db', 'tgt_db', 'tgtdb', 'target', 'tgt'):
+                self.do_tgt_db_cfg(section_name, section)
+
+    # -------------------------------------------------------------------------
+    def do_src_db_cfg(self, section_name, section):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'host' in section:
+            host = section['host'].lower().strip()
+            if not host:
+                LOG.error(
+                    'Invalid source hostname {!r} in configuration section {!r} found.'.format(
+                        section['host'], section_name))
+            else:
+                try:
+                    _ = socket.getaddrinfo(host, 3306, proto=socket.IPPROTO_TCP)            # noqa
+                except socket.gaierror as e:
+                    msg = 'Invalid source hostname {!r} in configuration section {!r}: {}'.format(
+                        section['host'], section_name, e)
+                    LOG.error(msg)
+                    self.config_has_errors = True
+                else:
+                    self.src_db_host = host
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+                if port <= 0:
+                    raise ValueError("port number may not be negative.")
+                elif port >= (2 ** 16):
+                    raise ValueError("port number must be less than {}".format((2 ** 16)))
+            except (ValueError, TypeError) as e:
+                msg = 'Invalid source port number {!r} in configuration section {!r}: {}'.format(
+                    section['port'], section_name, e)
+                LOG.error(msg)
+                self.config_has_errors = True
+            else:
+                self.src_db_port = port
+
+        if 'schema' in section:
+            schema = section['schema'].lower().strip()
+            if not schema:
+                LOG.error((
+                    'Invalid source database name {!r} '
+                    'in configuration section {!r} found.').format(
+                        section['schema'], section_name))
+            else:
+                self.src_db_schema = schema
+
+        if 'user' in section:
+            user = section['user'].lower().strip()
+            if not user:
+                LOG.error((
+                    'Invalid source database user {!r} '
+                    'in configuration section {!r} found.').format(
+                        section['user'], section_name))
+                self.config_has_errors = True
+            else:
+                self.src_db_user = user
+
+        if 'password' in section:
+            self.src_db_pass = section['password']
+
+    # -------------------------------------------------------------------------
+    def do_tgt_db_cfg(self, section_name, section):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'type' in section:
+            db_type = section['type'].lower().strip()
+            if db_type not in ('mysql', 'postgresql', 'postgres', 'psql'):
+                LOG.error('Invalid database type {!r} in configuration section {!r} found.'.format(
+                    section['type'], section_name))
+                self.config_has_errors = True
+            else:
+                if db_type == 'mysql':
+                    self.tgt_db_type = 'mysql'
+                    self.tgt_db_port = self.default_tgt_db_port_mysql
+                else:
+                    self.tgt_db_type = 'postgresql'
+                    self.tgt_db_port = self.default_tgt_db_port_psql
+
+        if 'host' in section:
+            host = section['host'].lower().strip()
+            if not host:
+                LOG.error(
+                    'Invalid target hostname {!r} in configuration section {!r} found.'.format(
+                        section['host'], section_name))
+                self.config_has_errors = True
+            else:
+                try:
+                    _ = socket.getaddrinfo(host, 3306, proto=socket.IPPROTO_TCP)            # noqa
+                except socket.gaierror as e:
+                    msg = 'Invalid target hostname {!r} in configuration section {!r}: {}'.format(
+                        section['host'], section_name, e)
+                    LOG.error(msg)
+                    self.config_has_errors = True
+                else:
+                    self.tgt_db_host = host
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+                if port <= 0:
+                    raise ValueError("port number may not be negative.")
+                elif port >= (2 ** 16):
+                    raise ValueError("port number must be less than {}".format((2 ** 16)))
+            except (ValueError, TypeError) as e:
+                msg = 'Invalid target port number {!r} in configuration section {!r}: {}'.format(
+                    section['port'], section_name, e)
+                LOG.error(msg)
+                self.config_has_errors = True
+            else:
+                self.tgt_db_port = port
+
+        if 'schema' in section:
+            schema = section['schema'].lower().strip()
+            if not schema:
+                LOG.error((
+                    'Invalid target database name {!r} '
+                    'in configuration section {!r} found.').format(
+                        section['schema'], section_name))
+            else:
+                self.tgt_db_schema = schema
+
+        if 'user' in section:
+            user = section['user'].lower().strip()
+            if not user:
+                LOG.error((
+                    'Invalid target database user {!r} '
+                    'in configuration section {!r} found.').format(
+                    section['user'], section_name))
+                self.config_has_errors = True
+            else:
+                self.tgt_db_user = user
+
+        if 'password' in section:
+            self.tgt_db_pass = section['password']
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+
+        self.connect_src_db()
+        self.connect_tgt_db()
+
+    # -------------------------------------------------------------------------
+    def connect_src_db(self):
+
+        result = None
+
+        LOG.debug("Connecting to source MySQL database on {}@{}:{}/{} ...".format(
+            self.src_db_user, self.src_db_host, self.src_db_port, self.src_db_schema))
+        try:
+            self.src_connection = pymysql.connect(
+                host=self.src_db_host,
+                port=self.src_db_port,
+                db=self.src_db_schema,
+                user=self.src_db_user,
+                password=self.src_db_pass,
+                charset='utf8',
+                cursorclass=pymysql.cursors.DictCursor
+            )
+
+            sql = 'SHOW VARIABLES LIKE "version"'
+            if self.verbose > 1:
+                LOG.debug("SQL: {}".format(sql))
+            with self.src_connection.cursor() as cursor:
+                cursor.execute(sql)
+                result = cursor.fetchone()
+            if self.verbose > 2:
+                LOG.debug("Got version info:\n{}".format(pp(result)))
+            LOG.info("Source database is MySQL version {!r}.".format(result['Value']))
+
+        except (pymysql.err.OperationalError) as e:
+            LOG.error("Could not connect to source database ({}): {}".format(
+                e.__class__.__name__, e))
+            self.exit(6)
+
+    # -------------------------------------------------------------------------
+    def connect_tgt_db(self):
+
+        if self.tgt_db_type == 'mysql':
+            self.connect_tgt_db_mysql()
+        else:
+            self.connect_tgt_db_psql()
+
+    # -------------------------------------------------------------------------
+    def connect_tgt_db_mysql(self):
+
+        result = None
+
+        LOG.debug("Connecting to target MySQL database on {}@{}:{}/{} ...".format(
+            self.tgt_db_user, self.tgt_db_host, self.tgt_db_port, self.tgt_db_schema))
+        try:
+            self.tgt_connection = pymysql.connect(
+                host=self.tgt_db_host,
+                port=self.tgt_db_port,
+                db=self.tgt_db_schema,
+                user=self.tgt_db_user,
+                password=self.tgt_db_pass,
+                charset='utf8',
+                cursorclass=pymysql.cursors.DictCursor
+            )
+
+            sql = 'SHOW VARIABLES LIKE "version"'
+            if self.verbose > 1:
+                LOG.debug("SQL: {}".format(sql))
+            with self.tgt_connection.cursor() as cursor:
+                cursor.execute(sql)
+                result = cursor.fetchone()
+            if self.verbose > 2:
+                LOG.debug("Got version info:\n{}".format(pp(result)))
+            LOG.info("Target database is MySQL version {!r}.".format(result['Value']))
+
+        except (pymysql.err.OperationalError) as e:
+            LOG.error("Could not connect to target database ({}): {}".format(
+                e.__class__.__name__, e))
+            self.exit(6)
+
+    # -------------------------------------------------------------------------
+    def connect_tgt_db_psql(self):
+
+        result = None
+
+        LOG.debug("Connecting to target PostgreSQL database on {}@{}:{}/{} ...".format(
+            self.tgt_db_user, self.tgt_db_host, self.tgt_db_port, self.tgt_db_schema))
+        try:
+            self.tgt_connection = psycopg2.connect(
+                host=self.tgt_db_host,
+                port=self.tgt_db_port,
+                dbname=self.tgt_db_schema,
+                user=self.tgt_db_user,
+                password=self.tgt_db_pass,
+            )
+
+            sql = 'SHOW server_version'
+            if self.verbose > 1:
+                LOG.debug("SQL: {}".format(sql))
+            with self.tgt_connection.cursor() as cursor:
+                cursor.execute(sql)
+                result = cursor.fetchone()
+            if self.verbose > 2:
+                LOG.debug("Got version info:\n{}".format(pp(result)))
+            LOG.info("Target database is PostgreSQL version {!r}.".format(result[0]))
+
+        except psycopg2.OperationalError as e:
+            LOG.error("Could not connect to target database ({}): {}".format(
+                e.__class__.__name__, e))
+            self.exit(7)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        try:
+            self.get_src_info()
+            self.clean_tgt_db()
+            self.import_supermasters()
+            self.import_domains()
+            self.import_cryptokeys()
+            self.import_domainmetadata()
+            self.import_records()
+            self.import_tsigkeys()
+#            self.create_ipv6_as_zone()
+        finally:
+            self._close_all()
+
+    # -------------------------------------------------------------------------
+    def get_src_info(self):
+
+        LOG.debug("Retreiving number of source datasets ...")
+
+        result = None
+
+        max_tblname_len = 1
+        for table in self.nr.keys():
+            if len(table) > max_tblname_len:
+                max_tblname_len = len(table)
+        max_tblname_len += 1
+        tpl = "Found {{:<{}}} {{:>8}}".format(max_tblname_len)
+        if self.verbose > 2:
+            LOG.debug("Output template: {!r}".format(tpl))
+
+        with self.src_connection.cursor() as cursor:
+
+            for table in sorted(self.nr.keys()):
+                has_domain = self.nr[table].get('has_domain', False)
+                count_total = 0
+                count_valid = 0
+                count_invalid = 0
+                sql = "SELECT COUNT(*) AS count_rows FROM {}".format(table)
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                cursor.execute(sql)
+                result = cursor.fetchone()
+                count_total = int(result['count_rows'])
+                self.nr[table]['total'] = count_total
+                self.nr[table]['valid'] = 0
+                self.nr[table]['invalid'] = 0
+
+                if count_total and has_domain:
+
+                    sql = textwrap.dedent('''\
+                        SELECT COUNT(*) AS count_rows
+                          FROM {}
+                         WHERE domain_id NOT IN (
+                                SELECT id FROM domains)
+                        ''').strip().format(table)
+
+                    if self.verbose > 1:
+                        LOG.debug("SQL: {}".format(sql))
+                    cursor.execute(sql)
+                    result = cursor.fetchone()
+                    count_invalid = int(result['count_rows'])
+                    if count_invalid:
+                        count_valid = count_total - count_invalid
+                    self.nr[table]['valid'] = count_valid
+                    self.nr[table]['invalid'] = count_invalid
+
+        title = "Number of rows in current PowerDNS database"
+
+        print()
+        print(title)
+        print(('=' * len(title)))
+
+        for table in sorted(self.nr.keys()):
+            has_domain = self.nr[table].get('has_domain', False)
+            msg = tpl.format(table, self.nr[table]['total'])
+            if has_domain:
+                if self.nr[table]['invalid']:
+                    msg += " ({} valid, {} invalid)".format(
+                        self.nr[table]['valid'], self.nr[table]['invalid'])
+                else:
+                    msg += " (all valid)"
+            print(msg)
+        print()
+
+    # -------------------------------------------------------------------------
+    def clean_tgt_db(self):
+
+        tables = [
+            'comments', 'cryptokeys', 'domainmetadata', 'records',
+            'supermasters', 'tsigkeys', 'domains',
+        ]
+
+        sequences = [
+            'comments_id_seq', 'cryptokeys_id_seq', 'domainmetadata_id_seq',
+            'domains_id_seq', 'records_id_seq', 'tsigkeys_id_seq',
+        ]
+
+        LOG.info("Truncating all tables in target database ...")
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+
+            for table in tables:
+
+                LOG.debug("Truncating table {!r} ...".format(table))
+                sql = 'DELETE FROM {}'.format(table)
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                if not self.simulate:
+                    tgt_cursor.execute(sql)
+
+            if self.tgt_db_type != 'mysql':
+
+                for sequence in sequences:
+
+                    LOG.debug("Resetting sequence {!r} ...".format(sequence))
+                    sql = "SELECT SETVAL('{}', 1)".format(sequence)
+                    if self.verbose > 1:
+                        LOG.debug("SQL: {}".format(sql))
+                    if not self.simulate:
+                        tgt_cursor.execute(sql)
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def _import_domain(self, dom_data, tgt_cursor):
+
+        dom_id = dom_data['id']
+        dom_name = dom_data['name']
+        self.domain_ids[dom_id] = dom_name
+
+        if self.is_local_domain(dom_name):
+            LOG.debug("Setting zone {!r} to a local only zone.".format(dom_name))
+            cur_account = dom_data['account']
+            if cur_account is None:
+                cur_account = ''
+            else:
+                cur_account = cur_account.strip()
+            if not self.re_is_local_account.search(cur_account):
+                if cur_account == '':
+                    cur_account = 'local'
+                else:
+                    cur_account += ', local'
+                if self.verbose > 1:
+                    LOG.debug(
+                        "Setting account information of zone {!r} to {!r}.".format(
+                            dom_name, cur_account))
+                dom_data['account'] = cur_account
+        if self.verbose > 1:
+            LOG.debug("SQL for insert domain:\n{}".format(
+                to_str(tgt_cursor.mogrify(self.sql_insert_domain, dom_data))))
+        if not self.simulate:
+            tgt_cursor.execute(self.sql_insert_domain, dom_data)
+
+        # Inserting domain metadata for SOA-EDIT-API
+        params = {
+            'domain_id': dom_id,
+            'kind': 'SOA-EDIT-API',
+            'content': 'INCEPTION-INCREMENT',
+        }
+        if self.verbose > 1:
+            LOG.debug("SQL for insert domain metadata:\n{}".format(
+                to_str(tgt_cursor.mogrify(self.sql_insert_dom_meta, params))))
+        if not self.simulate:
+            tgt_cursor.execute(self.sql_insert_dom_meta, params)
+
+    # -------------------------------------------------------------------------
+    def import_domains(self):
+
+        LOG.info("Importing all domains ...")
+
+        self.domain_ids = {}
+
+        src_sql = textwrap.dedent('''\
+            SELECT id, name, master, last_check, type, notified_serial, account
+              FROM domains
+            ORDER by name
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                i = 0
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got domains:\n{}".format(pp(results)))
+
+                for result in results:
+                    i += 1
+                    self._import_domain(result, tgt_cursor)
+
+                LOG.info("Imported {} domains.".format(i))
+
+            if self.tgt_db_type != 'mysql':
+                # Get current max domain Id
+                LOG.debug("Get max. Domain Id ...")
+                sql = "SELECT MAX(id) AS max_id FROM domains"
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                tgt_cursor.execute(sql)
+                result = tgt_cursor.fetchone()
+                if self.verbose > 2:
+                    LOG.debug("Got max domain Id:\n{}".format(pp(result)))
+                max_id = int(result[0])
+
+                # Setting this as new value for sequence
+                sql = "SELECT SETVAL('domains_id_seq', %s)"
+                LOG.debug("Setting curval of domains_id_seq to {} ...".format(max_id))
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(to_str(tgt_cursor.mogrify(sql, (max_id, )))))
+                if not self.simulate:
+                    tgt_cursor.execute(sql, (max_id, ))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def import_cryptokeys(self):
+
+        LOG.info("Importing all cryptokeys ...")
+
+        src_sql = textwrap.dedent('''\
+            SELECT id, domain_id, flags, active, content
+              FROM cryptokeys
+             WHERE domain_id IN (
+                    SELECT id FROM domains)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        tgt_sql = textwrap.dedent('''\
+            INSERT INTO cryptokeys (id, domain_id, flags, active, content)
+                 VALUES (%(id)s, %(domain_id)s, %(flags)s, %(active)s, %(content)s)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Target SQL:\n{}".format(tgt_sql))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got cryptokeys:\n{}".format(pp(results)))
+
+                if not results:
+                    LOG.info("No cryptokeys in source database.")
+                    LOG.debug("Commiting changes ...")
+                    self.tgt_connection.commit()
+                    return
+
+                i = 0
+                for result in results:
+                    i += 1
+                    if self.tgt_db_type != 'mysql':
+                        if result['active']:
+                            result['active'] = True
+                        else:
+                            result['active'] = False
+                    if not self.simulate:
+                        tgt_cursor.execute(tgt_sql, result)
+                LOG.info("Imported {} cryptokeys.".format(i))
+
+            if self.tgt_db_type != 'mysql':
+                LOG.debug("Get max. CryptoKey Id ...")
+                sql = "SELECT MAX(id) AS max_id FROM cryptokeys"
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                tgt_cursor.execute(sql)
+                result = tgt_cursor.fetchone()
+                if self.verbose > 2:
+                    LOG.debug("Got max cryptokey Id:\n{}".format(pp(result)))
+                max_id = int(result[0])
+                sql = "SELECT SETVAL('cryptokeys_id_seq', %s)"
+                LOG.debug("Setting curval of cryptokeys_id_seq to {} ...".format(max_id))
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                if not self.simulate:
+                    tgt_cursor.execute(sql, (max_id, ))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def import_domainmetadata(self):
+
+        LOG.info("Importing all domainmetadata ...")
+
+        src_sql = textwrap.dedent('''\
+            SELECT domain_id, kind, content
+              FROM domainmetadata
+             WHERE domain_id IN (
+                    SELECT id FROM domains)
+             ORDER BY domain_id, kind, content
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        tgt_sql = textwrap.dedent('''\
+            INSERT INTO domainmetadata (domain_id, kind, content)
+                 VALUES (%(domain_id)s, %(kind)s, %(content)s)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Target SQL:\n{}".format(tgt_sql))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                nr_total = 0
+                nr_imported = 0
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got domainmetadata:\n{}".format(pp(results)))
+
+                if not results:
+                    LOG.info("No domainmetadata in source database.")
+                    LOG.debug("Commiting changes ...")
+                    self.tgt_connection.commit()
+                    return
+
+                for result in results:
+                    nr_total += 1
+                    if result['kind'].lower() == 'also-notify':
+                        continue
+                    nr_imported += 1
+                    if not self.simulate:
+                        tgt_cursor.execute(tgt_sql, result)
+                LOG.info("Imported {i} and rejected {r} domainmetadata.".format(
+                    i=nr_imported, r=(nr_total - nr_imported)))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def _import_record(self, record, tgt_cursor):
+
+        if self.tgt_db_type == 'mysql':
+            record['disabled'] = 0
+        else:
+            record['disabled'] = False
+            if record['auth'] is None:
+                record['auth'] = True
+            else:
+                if record['auth']:
+                    record['auth'] = True
+                else:
+                    record['auth'] = False
+            if record['ordername'] is None:
+                dom_id = record['domain_id']
+                if dom_id in self.domain_ids:
+                    dom_name = self.domain_ids[dom_id]
+                    if record['name'] == dom_name:
+                        record['ordername'] = ''
+                    else:
+                        idx = record['name'].rfind('.' + dom_name)
+                        if idx >= 0:
+                            record['ordername'] = record['name'][:idx]
+                        else:
+                            record['ordername'] = ''
+                else:
+                    record['ordername'] = ''
+            if record['type'] in ('NS', 'MX'):
+                record['content'] = RE_DOT_AT_END.sub('', record['content'])
+            elif record['type'] == 'SOA':
+                soa = PdnsSoaData.init_from_data(
+                    record['content'], appname=self.appname,
+                    verbose=self.verbose, base_dir=self.base_dir)
+                soa.primary = RE_DOT_AT_END.sub('', soa.primary)
+                soa.email = RE_DOT_AT_END.sub('', soa.email)
+                record['content'] = soa.data
+        if self.verbose > 3:
+            LOG.debug("SQL for insert record:\n{}".format(
+                to_str(tgt_cursor.mogrify(self.sql_insert_record, record))))
+        if not self.simulate:
+            tgt_cursor.execute(self.sql_insert_record, record)
+
+    # -------------------------------------------------------------------------
+    def import_records(self):
+
+        LOG.info("Importing all records ...")
+
+        src_sql = textwrap.dedent('''\
+            SELECT id, domain_id, name, type, content,
+                   ttl, prio, change_date, ordername, auth
+              FROM records
+             WHERE domain_id IN (
+                    SELECT id FROM domains)
+             ORDER BY name
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        if self.verbose > 1:
+            LOG.debug("Target SQL:\n{}".format(self.sql_insert_record))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                i = 0
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got records:\n{}".format(pp(results)))
+
+                if not results:
+                    LOG.info("No records in source database.")
+                    LOG.debug("Commiting changes ...")
+                    self.tgt_connection.commit()
+                    return
+
+                for result in results:
+                    i += 1
+                    self._import_record(result, tgt_cursor)
+
+                LOG.info("Imported {} records.".format(i))
+
+            if self.tgt_db_type != 'mysql':
+                LOG.debug("Get max. records Id ...")
+                sql = "SELECT MAX(id) AS max_id FROM records"
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                tgt_cursor.execute(sql)
+                result = tgt_cursor.fetchone()
+                if self.verbose > 2:
+                    LOG.debug("Got max records Id:\n{}".format(pp(result)))
+                max_id = int(result[0])
+                sql = "SELECT SETVAL('records_id_seq', %s)"
+                LOG.debug("Setting curval of records_id_seq to {} ...".format(max_id))
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(to_str(tgt_cursor.mogrify(sql, (max_id, )))))
+                if not self.simulate:
+                    tgt_cursor.execute(sql, (max_id, ))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def import_supermasters(self):
+
+        LOG.info("Importing all supermasters ...")
+
+        src_sql = textwrap.dedent('''\
+            SELECT ip, nameserver, account
+              FROM supermasters
+            ORDER by nameserver
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        tgt_sql = textwrap.dedent('''\
+            INSERT INTO supermasters (ip, nameserver, account)
+                 VALUES (%(ip)s, %(nameserver)s, %(account)s)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Target SQL:\n{}".format(tgt_sql))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                i = 0
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got supermasters:\n{}".format(pp(results)))
+
+                if not results:
+                    LOG.info("No supermasters in source database.")
+                    LOG.debug("Commiting changes ...")
+                    self.tgt_connection.commit()
+                    return
+
+                for result in results:
+                    i += 1
+                    if not self.simulate:
+                        tgt_cursor.execute(tgt_sql, result)
+                LOG.info("Imported {} supermasters.".format(i))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def import_tsigkeys(self):
+
+        LOG.info("Importing all tsigkeys ...")
+
+        src_sql = textwrap.dedent('''\
+            SELECT id, name, algorithm, secret
+              FROM tsigkeys
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Source SQL:\n{}".format(src_sql))
+
+        tgt_sql = textwrap.dedent('''\
+            INSERT INTO tsigkeys (id, name, algorithm, secret)
+                 VALUES (%(id)s, %(name)s, %(algorithm)s, %(secret)s)
+            ''').strip()
+        if self.verbose > 1:
+            LOG.debug("Target SQL:\n{}".format(tgt_sql))
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+            with self.src_connection.cursor() as src_cursor:
+
+                i = 0
+                src_cursor.execute(src_sql)
+                results = src_cursor.fetchall()
+
+                if self.verbose > 3:
+                    LOG.debug("Got tsigkeys:\n{}".format(pp(results)))
+
+                if not results:
+                    LOG.info("No tsigkeys in source database.")
+                    LOG.debug("Commiting changes ...")
+                    self.tgt_connection.commit()
+                    return
+
+                for result in results:
+                    i += 1
+                    if not self.simulate:
+                        tgt_cursor.execute(tgt_sql, result)
+                LOG.info("Imported {} tsigkeys.".format(i))
+
+            if self.tgt_db_type != 'mysql':
+                LOG.debug("Get max. TsigKey Id ...")
+                sql = "SELECT MAX(id) AS max_id FROM tsigkeys"
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                tgt_cursor.execute(sql)
+                result = tgt_cursor.fetchone()
+                if self.verbose > 2:
+                    LOG.debug("Got max TsigKey Id:\n{}".format(pp(result)))
+                max_id = int(result[0])
+                sql = "SELECT SETVAL('tsigkeys_id_seq', %s)"
+                LOG.debug("Setting curval of tsigkeys_id_seq to {} ...".format(max_id))
+                if self.verbose > 1:
+                    LOG.debug("SQL: {}".format(sql))
+                if not self.simulate:
+                    tgt_cursor.execute(sql, (max_id, ))
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def create_ipv6_as_zone(self):
+
+        zone_name = '0.0.7.a.8.7.9.0.1.0.0.2.ip6.arpa'
+        net_addr = '2001:978:a700::'
+        nameservers = (
+            'ns1.pp-dns.com.',
+            'ns2.pp-dns.com.',
+            'ns3.pp-dns.com.',
+            'ns4.pp-dns.com.',
+        )
+        mail_addr = 'hostmaster.pixelpark.net'
+
+        LOG.info("Creating zone {z!r} for AS network {n!r} ...".format(
+            z=zone_name, n=net_addr))
+
+        today = datetime.date.today()
+        serial = 1000000 * today.year + 10000 * today.month + 100 * today.day + 1
+        domain_id = 9999
+        refresh = 10800
+        retry = 3600
+        expire = 604800
+        ttl = 3600
+        change_date = int(time.time())
+
+        with self.tgt_connection.cursor() as tgt_cursor:
+
+            LOG.debug("Inserting domain ...")
+            sql = textwrap.dedent('''\
+                INSERT INTO domains (name, master, type, notified_serial, account)
+                  VALUES (%(zone_name)s, '', 'MASTER', %(serial)s, 'public')
+                ''').strip()
+            data = {'zone_name': zone_name, 'serial': serial}
+            if self.verbose > 1:
+                LOG.debug("SQL for insert domain:\n{}".format(
+                    to_str(tgt_cursor.mogrify(sql, data))))
+            if not self.simulate:
+                tgt_cursor.execute(sql, data)
+
+            LOG.debug("Retrieving domain_id from DB ...")
+            sql = 'SELECT id FROM domains WHERE name = %s'
+            if self.verbose > 1:
+                LOG.debug("SQL for retrieving domain_id:\n{}".format(
+                    to_str(tgt_cursor.mogrify(sql, [zone_name]))))
+            if not self.simulate:
+                domain_id = None
+                tgt_cursor.execute(sql, [zone_name])
+                results = tgt_cursor.fetchall()
+                if self.verbose > 2:
+                    LOG.debug("Got results:\n{}".format(pp(results)))
+                for result in results:
+                    domain_id = result[0]
+                if domain_id is None:
+                    raise ImportPdnsdataError(
+                        "Did not found Domain Id of zone {!r}.".format(zone_name))
+            LOG.info("Using Id of zone {z!r}: {i}.".format(z=zone_name, i=domain_id))
+
+            ns_used = RE_DOT_AT_END.sub('', nameservers[0])
+            soa = PdnsSoaData(
+                primary=ns_used, email=mail_addr, serial=serial,
+                refresh=refresh, retry=retry, expire=expire, ttl=ttl,
+                appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
+            )
+
+            LOG.debug("Inserting SOA {!r} ...".format(soa.data))
+            sql = textwrap.dedent('''\
+                INSERT INTO records (
+                        domain_id, name, type, content, ttl, prio,
+                        change_date, disabled, ordername, auth)
+                    VALUES (
+                        %(domain_id)s, %(name)s, 'SOA', %(content)s, %(ttl)s, 0,
+                        %(change_date)s, %(disabled)s, '', %(auth)s)
+                ''').strip()
+            data = {
+                'domain_id': domain_id, 'name': zone_name, 'content': soa.data,
+                'ttl': ttl, 'change_date': change_date, 'disabled': False, 'auth': True,
+            }
+            if self.verbose > 1:
+                LOG.debug("SQL for insert SOA:\n{}".format(
+                    to_str(tgt_cursor.mogrify(sql, data))))
+            if not self.simulate:
+                tgt_cursor.execute(sql, data)
+
+            LOG.debug("Inserting nameservers ...")
+            sql = textwrap.dedent('''\
+                INSERT INTO records (
+                        domain_id, name, type, content, ttl, prio,
+                        change_date, disabled, ordername, auth)
+                    VALUES (
+                        %(domain_id)s, %(name)s, 'NS', %(content)s, %(ttl)s, 0,
+                        %(change_date)s, %(disabled)s, '', %(auth)s)
+                ''').strip()
+            for ns in nameservers:
+                ns_used = RE_DOT_AT_END.sub('', ns)
+                data = {
+                    'domain_id': domain_id, 'name': zone_name, 'content': ns_used,
+                    'ttl': ttl, 'change_date': change_date, 'disabled': False, 'auth': True,
+                }
+                if self.verbose > 1:
+                    LOG.debug("SQL for insert nameserver:\n{}".format(
+                        to_str(tgt_cursor.mogrify(sql, data))))
+                if not self.simulate:
+                    tgt_cursor.execute(sql, data)
+
+            LOG.debug("Inserting domain metadata ...")
+            sql = textwrap.dedent('''\
+                INSERT INTO domainmetadata (domain_id, kind, content)
+                    VALUES (%(domain_id)s, %(kind)s, %(content)s)
+                ''').strip()
+            data = {
+                'domain_id': domain_id,
+                'kind': 'SOA-EDIT-API',
+                'content': 'INCEPTION-INCREMENT',
+            }
+            if self.verbose > 1:
+                LOG.debug("SQL for insert domain metadata:\n{}".format(
+                    to_str(tgt_cursor.mogrify(sql, data))))
+            if not self.simulate:
+                tgt_cursor.execute(sql, data)
+
+        LOG.debug("Commiting changes ...")
+        self.tgt_connection.commit()
+
+    # -------------------------------------------------------------------------
+    def _close_all(self):
+
+        if self.src_connection:
+            LOG.debug("Closing source database connection.")
+            try:
+                self.src_connection.close()
+            except Exception as e:
+                LOG.error("Could not close source database connection ({}): {}".format(
+                    e.__class__.__name__, e))
+                traceback.print_exc()
+            self.src_connection = None
+
+        if self.tgt_connection:
+            LOG.debug("Closing target database connection.")
+            try:
+                self.tgt_connection.close()
+            except Exception as e:
+                LOG.error("Could not close target database connection ({}): {}".format(
+                    e.__class__.__name__, e))
+                traceback.print_exc()
+            self.tgt_connection = None
+
+    # -------------------------------------------------------------------------
+    def post_run(self):
+
+        if self.verbose > 1:
+            LOG.info("executing post_run() ...")
+        self._close_all()
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/ldap_app.py b/lib/pp_lib/ldap_app.py
new file mode 100644 (file)
index 0000000..5b84e3a
--- /dev/null
@@ -0,0 +1,411 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for a LDAP based application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import re
+import copy
+
+# Third party modules
+
+# ldap3 classes and objects
+from ldap3 import Server, ServerPool, Connection, Reader, Writer, ObjectDef
+# ldap3 constants
+from ldap3 import IP_V4_PREFERRED, ROUND_ROBIN, AUTO_BIND_NONE, ALL_ATTRIBUTES
+from ldap3 import SUBTREE
+
+from ldap3.core.exceptions import LDAPPasswordIsMandatoryError
+
+from ldap3.utils.log import set_library_log_detail_level, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED
+
+# Own modules
+from .common import pp, to_bool
+
+from .cfg_app import PpCfgAppError, PpConfigApplication
+
+__version__ = '0.4.9'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpLdapAppError(PpCfgAppError):
+    """Base error class for all exceptions happened during
+    execution this configured application"""
+
+    pass
+
+
+# =============================================================================
+class PpLdapApplication(PpConfigApplication):
+    """
+    Class for a LDAP based configured application objects.
+    """
+
+    default_ldap_hosts = [
+        'ldap.pixelpark.com'
+    ]
+
+    default_ldap_port = 389
+    default_ldap_port_ssl = 636
+    default_ldap_use_ssl = False
+
+    default_ldap_base_dn = 'o=isp'
+    default_ldap_bind_dn = 'uid=Solaris_NSS,ou=Unix NSS,ou=Applications,o=pixelpark,o=isp'
+    default_ldap_timeout = 30
+
+    fs_re = re.compile(r'(?:\s+|\s*[,;]\s*)')
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=None, usage=None, description=None,
+            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
+            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False):
+
+        self.ldap_hosts = copy.copy(self.default_ldap_hosts)
+        self.ldap_use_ssl = self.default_ldap_use_ssl
+        self.ldap_port = self.default_ldap_port
+        if self.ldap_use_ssl:
+            self.ldap_port = self.default_ldap_port_ssl
+
+        self.ldap_base_dn = self.default_ldap_base_dn
+        self.ldap_bind_dn = self.default_ldap_bind_dn
+        self.ldap_bind_pw = None
+        self.ldap_timeout = self.default_ldap_timeout
+
+        # Either a single Server object or a ServerPool object
+        self.ldap_server = None
+        self.ldap_connection = None
+
+        stems = []
+        if cfg_stems:
+            if isinstance(cfg_stems, list):
+                for stem in cfg_stems:
+                    s = str(stem).strip()
+                    if not s:
+                        msg = "Invalid configuration stem {!r} given.".format(stem)
+                        raise PpLdapAppError(msg)
+                    stems.append(s)
+            else:
+                s = str(cfg_stems).strip()
+                if not s:
+                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
+                    raise PpLdapAppError(msg)
+                stems.append(s)
+        else:
+            stems = [self.appname]
+        if 'ldap' not in stems:
+            stems.insert(0, 'ldap')
+
+        super(PpLdapApplication, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
+            initialized=False, usage=usage, description=description,
+            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
+            env_prefix=env_prefix, cfg_dir=cfg_dir, cfg_stems=stems,
+            cfg_encoding=cfg_encoding, need_config_file=need_config_file,
+        )
+
+        if self.verbose > 5:
+            set_library_log_detail_level(EXTENDED)
+        elif self.verbose > 4:
+            set_library_log_detail_level(NETWORK)
+        elif self.verbose > 3:
+            set_library_log_detail_level(PROTOCOL)
+        elif self.verbose > 2:
+            set_library_log_detail_level(BASIC)
+        else:
+            set_library_log_detail_level(ERROR)
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+        """
+        Execute some actions after reading the configuration.
+
+        This method should be explicitely called by all perform_config()
+        methods in descendant classes.
+        """
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 2:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+            section = self.cfg[section_name]
+
+            if section_name.lower() == 'ldap':
+                self.do_ldap_cfg(section_name, section)
+
+    # -------------------------------------------------------------------------
+    def _ldap_cfg_host(self, section_name, section):
+
+        got_host = False
+
+        if 'host' in section:
+            hosts = self.fs_re.split(section['host'])
+            for host in hosts:
+                if not host:
+                    continue
+                if not got_host:
+                    self.ldap_hosts = []
+                    got_host = True
+                host = host.lower()
+                if host in self.ldap_hosts:
+                    continue
+                self.ldap_hosts.append(host)
+
+    # -------------------------------------------------------------------------
+    def _ldap_cfg_port(self, section_name, section):
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+            except (ValueError, TypeError):
+                msg = "Invalid LDAP port ({s}/port => {v!r}) found in configuration.".format(
+                    s=section_name, v=section['port'])
+                raise PpLdapAppError(msg)
+            if port <= 0 or port >= 2 ** 16:
+                msg = "Invalid LDAP port ({s}/port => {v!r}) found in configuration.".format(
+                    s=section_name, v=port)
+                raise PpLdapAppError(msg)
+            self.ldap_port = port
+
+    # -------------------------------------------------------------------------
+    def _ldap_cfg_other(self, section_name, section):
+
+        if 'ssl' in section:
+            self.ldap_use_ssl = to_bool(section['ssl'])
+
+        if 'tls' in section:
+            self.ldap_use_ssl = to_bool(section['tls'])
+
+        if 'base_dn' in section:
+            self.ldap_base_dn = section['base_dn'].strip()
+        if 'bind_dn' in section:
+            self.ldap_bind_dn = section['bind_dn'].strip()
+        if 'bind_pw' in section:
+            self.ldap_bind_pw = section['bind_pw']
+        if 'timeout' in section:
+            try:
+                timeout = int(section['timeout'])
+            except (ValueError, TypeError):
+                msg = (
+                    "Invalid LDAP timeout ({s}/port => {v!r}) found in configuration.").format(
+                    s=section_name, v=section['timeout'])
+                LOG.error(msg)
+            if timeout > 0:
+                self.ldap_timeout = timeout
+
+    # -------------------------------------------------------------------------
+    def do_ldap_cfg(self, section_name, section):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        self._ldap_cfg_host(section_name, section)
+        self._ldap_cfg_port(section_name, section)
+        self._ldap_cfg_other(section_name, section)
+
+        # ----------------------
+        def _get_ldap_server(host):
+            return Server(
+                host, port=self.ldap_port, use_ssl=self.ldap_use_ssl,
+                mode=IP_V4_PREFERRED, connect_timeout=self.ldap_timeout)
+
+        # Init LDAP Server objects
+        if len(self.ldap_hosts):
+            self.ldap_server = ServerPool(None, ROUND_ROBIN)
+            for h in self.ldap_hosts:
+                server = _get_ldap_server(h)
+                self.ldap_server.add(server)
+        else:
+            msg = "No LDAP servers found in configuration."
+            raise PpLdapAppError(msg)
+
+        # Init LDAP connection object
+        self.ldap_connection = Connection(
+            self.ldap_server, user=self.ldap_bind_dn, password=self.ldap_bind_pw,
+            auto_bind=AUTO_BIND_NONE, lazy=True, auto_range=True
+        )
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.verbose > 1:
+            LOG.debug("Executing pre_run() ...")
+
+        super(PpLdapApplication, self).pre_run()
+
+        LOG.info("Binding local address for LDAP requests ...")
+        try:
+            self.ldap_connection.bind()
+        except LDAPPasswordIsMandatoryError as e:
+            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
+            self.handle_error(msg, e.__class__.__name__)
+            self.exit(1)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """
+        Dummy function as main routine.
+
+        MUST be overwritten by descendant classes.
+
+        """
+        LOG.debug("Executing nothing ...")
+
+    # -------------------------------------------------------------------------
+    def ldap_search(self, query_filter, dn=None, attributes=ALL_ATTRIBUTES, scope=SUBTREE):
+
+        if self.verbose > 1:
+            LOG.debug("Query string: {q!r}, attributes: {a}".format(
+                q=query_filter, a=pp(attributes)))
+
+        if dn is None:
+            dn = self.ldap_base_dn
+
+        cursor = Reader(
+            self.ldap_connection,
+            query=query_filter, base=dn, attributes=attributes
+        )
+
+        try:
+            cursor.search()
+        except LDAPPasswordIsMandatoryError as e:
+            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
+            LOG.error(msg)
+            return []
+
+        return cursor.entries
+
+        try:
+            self.ldap_connection.search(
+                dn, query_filter, search_scope=scope, attributes=attributes)
+        except LDAPPasswordIsMandatoryError as e:
+            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
+            LOG.error(msg)
+            return []
+        entries = self.ldap_connection.entries
+        return entries
+
+    # -------------------------------------------------------------------------
+    def ldap_search_subtree(self, obj_def, query=None, base=None):
+
+        if base is None:
+            base = self.ldap_base_dn
+
+        cursor = Reader(
+            self.ldap_connection,
+            object_def=obj_def, query=query, base=base)
+
+        if self.verbose > 1:
+            LOG.debug("LDAP-Reader:\n{}".format(cursor))
+
+        cursor.search()
+        return cursor.entries
+
+    # -------------------------------------------------------------------------
+    def ldap_search_object(self, obj_def, object_dn, base=None):
+
+        if base is None:
+            base = self.ldap_base_dn
+
+        cursor = Reader(
+            self.ldap_connection,
+            object_def=obj_def, base=base)
+
+        if self.verbose > 1:
+            LOG.debug("LDAP-Reader:\n{}".format(cursor))
+
+        cursor.search_object(entry_dn=object_dn)
+        return cursor.entries
+
+    # -------------------------------------------------------------------------
+    def get_numeric_uid(self, dn, base=None):
+
+        person = ObjectDef(['posixAccount', 'shadowAccount'])
+        person += ["uid", "uidNumber", "gidNumber"]
+
+        entries = self.ldap_search_object(person, dn)
+        LOG.debug("Found {} LDAP entries.".format(len(entries)))
+
+        if not entries:
+            LOG.error("No LDAP entry found for DN {!r}.".format(dn))
+            return None
+
+        entry = entries[0]
+
+        uid = entry['uidNumber'][0]
+        return uid
+
+    # -------------------------------------------------------------------------
+    def set_numeric_uid(self, dn, new_uid, simulate=False, base=None):
+
+        person = ObjectDef(['posixAccount', 'shadowAccount'])
+        person += ["uid", "uidNumber", "gidNumber", 'objectClass']
+
+        if base is None:
+            base = self.ldap_base_dn
+
+        read_cursor = Reader(
+            self.ldap_connection,
+            object_def=person, base=base)
+        read_cursor.search_object(entry_dn=dn)
+
+        if not read_cursor.entries:
+            msg = "Did not found Counter LDAP entry {!r}.".format(dn)
+            raise PpLdapAppError(msg)
+
+        entry = read_cursor.entries[0]
+        if self.verbose:
+            LOG.debug("Found entry:\n{}".format(entry))
+
+        writer_cursor = Writer.from_cursor(read_cursor)
+        entry = writer_cursor.entries[0]
+        entry.uidNumber = new_uid
+        if self.verbose > 1:
+            LOG.debug("Writer entry before commit:\n{}".format(entry))
+
+        LOG.info("Setting numeric user Id of {d!r} to {u} ...".format(
+            d=dn, u=new_uid))
+
+        if not simulate:
+            entry.entry_commit_changes()
+            if self.verbose:
+                LOG.debug("Writer entry after commit:\n{}".format(entry))
+
+    # -------------------------------------------------------------------------
+    def post_run(self):
+        """
+        Dummy function to run after the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.verbose > 1:
+            LOG.debug("executing post_run() ...")
+
+        LOG.debug("Unbinding from the LDAP servers ...")
+        self.ldap_connection.unbind()
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/mailaddress.py b/lib/pp_lib/mailaddress.py
new file mode 100644 (file)
index 0000000..11c7f8f
--- /dev/null
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicis Pixelpark GmbH, Berlin
+@summary: The module for the MailAddress object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import re
+
+# Own modules
+from .errors import InvalidMailAddressError
+
+from .common import to_str
+
+__version__ = '0.3.2'
+log = logging.getLogger(__name__)
+
+
+# =============================================================================
+class MailAddress(object):
+    """
+    Class for encapsulating a mail simple address.
+    """
+
+    pattern_valid_domain = r'@((?:[a-z0-9](?:[a-z0-9\-]*[a-z0-9])?\.)+[a-z][a-z]+)$'
+
+    pattern_valid_user = r'^([a-z0-9][a-z0-9_\-\.\+\&@]*[a-z0-9]'
+    pattern_valid_user += r'(?:\+[a-z0-9][a-z0-9_\-\.]*[a-z0-9])*)'
+
+    pattern_valid_address = pattern_valid_user + pattern_valid_domain
+
+    re_valid_user = re.compile(pattern_valid_user + r'$', re.IGNORECASE)
+    re_valid_domain = re.compile(r'^' + pattern_valid_domain, re.IGNORECASE)
+    re_valid_address = re.compile(pattern_valid_address, re.IGNORECASE)
+
+    verbose = 0
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def valid_address(cls, address, raise_on_failure=False):
+
+        if not address:
+            e = InvalidMailAddressError(address, "Empty address.")
+            if raise_on_failure:
+                raise e
+            elif cls.verbose > 2:
+                log.debug(str(e))
+            return False
+
+        addr = to_str(address)
+        if not isinstance(addr, str):
+            e = InvalidMailAddressError(address, "Wrong type.")
+            if raise_on_failure:
+                raise e
+            elif cls.verbose > 2:
+                log.debug(str(e))
+            return False
+
+        if cls.re_valid_address.search(addr):
+            return True
+
+        e = InvalidMailAddressError(address, "Invalid address.")
+        if raise_on_failure:
+            raise e
+        elif cls.verbose > 2:
+            log.debug(str(e))
+        return False
+
+    # -------------------------------------------------------------------------
+    def __init__(self, user=None, domain=None):
+
+        self._user = ''
+        self._domain = ''
+
+        if not domain:
+            if user:
+                addr = to_str(user)
+                if self.valid_address(addr):
+                    match = self.re_valid_address.search(addr)
+                    self._user = match.group(1)
+                    self._domain = match.group(2)
+                    return
+                match = self.re_valid_domain.search(addr)
+                if match:
+                    self._domain = match.group(1)
+                    return
+                self._user = addr
+                return
+
+        self._user = to_str(user)
+        self._domain = to_str(domain)
+
+    # -----------------------------------------------------------
+    @property
+    def user(self):
+        """The user part of the address."""
+        if self._user is None:
+            return ''
+        return self._user
+
+    # -----------------------------------------------------------
+    @property
+    def domain(self):
+        """The domain part of the address."""
+        if self._domain is None:
+            return ''
+        return self._domain
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+
+        if not self.user and not self.domain:
+            return ''
+
+        if not self.domain:
+            return self.user
+
+        if not self.user:
+            return '@' + self.domain
+
+        return self.user + '@' + self.domain
+
+    # -------------------------------------------------------------------------
+    def str_for_access(self):
+
+        if not self.user and not self.domain:
+            return None
+
+        if not self.domain:
+            return self.user + '@'
+
+        if not self.user:
+            return self.domain
+
+        return self.user + '@' + self.domain
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("user={!r}".format(self.user))
+        fields.append("domain={!r}".format(self.domain))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    def __hash__(self):
+        return hash(str(self).lower())
+
+    # -------------------------------------------------------------------------
+    def __eq__(self, other):
+
+        if not isinstance(other, MailAddress):
+            if other is None:
+                return False
+            return str(self).lower() == str(other).lower()
+
+        if not self.user:
+            if other.user:
+                return False
+            if not self.domain:
+                if other.domain:
+                    return False
+                return True
+            if not other.domain:
+                return False
+            if self.domain.lower() == other.domain.lower():
+                return True
+            return False
+
+        if not self.domain:
+            if other.domain:
+                return False
+            if not other.user:
+                return False
+            if self.user.lower() == other.user.lower():
+                return True
+            return False
+
+        if not other.user:
+            return False
+        if not other.domain:
+            return False
+        if self.domain.lower() != other.domain.lower():
+            return False
+        if self.user.lower() != other.user.lower():
+            return False
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def __ne__(self, other):
+
+        if self == other:
+            return False
+        return True
+
+    # -------------------------------------------------------------------------
+    def __lt__(self, other):
+
+        if not isinstance(other, MailAddress):
+            if other is None:
+                return False
+            return str(self).lower() < str(other).lower()
+
+        if not self.user:
+            if not self.domain:
+                if other.domain:
+                    return False
+                return True
+            if not other.domain:
+                return False
+            if self.domain.lower() != other.domain.lower():
+                return self.domain.lower() < other.domain.lower()
+            if other.user:
+                return False
+            return True
+
+        if not self.domain:
+            if other.domain:
+                return True
+            if not other.user:
+                return False
+            if self.user.lower() != other.user.lower():
+                return self.user.lower() < other.user.lower()
+            return False
+
+        if not other.domain:
+            return False
+        if not other.user:
+            return False
+
+        if self.domain.lower() != other.domain.lower():
+            return self.domain.lower() < other.domain.lower()
+        if self.user.lower() != other.user.lower():
+            return self.user.lower() < other.user.lower()
+
+        return False
+
+    # -------------------------------------------------------------------------
+    def __gt__(self, other):
+
+        if not isinstance(other, MailAddress):
+            return NotImplemented
+
+        if self < other:
+            return False
+        return True
+
+    # -------------------------------------------------------------------------
+    def __copy__(self):
+        "Implementing a wrapper for copy.copy()."
+
+        addr = MailAddress()
+        addr._user = self.user
+        addr._domain = self.domain
+        return addr
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/merge.py b/lib/pp_lib/merge.py
new file mode 100644 (file)
index 0000000..7daadc7
--- /dev/null
@@ -0,0 +1,72 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+"""
+
+import itertools
+
+# =============================================================================
+class ZipExhausted(Exception):
+    pass
+
+
+# =============================================================================
+def izip_longest(*args, **kwds):
+    '''
+    Function izip_longest() does not exists anymore in Python3 itertools.
+    Taken from https://docs.python.org/2/library/itertools.html#itertools.izip_longest
+    '''
+    # izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
+
+    fillvalue = kwds.get('fillvalue')
+    counter = [len(args) - 1]
+
+    # ------------------
+    def sentinel():
+        if not counter[0]:
+            raise ZipExhausted
+        counter[0] -= 1
+        yield fillvalue
+
+    # ------------------
+    fillers = itertools.repeat(fillvalue)
+    iterators = [itertools.chain(it, sentinel(), fillers) for it in args]
+    try:
+        while iterators:
+            yield tuple(map(next, iterators))
+    except ZipExhausted:
+        pass
+
+
+# =============================================================================
+def merge_structure(a, b):
+    '''
+    Taken from https://gist.github.com/saurabh-hirani/6f3f5d119076df70e0da
+    '''
+    if isinstance(a, dict) and isinstance(b, dict):
+        d = dict(a)
+        d.update({k: merge_structure(a.get(k, None), b[k]) for k in b})
+        return d
+
+    if isinstance(a, list) and isinstance(b, list):
+        is_a_nested = any(x for x in a if isinstance(x, list) or isinstance(x, dict))
+        is_b_nested = any(x for x in b if isinstance(x, list) or isinstance(x, dict))
+        if is_a_nested or is_b_nested:
+            return [merge_structure(x, y) for x, y in izip_longest(a, b)]
+        else:
+            return a + b
+
+    return a if b is None else b
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/mk_home_app.py b/lib/pp_lib/mk_home_app.py
new file mode 100644 (file)
index 0000000..ce8b05c
--- /dev/null
@@ -0,0 +1,380 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the mk-home application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import logging
+import logging.config
+import textwrap
+import shutil
+import stat
+
+# Third party modules
+# from ldap3 import ObjectDef, AttrDef, Reader, Writer
+from ldap3 import ObjectDef
+
+# from ldap3.core.exceptions import LDAPKeyError
+
+# Own modules
+from .common import pp
+
+from .ldap_app import PpLdapAppError, PpLdapApplication
+
+__version__ = '0.5.1'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpMkHomeError(PpLdapAppError):
+    pass
+
+
+# =============================================================================
+class PpMkHomeApp(PpLdapApplication):
+    """Class for the 'mk-home' application to ensure:
+        * existence of HOME directories for all users in LDAP
+        * all LDAP users having a valid numeric UID (different to 999999999)
+    """
+
+    default_initial_uid = 999999999
+    # /mnt/nfs
+    default_chroot_homedir = os.sep + os.path.join('mnt', 'nfs')
+    # /home
+    default_home_root = os.sep + 'home'
+    # /etc/skel
+    default_skel_dir = os.sep + os.path.join('etc', 'skel')
+    default_dn_counter = 'uid=uidNumber,ou=ldapTool,ou=Applications,o=Pixelpark,o=isp'
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.initial_uid = self.default_initial_uid
+        self.chroot_homedir = self.default_chroot_homedir
+        self.home_root_abs = self.default_home_root
+        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
+        self.show_simulate_opt = True
+        self.user_entries = []
+        self.users = {}
+        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
+        self.skel_dir = self.default_skel_dir
+        self.dn_counter = self.default_dn_counter
+        self.el_printed = False
+
+        description = textwrap.dedent('''\
+            Home Directory and UIDNumber generation - this script will search for
+            Unix Accounts in LDAP and generate the HomeDirectory for Users if it
+            dosen't exists. Also it looks for accounts with the special
+            UIDNumber {} and generate an new for them.
+            ''').strip().format(self.default_initial_uid)
+
+        super(PpMkHomeApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='mk-home'
+        )
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpMkHomeApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 2:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            if section_name.lower() not in ('mk-home', 'mk_home', 'mkhome'):
+                continue
+
+            section = self.cfg[section_name]
+            if self.verbose > 2:
+                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                    n=section_name, s=pp(section)))
+
+            if 'initial_uid' in section:
+                v = section['initial_uid']
+                try:
+                    uid = int(v)
+                except (ValueError, TypeError):
+                    msg = (
+                        "Invalid initial numeric user Id ([{s}]/initial_uid "
+                        "=> {v!r}) found in configuration.").format(s=section_name, v=v)
+                    raise PpMkHomeError(msg)
+                if uid <= 0:
+                    msg = (
+                        "Invalid initial numeric user Id ([{s}]/initial_uid "
+                        "=> {v!r}) found in configuration.").format(s=section_name, v=v)
+                    raise PpMkHomeError(msg)
+                self.initial_uid = uid
+
+            if 'chroot_homedir' in section:
+                v = section['chroot_homedir']
+                if not os.path.isabs(v):
+                    msg = (
+                        "The chrooted path of the home directories must be an "
+                        "absolute pathname (found [{s}]/chroot_homedir "
+                        "=> {v!r} in configuration.").format(s=section_name, v=v)
+                    raise PpMkHomeError(msg)
+                self.chroot_homedir = v
+
+            if 'home_root' in section:
+                v = section['home_root']
+                if not os.path.isabs(v):
+                    msg = (
+                        "The root path of the home directories must be an "
+                        "absolute pathname (found [{s}]/home_root "
+                        "=> {v!r} in configuration.").format(s=section_name, v=v)
+                    raise PpMkHomeError(msg)
+                self.home_root_abs = v
+
+            if 'skel_dir' in section:
+                v = section['skel_dir']
+                if not os.path.isabs(v):
+                    msg = (
+                        "The skeleton directory must be an "
+                        "absolute pathname (found [{s}]/skel_dir "
+                        "=> {v!r} in configuration.").format(s=section_name, v=v)
+                    raise PpMkHomeError(msg)
+                self.skel_dir = v
+
+            if 'dn_counter' in section:
+                self.dn_counter = section['dn_counter'].strip()
+
+        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
+        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if os.geteuid():
+            msg = "Only root may execute this application."
+            LOG.error(msg)
+            self.exit(1)
+
+        if not os.path.exists(self.chroot_homedir):
+            msg = "The chrooted path of the home directories {!r} does not exists.".format(
+                self.chroot_homedir)
+            LOG.error(msg)
+            self.exit(1)
+
+        if not os.path.isdir(self.chroot_homedir):
+            msg = "The chrooted path of the home directories {!r} is not a directory.".format(
+                self.chroot_homedir)
+            LOG.error(msg)
+            self.exit(1)
+
+        if not os.path.isdir(self.skel_dir):
+            msg = "The skeleton directory {!r} does not exists or is not a directory.".format(
+                self.skel_dir)
+            LOG.error(msg)
+            self.exit(1)
+
+        super(PpMkHomeApp, self).pre_run()
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        self.get_all_user_entries()
+        self.check_numeric_uids()
+        self.check_home_dirs()
+
+    # -------------------------------------------------------------------------
+    def get_all_user_entries(self):
+
+        LOG.info("Reading Accounts from LDAP ...")
+
+        query_filter = '(&(objectclass=posixAccount)(objectclass=shadowAccount))'
+        # attributes = ["uid", "uidNumber", "homeDirectory", "gidNumber"]
+
+        person = ObjectDef(['posixAccount', 'shadowAccount'])
+        person += ["uid", "uidNumber", "homeDirectory", "gidNumber"]
+
+        # self.user_entries = self.ldap_search(query_filter, attributes=attributes)
+        self.user_entries = self.ldap_search_subtree(person, query_filter)
+        LOG.debug("Found {} LDAP entries.".format(len(self.user_entries)))
+
+        for entry in self.user_entries:
+            dn = entry.entry_dn
+            self.users[dn] = {
+                'uid': entry['uid'][0],
+                'uidNumber': entry['uidNumber'][0],
+                'gidNumber': entry['gidNumber'][0],
+                'homeDirectory': entry['homeDirectory'][0],
+            }
+
+        if self.verbose > 2:
+            LOG.debug("All found user entries from LDAP:\n{}".format(pp(self.users)))
+
+    # -------------------------------------------------------------------------
+    def set_new_counter(self, new_uid):
+
+        return self.set_numeric_uid(self.dn_counter, new_uid, simulate=self.simulate)
+
+    # -------------------------------------------------------------------------
+    def check_numeric_uids(self):
+
+        LOG.info("Checking UID's for new Users ...")
+
+        uid_counter = self.get_numeric_uid(self.dn_counter)
+        if uid_counter is None:
+            LOG.error("Did not found current numeric UID of the counter.")
+            self.exit(5)
+        LOG.debug("Current UID counter: {}".format(uid_counter))
+
+        i = 0
+
+        for dn in self.users.keys():
+
+            user = self.users[dn]
+
+            uid = user['uidNumber']
+            # gid = user['gidNumber']
+            user_name = user['uid']
+            # home = user['homeDirectory']
+
+            if uid == self.initial_uid:
+
+                i += 1
+                new_uid = uid_counter + 1
+                LOG.info("Setting numeric UID of user {n!r} to {u}...".format(
+                    n=user_name, u=new_uid))
+
+                uid_counter = self.get_numeric_uid(self.dn_counter)
+                new_uid = uid_counter + 1
+                # Setting uid of user itself
+                self.set_numeric_uid(dn, new_uid, simulate=self.simulate)
+                # Setting uid of the counter
+                self.set_new_counter(new_uid)
+
+                user['uidNumber'] = new_uid
+
+        if self.verbose:
+            print('')
+        if i:
+            if i > 1:
+                LOG.info("Total {} numeric user Ids set.".format(i))
+            else:
+                LOG.info("Total one numeric user Id set.")
+        else:
+            LOG.info("No numeric user Ids set.")
+
+        if self.verbose:
+            print('')
+
+    # -------------------------------------------------------------------------
+    def _check_home_dir(self, dn, upper_dir, home_mode=stat.S_IRWXU):
+
+        user = self.users[dn]
+
+        uid = user['uidNumber']
+        gid = user['gidNumber']
+        user_name = user['uid']
+        home = user['homeDirectory']
+
+        LOG.debug("Checking home directory {h!r} of {d!r} ...".format(h=home, d=dn))
+        if not os.path.isabs(home):
+            LOG.warn("Home directory {h!r} of user {u!r} is not absolute.".format(
+                h=home, u=dn))
+            self.el_printed = False
+            return False
+
+        home_relative = os.path.relpath(home, self.home_root_abs)
+        if home_relative.startswith(upper_dir):
+            if self.verbose > 1:
+                LOG.debug("Home directory {h!r} outside {r!r} is not considered.".format(
+                    h=home, r=self.home_root_abs))
+            self.el_printed = False
+            return False
+
+        chroot_dir = os.path.join(self.chroot_homedir, os.path.relpath(home, os.sep))
+        if self.verbose > 1:
+            LOG.debug("Checking existence of {!r} ...".format(chroot_dir))
+        if os.path.exists(chroot_dir):
+            if os.path.isdir(chroot_dir):
+                if self.verbose > 2:
+                    LOG.debug("Directory {!r} is already existing.".format(chroot_dir))
+            else:
+                LOG.error("Directory {!r} exists, but is NOT a directory.".format(chroot_dir))
+            self.el_printed = False
+            return False
+
+        if not self.el_printed:
+            if self.verbose:
+                print("")
+            self.el_printed = True
+
+        LOG.info("Creating home directory {!r} ....".format(chroot_dir))
+        LOG.debug("Copying recursive {s!r} to {c!r} ....".format(s=self.skel_dir, c=chroot_dir))
+
+        if not self.simulate:
+            shutil.copytree(self.skel_dir, chroot_dir, symlinks=True)
+
+        LOG.debug("Chowning recursive {c!r} to {u}:{g} (user {n!r}) ...".format(
+            c=chroot_dir, u=uid, g=gid, n=user_name))
+
+        if not self.simulate:
+            for root, dirs, files in os.walk(chroot_dir):
+                if self.verbose > 1:
+                    LOG.debug("Chowning {!r} ...".format(root))
+                os.chown(root, uid, gid, follow_symlinks=False)
+                for file_name in files:
+                    fname_abs = os.path.join(root, file_name)
+                    if self.verbose > 1:
+                        LOG.debug("Chowning {!r} ...".format(fname_abs))
+                    os.chown(fname_abs, uid, gid, follow_symlinks=False)
+
+        LOG.debug("Setting permissions of {h!r} to {p:04o} ...".format(h=chroot_dir, p=home_mode))
+        if not self.simulate:
+            os.chmod(chroot_dir, home_mode)
+
+        if self.verbose:
+            print("")
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def check_home_dirs(self):
+
+        LOG.info("Checking home directories ...")
+        upper_dir = os.pardir + os.sep
+        home_mode = stat.S_IRWXU
+        self.el_printed = False
+
+        created = 0
+
+        for dn in sorted(self.users.keys(), key=str.lower):
+            if self._check_home_dir(dn, upper_dir, home_mode):
+                created += 1
+
+        if self.verbose:
+            print('')
+        if created:
+            if created > 1:
+                LOG.info("Total {} home directories created.".format(created))
+            else:
+                LOG.info("Total one home directory created.")
+        else:
+            LOG.info("No home directories created.")
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/obj.py b/lib/pp_lib/obj.py
new file mode 100644 (file)
index 0000000..8208c98
--- /dev/null
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import sys
+import os
+import logging
+import datetime
+import traceback
+
+# Third party modules
+
+# Own modules
+from .common import pp, to_bytes
+
+from .errors import PpError
+
+__version__ = '0.2.4'
+
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpBaseObjectError(PpError):
+    """
+    Base error class useable by all descendand objects.
+    """
+
+    pass
+
+
+# =============================================================================
+class PpBaseObject(object):
+    """
+    Base class for all objects.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=False):
+        """
+        Initialisation of the base object.
+
+        Raises an exception on a uncoverable error.
+        """
+
+        self._appname = None
+        """
+        @ivar: name of the current running application
+        @type: str
+        """
+        if appname:
+            v = str(appname).strip()
+            if v:
+                self._appname = v
+        if not self._appname:
+            self._appname = os.path.basename(sys.argv[0])
+
+        self._version = version
+        """
+        @ivar: version string of the current object or application
+        @type: str
+        """
+
+        self._verbose = int(verbose)
+        """
+        @ivar: verbosity level (0 - 9)
+        @type: int
+        """
+        if self._verbose < 0:
+            msg = "Wrong verbose level {!r}, must be >= 0".format(verbose)
+            raise ValueError(msg)
+
+        self._initialized = False
+        """
+        @ivar: initialisation of this object is complete
+               after __init__() of this object
+        @type: bool
+        """
+
+        self._base_dir = base_dir
+        """
+        @ivar: base directory used for different purposes, must be an existent
+               directory. Defaults to directory of current script daemon.py.
+        @type: str
+        """
+        if base_dir:
+            if not os.path.exists(base_dir):
+                msg = "Base directory {!r} does not exists.".format(base_dir)
+                self.handle_error(msg)
+                self._base_dir = None
+            elif not os.path.isdir(base_dir):
+                msg = "Base directory {!r} is not a directory.".format(base_dir)
+                self.handle_error(msg)
+                self._base_dir = None
+        if not self._base_dir:
+            self._base_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+
+        self._initialized = bool(initialized)
+
+    # -----------------------------------------------------------
+    @property
+    def appname(self):
+        """The name of the current running application."""
+        if hasattr(self, '_appname'):
+            return self._appname
+        return os.path.basename(sys.argv[0])
+
+    @appname.setter
+    def appname(self, value):
+        if value:
+            v = str(value).strip()
+            if v:
+                self._appname = v
+
+    # -----------------------------------------------------------
+    @property
+    def version(self):
+        """The version string of the current object or application."""
+        return getattr(self, '_version', __version__)
+
+    # -----------------------------------------------------------
+    @property
+    def verbose(self):
+        """The verbosity level."""
+        return getattr(self, '_verbose', 0)
+
+    @verbose.setter
+    def verbose(self, value):
+        v = int(value)
+        if v >= 0:
+            self._verbose = v
+        else:
+            LOG.warn("Wrong verbose level {!r}, must be >= 0".format(value))
+
+    # -----------------------------------------------------------
+    @property
+    def initialized(self):
+        """The initialisation of this object is complete."""
+        return getattr(self, '_initialized', False)
+
+    @initialized.setter
+    def initialized(self, value):
+        self._initialized = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def base_dir(self):
+        """The base directory used for different purposes."""
+        return self._base_dir
+
+    @base_dir.setter
+    def base_dir(self, value):
+        if value.startswith('~'):
+            value = os.path.expanduser(value)
+        if not os.path.exists(value):
+            msg = "Base directory {!r} does not exists.".format(value)
+            LOG.error(msg)
+        elif not os.path.isdir(value):
+            msg = "Base directory {!r} is not a directory.".format(value)
+            LOG.error(msg)
+        else:
+            self._base_dir = value
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting function for translating object structure
+        into a string
+
+        @return: structure as string
+        @rtype:  str
+        """
+
+        return pp(self.as_dict(short=True))
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("appname={!r}".format(self.appname))
+        fields.append("verbose={!r}".format(self.verbose))
+        fields.append("version={!r}".format(self.version))
+        fields.append("base_dir={!r}".format(self.base_dir))
+        fields.append("initialized={!r}".format(self.initialized))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = self.__dict__
+        res = {}
+        for key in self.__dict__:
+            if short and key.startswith('_') and not key.startswith('__'):
+                continue
+            val = self.__dict__[key]
+            if isinstance(val, PpBaseObject):
+                res[key] = val.as_dict(short=short)
+            else:
+                res[key] = val
+        res['__class_name__'] = self.__class__.__name__
+        res['appname'] = self.appname
+        res['version'] = self.version
+        res['verbose'] = self.verbose
+        res['initialized'] = self.initialized
+        res['base_dir'] = self.base_dir
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def handle_error(
+            self, error_message=None, exception_name=None, do_traceback=False):
+        """
+        Handle an error gracefully.
+
+        Print a traceback and continue.
+
+        @param error_message: the error message to display
+        @type error_message: str
+        @param exception_name: name of the exception class
+        @type exception_name: str
+        @param do_traceback: allways show a traceback
+        @type do_traceback: bool
+
+        """
+
+        msg = 'Exception happened: '
+        if exception_name is not None:
+            exception_name = exception_name.strip()
+            if exception_name:
+                msg = exception_name + ': '
+            else:
+                msg = ''
+        if error_message:
+            msg += str(error_message)
+        else:
+            msg += 'undefined error.'
+
+        root_log = logging.getLogger()
+        has_handlers = False
+        if root_log.handlers:
+            has_handlers = True
+
+        if has_handlers:
+            LOG.error(msg)
+            if do_traceback:
+                LOG.error(traceback.format_exc())
+        else:
+            curdate = datetime.datetime.now()
+            curdate_str = "[" + curdate.isoformat(' ') + "]: "
+            msg = curdate_str + msg + "\n"
+            if hasattr(sys.stderr, 'buffer'):
+                sys.stderr.buffer.write(to_bytes(msg))
+            else:
+                sys.stderr.write(msg)
+            if do_traceback:
+                traceback.print_exc()
+
+        return
+
+    # -------------------------------------------------------------------------
+    def handle_info(self, message, info_name=None):
+        """
+        Shows an information. This happens both to STDERR and to all
+        initialized log handlers.
+
+        @param message: the info message to display
+        @type message: str
+        @param info_name: Title of information
+        @type info_name: str
+
+        """
+
+        msg = ''
+        if info_name is not None:
+            info_name = info_name.strip()
+            if info_name:
+                msg = info_name + ': '
+        msg += str(message).strip()
+
+        root_log = logging.getLogger()
+        has_handlers = False
+        if root_log.handlers:
+            has_handlers = True
+
+        if has_handlers:
+            LOG.info(msg)
+        else:
+            curdate = datetime.datetime.now()
+            curdate_str = "[" + curdate.isoformat(' ') + "]: "
+            msg = curdate_str + msg + "\n"
+            if hasattr(sys.stderr, 'buffer'):
+                sys.stderr.buffer.write(to_bytes(msg))
+            else:
+                sys.stderr.write(msg)
+
+        return
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/pdns_app.py b/lib/pp_lib/pdns_app.py
new file mode 100644 (file)
index 0000000..1cda9a9
--- /dev/null
@@ -0,0 +1,855 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for a application object related to PowerDNS.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import re
+import copy
+import json
+import os
+import ipaddress
+import socket
+import getpass
+import time
+
+# Third party modules
+import requests
+import psutil
+
+# Own modules
+from .common import pp
+
+from .cfg_app import PpCfgAppError, PpConfigApplication
+from .pdns_zone import PdnsApiZone
+from .pdns_record import PdnsSoaData
+
+__version__ = '0.6.5'
+LOG = logging.getLogger(__name__)
+_LIBRARY_NAME = "pp-pdns-api-client"
+
+
+# =============================================================================
+class PpPDNSAppError(PpCfgAppError):
+    """Base error class for all exceptions happened during
+    execution this configured application"""
+    pass
+
+
+# =============================================================================
+class PDNSApiError(PpPDNSAppError):
+    """Base class for more complex exceptions"""
+    def __init__(self, resp, content, uri=None):
+        self.resp = resp
+        self.content = content
+        self.uri = uri
+
+
+# =============================================================================
+class PDNSApiNotAuthorizedError(PDNSApiError):
+    """The authorization information provided is not correct"""
+    pass
+
+
+# =============================================================================
+class PDNSApiNotFoundError(PDNSApiError):
+    """The ProfitBricks entity was not found"""
+    pass
+
+
+# =============================================================================
+class PDNSApiValidationError(PDNSApiError):
+    """The HTTP data provided is not valid"""
+    pass
+
+
+# =============================================================================
+class PDNSApiRateLimitExceededError(PDNSApiError):
+    """The number of requests sent have exceeded the allowed API rate limit"""
+    pass
+
+
+# =============================================================================
+class PDNSApiRequestError(PDNSApiError):
+    """Base error for request failures"""
+    pass
+
+
+# =============================================================================
+class PDNSApiTimeoutError(PDNSApiRequestError):
+    """Raised when a request does not finish in the given time span."""
+    pass
+
+
+# =============================================================================
+class PpPDNSApplication(PpConfigApplication):
+    """
+    Class for configured application objects related to PowerDNS.
+    """
+
+    api_keys = {
+        'global': "6d1b08e2-59c6-49e7-9e48-039ade102016",
+        'public': "cf0fb928-2a73-49ec-86c2-36e85c9672ff",
+        'local': "d94b183a-c50d-47f7-b338-496090af1577"
+    }
+
+    api_hosts = {
+        'global': "dnsmaster.pp-dns.com",
+        'public': "dnsmaster-public.pixelpark.com",
+        'local': "dnsmaster-local.pixelpark.com"
+    }
+
+    default_api_port = 8081
+    default_api_servername = "localhost"
+    default_timeout = 20
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None,
+            initialized=None, usage=None, description=None,
+            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
+            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False,
+            environment='global'):
+
+        self._api_key = self.api_keys['global']
+        self._api_host = self.api_hosts['global']
+        self._api_port = self.default_api_port
+        self._api_servername = self.default_api_servername
+        self._api_server_version = 'unknown'
+        self._user_agent = '{}/{}'.format(_LIBRARY_NAME, self.version)
+        self._timeout = self.default_timeout
+
+        self.local_addresses = []
+
+        self._environment = 'global'
+        if environment != 'global':
+            self.environment = environment
+
+        stems = []
+        if cfg_stems:
+            if isinstance(cfg_stems, list):
+                for stem in cfg_stems:
+                    s = str(stem).strip()
+                    if not s:
+                        msg = "Invalid configuration stem {!r} given.".format(stem)
+                        raise PpPDNSAppError(msg)
+                    stems.append(s)
+            else:
+                s = str(cfg_stems).strip()
+                if not s:
+                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
+                    raise PpPDNSAppError(msg)
+                stems.append(s)
+        else:
+            stems = [self.appname]
+        if 'pdns-api' not in stems:
+            stems.insert(0, 'pdns-api')
+
+        super(PpPDNSApplication, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
+            initialized=False, usage=usage, description=description,
+            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
+            env_prefix=env_prefix, cfg_dir=cfg_dir, cfg_stems=stems,
+            cfg_encoding=cfg_encoding, need_config_file=need_config_file,
+        )
+
+        for interface, snics in psutil.net_if_addrs().items():
+            for snic in snics:
+                if snic.family == socket.AF_INET or snic.family == socket.AF_INET6:
+                    addr = str(ipaddress.ip_address(re.sub(r'%.*', '', snic.address)))
+                    if addr not in self.local_addresses:
+                        self.local_addresses.append(addr)
+
+        self._user_agent = '{}/{}'.format(_LIBRARY_NAME, self.version)
+
+    # -----------------------------------------------------------
+    @property
+    def api_key(self):
+        "The API key to use the PowerDNS API"
+        return self._api_key
+
+    @api_key.setter
+    def api_key(self, value):
+        if value is None or str(value).strip() == '':
+            raise PpPDNSAppError("Invalid API key {!r} given.".format(value))
+        self._api_key = str(value).strip()
+
+    # -----------------------------------------------------------
+    @property
+    def api_host(self):
+        "The host name or address providing the PowerDNS API."
+        return self._api_host
+
+    @api_host.setter
+    def api_host(self, value):
+        if value is None or str(value).strip() == '':
+            raise PpPDNSAppError("Invalid API host {!r} given.".format(value))
+        self._api_host = str(value).strip().lower()
+
+    # -----------------------------------------------------------
+    @property
+    def api_port(self):
+        "The TCP port number of the PowerDNS API."
+        return self._api_port
+
+    @api_port.setter
+    def api_port(self, value):
+        v = int(value)
+        if v < 1:
+            raise PpPDNSAppError("Invalid API port {!r} given.".format(value))
+        self._api_port = v
+
+    # -----------------------------------------------------------
+    @property
+    def api_servername(self):
+        "The (virtual) name of the PowerDNS server used in API calls."
+        return self._api_servername
+
+    @api_servername.setter
+    def api_servername(self, value):
+        if value is None or str(value).strip() == '':
+            raise PpPDNSAppError("Invalid API server name {!r} given.".format(value))
+        self._api_servername = str(value).strip()
+
+    # -----------------------------------------------------------
+    @property
+    def api_server_version(self):
+        "The version of the PowerDNS server, how provided by API."
+        return self._api_server_version
+
+    # -----------------------------------------------------------
+    @property
+    def user_agent(self):
+        "The name of the user agent used in API calls."
+        return self._user_agent
+
+    @user_agent.setter
+    def user_agent(self, value):
+        if value is None or str(value).strip() == '':
+            raise PpPDNSAppError("Invalid user agent {!r} given.".format(value))
+        self._user_agent = str(value).strip()
+
+    # -----------------------------------------------------------
+    @property
+    def timeout(self):
+        "The timeout in seconds on requesting the PowerDNS API."
+        return self._timeout
+
+    @timeout.setter
+    def timeout(self, value):
+        v = int(value)
+        if v < 1:
+            raise PpPDNSAppError("Invalid timeout {!r} given.".format(value))
+        self._timeout = v
+
+    # -----------------------------------------------------------
+    @property
+    def environment(self):
+        "The name of the PowerDNS environment."
+        return self._environment
+
+    @environment.setter
+    def environment(self, value):
+        if value is None:
+            raise PpPDNSAppError("Invalid environment None given.")
+        v = str(value).strip().lower()
+        if v not in self.api_keys.keys():
+            raise PpPDNSAppError("Invalid environment {!r} given.".format(value))
+        self._environment = v
+        self._api_host = self.api_hosts[v]
+        self._api_key = self.api_keys[v]
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PpPDNSApplication, self).as_dict(short=short)
+        res['api_host'] = self.api_host
+        res['api_hosts'] = copy.copy(self.api_hosts)
+        res['api_key'] = self.api_key
+        res['api_keys'] = copy.copy(self.api_keys)
+        res['api_port'] = self.api_port
+        res['api_servername'] = self.api_servername
+        res['default_api_port'] = self.default_api_port
+        res['default_api_servername'] = self.default_api_servername
+        res['default_timeout'] = self.default_timeout
+        res['environment'] = self.environment
+        res['timeout'] = self.timeout
+        res['user_agent'] = self.user_agent
+        res['api_server_version'] = self.api_server_version
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        super(PpPDNSApplication, self).init_arg_parser()
+
+        pdns_group = self.arg_parser.add_argument_group('PowerDNS API options')
+        env_group = pdns_group.add_mutually_exclusive_group()
+
+        envs = []
+        for env in self.api_keys.keys():
+            envs.append(str(env))
+        envs.sort()
+
+        env_group.add_argument(
+            '-E', '--env', '--environment',
+            metavar="ENVIRONMENT", choices=envs, dest="env",
+            help=(
+                "Select, which PowerDNS environment to use. "
+                "Valid values: {v}, default: {d!r}.".format(
+                    v=', '.join(map(lambda x: repr(x), envs)),
+                    d='global'))
+        )
+
+        env_group.add_argument(
+            '-G', '--global',
+            action='store_true', dest="env_global",
+            help=("Using the 'global' PowerDNS environment."),
+        )
+
+        env_group.add_argument(
+            '-L', '--local',
+            action='store_true', dest="env_local",
+            help=("Using the 'local' PowerDNS environment."),
+        )
+
+        env_group.add_argument(
+            '-P', '--public',
+            action='store_true', dest="env_public",
+            help=("Using the 'public' PowerDNS environment."),
+        )
+
+        pdns_group.add_argument(
+            '-p', '--port',
+            metavar="PORT", type=int, dest='api_port', default=self.default_api_port,
+            help=("Which port to connect to PowerDNS API, default: {}.".format(
+                self.default_api_port)),
+        )
+
+        pdns_group.add_argument(
+            '-t', '--timeout',
+            metavar="SECS", type=int, dest='timeout', default=self.default_timeout,
+            help=("The timeout in seconds to request the PowerDNS API, default: {}.".format(
+                self.default_timeout)),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+        """
+
+        if self.args.env:
+            self.environment = self.args.env
+        elif self.args.env_global:
+            self.environment = 'global'
+        elif self.args.env_local:
+            self.environment = 'local'
+        elif self.args.env_public:
+            self.environment = 'public'
+
+        if self.args.api_port:
+            self.api_port = self.args.api_port
+
+        if self.args.timeout:
+            self.timeout = self.args.timeout
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpPDNSApplication, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 3:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            section = self.cfg[section_name]
+
+            if section_name.lower() in (
+                    'powerdns-api', 'powerdns_api', 'powerdnsapi',
+                    'pdns-api', 'pdns_api', 'pdnsapi'):
+                self.set_cfg_api_options(section, section_name)
+
+    # -------------------------------------------------------------------------
+    def set_cfg_api_options(self, section, section_name):
+
+        if self.verbose > 2:
+            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                n=section_name, s=pp(section)))
+
+        if 'environment' in section:
+            v = section['environment'].strip().lower()
+            if v not in self.api_hosts:
+                LOG.error("Wrong environment {!r} found in configuration.".format(
+                    section['environment']))
+                self.config_has_errors = True
+            else:
+                self.environment = v
+
+        if 'host' in section:
+            v = section['host']
+            host = v.lower().strip()
+            if host:
+                self.api_host = host
+
+        if 'port' in section:
+            try:
+                port = int(section['port'])
+                if port <= 0 or port > 2**16:
+                    raise ValueError(
+                        "a port must be greater than 0 and less than {}.".format(2**16))
+            except (TypeError, ValueError) as e:
+                LOG.error("Wrong port number {!r} in configuration section {!r}: {}".format(
+                    section['port'], section_name, e))
+                self.config_has_errors = True
+            else:
+                self.api_port = port
+
+        if 'server_id' in section and section['server_id'].strip():
+            self.api_servername = section['server_id'].strip().lower()
+
+        if 'key' in section:
+            key = section['key'].strip()
+            self.api_key = key
+
+    # -------------------------------------------------------------------------
+    def _check_path_config(self, section, section_name, key, class_prop, absolute=True, desc=None):
+
+        if key not in section:
+            return
+
+        d = ''
+        if desc:
+            d = ' ' + str(desc).strip()
+
+        path = section[key].strip()
+        if not path:
+            msg = "No path given for{} [{}]/{} in configuration.".format(
+                d, section_name, key)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        if absolute and not os.path.isabs(path):
+            msg = "Path {!r} for{} [{}]/{} in configuration must be an absolute path.".format(
+                path, d, section_name, key)
+            LOG.error(msg)
+            self.config_has_errors = True
+            return
+
+        setattr(self, class_prop, path)
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.verbose > 1:
+            LOG.debug("executing pre_run() ...")
+
+        LOG.debug("Setting Loglevel of the requests module to WARNING")
+        logging.getLogger("requests").setLevel(logging.WARNING)
+
+        super(PpPDNSApplication, self).pre_run()
+        self.get_api_server_version()
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+        """
+        Dummy function as main routine.
+
+        MUST be overwritten by descendant classes.
+
+        """
+        LOG.debug("Executing nothing ...")
+
+    # -------------------------------------------------------------------------
+    def post_run(self):
+        """
+        Dummy function to run after the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if self.verbose > 1:
+            LOG.debug("executing post_run() ...")
+
+    # -------------------------------------------------------------------------
+    def get_api_server_version(self):
+
+        path = "/servers/{}".format(self.api_servername)
+        try:
+            json_response = self.perform_request(path)
+        except (PDNSApiNotFoundError, PDNSApiValidationError):
+            LOG.error("Could not found server info.")
+            return None
+        if self.verbose > 2:
+            LOG.debug("Got a response:\n{}".format(pp(json_response)))
+
+        if 'version' in json_response:
+            self._api_server_version = json_response['version']
+            LOG.info("PowerDNS server version {!r}.".format(self.api_server_version))
+            return self.api_server_version
+        LOG.error("Did not found version info in server info:\n{}".format(pp(json_response)))
+        return None
+
+    # -------------------------------------------------------------------------
+    def _build_url(self, path):
+
+        url = 'http://{}'.format(self.api_host)
+        if self.api_port != 80:
+            url += ':{}'.format(self.api_port)
+
+        url += '/api/v1' + path
+        LOG.debug("Used URL: {!r}".format(url))
+        return url
+
+    # -------------------------------------------------------------------------
+    def perform_request(self, path, method='GET', data=None, headers=None, may_simulate=False):
+        """Performing the underlying API request."""
+
+        if headers is None:
+            headers = dict()
+        headers['X-API-Key'] = self.api_key
+
+        url = self._build_url(path)
+        if self.verbose > 1:
+            LOG.debug("Request method: {!r}".format(method))
+        if data and self.verbose > 2:
+            data_out = "{!r}".format(data)
+            try:
+                data_out = json.loads(data)
+            except ValueError:
+                pass
+            else:
+                data_out = pp(data_out)
+            LOG.debug("Data:\n{}".format(data_out))
+            LOG.debug("RAW data:\n{}".format(data))
+
+        headers.update({'User-Agent': self.user_agent})
+        headers.update({'Content-Type': 'application/json'})
+        if self.verbose > 1:
+            LOG.debug("Headers:\n%s", pp(headers))
+
+        if may_simulate and self.simulate:
+            LOG.debug("Simulation mode, Request will not be sent.")
+            return ''
+
+        session = requests.Session()
+        response = session.request(method, url, data=data, headers=headers, timeout=self.timeout)
+
+        try:
+            if not response.ok:
+                LOG.debug
+                err = response.json()
+                code = response.status_code
+                msg = err['error']
+                if response.status_code == 401:
+                    raise PDNSApiNotAuthorizedError(code, msg, url)
+                if response.status_code == 404:
+                    raise PDNSApiNotFoundError(code, msg, url)
+                if response.status_code == 422:
+                    raise PDNSApiValidationError(code, msg, url)
+                if response.status_code == 429:
+                    raise PDNSApiRateLimitExceededError(code, msg, url)
+                else:
+                    raise PDNSApiError(code, msg, url)
+
+        except ValueError:
+            raise PpPDNSAppError('Failed to parse the response', response.text)
+
+        if self.verbose > 3:
+            LOG.debug("RAW response: {!r}.".format(response.text))
+        if not response.text:
+            return ''
+
+        json_response = response.json()
+
+        if 'location' in response.headers:
+            json_response['requestId'] = self._request_id(response.headers)
+
+        return json_response
+
+    # -------------------------------------------------------------------------
+    def get_api_zones(self):
+
+        LOG.debug("Trying to get all zones from PDNS API ...")
+
+        path = "/servers/{}/zones".format(self.api_servername)
+        json_response = self.perform_request(path)
+        if self.verbose > 3:
+            LOG.debug("Got a response:\n{}".format(pp(json_response)))
+
+        zone_list = []
+
+        for data in json_response:
+            zone = PdnsApiZone.init_from_dict(
+                data, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
+            zone_list.append(zone)
+            if self.verbose > 2:
+                print("{!r}".format(zone))
+
+        if self.verbose > 1:
+            LOG.debug("Found {} zones.".format(len(zone_list)))
+
+        return zone_list
+
+    # -------------------------------------------------------------------------
+    def get_api_zone(self, zone_name):
+
+        zone_unicode = zone_name
+        json_response = None
+        zout = "{!r}".format(zone_name)
+        if 'xn--' in zone_name:
+            zone_unicode = zone_name.encode('idna').decode('idna')
+            zout = "{!r} ({})".format(zone_name, zone_unicode)
+        LOG.debug("Trying to get complete information about zone {!r} ...".format(zone_name))
+
+        path = "/servers/{}/zones/{}".format(self.api_servername, zone_name)
+        try:
+            json_response = self.perform_request(path)
+        except (PDNSApiNotFoundError, PDNSApiValidationError):
+            LOG.error("The given zone {} was not found.".format(zout))
+            return None
+        if self.verbose > 2:
+            LOG.debug("Got a response:\n{}".format(pp(json_response)))
+
+        zone = PdnsApiZone.init_from_dict(
+            json_response, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
+        if self.verbose > 2:
+            LOG.debug("Zone object:\n{}".format(pp(zone.as_dict())))
+
+        return zone
+
+    # -------------------------------------------------------------------------
+    def patch_zone(self, zone, payload):
+
+        if self.verbose > 1:
+            LOG.debug("Patching zone {!r} ...".format(zone.name))
+
+        path = "/servers/{}/zones/{}".format(self.api_servername, zone.name)
+        return self.perform_request(path, 'PATCH', json.dumps(payload), may_simulate=True)
+
+    # -------------------------------------------------------------------------
+    def update_soa(self, zone, new_soa, comment=None, ttl=None):
+
+        if not isinstance(new_soa, PdnsSoaData):
+            msg = "New SOA must by of type PdnsSoaData, given {t}: {s!r}".format(
+                t=new_soa.__class__.__name__, s=new_soa)
+            raise TypeError(msg)
+
+        if ttl:
+            ttl = int(ttl)
+        else:
+            cur_soa_rrset = zone.get_soa_rrset()
+            ttl = cur_soa_rrset.ttl
+
+        if comment is not None:
+            comment = str(comment).strip()
+            if comment == '':
+                comment = None
+
+        rrset = {
+            'name': zone.name,
+            'type': 'SOA',
+            'ttl': ttl,
+            'changetype': 'REPLACE',
+            'records': [],
+            'comments': [],
+        }
+
+#        if comment:
+#            comment_rec = {
+#                'content': comment,
+#                'account': getpass.getuser(),
+#                'modified_at': int(time.time() + 0.5),
+#            }
+#            rrset['comments'] = [comment_rec]
+
+        record = {
+            'content': new_soa.data,
+            'disabled': False,
+            'name': zone.name,
+            'set-ptr': False,
+            'type': 'SOA',
+        }
+        rrset['records'].append(record)
+        payload = {"rrsets": [rrset]}
+
+        if self.verbose > 1:
+            LOG.debug("Setting new SOA {s!r} for zone {z!r}, TTL {t} ...".format(
+                s=new_soa.data, z=zone.name, t=ttl))
+
+        self.patch_zone(zone, payload)
+
+    # -------------------------------------------------------------------------
+    def increase_serial(self, zone_name, comment=None):
+
+        zone = self.get_api_zone(zone_name)
+        if not zone:
+            raise PpPDNSAppError("Did not found zone for {!r}.".format(zone_name))
+
+        LOG.info("Increasing serial in SOA of zone {!r} ....".format(zone_name))
+
+        api_host_address = None
+        for addr_info in socket.getaddrinfo(self.api_host, 53, family=socket.AF_INET):
+            api_host_address = addr_info[4][0]
+            break
+
+        api_soa = zone.get_soa()
+        if not api_soa:
+            raise PpPDNSAppError("Could not find SOA for zone {!r}.".format(zone_name))
+        if self.verbose > 2:
+            LOG.debug("Got SOA for zone {z!r} by API:\n{s}".format(
+                z=zone_name, s=api_soa))
+
+        dns_soa = zone.get_soa_by_dns(api_host_address)
+        if self.verbose > 2:
+            LOG.debug("Got SOA for zone {z!r} from DNS by {h!r}:\n{s}".format(
+                h=self.api_host, z=zone_name, s=dns_soa))
+
+        new_serial = zone.get_new_serial(dns_soa.serial)
+        LOG.debug("Got new serial number for zone {z!r}: {s}.".format(
+            z=zone_name, s=new_serial))
+
+        api_soa.serial = new_serial
+        return self.update_soa(zone, api_soa, comment)
+
+    # -------------------------------------------------------------------------
+    def set_nameservers(
+        self, zone, new_nameservers, for_zone=None, comment=None, new_ttl=None,
+            do_serial=True, do_notify=True):
+
+        current_nameservers = zone.get_zone_nameservers(for_zone=for_zone)
+        if for_zone:
+            LOG.debug("Current nameservers of {f!r} in zone {z!r}:\n{ns}".format(
+                f=for_zone, z=zone.name, ns=pp(current_nameservers)))
+        else:
+            LOG.debug("Current nameservers of zone {z!r}:\n{ns}".format(
+                z=zone.name, ns=pp(current_nameservers)))
+
+        ns2remove = []
+        ns2add = []
+
+        for ns in current_nameservers:
+            if ns not in new_nameservers:
+                ns2remove.append(ns)
+        for ns in new_nameservers:
+            if ns not in current_nameservers:
+                ns2add.append(ns)
+
+        if not ns2remove and not ns2add:
+            if for_zone:
+                msg = "Subzone {f!r} has already the expected nameservers in zone {z!r}."
+            else:
+                msg = "Zone {z!r} has already the expected nameservers."
+            LOG.info(msg.format(f=for_zone, z=zone.name))
+            return False
+
+        LOG.debug("Nameservers to remove from zone {z!r}:\n{ns}".format(
+            z=zone.name, ns=pp(ns2remove)))
+        LOG.debug("Nameservers to add to zone {z!r}:\n{ns}".format(
+            z=zone.name, ns=pp(ns2add)))
+
+        ns_ttl = None
+        if not new_ttl:
+            cur_rrset = zone.get_ns_rrset(for_zone=for_zone)
+            if cur_rrset:
+                ns_ttl = cur_rrset.ttl
+            else:
+                soa = zone.get_soa()
+                ns_ttl = soa.ttl
+                del soa
+        else:
+            ns_ttl = int(new_ttl)
+        if ns_ttl <= 0:
+            ns_ttl = 3600
+        LOG.debug("TTL for NS records: {}.".format(ns_ttl))
+
+        rrset_name = zone.name.lower()
+        if for_zone:
+            rrset_name = for_zone.lower()
+
+        records = []
+        for ns in new_nameservers:
+            record = {
+                "name": rrset_name,
+                "type": "NS",
+                "content": ns,
+                "disabled": False,
+                "set-ptr": False,
+            }
+            records.append(record)
+        rrset = {
+            "name": rrset_name,
+            "type": "NS",
+            "ttl": ns_ttl,
+            "changetype": "REPLACE",
+            "records": records,
+        }
+
+        if comment:
+            comment_rec = {
+                'content': comment,
+                'account': getpass.getuser(),
+                'modified_at': int(time.time() + 0.5),
+            }
+            rrset['comments'] = [comment_rec]
+
+        payload = {"rrsets": [rrset]}
+
+        self.patch_zone(zone, payload)
+
+        if do_serial:
+            self.increase_serial(zone.name)
+
+        if do_notify:
+            self.notify_zone(zone)
+
+        return True
+
+    # -------------------------------------------------------------------------
+    def notify_zone(self, zone):
+
+        LOG.info("Notifying slaves of zone {!r} ...".format(zone.name))
+
+        path = "/servers/{}/zones/{}/notify".format(self.api_servername, zone.name)
+        return self.perform_request(path, 'PUT', '', may_simulate=True)
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pdns_list_zones.py b/lib/pp_lib/pdns_list_zones.py
new file mode 100644 (file)
index 0000000..be91a52
--- /dev/null
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the pdns-list-zones application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import textwrap
+
+from functools import cmp_to_key
+
+# Own modules
+from .common import compare_fqdn
+
+from .pdns_app import PpPDNSAppError, PpPDNSApplication
+from .pdns_zone import PdnsApiZone
+
+__version__ = '0.5.1'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpPDNSListZonesError(PpPDNSAppError):
+    pass
+
+
+# =============================================================================
+class PpPDNSListZonesApp(PpPDNSApplication):
+    """Class for the 'pdns-list-zones' application get a list of all available
+       zones from PowerDNS
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.no_header = False
+        self.no_summary = False
+        self.show_numbers = False
+        self.minimal = False
+
+        description = textwrap.dedent('''\
+            Lists all available zones from given PowerDNS API.
+            ''')
+
+        super(PpPDNSListZonesApp, self).__init__(
+            appname=appname, version=version, description=description,
+        )
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        super(PpPDNSListZonesApp, self).init_arg_parser()
+
+        self.arg_parser.add_argument(
+            '-H', '--no-header', action='store_true', dest='no_header',
+            help="Don't show header lines at the beginning of the list."
+        )
+
+        self.arg_parser.add_argument(
+            '-n', '--no-summary', action='store_true', dest='no_summary',
+            help="Don't show summary at the end of the list."
+        )
+
+        col_group = self.arg_parser.add_mutually_exclusive_group()
+
+        col_group.add_argument(
+            '-M', '--minimal', action='store_true', dest='minimal',
+            help=(
+                "Minimal output, includes --no-header and --no-summary. "
+                "Mutually exclusive to --numbers.")
+        )
+
+        col_group.add_argument(
+            '-N', '--numbers', action='store_true', dest='show_numbers',
+            help="Show number of Ressource Record Sets and Records for each zone",
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+        """
+
+        super(PpPDNSListZonesApp, self).perform_arg_parser()
+
+        if self.args.no_header:
+            self.no_header = True
+
+        if self.args.no_summary:
+            self.no_summary = True
+
+        if self.args.show_numbers:
+            self.show_numbers = True
+
+        if self.args.minimal:
+            self.no_header = True
+            self.no_summary = True
+            self.minimal = True
+            self.show_numbers = False
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        LOG.info("Listing all available zones from PowerDNS environment {!r}.".format(
+            self.environment))
+
+        zone_list = self.get_api_zones()
+
+        len_zone = 10
+        for zone in zone_list:
+            if len(zone.name_unicode) > len_zone:
+                len_zone = len(zone.name_unicode)
+
+        if self.minimal:
+            tpl = PdnsApiZone.get_list_template(minimal=True)
+        elif self.show_numbers:
+            tpl = PdnsApiZone.get_list_template(show_numbers=True)
+        else:
+            tpl = PdnsApiZone.get_list_template(show_numbers=False)
+
+        if self.verbose > 2:
+            LOG.debug("Used template for line: {!r}".format(tpl))
+
+        if not self.no_header:
+            line = tpl.format(
+                name="Zone", len_zone=len_zone, kind="Type", serial="Serial",
+                dnssec="DNSSEC", nr_rrsets='RR Sets', nr_records='Records',
+                account="Account information")
+            print(line)
+            print('-' * len(line))
+
+        nr_zones = 0
+        nr_rrsets = 0
+        nr_records = 0
+        for zone in sorted(zone_list, key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode)):
+            if self.show_numbers:
+                nr_zones += 1
+                zone_complete =  self.get_api_zone(zone.name)
+                for rrset in zone_complete.rrsets:
+                    nr_rrsets += 1
+                    for record in rrset.records:
+                        nr_records += 1
+                print(zone.get_line(len_zone, zone_complete.rrsets))
+            else:
+                print(zone.get_line(len_zone, minimal=self.minimal))
+
+        if not self.no_summary:
+            print('-' * len(line))
+            line = tpl.format(
+                name="Total:", len_zone=len_zone, kind="", serial=nr_zones,
+                dnssec="Zones", nr_rrsets=nr_rrsets,  nr_records=nr_records, account="")
+            print(line)
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pdns_migrate_ns.py b/lib/pp_lib/pdns_migrate_ns.py
new file mode 100644 (file)
index 0000000..595d341
--- /dev/null
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for migration of of nameservers.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import textwrap
+import copy
+import time
+
+from functools import cmp_to_key
+
+# Own modules
+from .common import pp, compare_fqdn, to_str
+from .common import RE_DOT_AT_END
+
+from .pdns_app import PpPDNSAppError, PpPDNSApplication
+
+__version__ = '0.2.9'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PDNSMigrateNsError(PpPDNSAppError):
+    pass
+
+
+# =============================================================================
+class PDNSMigrateNsApp(PpPDNSApplication):
+    """Class for the 'pdns-migrate-nameservers' application to migrate the nameservers
+       of all zones of PowerDNS from the old nameservers to the new one.
+    """
+
+    new_public_nameservers = [
+        'ns1.pp-dns.com.',
+        'ns2.pp-dns.com.',
+        'ns3.pp-dns.com.',
+        'ns4.pp-dns.com.',
+    ]
+
+    new_local_nameservers = [
+        'ns1-local.pixelpark.com.',
+        'ns2-local.pixelpark.com.',
+        'ns3-local.pixelpark.com.',
+    ]
+
+    address_hostmaster_local = 'hostmaster.pixelpark.com.'
+    address_hostmaster_public = 'hostmaster.pixelpark.net.'
+
+    default_pause = 3
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.zones = []
+        self.zone_names = []
+        self.oneshot = False
+        self.pause = self.default_pause
+
+        description = textwrap.dedent('''\
+            Substituting NS records in all zones by the new ones.
+            ''')
+
+        self._show_simulate_opt = True
+
+        super(PDNSMigrateNsApp, self).__init__(
+            appname=appname, version=version, description=description,
+        )
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        super(PDNSMigrateNsApp, self).init_arg_parser()
+
+        self.arg_parser.add_argument(
+            "-1", '--oneshot', action="store_true", dest="oneshot",
+            help="Stop execution after first successful migration."
+        )
+
+        self.arg_parser.add_argument(
+            '--pause', dest='pause', type=int,
+            default=self.default_pause, metavar='SECS',
+            help=(
+                "Pause in seconds between mangling the particular zones. "
+                "(Default: %(default)r)."),
+        )
+
+        self.arg_parser.add_argument(
+            'zones', metavar='ZONE', nargs='*',
+            help=(
+                "All zones, for which the migration should be executed. "
+                "If not given, the migration will be executed for all zones."),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+        """
+
+        super(PDNSMigrateNsApp, self).perform_arg_parser()
+
+        if self.args.oneshot:
+            self.oneshot = True
+
+        if self.args.pause and self.args.pause > 0:
+            self.pause = self.args.pause
+
+        for zone in self.args.zones:
+            zone_idna = zone
+            if 'xn--' not in zone:
+                zone_idna = to_str(zone.encode('idna'))
+            zone_idna = RE_DOT_AT_END.sub('.', zone_idna).lower()
+            self.zone_names.append(zone_idna)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        LOG.info("Substituting NS records in all zones by the new ones.")
+
+        self.zones = self.get_api_zones()
+
+        if not self.zone_names:
+            for zone in sorted(self.zones, key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode)):
+                self.zone_names.append(zone.name)
+
+        idx = 0
+        print('')
+        print('')
+        for zone_name in self.zone_names:
+            if idx:
+                print('')
+                print('')
+                print('Sleeping {} seconds...'.format(self.pause))
+                print('')
+                time.sleep(self.pause)
+            migrated = self.migrate_zone(zone_name)
+            if migrated:
+                idx += 1
+            if self.oneshot and migrated:
+                break
+
+    # -------------------------------------------------------------------------
+    def migrate_zone(self, zone_name):
+
+        LOG.info("Migrating zone {!r} ...".format(zone_name))
+        zone = self.get_api_zone(zone_name)
+        if not zone:
+            return False
+
+        new_nameservers = []
+        hm_address = self.address_hostmaster_public
+
+        is_local = self.is_local_domain(zone_name)
+
+        if is_local:
+            LOG.debug("Using local nameservers for substituting.")
+            new_nameservers = sorted(self.new_local_nameservers)
+            hm_address = self.address_hostmaster_local
+        else:
+            LOG.debug("Using public nameservers for substituting.")
+            new_nameservers = sorted(self.new_public_nameservers)
+        if self.verbose > 1:
+            LOG.debug("Expected nameservers of zone:\n{}".format(pp(new_nameservers)))
+
+        soa = zone.get_soa()
+        if not soa:
+            LOG.error("Could not find SOA for zone {!r}.".format(zone_name))
+            return False
+        if self.verbose > 2:
+            LOG.debug("Current SOA of zone {!r}:\n{}".format(zone_name, soa))
+
+        new_soa = copy.copy(soa)
+        new_soa.primary = new_nameservers[0]
+        new_soa.email = hm_address
+        if self.verbose > 2:
+            LOG.debug("New SOA of zone {!r}:\n{}".format(zone_name, new_soa))
+
+        if new_soa != soa:
+            LOG.info("Update SOA of zone {!r} ...".format(zone_name))
+            self.update_soa(zone, new_soa, "Update SOA on great NS- and SOA-Migration.")
+        else:
+            LOG.debug("Update SOA of zone is not necessary.".format(zone_name))
+
+        LOG.info("Setting nameservers for zone {!r} ...".format(zone_name))
+        if not self.set_nameservers(zone, new_nameservers, do_serial=False):
+            return False
+
+        zone_parts = zone_name.split('.')
+        top_zone_name = '.'.join(zone_parts[1:])
+        LOG.debug("Top zone of {z!r} is {t!r}.".format(z=zone_name, t=top_zone_name))
+
+        have_top_zone = False
+        for t_zone in self.zones:
+            if t_zone.name == top_zone_name:
+                have_top_zone = True
+                break
+
+        if have_top_zone:
+            LOG.info("Setting nameservers for zone {z!r} in zone {t!r}.".format(
+                z=zone_name, t=top_zone_name))
+            top_zone = self.get_api_zone(top_zone_name)
+            return self.set_nameservers(
+                top_zone, new_nameservers, zone_name, do_serial=False)
+        else:
+            LOG.debug("Top zone {!r} is not in our responsibility.".format(top_zone_name))
+        return True
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pdns_record.py b/lib/pp_lib/pdns_record.py
new file mode 100644 (file)
index 0000000..17a29fc
--- /dev/null
@@ -0,0 +1,614 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
+@summary: An encapsulation class for a DNS record object by PowerDNS API
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import copy
+import re
+import datetime
+
+# Third party modules
+
+# Own modules
+from .common import pp, compare_fqdn, to_utf8, to_str
+
+from .obj import PpBaseObjectError, PpBaseObject
+
+__version__ = '0.4.6'
+
+LOG = logging.getLogger(__name__)
+
+
+TYPE_ORDER = {
+    'SOA': 0,
+    'NS': 1,
+    'MX': 2,
+    'A': 3,
+    'AAAA': 4,
+    'CNAME': 5,
+    'SRV': 6,
+    'TXT': 7,
+    'SPF': 8,
+    'PTR': 9,
+}
+
+# =============================================================================
+class PdnsApiRrsetError(PpBaseObjectError):
+    pass
+
+
+# =============================================================================
+class PdnsWrongSoaDataError(PdnsApiRrsetError):
+
+    # -------------------------------------------------------------------------
+    def __init__(self, data):
+        self.data = str(data)
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+
+        msg = "Could not interprete SOA data: {!r}.".format(self.data)
+        return msg
+
+
+# =============================================================================
+def compare_rrsets(x, y):
+
+    if not isinstance(x, PdnsApiRrset):
+        raise TypeError("Argument x {!r} must be a PdnsApiRrset object.".format(x))
+
+    if not isinstance(y, PdnsApiRrset):
+        raise TypeError("Argument y {!r} must be a PdnsApiRrset object.".format(y))
+
+    ret = compare_fqdn(x.name, y.name)
+    if ret:
+        return ret
+
+    xt = 99
+    yt = 99
+    if x.type.upper() in TYPE_ORDER:
+        xt = TYPE_ORDER[x.type.upper()]
+    if y.type.upper() in TYPE_ORDER:
+        yt = TYPE_ORDER[y.type.upper()]
+
+    if xt < yt:
+        return -1
+    if xt > yt:
+        return 1
+    return 0
+
+# =============================================================================
+class PdnsApiRecord(PpBaseObject):
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None, initialized=None,
+            content=None, disabled=False):
+
+        self._content = content
+        self._disabled = False
+        self.disabled = disabled
+
+        super(PdnsApiRecord, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
+
+        if initialized is not None:
+            self.initialized = initialized
+
+    # -----------------------------------------------------------
+    @property
+    def content(self):
+        "The underlying content of this record."
+        return self._content
+
+    # -----------------------------------------------------------
+    @property
+    def disabled(self):
+        "Flag, whether the record is disabled or not."
+        return self._disabled
+
+    @disabled.setter
+    def disabled(self, value):
+        self._disabled = bool(value)
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PdnsApiRecord, self).as_dict(short=short)
+        res['content'] = self.content
+        res['disabled'] = self.disabled
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def __copy__(self):
+
+        return PdnsApiRecord(
+            appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
+            initialized=self.initialized, content=self.content, disabled=self.disabled)
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting function for translating object structure
+        into a string
+
+        @return: structure as string
+        @rtype:  str
+        """
+
+        return pp(self.as_dict(short=True))
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("content={!r}".format(self.content))
+        fields.append("disabled={!r}".format(self.disabled))
+        fields.append("appname={!r}".format(self.appname))
+        fields.append("verbose={!r}".format(self.verbose))
+        fields.append("version={!r}".format(self.version))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+
+# =============================================================================
+class PdnsSoaData(PpBaseObject):
+
+    re_soa_data = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s*$')
+    re_ws = re.compile(r'\s+')
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, primary=None, email=None, serial=None, refresh=None, retry=None, expire=None,
+            ttl=None, appname=None, verbose=0, version=__version__,
+            base_dir=None):
+
+        self._primary = None
+        self._email = None
+        self._serial = None
+        self._refresh = None
+        self._retry = None
+        self._expire = None
+        self._ttl = None
+
+        super(PdnsSoaData, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
+            initialized=False)
+
+        self.primary = primary
+        self.email = email
+        self.serial = serial
+        self.refresh = refresh
+        self.retry = retry
+        self.expire = expire
+        self.ttl = ttl
+
+        if (self.primary and self.email and self.serial is not None and self.refresh and
+                self.retry and self.expire and self.ttl):
+            self.initialized = True
+        else:
+            self.initialized = False
+
+    # -----------------------------------------------------------
+    @property
+    def primary(self):
+        "The primary name server of this SOA"
+        return self._primary
+
+    @primary.setter
+    def primary(self, value):
+        if value is None:
+            self._primary = None
+            return
+        self._primary = str(value).strip()
+
+    # -----------------------------------------------------------
+    @property
+    def email(self):
+        "The E-Mail-address of the hostmaster of this zone."
+        return self._email
+
+    @email.setter
+    def email(self, value):
+        if value is None:
+            self._email = None
+            return
+        self._email = str(value).strip()
+
+    # -----------------------------------------------------------
+    @property
+    def serial(self):
+        "The serial number of this SOA."
+        return self._serial
+
+    @serial.setter
+    def serial(self, value):
+        if value is None:
+            self._serial = None
+            return
+        self._serial = int(value)
+
+    # -----------------------------------------------------------
+    @property
+    def refresh(self):
+        "The time in seconds when slaves should ask master for changes."
+        return self._refresh
+
+    @refresh.setter
+    def refresh(self, value):
+        if value is None:
+            self._refresh = None
+            return
+        self._refresh = int(value)
+
+    # -----------------------------------------------------------
+    @property
+    def retry(self):
+        """The time in seconds when slaves should retry getting changes from master,
+            if an attemt to get it was not successful."""
+        return self._retry
+
+    @retry.setter
+    def retry(self, value):
+        if value is None:
+            self._retry = None
+            return
+        self._retry = int(value)
+
+    # -----------------------------------------------------------
+    @property
+    def expire(self):
+        """The time in seconds when slaves should expiring the zone,
+            if an attemt to get it was not successful."""
+        return self._expire
+
+    @expire.setter
+    def expire(self, value):
+        if value is None:
+            self._expire = None
+            return
+        self._expire = int(value)
+
+    # -----------------------------------------------------------
+    @property
+    def ttl(self):
+        "The defaul TTL of this zone."
+        return self._ttl
+
+    @ttl.setter
+    def ttl(self, value):
+        if value is None:
+            self._ttl = None
+            return
+        self._ttl = int(value)
+
+    # -----------------------------------------------------------
+    @property
+    def data(self):
+        "String representation of SOA data."
+        if (self.primary and self.email and self.serial is not None and self.refresh and
+                self.retry and self.expire and self.ttl):
+            return "{_primary} {_email} {_serial} {_refresh} {_retry} {_expire} {_ttl}".format(
+                **self.__dict__)
+        else:
+            return None
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PdnsSoaData, self).as_dict(short=short)
+        res['primary'] = self.primary
+        res['email'] = self.email
+        res['serial'] = self.serial
+        res['refresh'] = self.refresh
+        res['retry'] = self.retry
+        res['expire'] = self.expire
+        res['ttl'] = self.ttl
+        res['data'] = self.data
+
+        return res
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def init_from_data(cls, data, appname=None, verbose=0, base_dir=None):
+
+        line = cls.re_ws.sub(' ', to_str(data))
+        match = cls.re_soa_data.match(line)
+        if not match:
+            raise PdnsWrongSoaDataError(data)
+
+        soa = cls(
+            primary=match.group(1), email=match.group(2), serial=match.group(3),
+            refresh=match.group(4), retry=match.group(5), expire=match.group(6),
+            ttl=match.group(7), appname=appname, verbose=verbose, base_dir=base_dir)
+        return soa
+
+    # -------------------------------------------------------------------------
+    def __copy__(self):
+
+        if self.verbose > 4:
+            LOG.debug("Copying current {}-object in a new one.".format(self.__class__.__name__))
+
+        soa = PdnsSoaData(
+            primary=self.primary, email=self.email, serial=self.serial, refresh=self.refresh,
+            retry=self.retry, expire=self.expire, ttl=self.ttl, appname=self.appname,
+            version=self.version, base_dir=self.base_dir)
+        return soa
+
+    # -------------------------------------------------------------------------
+    def __eq__(self, other):
+
+        if self.verbose > 4:
+            LOG.debug("Comparing {}-objects ...".format(self.__class__.__name__))
+
+        if not isinstance(other, PdnsSoaData):
+            return False
+
+        if self.primary != other.primary:
+            return False
+        if self.email != other.email:
+            return False
+        if self.serial != other.serial:
+            return False
+        if self.refresh != other.refresh:
+            return False
+        if self.retry != other.retry:
+            return False
+        if self.expire != other.expire:
+            return False
+        if self.ttl != other.ttl:
+            return False
+
+        return True
+
+# =============================================================================
+class PdnsApiRrset(PpBaseObject):
+
+    default_ttl = 3600
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__,
+            base_dir=None, initialized=None):
+
+        # {   'comments': [],
+        #     'name': 'www.bmwi.tv.',
+        #     'records': [{'content': '77.74.236.5', 'disabled': False}],
+        #     'ttl': 3600,
+        #     'type': 'A'},
+
+        self.comments = []
+        self._name = None
+        self.ttl = self.default_ttl
+        self._type = None
+        self.records = []
+
+        super(PdnsApiRrset, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
+
+        if initialized is not None:
+            self.initialized = initialized
+
+    # -----------------------------------------------------------
+    @property
+    def name(self):
+        "The name of this record set."
+        return self._name
+
+    # -----------------------------------------------------------
+    @property
+    def name_unicode(self):
+        """The name of the resource record set in unicode, if it is an IDNA encoded zone."""
+        n = getattr(self, '_name', None)
+        if n is None:
+            return None
+        if 'xn--' in n:
+            return to_utf8(n).decode('idna')
+        return n
+
+    # -----------------------------------------------------------
+    @property
+    def type(self):
+        "The type of this record set."
+        return self._type
+
+    # -----------------------------------------------------------
+    @property
+    def ttl(self):
+        "The TTL of this record set."
+        return self._ttl
+
+    @ttl.setter
+    def ttl(self, value):
+        self._ttl = int(value)
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def init_from_dict(
+        cls, data, appname=None, verbose=0, version=__version__,
+            base_dir=None, initialized=None):
+
+        if not isinstance(data, dict):
+            raise PdnsApiRrsetError("Given data {!r} is not a dict object.".format(data))
+
+        params = {
+            'appname': appname,
+            'verbose': verbose,
+            'version': version,
+            'base_dir': base_dir
+        }
+        if initialized is not None:
+            params['initialized'] = initialized
+
+        rrset = cls(**params)
+
+        if 'comments' in data and data['comments']:
+            for comment in data['comments']:
+                rrset.comments.append(str(comment))
+
+        rrset._name = str(data['name'])
+        rrset._type = str(data['type']).upper()
+        if 'ttl' in data:
+            rrset._ttl = int(data['ttl'])
+
+        if 'records' in data:
+            for single_record in data['records']:
+                record = PdnsApiRecord(
+                    content=single_record['content'], disabled=single_record['disabled'],
+                    **params)
+                record.initialized = True
+                rrset.records.append(record)
+
+        rrset.initialized = True
+
+        return rrset
+
+    # -------------------------------------------------------------------------
+    def name_relative(self, reference):
+
+        # current name must be an absolute name
+        if not self.name.endswith('.'):
+            return self.name
+
+        # reference name must be an absolute name
+        if not reference.endswith('.'):
+            return self.name
+
+        ref_escaped = r'\.' + re.escape(reference) + r'$'
+        rel_name = re.sub(ref_escaped, '', self.name)
+        return rel_name
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PdnsApiRrset, self).as_dict(short=short)
+        res['name'] = self.name
+        res['type'] = self.type
+        res['ttl'] = self.ttl
+        res['name_unicode'] = self.name_unicode
+        res['comments'] = copy.copy(self.comments)
+        res['records'] = []
+
+        for record in self.records:
+            res['records'].append(record.as_dict(short))
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting function for translating object structure
+        into a string
+
+        @return: structure as string
+        @rtype:  str
+        """
+
+        return pp(self.as_dict(short=True))
+
+    # -------------------------------------------------------------------------
+    def __copy__(self):
+
+        rrset = PdnsApiRrset(
+            appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
+            initialized=self.initialized)
+
+        rrset._name = self.name
+        rrset._type = self.type
+        rrset._ttl = self.ttl
+        rrset.comments = copy.copy(self.comments)
+        rrset.records = copy.copy(self.records)
+
+    # -------------------------------------------------------------------------
+    def get_zone_lines(self, rrname_len=12, reference=None, default_ttl=None):
+
+        lines = ''
+        for comment in self.comments:
+
+            if self.verbose > 3:
+                LOG.debug("Formatting comment: {}".format(comment))
+
+            try:
+                cmt = eval(comment)
+                mtime = datetime.datetime.utcfromtimestamp(cmt['modified_at'])
+                if cmt['content']:
+                    line = "; {} {}: {}\n".format(
+                        mtime.isoformat(' '), cmt['account'], cmt['content'])
+                else:
+                    line = "; {} {}\n".format(mtime.isoformat(' '), cmt['account'])
+            except Exception as e:
+                LOG.warn("Could not decode comment {!r}: {}".format(comment, e))
+                line = '; {}\n'.format(comment)
+
+            lines += line
+
+        i = 0
+        for record in self.records:
+            show_name = ''
+            if not i:
+                if reference:
+                    show_name = self.name_relative(reference)
+                else:
+                    show_name = self.name
+            i += 1
+            if record.disabled:
+                show_name = '; ' + show_name
+            ttl = self.ttl
+            if default_ttl and default_ttl == self.ttl:
+                ttl = ''
+            tpl = "{name:<{name_len}}  {ttl:>8}  {type:<6}  {content}\n"
+            line = tpl.format(
+                name=show_name, name_len=rrname_len, ttl=ttl,
+                type=self.type, content=record.content)
+            lines += line
+
+        return lines
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pdns_show_zone.py b/lib/pp_lib/pdns_show_zone.py
new file mode 100644 (file)
index 0000000..f859083
--- /dev/null
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the pdns-show-zone application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import textwrap
+
+from functools import cmp_to_key
+
+# Own modules
+from .common import to_str
+from .common import RE_DOT_AT_END
+
+from .pdns_app import PpPDNSAppError, PpPDNSApplication
+from .pdns_record import compare_rrsets
+
+__version__ = '0.4.3'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpPDNSShowZoneError(PpPDNSAppError):
+    pass
+
+
+# =============================================================================
+class PpPDNSShowZoneApp(PpPDNSApplication):
+    """Class for the 'pdns-show-zone' application to get all information about
+       a given zone from PowerDNS.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.zones = []
+
+        description = textwrap.dedent('''\
+            Lists all available zones from given PowerDNS API.
+            ''')
+
+        super(PpPDNSShowZoneApp, self).__init__(
+            appname=appname, version=version, description=description,
+        )
+
+        self.initialized = True
+        self.default_ttl = 3600
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        super(PpPDNSShowZoneApp, self).init_arg_parser()
+
+        self.arg_parser.add_argument(
+            'zones', metavar='ZONE', nargs='+',
+            help="All zones, for which the complete information should shown",
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_arg_parser(self):
+        """
+        Public available method to execute some actions after parsing
+        the command line parameters.
+        """
+
+        super(PpPDNSShowZoneApp, self).perform_arg_parser()
+
+        for zone in self.args.zones:
+            zone_idna = zone
+            if 'xn--' not in zone:
+                zone_idna = to_str(zone.encode('idna'))
+            zone_idna = RE_DOT_AT_END.sub('.', zone_idna).lower()
+            self.zones.append(zone_idna)
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        success = True
+        self.get_default_ttl()
+
+        for zone in self.zones:
+            if not self.show_zone(zone):
+                success = False
+
+        if not success:
+            self.exit(1)
+
+    # -------------------------------------------------------------------------
+    def get_default_ttl(self):
+
+        LOG.debug("Retrieving defaul TTL from server ...")
+        path = "/servers/{}/config".format(self.api_servername)
+        json_response = self.perform_request(path)
+        ttl = None
+
+        for cfg in json_response:
+            if cfg['name'] == 'default-ttl':
+                try:
+                    ttl = int(cfg['value'])
+                except ValueError as e:
+                    LOG.error("Found invalid TTL {!r} from server: {}".format(
+                        cfg['value'], e))
+                break
+        if ttl:
+            LOG.debug("Got a default TTL {} from server.".format(ttl))
+            self.default_ttl = ttl
+
+    # -------------------------------------------------------------------------
+    def show_zone(self, zone_name):
+
+        zone_unicode = zone_name
+        zout = "{!r}".format(zone_name)
+        if 'xn--' in zone_name:
+            zone_unicode = zone_name.encode('idna').decode('idna')
+            zout = "{!r} ({})".format(zone_name, zone_unicode)
+
+        LOG.info("Show all information about zone {} from PowerDNS environment {!r}.".format(
+            zout, self.environment))
+        zone = self.get_api_zone(zone_name)
+
+        msg = "All information about zone {}:".format(zout)
+        print("\n{}".format(msg))
+        print('-' * len(msg))
+
+        params = {
+            'name': zone.name,
+            'name_unicode': zone.name_unicode,
+            'kind': zone.kind,
+            'serial': zone.serial,
+            'soa_edit': zone.soa_edit,
+            'dnssec': 'no',
+            'account': zone.account,
+            'default_ttl': self.default_ttl,
+        }
+        if zone.dnssec:
+            params['dnssec'] = 'yes'
+
+        msg = textwrap.dedent("""\
+        Name (Punicode): {name}
+        Name (UTF-8):    {name_unicode}
+        Kind:            {kind}
+        Serial:          {serial}
+        SOA edit:        {soa_edit}
+        DNSSEC enabled:  {dnssec}
+        Default TTL:     {default_ttl}
+        Account info:    {account}
+        """).strip().format(**params)
+
+        if zone.masters:
+            i = 0
+            for master in zone.masters:
+                if i:
+                    msg += "                 {!r}".format(master)
+                else:
+                    msg += "Masters:         {!r}".format(master)
+                i += 1
+        print(msg)
+
+        enabled = 0
+        disabled = 0
+
+        msg = "All Resource Records:"
+        print("\n{}".format(msg))
+        print('-' * len(msg))
+
+        rrname_len = 1
+        for rrset in zone.rrsets:
+            name = rrset.name_relative(zone.name)
+            if len(name) > rrname_len:
+                rrname_len = len(name)
+            for record in rrset.records:
+                if record.disabled:
+                    disabled += 1
+                else:
+                    enabled += 1
+        rrname_len += 2
+        if self.verbose > 2:
+            LOG.debug("Length of longest rrset name: {}".format(rrname_len))
+
+        for rrset in sorted(zone.rrsets, key=lambda x: cmp_to_key(compare_rrsets)(x)):
+            msg = rrset.get_zone_lines(
+                rrname_len=rrname_len, reference=zone.name,
+                default_ttl=self.default_ttl).rstrip()
+            print(msg)
+
+        msg = "\nFound {} enabled and {} disabled records.".format(
+            enabled, disabled)
+        print(msg)
+
+        return True
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pdns_zone.py b/lib/pp_lib/pdns_zone.py
new file mode 100644 (file)
index 0000000..3188d1b
--- /dev/null
@@ -0,0 +1,559 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
+@summary: An encapsulation class for zone objects by PowerDNS API
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import copy
+import datetime
+
+# Third party modules
+from dns.resolver import Resolver, NoAnswer
+
+# Own modules
+from .common import pp, to_utf8, to_bool
+from .common import RE_DOT_AT_END
+
+from .obj import PpBaseObjectError, PpBaseObject
+from .pdns_record import PdnsApiRrset, PdnsSoaData
+
+__version__ = '0.5.6'
+
+LOG = logging.getLogger(__name__)
+
+# =============================================================================
+class PdnsApiZoneError(PpBaseObjectError):
+    pass
+
+
+# =============================================================================
+class PdnsApiZone(PpBaseObject):
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, appname=None, verbose=0, version=__version__, base_dir=None, initialized=None,
+            account=None, dnssec=False, id=None, kind=None, last_check=None,
+            masters=None, name=None, notified_serial=None, serial=None, url=None,
+            rrsets=None, soa_edit=None, soa_edit_api=None, nsec3narrow=None, nsec3param=None,
+            presigned=None, api_rectify=None):
+
+        # {   'account': 'local',
+        #     'dnssec': False,
+        #     'id': 'bla.ai.',
+        #     'kind': 'Master',
+        #     'last_check': 0,
+        #     'masters': [],
+        #     'name': 'bla.ai.',
+        #     'notified_serial': 2018080404,
+        #     'serial': 2018080404,
+        #     'url': 'api/v1/servers/localhost/zones/bla.ai.'},
+        self._account = account
+        self._dnssec = dnssec
+        self._id = id
+        self._kind = kind
+        self._last_check = last_check
+        self.masters = []
+        if masters:
+            self.masters = copy.copy(masters)
+        self._name = name
+        self._notified_serial = notified_serial
+        self._serial = serial
+        self._url = url
+        self._nsec3narrow = None
+        if nsec3narrow is not None:
+            self._nsec3narrow = to_bool(nsec3narrow)
+        self._nsec3param = None
+        if nsec3param is not None and str(nsec3param).strip() != '':
+            self._nsec3param = str(nsec3param).strip()
+        self._presigned = None
+        if presigned is not None:
+            self._presigned = to_bool(presigned)
+        self._api_rectify = None
+        if api_rectify is not None:
+            self._api_rectify = to_bool(api_rectify)
+
+        self.rrsets = []
+        self._soa_edit = soa_edit
+        self._soa_edit_api = soa_edit_api
+
+        super(PdnsApiZone, self).__init__(
+            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
+
+        if initialized is not None:
+            self.initialized = initialized
+
+    # -----------------------------------------------------------
+    @classmethod
+    def init_from_dict(
+        cls, data,
+            appname=None, verbose=0, version=__version__, base_dir=None, initialized=None):
+
+        if not isinstance(data, dict):
+            raise PdnsApiZoneError("Given data {!r} is not a dict object.".format(data))
+
+        params = {
+            'appname': appname,
+            'verbose': verbose,
+            'version': version,
+            'base_dir': base_dir
+        }
+        if initialized is not None:
+            params['initialized'] = initialized
+
+        rrsets = None
+        if 'rrsets' in data:
+            if data['rrsets']:
+                rrsets = data['rrsets']
+            data['rrsets'] = None
+
+        params.update(data)
+        zone = cls(**params)
+
+        if rrsets:
+            for single_rrset in rrsets:
+                rrset = PdnsApiRrset.init_from_dict(
+                    single_rrset, appname=appname, verbose=verbose, base_dir=base_dir)
+                zone.rrsets.append(rrset)
+
+        zone.initialized = True
+
+        return zone
+
+    # -----------------------------------------------------------
+    @property
+    def account(self):
+        """The name of the owning account of the zone, internal used
+            to differ local visible zones from all other zones."""
+        return getattr(self, '_account', None)
+
+    @account.setter
+    def account(self, value):
+        if value:
+            v = str(value).strip()
+            if v:
+                self._account = v
+            else:
+                self._account = None
+        else:
+            self._account = None
+
+    # -----------------------------------------------------------
+    @property
+    def dnssec(self):
+        """Is the zone under control of DNSSEC."""
+        return getattr(self, '_dnssec', False)
+
+    @dnssec.setter
+    def dnssec(self, value):
+        self._dnssec = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def id(self):
+        """The unique idendity of the zone."""
+        return getattr(self, '_id', None)
+
+    @id.setter
+    def id(self, value):
+        if value:
+            v = str(value).strip()
+            if v:
+                self._id = v
+            else:
+                self._id = None
+        else:
+            self._id = None
+
+    # -----------------------------------------------------------
+    @property
+    def kind(self):
+        """The kind or type of the zone."""
+        return getattr(self, '_kind', None)
+
+    @kind.setter
+    def kind(self, value):
+        if value:
+            v = str(value).strip()
+            if v:
+                self._kind = v
+            else:
+                self._kind = None
+        else:
+            self._kind = None
+
+    # -----------------------------------------------------------
+    @property
+    def last_check(self):
+        """The timestamp of the last check of the zone"""
+        return getattr(self, '_last_check', None)
+
+    # -----------------------------------------------------------
+    @property
+    def name(self):
+        """The name of the zone."""
+        return getattr(self, '_name', None)
+
+    @name.setter
+    def name(self, value):
+        if value:
+            v = str(value).strip()
+            if v:
+                self._name = v
+            else:
+                self._name = None
+        else:
+            self._name = None
+
+    # -----------------------------------------------------------
+    @property
+    def name_unicode(self):
+        """The name of the zone in unicode, if it is an IDNA encoded zone."""
+        n = getattr(self, '_name', None)
+        if n is None:
+            return None
+        if 'xn--' in n:
+            return to_utf8(n).decode('idna')
+        return n
+
+    # -----------------------------------------------------------
+    @property
+    def notified_serial(self):
+        """The notified serial number of the zone"""
+        return getattr(self, '_notified_serial', None)
+
+    # -----------------------------------------------------------
+    @property
+    def serial(self):
+        """The serial number of the zone"""
+        return getattr(self, '_serial', None)
+
+    # -----------------------------------------------------------
+    @property
+    def url(self):
+        """The URL in the API to get the zone object."""
+        return getattr(self, '_url', None)
+
+    # -----------------------------------------------------------
+    @property
+    def soa_edit(self):
+        """The SOA edit property of the zone object."""
+        return getattr(self, '_soa_edit', None)
+
+    # -----------------------------------------------------------
+    @property
+    def soa_edit_api(self):
+        """The SOA edit property (API) of the zone object."""
+        return getattr(self, '_soa_edit_api', None)
+
+    # -----------------------------------------------------------
+    @property
+    def nsec3narrow(self):
+        """Some stuff belonging to DNSSEC."""
+        return getattr(self, '_nsec3narrow', None)
+
+    # -----------------------------------------------------------
+    @property
+    def nsec3param(self):
+        """Some stuff belonging to DNSSEC."""
+        return getattr(self, '_nsec3param', None)
+
+    # -----------------------------------------------------------
+    @property
+    def presigned(self):
+        """Some stuff belonging to PowerDNS >= 4.1."""
+        return getattr(self, '_presigned', None)
+
+    # -----------------------------------------------------------
+    @property
+    def api_rectify(self):
+        """Some stuff belonging to PowerDNS >= 4.1."""
+        return getattr(self, '_api_rectify', None)
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PdnsApiZone, self).as_dict(short=short)
+        res['account'] = self.account
+        res['dnssec'] = copy.copy(self.dnssec)
+        res['id'] = self.id
+        res['kind'] = self.kind
+        res['last_check'] = self.last_check
+        res['masters'] = copy.copy(self.masters)
+        res['name'] = self.name
+        res['name_unicode'] = self.name_unicode
+        res['notified_serial'] = self.notified_serial
+        res['serial'] = self.serial
+        res['url'] = self.url
+        res['rrsets'] = []
+        res['soa_edit'] = self.soa_edit
+        res['soa_edit_api'] = self.soa_edit_api
+        res['nsec3narrow'] = self.nsec3narrow
+        res['nsec3param'] = self.nsec3param
+        res['presigned'] = self.presigned
+        res['api_rectify'] = self.api_rectify
+
+        for rrset in self.rrsets:
+            if isinstance(rrset, PpBaseObject):
+                res['rrsets'].append(rrset.as_dict(short))
+            else:
+                res['rrsets'].append(rrset)
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """
+        Typecasting function for translating object structure
+        into a string
+
+        @return: structure as string
+        @rtype:  str
+        """
+
+        return pp(self.as_dict(short=True))
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("name={!r}".format(self.name))
+        fields.append("kind={!r}".format(self.kind))
+        fields.append("serial={!r}".format(self.serial))
+        fields.append("dnssec={!r}".format(self.dnssec))
+        fields.append("account={!r}".format(self.account))
+        fields.append("appname={!r}".format(self.appname))
+        fields.append("verbose={!r}".format(self.verbose))
+        fields.append("version={!r}".format(self.version))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    @classmethod
+    def get_list_template(cls, show_numbers=False, minimal=False):
+
+        if minimal:
+            return "{name}"
+
+        tpl = "{name:<{len_zone}}  {kind:<8} {serial:>10}  {dnssec:<6}"
+        if show_numbers:
+            tpl += '  {nr_rrsets:>8}  {nr_records:>8} '
+        tpl += ' {account}'
+        return tpl
+
+    # -------------------------------------------------------------------------
+    def get_line(self, len_zone=20, rrsets=None, minimal=False):
+
+        if minimal:
+            tpl = self.get_list_template(minimal=True)
+        elif rrsets:
+            tpl = self.get_list_template(show_numbers=True)
+        else:
+            tpl = self.get_list_template(show_numbers=False)
+
+        params = {
+            'name': self.name_unicode,
+            'len_zone': len_zone,
+            'kind': self.kind,
+            'serial': self.serial,
+            'dnssec': 'no',
+            'account': '',
+            'nr_rrsets': '',
+            'nr_records': '',
+        }
+        if self.dnssec:
+            params['dnssec'] = 'yes'
+        if self.account:
+            params['account'] = self.account
+
+        if rrsets:
+            params['nr_rrsets'] = 0
+            params['nr_records'] = 0
+            for rrset in rrsets:
+                params['nr_rrsets'] += 1
+                for record in rrset.records:
+                    params['nr_records'] += 1
+
+        return tpl.format(**params)
+
+    # -------------------------------------------------------------------------
+    def get_soa_rrset(self):
+
+        for rrset in self.rrsets:
+            if rrset.type == 'SOA':
+                return rrset
+        return None
+
+    # -------------------------------------------------------------------------
+    def get_soa_record(self):
+
+        rrset = self.get_soa_rrset()
+        if self.verbose > 3:
+            LOG.debug("Got SOA RRset:\n{}".format(rrset))
+        if not rrset:
+            return None
+        if not rrset.records:
+            return None
+        return rrset.records[0]
+
+    # -------------------------------------------------------------------------
+    def get_soa(self):
+
+        soa = None
+        record = self.get_soa_record()
+        if self.verbose > 3:
+            LOG.debug("Got SOA record:\n{}".format(record))
+        if not record:
+            return None
+
+        soa = PdnsSoaData.init_from_data(
+            data=record.content, appname=self.appname,
+            verbose=self.verbose, base_dir=self.base_dir)
+
+        return soa
+
+    # -------------------------------------------------------------------------
+    def get_soa_by_dns(self, *nameservers):
+
+        soa = None
+
+        resolver = Resolver()
+
+        if nameservers:
+            resolver.nameservers = []
+            for ns in nameservers:
+                LOG.debug("Adding nameserver to use: {!r}.".format(ns))
+                resolver.nameservers.append(ns)
+
+        try:
+            answers = resolver.query(self.name, 'SOA', raise_on_no_answer=False)
+        except NoAnswer as e:
+            LOG.error("Got no answer from nameservers: {}".format(e))
+            return None
+
+        for rdata in answers:
+            soa = PdnsSoaData(
+                primary=rdata.mname, email=rdata.rname, serial=rdata.serial, refresh=rdata.refresh,
+                retry=rdata.retry, expire=rdata.expire, ttl=rdata.minimum,
+                appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
+            return soa
+
+        return None
+
+    # -------------------------------------------------------------------------
+    def get_new_serial(self, *compare_serials):
+
+        i = 0
+        today = datetime.date.today()
+        new_serial = today.year * 1000000 + today.month * 10000 + today.day * 100 + i
+        one_day = datetime.timedelta(1)
+
+        compare = []
+        compare.append(self.serial)
+        compare.append(self.notified_serial)
+        if compare_serials:
+            for serial in compare_serials:
+                compare.append(serial)
+        if self.verbose > 3:
+            LOG.debug("Compare serials: {}".format(pp(compare)))
+
+        found = False
+        while not found:
+            if self.verbose > 3:
+                LOG.debug("Trying new serial {} ...".format(new_serial))
+            found = True
+            for serial in compare:
+                if serial >= new_serial:
+                    found = False
+                    break
+            if found:
+                return new_serial
+            i += 1
+            if i > 99:
+                today += one_day
+                i = 0
+            new_serial = today.year * 1000000 + today.month * 10000 + today.day * 100 + i
+
+        return new_serial
+
+    # -------------------------------------------------------------------------
+    def get_ns_rrset(self, for_zone=None):
+
+        if self.verbose > 3:
+            rrrr = []
+            for rrset in self.rrsets:
+                rrrr.append(rrset.as_dict())
+            LOG.debug("Searching NS record set in:\n{}".format(pp(rrrr)))
+
+        for rrset in self.rrsets:
+            if rrset.type == 'NS':
+                if for_zone:
+                    if for_zone.lower() != rrset.name.lower():
+                        continue
+                else:
+                    if self.name.lower() != rrset.name.lower():
+                        continue
+                if self.verbose > 3:
+                    LOG.debug("Found NS RRSet:\n{}".format(pp(rrset.as_dict())))
+                return rrset
+        return None
+
+    # -------------------------------------------------------------------------
+    def get_ns_records(self, for_zone=None):
+
+        rrset = self.get_ns_rrset(for_zone=for_zone)
+        if self.verbose > 2:
+            LOG.debug("Got NS RRset:\n{}".format(rrset))
+        if not rrset:
+            return None
+        if not rrset.records:
+            return None
+
+        ns_records = []
+        for record in rrset.records:
+            ns_records.append(record)
+        return ns_records
+
+    # -------------------------------------------------------------------------
+    def get_zone_nameservers(self, for_zone=None):
+
+        nameservers = []
+
+        ns_records = self.get_ns_records(for_zone=for_zone)
+        if not ns_records:
+            return nameservers
+
+        for record in ns_records:
+            ns = RE_DOT_AT_END.sub('.', record.content).lower()
+            nameservers.append(ns)
+        nameservers.sort()
+        return nameservers
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/pidfile.py b/lib/pp_lib/pidfile.py
new file mode 100644 (file)
index 0000000..b938c2a
--- /dev/null
@@ -0,0 +1,527 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: A module for a pidfile object.
+          It provides methods to define, check, create
+          and remove a pidfile.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import sys
+import logging
+
+import re
+import signal
+import errno
+
+# Third party modules
+import six
+from six import reraise
+
+# Own modules
+
+from .errors import ReadTimeoutError
+
+from .obj import PpBaseObjectError
+from .obj import PpBaseObject
+
+from .common import to_utf8
+
+__version__ = '0.2.5'
+
+LOG = logging.getLogger(__name__)
+
+# =============================================================================
+class PidFileError(PpBaseObjectError):
+    """Base error class for all exceptions happened during
+    handling a pidfile."""
+
+    pass
+
+
+# =============================================================================
+class InvalidPidFileError(PidFileError):
+    """An error class indicating, that the given pidfile is unusable"""
+
+    def __init__(self, pidfile, reason=None):
+        """
+        Constructor.
+
+        @param pidfile: the filename of the invalid pidfile.
+        @type pidfile: str
+        @param reason: the reason, why the pidfile is invalid.
+        @type reason: str
+
+        """
+
+        self.pidfile = pidfile
+        self.reason = reason
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """Typecasting into a string for error output."""
+
+        msg = None
+        if self.reason:
+            msg = "Invalid pidfile {!r} given: {}".format(self.pidfile, self.reason)
+        else:
+            msg = "Invalid pidfile {!r} given.".format(self.pidfile)
+
+        return msg
+
+# =============================================================================
+class PidFileInUseError(PidFileError):
+    """
+    An error class indicating, that the given pidfile is in use
+    by another application.
+    """
+
+    def __init__(self, pidfile, pid):
+        """
+        Constructor.
+
+        @param pidfile: the filename of the pidfile.
+        @type pidfile: str
+        @param pid: the PID of the process owning the pidfile
+        @type pid: int
+
+        """
+
+        self.pidfile = pidfile
+        self.pid = pid
+
+    # -------------------------------------------------------------------------
+    def __str__(self):
+        """Typecasting into a string for error output."""
+
+        msg = "The pidfile {!r} is currently in use by the application with the PID {}.".format(
+            self.pidfile, self.pid)
+
+        return msg
+
+
+# =============================================================================
+class PidFile(PpBaseObject):
+    """
+    Base class for a pidfile object.
+    """
+
+    open_args = {}
+    if six.PY3:
+        open_args = {
+            'encoding': 'utf-8',
+            'errors': 'surrogateescape',
+        }
+
+    # -------------------------------------------------------------------------
+    def __init__(
+        self, filename, auto_remove=True, appname=None, verbose=0,
+            version=__version__, base_dir=None,
+            initialized=False, simulate=False, timeout=10):
+        """
+        Initialisation of the pidfile object.
+
+        @raise ValueError: no filename was given
+        @raise PidFileError: on some errors.
+
+        @param filename: the filename of the pidfile
+        @type filename: str
+        @param auto_remove: Remove the self created pidfile on destroying
+                            the current object
+        @type auto_remove: bool
+        @param appname: name of the current running application
+        @type appname: str
+        @param verbose: verbose level
+        @type verbose: int
+        @param version: the version string of the current object or application
+        @type version: str
+        @param base_dir: the base directory of all operations
+        @type base_dir: str
+        @param initialized: initialisation is complete after __init__()
+                            of this object
+        @type initialized: bool
+        @param simulate: simulation mode
+        @type simulate: bool
+        @param timeout: timeout in seconds for IO operations on pidfile
+        @type timeout: int
+
+        @return: None
+        """
+
+        self._created = False
+        """
+        @ivar: the pidfile was created by this current object
+        @type: bool
+        """
+
+        super(PidFile, self).__init__(
+            appname=appname,
+            verbose=verbose,
+            version=version,
+            base_dir=base_dir,
+            initialized=False,
+        )
+
+        if not filename:
+            raise ValueError('No filename given on initializing PidFile object.')
+
+        self._filename = os.path.abspath(str(filename))
+        """
+        @ivar: The filename of the pidfile
+        @type: str
+        """
+
+        self._auto_remove = bool(auto_remove)
+        """
+        @ivar: Remove the self created pidfile on destroying the current object
+        @type: bool
+        """
+
+        self._simulate = bool(simulate)
+        """
+        @ivar: Simulation mode
+        @type: bool
+        """
+
+        self._timeout = int(timeout)
+        """
+        @ivar: timeout in seconds for IO operations on pidfile
+        @type: int
+        """
+
+    # -----------------------------------------------------------
+    @property
+    def filename(self):
+        """The filename of the pidfile."""
+        return self._filename
+
+    # -----------------------------------------------------------
+    @property
+    def auto_remove(self):
+        """Remove the self created pidfile on destroying the current object."""
+        return self._auto_remove
+
+    @auto_remove.setter
+    def auto_remove(self, value):
+        self._auto_remove = bool(value)
+
+    # -----------------------------------------------------------
+    @property
+    def simulate(self):
+        """Simulation mode."""
+        return self._simulate
+
+    # -----------------------------------------------------------
+    @property
+    def created(self):
+        """The pidfile was created by this current object."""
+        return self._created
+
+    # -----------------------------------------------------------
+    @property
+    def timeout(self):
+        """The timeout in seconds for IO operations on pidfile."""
+        return self._timeout
+
+    # -----------------------------------------------------------
+    @property
+    def parent_dir(self):
+        """The directory containing the pidfile."""
+        return os.path.dirname(self.filename)
+
+    # -------------------------------------------------------------------------
+    def as_dict(self, short=True):
+        """
+        Transforms the elements of the object into a dict
+
+        @param short: don't include local properties in resulting dict.
+        @type short: bool
+
+        @return: structure as dict
+        @rtype:  dict
+        """
+
+        res = super(PidFile, self).as_dict(short=short)
+        res['filename'] = self.filename
+        res['auto_remove'] = self.auto_remove
+        res['simulate'] = self.simulate
+        res['created'] = self.created
+        res['timeout'] = self.timeout
+        res['parent_dir'] = self.parent_dir
+        res['open_args'] = self.open_args
+
+        return res
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        """Typecasting into a string for reproduction."""
+
+        out = "<%s(" % (self.__class__.__name__)
+
+        fields = []
+        fields.append("filename=%r" % (self.filename))
+        fields.append("auto_remove=%r" % (self.auto_remove))
+        fields.append("appname=%r" % (self.appname))
+        fields.append("verbose=%r" % (self.verbose))
+        fields.append("base_dir=%r" % (self.base_dir))
+        fields.append("initialized=%r" % (self.initialized))
+        fields.append("simulate=%r" % (self.simulate))
+        fields.append("timeout=%r" % (self.timeout))
+
+        out += ", ".join(fields) + ")>"
+        return out
+
+    # -------------------------------------------------------------------------
+    def __del__(self):
+        """Destructor. Removes the pidfile, if it was created by ourselfes."""
+
+        if not self.created:
+            return
+
+        if not os.path.exists(self.filename):
+            if self.verbose > 3:
+                LOG.debug("Pidfile {!r} doesn't exists, not removing.".format(self.filename))
+            return
+
+        if not self.auto_remove:
+            if self.verbose > 3:
+                LOG.debug("Auto removing disabled, don't deleting {!r}.".format(self.filename))
+            return
+
+        if self.verbose > 1:
+            LOG.debug("Removing pidfile {!r} ...".format(self.filename))
+        if self.simulate:
+            if self.verbose > 1:
+                LOG.debug("Just kidding ..")
+            return
+        try:
+            os.remove(self.filename)
+        except OSError as e:
+            LOG.err("Could not delete pidfile {!r}: {}".format(self.filename, e))
+        except Exception as e:
+            self.handle_error(str(e), e.__class__.__name__, True)
+
+    # -------------------------------------------------------------------------
+    def create(self, pid=None):
+        """
+        The main method of this class. It tries to write the PID of the process
+        into the pidfile.
+
+        @param pid: the pid to write into the pidfile. If not given, the PID of
+                    the current process will taken.
+        @type pid: int
+
+        """
+
+        if pid:
+            pid = int(pid)
+            if pid <= 0:
+                msg = "Invalid PID {} for creating pidfile {!r} given.".format(pid, self.filename)
+                raise PidFileError(msg)
+        else:
+            pid = os.getpid()
+
+        if self.check():
+
+            LOG.info("Deleting pidfile {!r} ...".format(self.filename))
+            if self.simulate:
+                LOG.debug("Just kidding ..")
+            else:
+                try:
+                    os.remove(self.filename)
+                except OSError as e:
+                    raise InvalidPidFileError(self.filename, str(e))
+
+        if self.verbose > 1:
+            LOG.debug("Trying opening {!r} exclusive ...".format(self.filename))
+
+        if self.simulate:
+            LOG.debug("Simulation mode - don't real writing in {!r}.".format(self.filename))
+            self._created = True
+            return
+
+        fd = None
+        try:
+            fd = os.open(
+                self.filename, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
+        except OSError as e:
+            error_tuple = sys.exc_info()
+            msg = "Error on creating pidfile {!r}: {}".format(self.filename, e)
+            reraise(PidFileError, msg, error_tuple[2])
+
+        if self.verbose > 2:
+            LOG.debug("Writing {} into {!r} ...".format(pid, self.filename))
+
+        out = to_utf8("%d\n" % (pid))
+        try:
+            os.write(fd, out)
+        finally:
+            os.close(fd)
+
+        self._created = True
+
+    # -------------------------------------------------------------------------
+    def recreate(self, pid=None):
+        """
+        Rewrites an even created pidfile with the current PID.
+
+        @param pid: the pid to write into the pidfile. If not given, the PID of
+                    the current process will taken.
+        @type pid: int
+
+        """
+
+        if not self.created:
+            msg = "Calling recreate() on a not self created pidfile."
+            raise PidFileError(msg)
+
+        if pid:
+            pid = int(pid)
+            if pid <= 0:
+                msg = "Invalid PID {} for creating pidfile {!r} given.".format(pid, self.filename)
+                raise PidFileError(msg)
+        else:
+            pid = os.getpid()
+
+        if self.verbose > 1:
+            LOG.debug("Trying opening {!r} for recreate ...".format(self.filename))
+
+        if self.simulate:
+            LOG.debug("Simulation mode - don't real writing in {!r}.".format(self.filename))
+            return
+
+        fh = None
+        try:
+            fh = open(self.filename, 'w', **self.open_args)
+        except OSError as e:
+            error_tuple = sys.exc_info()
+            msg = "Error on recreating pidfile {!r}: {}".format(self.filename, e)
+            reraise(PidFileError, msg, error_tuple[2])
+
+        if self.verbose > 2:
+            LOG.debug("Writing {} into {!r} ...".format(pid, self.filename))
+
+        try:
+            fh.write("%d\n" % (pid))
+        finally:
+            fh.close()
+
+    # -------------------------------------------------------------------------
+    def check(self):
+        """
+        This methods checks the usability of the pidfile.
+        If the method doesn't raise an exception, the pidfile is usable.
+
+        It returns, whether the pidfile exist and can be deleted or not.
+
+        @raise InvalidPidFileError: if the pidfile is unusable
+        @raise PidFileInUseError: if the pidfile is in use by another application
+        @raise ReadTimeoutError: on timeout reading an existing pidfile
+        @raise OSError: on some other reasons, why the existing pidfile
+                        couldn't be read
+
+        @return: the pidfile exists, but can be deleted - or it doesn't
+                 exists.
+        @rtype: bool
+
+        """
+
+        if not os.path.exists(self.filename):
+            if not os.path.exists(self.parent_dir):
+                reason = "Pidfile parent directory {!r} doesn't exists.".format(
+                    self.parent_dir)
+                raise InvalidPidFileError(self.filename, reason)
+            if not os.path.isdir(self.parent_dir):
+                reason = "Pidfile parent directory {!r} is not a directory.".format(
+                    self.parent_dir)
+                raise InvalidPidFileError(self.filename, reason)
+            if not os.access(self.parent_dir, os.X_OK):
+                reason = "No write access to pidfile parent directory {!r}.".format(
+                    self.parent_dir)
+                raise InvalidPidFileError(self.filename, reason)
+
+            return False
+
+        if not os.path.isfile(self.filename):
+            reason = "It is not a regular file."
+            raise InvalidPidFileError(self.filename, self.parent_dir)
+
+        # ---------
+        def pidfile_read_alarm_caller(signum, sigframe):
+            """
+            This nested function will be called in event of a timeout.
+
+            @param signum: the signal number (POSIX) which happend
+            @type signum: int
+            @param sigframe: the frame of the signal
+            @type sigframe: object
+            """
+
+            return ReadTimeoutError(self.timeout, self.filename)
+
+        if self.verbose > 1:
+            LOG.debug("Reading content of pidfile {!r} ...".format(self.filename))
+
+        signal.signal(signal.SIGALRM, pidfile_read_alarm_caller)
+        signal.alarm(self.timeout)
+
+        content = ''
+        fh = None
+
+        try:
+            fh = open(self.filename, 'r')
+            for line in fh.readlines():
+                content += line
+        finally:
+            if fh:
+                fh.close()
+            signal.alarm(0)
+
+        # Performing content of pidfile
+
+        pid = None
+        line = content.strip()
+        match = re.search(r'^\s*(\d+)\s*$', line)
+        if match:
+            pid = int(match.group(1))
+        else:
+            msg = "No useful information found in pidfile {!r}: {!r}".format(self.filename, line)
+            return True
+
+        if self.verbose > 1:
+            LOG.debug("Trying check for process with PID {} ...".format(pid))
+
+        try:
+            os.kill(pid, 0)
+        except OSError as err:
+            if err.errno == errno.ESRCH:
+                LOG.info("Process with PID {} anonymous died.".format(pid))
+                return True
+            elif err.errno == errno.EPERM:
+                error_tuple = sys.exc_info()
+                msg = "No permission to signal the process {} ...".format(pid)
+                reraise(PidFileError, msg, error_tuple[2])
+            else:
+                error_tuple = sys.exc_info()
+                msg = "Got a {}: {}.".format(err.__class__.__name__, err)
+                reraise(PidFileError, msg, error_tuple[2])
+        else:
+            raise PidFileInUseError(self.filename, pid)
+
+        return False
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/quota_check.py b/lib/pp_lib/quota_check.py
new file mode 100644 (file)
index 0000000..9ad82be
--- /dev/null
@@ -0,0 +1,710 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the quota-check application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import os
+import datetime
+import logging
+import logging.config
+import re
+import textwrap
+import pwd
+import glob
+import stat
+import pipes
+import gzip
+import shutil
+import time
+import locale
+import sys
+
+from subprocess import Popen, PIPE
+
+# Third party modules
+import six
+import yaml
+
+# Own modules
+from .common import pp, to_str
+
+from .homes_admin import PpHomesAdminError, PpHomesAdminApp
+
+__version__ = '0.6.3'
+LOG = logging.getLogger(__name__)
+ZERO = datetime.timedelta(0)
+
+# A Utc class.
+
+class Utc(datetime.tzinfo):
+    """Utc"""
+
+    def utcoffset(self, dt):
+        return ZERO
+
+    def tzname(self, dt):
+        return "UTC"
+
+    def dst(self, dt):
+        return ZERO
+
+UTC = Utc()
+# UTC = datetime.timezone.utc
+
+
+# =============================================================================
+class PpQuotaCheckError(PpHomesAdminError):
+    pass
+
+
+# =============================================================================
+class PpQuotaCheckApp(PpHomesAdminApp):
+    """
+    Class for the 'quota-check' application to check the utilization
+    of the home share on the NFS server.
+    """
+
+    default_quota_kb = 300 * 1024
+
+    # /var/lib/quota-check
+    default_status_dir = os.sep + os.path.join('var', 'lib', 'quota-check')
+    # /var/lib/quota-check/quota-check.yaml
+    default_statusfile_base = 'quota-check.yaml'
+
+    du_line_re = re.compile(r'^\s*(\d+)\s+(.*)')
+
+    default_max_age = 365.25 * 4 * 24 * 60 * 60
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.default_reply_to = 'noreply@pixelpark.com'
+
+        self.quota_kb = self.default_quota_kb
+
+        self.status_dir = self.default_status_dir
+        self.statusfile_base = self.default_statusfile_base
+        self.statusfile = os.path.join(self.status_dir, self.statusfile_base)
+        self.status_data = {}
+        self.max_age = self.default_max_age
+
+        self.passwd_data = {}
+        self.map_uid = {}
+        self.now = datetime.datetime.utcnow()
+        self.du_cmd = self.get_command('du', quiet=True)
+        self.do_statistics = False
+
+        self.show_simulate_opt = True
+        self._simulate_opt_help = textwrap.dedent('''\
+            Retrieving all utilization values, but not writing it
+            into th status file.
+            ''').strip()
+
+        description = textwrap.dedent('''\
+            This checks the utilization of the home directories on the NFS server
+            and sends a mail per request about all home directories, which are
+            exceeding the given quota (default {} MB).
+            ''').strip().format(self.default_quota_kb)
+
+        super(PpQuotaCheckApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='quota-check'
+        )
+
+        if not self.du_cmd:
+            LOG.error("Command {!r} not found.".format('du'))
+            self.exit(7)
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def init_arg_parser(self):
+        """
+        Method to initiate the argument parser.
+
+        This method should be explicitely called by all init_arg_parser()
+        methods in descendant classes.
+        """
+
+        super(PpQuotaCheckApp, self).init_arg_parser()
+
+        def_mb = self.quota_kb / 1024
+
+        self.arg_parser.add_argument(
+            '-Q', '--quota',
+            metavar="MB", type=int, dest='quota_mb',
+            help="Quota value in MB (default: {} MB).".format(def_mb),
+        )
+
+        self.arg_parser.add_argument(
+            '--status-file',
+            metavar='FILE', dest='status_file',
+            help=(
+                "The YAML file containing the statistics of the current week "
+                "(default: {!r}).").format(self.statusfile)
+        )
+
+        self.arg_parser.add_argument(
+            '-S', '--stats',
+            action="store_true", dest="stats",
+            help=(
+                "Generate statistics, mail them to the administrators and "
+                "rotate the status data file. Without this option the current "
+                "utilization is determined and saved in the status data file."),
+        )
+
+    # -------------------------------------------------------------------------
+    def perform_config(self):
+
+        super(PpQuotaCheckApp, self).perform_config()
+
+        for section_name in self.cfg.keys():
+
+            if self.verbose > 3:
+                LOG.debug("Checking config section {!r} ...".format(section_name))
+
+            if section_name.lower() not in ('quota-check', 'quota_check', 'quotacheck'):
+                continue
+
+            section = self.cfg[section_name]
+            if self.verbose > 2:
+                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
+                    n=section_name, s=pp(section)))
+
+            if 'quota_mb' in section:
+                v = section['quota_mb']
+                quota = self.quota_kb / 1024
+                try:
+                    quota = int(v)
+                except (ValueError, TypeError):
+                    msg = "Found invalid quota MB {!r} in configuration.".format(v)
+                    LOG.error(msg)
+                else:
+                    if quota < 0:
+                        msg = "Found invalid quota MB {!r} in configuration.".format(quota)
+                        LOG.error(msg)
+                    else:
+                        self.quota_kb = quota * 1024
+
+            if 'quota_kb' in section:
+                v = section['quota_kb']
+                quota = self.quota_kb
+                try:
+                    quota = int(v)
+                except (ValueError, TypeError):
+                    msg = "Found invalid quota KB {!r} in configuration.".format(v)
+                    LOG.error(msg)
+                else:
+                    if quota < 0:
+                        msg = "Found invalid quota KB {!r} in configuration.".format(quota)
+                        LOG.error(msg)
+                    else:
+                        self.quota_kb = quota
+
+            if 'status_file' in section:
+                v = section['status_file']
+                if os.path.isabs(v):
+                    self.status_dir = os.path.normpath(os.path.dirname(v))
+                    self.statusfile_base = os.path.basename(v)
+                    self.statusfile = os.path.normpath(v)
+                else:
+                    self.statusfile = os.path.normpath(
+                        os.path.join(self.status_dir, v))
+                    self.status_dir = os.path.dirname(self.statusfile)
+                    self.statusfile_base = os.path.basename(self.statusfile)
+
+        cmdline_quota = getattr(self.args, 'quota_mb', None)
+        if cmdline_quota is not None:
+            self.quota_kb = cmdline_quota * 1024
+
+        sfile = getattr(self.args, 'status_file')
+        if sfile:
+            if os.path.isabs(sfile):
+                self.status_dir = os.path.normpath(os.path.dirname(sfile))
+                self.statusfile_base = os.path.basename(sfile)
+                self.statusfile = os.path.normpath(sfile)
+            else:
+                self.statusfile = os.path.normpath(os.path.join(self.status_dir, sfile))
+                self.status_dir = os.path.dirname(self.statusfile)
+                self.statusfile_base = os.path.basename(self.statusfile)
+
+        self.do_statistics = bool(getattr(self.args, 'stats', False))
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        self.status_data = self.read_status_data()
+        self.status_data['last_check'] = self.now
+        self.read_passwd_data()
+        self.check_home_utilization()
+
+        self.write_status_data()
+
+        if self.do_statistics:
+            self.perform_statistics()
+            self.compress_old_status_files()
+
+    # -------------------------------------------------------------------------
+    def pre_run(self):
+        """
+        Dummy function to run before the main routine.
+        Could be overwritten by descendant classes.
+
+        """
+
+        if os.geteuid():
+            msg = "Only root may execute this application."
+            LOG.error(msg)
+            self.exit(1)
+
+        super(PpQuotaCheckApp, self).pre_run()
+
+    # -------------------------------------------------------------------------
+    def read_status_data(self):
+
+        LOG.info("Reading status data from {!r} ...".format(self.statusfile))
+
+        if not os.path.exists(self.statusfile):
+            LOG.debug("Status file {!r} does not exists.".format(self.statusfile))
+            return {}
+
+        if not os.path.isfile(self.statusfile):
+            msg = "Status file {!r} is not a regular file.".format(self.statusfile)
+            LOG.error(msg)
+            self.exit(5)
+
+        if not os.access(self.statusfile, os.R_OK):
+            msg = "No read access to status file {!r}.".format(self.statusfile)
+            LOG.error(msg)
+            self.exit(6)
+
+        open_args = {}
+        if six.PY3:
+            open_args['encoding'] = 'utf-8'
+            open_args['errors'] = 'surrogateescape'
+
+        status = {}
+
+        with open(self.statusfile, 'r', **open_args) as fh:
+            try:
+                status = yaml.load(fh)
+            except yaml.YAMLError as e:
+                msg = "YAML error in status file {f!r}: {e}".format(
+                    f=self.statusfile, e=e)
+                LOG.error(msg)
+                return {}
+
+        if not isinstance(status, dict):
+            status = {}
+
+        if self.verbose > 2:
+            LOG.debug("Status from {f!r}:\n{s}".format(
+                f=self.statusfile, s=pp(status)))
+
+        return status
+
+    # -------------------------------------------------------------------------
+    def rotate_status_file(self, date=None):
+
+        if not os.path.isfile(self.statusfile):
+            LOG.debug("File {!r} to rotate does not exists.".format(self.statusfile))
+            return
+
+        if not date:
+            file_stat = os.stat(self.statusfile)
+            date = datetime.datetime.utcfromtimestamp(file_stat.st_mtime)
+        (stem, ext) = os.path.splitext(self.statusfile)
+
+        new_fname = "{s}.{d}{e}".format(
+            s=stem, d=date.strftime('%Y-%m-%d_%H:%M:%S'), e=ext)
+        LOG.info("Renaming {o!r} -> {n!r}.".format(o=self.statusfile, n=new_fname))
+        if self.simulate:
+            LOG.info("Simulation mode, status file will not be renamed.")
+            return
+        os.rename(self.statusfile, new_fname)
+
+    # -------------------------------------------------------------------------
+    def compress_old_status_files(self):
+
+        if self.simulate:
+            LOG.info("Simulation mode, status file rotation will not be executed.")
+            return
+
+        (stem, ext) = os.path.splitext(self.statusfile_base)
+        search_base = "{s}.20*{e}".format(s=stem, e=ext)
+        seach_pattern = os.path.join(self.status_dir, search_base)
+        files = glob.glob(seach_pattern)
+        if len(files) <= 1:
+            return
+
+        files.sort()
+        for filename in files[:-1]:
+            file_stat = os.stat(filename)
+            if not file_stat.st_size:
+                LOG.debug("Not compressing {!r} because of zero size.".format(filename))
+                continue
+            LOG.info("Compressing {!r} ...".format(filename))
+            new_name = filename + '.gz'
+            with open(filename, 'rb') as f_in:
+                with gzip.open(new_name, 'wb') as f_out:
+                    shutil.copyfileobj(f_in, f_out)
+            shutil.copystat(filename, new_name)
+            LOG.debug("Removing {!r} ...".format(filename))
+            os.remove(filename)
+
+        files_to_remove = []
+        files = glob.glob(seach_pattern)
+        search_base = "{s}.20*{e}.gz".format(s=stem, e=ext)
+        seach_pattern = os.path.join(self.status_dir, search_base)
+        files += glob.glob(seach_pattern)
+        files.sort()
+        # Removing all files older 4 years
+        limit_age = time.time() - self.max_age
+        limit_age_dt = datetime.datetime.utcfromtimestamp(limit_age)
+        LOG.info("Removing all status files older than {!r} ...".format(
+            limit_age_dt.isoformat(' ')))
+
+        for filename in files[:-1]:
+            if not os.path.isfile(filename):
+                continue
+            file_stat = os.stat(filename)
+            if file_stat.st_mtime < limit_age:
+                files_to_remove.append(filename)
+
+        for filename in files_to_remove:
+            LOG.info("Removing {!r} ...".format(filename))
+            os.remove(filename)
+
+    # -------------------------------------------------------------------------
+    def write_status_data(self):
+
+        LOG.info("Writing status data from {!r} ...".format(self.statusfile))
+
+        if self.verbose > 2:
+            # LOG.debug("Status to write:\n{!r}".format(self.status_data))
+            LOG.debug("Status to write:\n{}".format(pp(self.status_data)))
+
+        if self.simulate:
+            LOG.info("Simulation mode, status file will not be really written.")
+            return
+
+        open_args = {}
+        if six.PY3:
+            open_args['encoding'] = 'utf-8'
+            open_args['errors'] = 'surrogateescape'
+
+        if not os.path.exists(self.status_dir):
+            LOG.info("Creating {!r} ...".format(self.status_dir))
+            mode = stat.S_IRWXU | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IXOTH
+            try:
+                os.makedirs(self.status_dir, mode)
+            except os.error as e:
+                LOG.error("Could not create {!r}: {}".format(self.status_dir, e))
+                sys.exit(9)
+        elif not os.path.isdir(self.status_dir):
+            msg = "Status directory {!r} exists, but is not a directory.".format(self.status_dir)
+            LOG.error(msg)
+            return
+
+        status_dump = yaml.dump(self.status_data, default_flow_style=False)
+        if self.verbose > 2:
+            LOG.debug("Writing YAML data:\n{}".format(status_dump))
+
+        with open(self.statusfile, 'w', **open_args) as fh:
+            fh.write(status_dump)
+
+    # -------------------------------------------------------------------------
+    def read_passwd_data(self):
+
+        LOG.info("Reading all necessary data from 'getent passwd' ...")
+
+        entries = pwd.getpwall()
+
+        for entry in entries:
+            user_name = entry.pw_name
+            uid = entry.pw_uid
+            if user_name not in self.passwd_data:
+                self.passwd_data[user_name] = entry
+            if uid not in self.map_uid:
+                self.map_uid[uid] = user_name
+
+        LOG.debug("Found {} appropriate user entries in passwd.".format(
+            len(self.passwd_data.keys())))
+        if self.verbose > 2:
+            LOG.debug("User data in passwd:\n{}".format(pp(self.passwd_data)))
+
+    # -------------------------------------------------------------------------
+    def get_util_dir_kb(self, directory):
+
+        if not os.path.isdir(directory):
+            return 0
+
+        cmd = [self.du_cmd, '-sk', directory]
+        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
+        if self.verbose > 2:
+            LOG.debug("Executing: {}".format(cmd_str))
+
+        p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
+        (stdoutdata, stderrdata) = p.communicate()
+        ret = p.wait()
+        if stdoutdata:
+            stdoutdata = to_str(stdoutdata)
+        if stderrdata:
+            stderrdata = to_str(stderrdata)
+
+        if ret:
+            msg = "Return value of \"{c}\": {r}.".format(c=cmd_str, r=ret)
+            if stderrdata:
+                msg += "\nError message: {}".format(stderrdata)
+            LOG.error(msg)
+            return 0
+
+        if not stdoutdata:
+            return 0
+
+        result = 0
+        for line in stdoutdata.splitlines():
+            line = line.strip()
+            match = self.du_line_re.search(line)
+            if not match:
+                continue
+            result = int(match.group(1))
+            break
+
+        return result
+
+    # -------------------------------------------------------------------------
+    def _silly_open_homedirs(self):
+
+        # Senseless opening of all user home directories to activate automounter
+        for user_name in self.passwd_data:
+            entry = self.passwd_data[user_name]
+            pwd_home_dir = entry.pw_dir
+            if not pwd_home_dir:
+                continue
+            if not pwd_home_dir.startswith(self.home_root_abs):
+                if self.verbose > 2:
+                    LOG.debug("Home dir {!r} is not below {!r}.".format(
+                        pwd_home_dir, self.home_root_abs))
+                continue
+            abs_home_dir = os.path.join(self.chroot_homedir, os.path.relpath(pwd_home_dir, os.sep))
+            LOG.debug("Trying to open {!r} ...".format(abs_home_dir))
+            try:
+                os.listdir(abs_home_dir)
+                if self.verbose > 2:
+                    LOG.debug("Found home directory {!r} ...".format(abs_home_dir))
+            except OSError:
+                LOG.warn("Directory {!r} does not exists.".format(abs_home_dir))
+
+    # -------------------------------------------------------------------------
+    def check_home_utilization(self):
+
+        LOG.info("Checking utilization of home directories ...")
+
+        self._silly_open_homedirs()
+
+        glob_pattern = os.path.join(self.home_root_real, '*')
+        all_home_entries = glob.glob(glob_pattern)
+
+        if 'checks' not in self.status_data:
+            self.status_data['checks'] = {}
+        self.status_data['checks'][self.now] = {}
+        check = self.status_data['checks'][self.now]
+        check['data'] = {}
+        check['stats'] = {
+            'begin': self.now,
+            'end': None,
+            'duration': None,
+            'total_kb': 0,
+            'total_mb': 0,
+            'total_gb': 0.0,
+            'number_dirs': 0,
+        }
+
+        total_kb = 0
+        number_dirs = 0
+
+        i = 0
+
+        for path in all_home_entries:
+
+            LOG.debug("Searching for {!r} ...".format(path))
+
+            if not os.path.isdir(path):
+                continue
+            number_dirs += 1
+            i += 1
+            home_rel = os.sep + os.path.relpath(path, self.chroot_homedir)
+            if self.verbose > 2:
+                LOG.debug("Checking {p!r} ({h!r}) ...".format(
+                    p=path, h=home_rel))
+            dir_stat = os.stat(path)
+            dir_uid = dir_stat.st_uid
+            dir_owner = str(dir_uid)
+            username = dir_owner
+            if dir_uid == 0:
+                username = 'root'
+                dir_owner = 'root'
+            elif dir_uid in self.map_uid:
+                dir_owner = self.map_uid[dir_uid]
+                username = dir_owner
+                if dir_owner in self.passwd_data and self.passwd_data[dir_owner].pw_gecos:
+                    dir_owner = self.passwd_data[dir_owner].pw_gecos
+            util = self.get_util_dir_kb(path)
+            total_kb += util
+            result = {
+                'checked': datetime.datetime.utcnow(),
+                'util_kb': util,
+                'uid': dir_uid,
+                'gid': dir_stat.st_gid,
+                'user': username,
+                'gecos': dir_owner,
+            }
+            check['data'][home_rel] = result
+
+        end_ts = datetime.datetime.utcnow()
+        duration = end_ts - self.now
+        dur_days = duration.days
+        dur_secs = duration.seconds
+        if duration.microseconds >= 500000:
+            dur_secs += 1
+        dur_mins = 0
+        dur_hours = 0
+        if dur_secs >= 60:
+            dur_mins = int(dur_secs / 60)
+            dur_secs = dur_secs % 60
+            if dur_mins >= 60:
+                dur_hours = int(dur_mins / 60)
+                dur_mins = dur_mins % 60
+        dur_parts = []
+        if dur_days:
+            dur_parts.append("{} days".format(dur_days))
+        if dur_days or dur_hours:
+            dur_parts.append("{} hours".format(dur_hours))
+        if dur_days or dur_hours or dur_mins:
+            dur_parts.append("{} minutes".format(dur_mins))
+        dur_parts.append("{} seconds".format(dur_secs))
+        check['stats']['end'] = end_ts
+        check['stats']['duration'] = ', '.join(dur_parts)
+        check['stats']['number_dirs'] = number_dirs
+        check['stats']['total_kb'] = total_kb
+        check['stats']['total_mb'] = total_kb // 1024
+        check['stats']['total_gb'] = float(total_kb) / 1024.0 / 1024.0
+
+    # -------------------------------------------------------------------------
+    def send_results(self, total_dirs_top):
+
+        locale_conv = locale.localeconv()
+        dp = ','
+        ts = '.'
+        if 'decimal_point' in locale_conv and locale_conv['decimal_point'] != '.':
+            dp = locale_conv['decimal_point']
+        if 'thousands_sep' in locale_conv:
+            ts = locale_conv['thousands_sep']
+
+        subject = "Quota weekly summary (>= {:.0f} MB)".format(self.quota_kb / 1024)
+
+        body = "Hallo Berlin dudes!\n\n"
+
+        if total_dirs_top.keys():
+
+            max_len_home = 2
+            max_len_size = 4
+            for home in total_dirs_top.keys():
+                if len(home) > max_len_home:
+                    max_len_home = len(home)
+                size = total_dirs_top[home]['util_kb_avg'] / 1024
+                size_out = "{:,.0f} MB".format(size)
+                size_out = size_out.replace('.', ';').replace(',', ts).replace(';', dp)
+                if len(size_out) > max_len_size:
+                    max_len_size = len(size_out)
+
+            for home in sorted(
+                    total_dirs_top.keys(),
+                    key=lambda x: total_dirs_top[x]['util_kb_avg'],
+                    reverse=True):
+                size = total_dirs_top[home]['util_kb_avg'] / 1024
+                user = total_dirs_top[home]['user']
+                gecos = total_dirs_top[home]['gecos']
+                size_out = "{:,.0f} MB".format(size)
+                size_out = size_out.replace('.', ';').replace(',', ts).replace(';', dp)
+                line = " * {h:<{wh}} - {s:>{ws}} ({u} -> {g})\n".format(
+                    h=home, wh=max_len_home, s=size_out, ws=max_len_size, u=user, g=gecos)
+                body += line
+
+        else:
+
+            body += (
+                "No home directory found with a recursive size "
+                "greater or equal than {:.f} MB.").format(self.quota_kb / 1024)
+
+        body += "\n\nCheers\n\n" + self.mail_from + '\n'
+
+        LOG.debug("Subject: {!r}".format(subject))
+        LOG.debug("Body:\n{}".format(body))
+
+        self.send_mail(subject, body)
+
+    # -------------------------------------------------------------------------
+    def perform_statistics(self):
+
+        if 'checks' in self.status_data and len(self.status_data['checks'].keys()):
+            total_dirs = {}
+            for check_date in self.status_data['checks'].keys():
+                check = self.status_data['checks'][check_date]
+                if 'data' not in check or not check['data'].keys():
+                    continue
+                # Consolidating data ...
+                for home in check['data'].keys():
+                    pdata = check['data'][home]
+                    old_kb = 0
+                    nr_checks = 0
+                    if home in total_dirs:
+                        old_kb = total_dirs[home]['util_kb']
+                        nr_checks = total_dirs[home]['nr_checks']
+                    nr_checks += 1
+                    util_kb = old_kb + pdata['util_kb']
+                    total_dirs[home] = {
+                        'gecos': pdata['gecos'],
+                        'gid': pdata['gid'],
+                        'uid': pdata['uid'],
+                        'user': pdata['user'],
+                        'util_kb': util_kb,
+                        'nr_checks': nr_checks,
+                        'util_kb_avg': util_kb / nr_checks,
+                    }
+
+            total_dirs_top = {}
+            for home in total_dirs.keys():
+                if total_dirs[home]['util_kb_avg'] < self.quota_kb:
+                    continue
+                total_dirs_top[home] = total_dirs[home]
+            del total_dirs
+
+            if self.verbose > 1:
+                LOG.debug("Got top home directories:\n{}".format(pp(total_dirs_top)))
+
+            self.send_results(total_dirs_top)
+
+        # Rotate status file and rewrite an empty status file
+        self.rotate_status_file(self.now)
+        self.status_data = {}
+        self.status_data['last_check'] = self.now
+        self.write_status_data()
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/lib/pp_lib/rec_dict.py b/lib/pp_lib/rec_dict.py
new file mode 100644 (file)
index 0000000..4060a89
--- /dev/null
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@summary: The module provides an object class with a dict, which can
+          be updated in a recursive way.
+          It is originated by Jannis Andrija Schnitzer::
+            https://gist.github.com/114831
+"""
+
+# Standard modules
+# import sys
+# import os
+import logging
+
+__author__ = 'jannis@itisme.org (Jannis Andrija Schnitzer)'
+__copyright__ = '(c) 2009 Jannis Andrija Schnitzer'
+__contact__ = 'jannis@itisme.org'
+__version__ = '0.2.1'
+__license__ = 'GPL3'
+
+log = logging.getLogger(__name__)
+
+
+# =============================================================================
+class RecursiveDictionary(dict):
+    """RecursiveDictionary provides the methods rec_update and iter_rec_update
+    that can be used to update member dictionaries rather than overwriting
+    them."""
+
+    # -------------------------------------------------------------------------
+    def rec_update(self, other, **third):
+        """Recursively update the dictionary with the contents of other and
+        third like dict.update() does - but don't overwrite sub-dictionaries.
+        Example:
+        >>> d = RecursiveDictionary({'foo': {'bar': 42}})
+        >>> d.rec_update({'foo': {'baz': 36}})
+        >>> d
+        {'foo': {'baz': 36, 'bar': 42}}
+        """
+
+        try:
+            iterator = iter(other.items())
+        except AttributeError:
+            iterator = other
+
+        self.iter_rec_update(iterator)
+        self.iter_rec_update(iter(third.items()))
+
+    # -------------------------------------------------------------------------
+    def iter_rec_update(self, iterator):
+        for (key, value) in iterator:
+            if key in self and \
+                    isinstance(self[key], dict) and isinstance(value, dict):
+                self[key] = RecursiveDictionary(self[key])
+                self[key].rec_update(value)
+            else:
+                self[key] = value
+
+    # -------------------------------------------------------------------------
+    def __repr__(self):
+        return super(self.__class__, self).__repr__()
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/lib/pp_lib/test_home_app.py b/lib/pp_lib/test_home_app.py
new file mode 100644 (file)
index 0000000..99ac907
--- /dev/null
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+@author: Frank Brehm
+@contact: frank.brehm@pixelpark.com
+@copyright: © 2018 by Frank Brehm, Berlin
+@summary: The module for the mk-home application object.
+"""
+from __future__ import absolute_import
+
+# Standard modules
+import logging
+import logging.config
+import textwrap
+
+# Third party modules
+
+# Own modules
+from .homes_admin import PpHomesAdminError, PpHomesAdminApp
+
+__version__ = '0.5.3'
+LOG = logging.getLogger(__name__)
+
+
+# =============================================================================
+class PpTestHomeError(PpHomesAdminError):
+    pass
+
+
+# =============================================================================
+class PpTestHomeApp(PpHomesAdminApp):
+    """
+    Class for the 'test-home' application to check for unnacessary home directories.
+    """
+
+    # -------------------------------------------------------------------------
+    def __init__(self, appname=None, version=__version__):
+
+        self.default_reply_to = 'noreply@pixelpark.com'
+
+        description = textwrap.dedent('''\
+            This scripts detects unnecessary home directories - without an
+            appropriate home directory in the passwd database and not excluded
+            in {!r}.
+            ''').strip().format(self.default_exclude_file)
+
+        super(PpTestHomeApp, self).__init__(
+            appname=appname, version=version, description=description,
+            cfg_stems='test-home',
+        )
+
+        if not self.mail_recipients:
+            self.exit(5)
+
+        self.initialized = True
+
+    # -------------------------------------------------------------------------
+    def _run(self):
+
+        self.read_exclude_dirs()
+        self.read_passwd_homes()
+        self.check_homes()
+        self.send_results()
+
+    # -------------------------------------------------------------------------
+    def send_results(self):
+
+        if not self.unnecessary_dirs:
+            LOG.debug("No unnecessary home directories, nothing to inform.")
+            return
+
+        subject = 'Nicht benötigte Home-Verzeichnisse'
+        body = textwrap.dedent('''\
+            Die folgenden Home-Verzeichnisse befinden sich weder
+            in der lokalen passwd-Datei, im LDAP oder in der exclude-Liste.
+            Sie können damit archiviert und gelöscht werden.''')
+        body += '\n\n'
+        for home in self.unnecessary_dirs:
+            body += ' - ' + home + '\n'
+
+        self.send_mail(subject, body)
+
+
+# =============================================================================
+
+if __name__ == "__main__":
+
+    pass
+
+# =============================================================================
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/__init__.py b/pp_lib/__init__.py
deleted file mode 100644 (file)
index 99e1da9..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/env python3
-# -*- coding: utf-8 -*-
-
-__version__ = '0.1.5'
-
-# vim: ts=4 et list
diff --git a/pp_lib/app.py b/pp_lib/app.py
deleted file mode 100644 (file)
index 755c8dc..0000000
+++ /dev/null
@@ -1,809 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import sys
-import os
-import logging
-import re
-import traceback
-
-# Third party modules
-import argparse
-
-# Own modules
-from .errors import FunctionNotImplementedError, PpAppError
-
-from .common import terminal_can_colors
-from .common import caller_search_path
-
-from .colored import ColoredFormatter, colorstr
-
-from .obj import PpBaseObject
-
-__version__ = '0.3.6'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpApplication(PpBaseObject):
-    """
-    Class for the application objects.
-    """
-
-    re_prefix = re.compile(r'^[a-z0-9][a-z0-9_]*$', re.IGNORECASE)
-    re_anum = re.compile(r'[^A-Z0-9_]+', re.IGNORECASE)
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=False, usage=None, description=None,
-            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None):
-
-        self.arg_parser = None
-        """
-        @ivar: argparser object to parse commandline parameters
-        @type: argparse.ArgumentParser
-        """
-
-        self.args = None
-        """
-        @ivar: an object containing all commandline parameters
-               after parsing them
-        @type: Namespace
-        """
-
-        self._exit_value = 0
-        """
-        @ivar: return value of the application for exiting with sys.exit().
-        @type: int
-        """
-
-        self._usage = usage
-        """
-        @ivar: usage text used on argparse
-        @type: str
-        """
-
-        self._description = description
-        """
-        @ivar: a short text describing the application
-        @type: str
-        """
-
-        self._argparse_epilog = argparse_epilog
-        """
-        @ivar: an epilog displayed at the end of the argparse help screen
-        @type: str
-        """
-
-        self._argparse_prefix_chars = argparse_prefix_chars
-        """
-        @ivar: The set of characters that prefix optional arguments.
-        @type: str
-        """
-
-        self._terminal_has_colors = False
-        """
-        @ivar: flag, that the current terminal understands color ANSI codes
-        @type: bool
-        """
-
-        self._quiet = False
-        self._force = False
-        self._simulate = False
-
-        self.env = {}
-        """
-        @ivar: a dictionary with all application specifiv environment variables,
-               they will detected by the env_prefix property of this object,
-               and their names will transformed before saving their values in
-               self.env by removing the env_prefix from the variable name.
-        @type: dict
-        """
-
-        self._env_prefix = None
-        """
-        @ivar: a prefix for environment variables to detect them and to assign
-               their transformed names and their values in self.env
-        @type: str
-        """
-
-        super(PpApplication, self).__init__(
-            appname=appname,
-            verbose=verbose,
-            version=version,
-            base_dir=base_dir,
-            initialized=False,
-        )
-
-        if env_prefix:
-            ep = str(env_prefix).strip()
-            if not ep:
-                msg = "Invalid env_prefix {!r} given - it may not be empty.".format(env_prefix)
-                raise PpAppError(msg)
-            match = self.re_prefix.search(ep)
-            if not match:
-                msg = (
-                    "Invalid characters found in env_prefix {!r}, only "
-                    "alphanumeric characters and digits and underscore "
-                    "(this not as the first character) are allowed.").format(env_prefix)
-                raise PpAppError(msg)
-            self._env_prefix = ep
-        else:
-            ep = self.appname.upper() + '_'
-            self._env_prefix = self.re_anum.sub('_', ep)
-
-        self._init_arg_parser()
-        self._perform_arg_parser()
-
-        self._init_env()
-        self._perform_env()
-
-    # -----------------------------------------------------------
-    @property
-    def exit_value(self):
-        """The return value of the application for exiting with sys.exit()."""
-        return self._exit_value
-
-    @exit_value.setter
-    def exit_value(self, value):
-        v = int(value)
-        if v >= 0:
-            self._exit_value = v
-        else:
-            LOG.warn("Wrong exit_value {!r}, must be >= 0".format(value))
-
-    # -----------------------------------------------------------
-    @property
-    def exitvalue(self):
-        """The return value of the application for exiting with sys.exit()."""
-        return self._exit_value
-
-    @exitvalue.setter
-    def exitvalue(self, value):
-        self.exit_value = value
-
-    # -----------------------------------------------------------
-    @property
-    def usage(self):
-        """The usage text used on argparse."""
-        return self._usage
-
-    # -----------------------------------------------------------
-    @property
-    def description(self):
-        """A short text describing the application."""
-        return self._description
-
-    # -----------------------------------------------------------
-    @property
-    def argparse_epilog(self):
-        """An epilog displayed at the end of the argparse help screen."""
-        return self._argparse_epilog
-
-    # -----------------------------------------------------------
-    @property
-    def argparse_prefix_chars(self):
-        """The set of characters that prefix optional arguments."""
-        return self._argparse_prefix_chars
-
-    # -----------------------------------------------------------
-    @property
-    def terminal_has_colors(self):
-        """A flag, that the current terminal understands color ANSI codes."""
-        return self._terminal_has_colors
-
-    # -----------------------------------------------------------
-    @property
-    def env_prefix(self):
-        """A prefix for environment variables to detect them."""
-        return self._env_prefix
-
-    # -----------------------------------------------------------
-    @property
-    def usage_term(self):
-        """The localized version of 'usage: '"""
-        return 'Usage: '
-
-    # -----------------------------------------------------------
-    @property
-    def usage_term_len(self):
-        """The length of the localized version of 'usage: '"""
-        return len(self.usage_term)
-
-    # -----------------------------------------------------------
-    @property
-    def quiet(self):
-        """Quiet execution of the application,
-            only warnings and errors are emitted."""
-        return self._quiet
-
-    @quiet.setter
-    def quiet(self, value):
-        self._quiet = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def force(self):
-        """Forced execution of the application."""
-        return self._force
-
-    @force.setter
-    def force(self, value):
-        self._force = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def simulate(self):
-        """Simulation mode, nothing is really done."""
-        return self._simulate
-
-    @simulate.setter
-    def simulate(self, value):
-        self._simulate = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def show_force_opt(self):
-        """Flag, whether the command line option '--force' should be shown."""
-        return getattr(self, '_show_force_opt', False)
-
-    @show_force_opt.setter
-    def show_force_opt(self, value):
-        self._show_force_opt = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def show_simulate_opt(self):
-        """Flag, whether the command line option '--simulate' should be shown."""
-        return getattr(self, '_show_simulate_opt', False)
-
-    @show_simulate_opt.setter
-    def show_simulate_opt(self, value):
-        self._show_simulate_opt = bool(value)
-
-    # -------------------------------------------------------------------------
-    def exit(self, retval=-1, msg=None, trace=False):
-        """
-        Universal method to call sys.exit(). If fake_exit is set, a
-        FakeExitError exception is raised instead (useful for unittests.)
-
-        @param retval: the return value to give back to theoperating system
-        @type retval: int
-        @param msg: a last message, which should be emitted before exit.
-        @type msg: str
-        @param trace: flag to output a stack trace before exiting
-        @type trace: bool
-
-        @return: None
-
-        """
-
-        retval = int(retval)
-        trace = bool(trace)
-
-        root_logger = logging.getLogger()
-        has_handlers = False
-        if root_logger.handlers:
-            has_handlers = True
-
-        if msg:
-            if has_handlers:
-                if retval:
-                    LOG.error(msg)
-                else:
-                    LOG.info(msg)
-            if not has_handlers:
-                if hasattr(sys.stderr, 'buffer'):
-                    sys.stderr.buffer.write(str(msg) + "\n")
-                else:
-                    sys.stderr.write(str(msg) + "\n")
-
-        if trace:
-            if has_handlers:
-                if retval:
-                    LOG.error(traceback.format_exc())
-                else:
-                    LOG.info(traceback.format_exc())
-            else:
-                traceback.print_exc()
-
-        sys.exit(retval)
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PpApplication, self).as_dict(short=short)
-        res['exit_value'] = self.exit_value
-        res['usage'] = self.usage
-        res['quiet'] = self.quiet
-        res['force'] = self.force
-        res['simulate'] = self.simulate
-        res['description'] = self.description
-        res['argparse_epilog'] = self.argparse_epilog
-        res['argparse_prefix_chars'] = self.argparse_prefix_chars
-        res['terminal_has_colors'] = self.terminal_has_colors
-        res['env_prefix'] = self.env_prefix
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_logging(self):
-        """
-        Initialize the logger object.
-        It creates a colored loghandler with all output to STDERR.
-        Maybe overridden in descendant classes.
-
-        @return: None
-        """
-
-        log_level = logging.INFO
-        if self.verbose:
-            log_level = logging.DEBUG
-        elif self.quiet:
-            log_level = logging.WARNING
-
-        root_logger = logging.getLogger()
-        root_logger.setLevel(log_level)
-
-        # create formatter
-        format_str = ''
-        if self.verbose > 1:
-            format_str = '[%(asctime)s]: '
-        format_str += self.appname + ': '
-        if self.verbose:
-            if self.verbose > 1:
-                format_str += '%(name)s(%(lineno)d) %(funcName)s() '
-            else:
-                format_str += '%(name)s '
-        format_str += '%(levelname)s - %(message)s'
-        formatter = None
-        if self.terminal_has_colors:
-            formatter = ColoredFormatter(format_str)
-        else:
-            formatter = logging.Formatter(format_str)
-
-        # create log handler for console output
-        lh_console = logging.StreamHandler(sys.stderr)
-        lh_console.setLevel(log_level)
-        lh_console.setFormatter(formatter)
-
-        root_logger.addHandler(lh_console)
-
-        return
-
-    # -------------------------------------------------------------------------
-    def terminal_can_color(self):
-        """
-        Method to detect, whether the current terminal (stdout and stderr)
-        is able to perform ANSI color sequences.
-
-        @return: both stdout and stderr can perform ANSI color sequences
-        @rtype: bool
-
-        """
-
-        term_debug = False
-        if self.verbose > 3:
-            term_debug = True
-        return terminal_can_colors(debug=term_debug)
-
-    # -------------------------------------------------------------------------
-    def post_init(self):
-        """
-        Method to execute before calling run(). Here could be done some
-        finishing actions after reading in commandline parameters,
-        configuration a.s.o.
-
-        This method could be overwritten by descendant classes, these
-        methhods should allways include a call to post_init() of the
-        parent class.
-
-        """
-
-        self.perform_arg_parser()
-        self.init_logging()
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.simulate:
-            LOG.warn("Simulation mode - nothing is really done.")
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """
-        Dummy function as main routine.
-
-        MUST be overwritten by descendant classes.
-
-        """
-
-        raise FunctionNotImplementedError('_run', self.__class__.__name__)
-
-    # -------------------------------------------------------------------------
-    def __call__(self):
-        """
-        Helper method to make the resulting object callable, e.g.::
-
-            app = PBApplication(...)
-            app()
-
-        @return: None
-
-        """
-
-        self.run()
-
-    # -------------------------------------------------------------------------
-    def run(self):
-        """
-        The visible start point of this object.
-
-        @return: None
-
-        """
-
-        LOG.debug("Executing {} ...".format(self.__class__.__name__))
-
-        if not self.initialized:
-            self.handle_error(
-                "The application is not completely initialized.", '', True)
-            self.exit(9)
-
-        try:
-            self.pre_run()
-        except Exception as e:
-            self.handle_error(str(e), e.__class__.__name__, True)
-            self.exit(98)
-
-        if not self.initialized:
-            raise PpAppError(
-                "Object {!r} seems not to be completely initialized.".format(
-                    self.__class__.__name__))
-
-        try:
-            self._run()
-        except Exception as e:
-            self.handle_error(str(e), e.__class__.__name__, True)
-            self.exit_value = 99
-
-        if self.verbose > 1:
-            LOG.info("Ending.")
-
-        try:
-            self.post_run()
-        except Exception as e:
-            self.handle_error(str(e), e.__class__.__name__, True)
-            self.exit_value = 97
-
-        self.exit(self.exit_value)
-
-    # -------------------------------------------------------------------------
-    def post_run(self):
-        """
-        Dummy function to run after the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.verbose > 1:
-            LOG.info("executing post_run() ...")
-
-    # -------------------------------------------------------------------------
-    def _init_arg_parser(self):
-        """
-        Local called method to initiate the argument parser.
-
-        @raise PBApplicationError: on some errors
-
-        """
-
-        self.arg_parser = argparse.ArgumentParser(
-            prog=self.appname,
-            description=self.description,
-            usage=self.usage,
-            epilog=self.argparse_epilog,
-            prefix_chars=self.argparse_prefix_chars,
-            add_help=False,
-        )
-
-        self.init_arg_parser()
-
-        general_group = self.arg_parser.add_argument_group('General options')
-        general_group.add_argument(
-            '--color',
-            action="store",
-            dest='color',
-            const='yes',
-            default='auto',
-            nargs='?',
-            choices=['yes', 'no', 'auto'],
-            help="Use colored output for messages.",
-        )
-
-        verbose_group = general_group.add_mutually_exclusive_group()
-
-        verbose_group.add_argument(
-            "-v", "--verbose",
-            action="count",
-            dest='verbose',
-            help='Increase the verbosity level',
-        )
-
-        verbose_group.add_argument(
-            "-q", "--quiet",
-            action="store_true",
-            dest='quiet',
-            help='Silent execution, only warnings and errors are emitted.',
-        )
-
-        if self.show_force_opt:
-            general_group.add_argument(
-                "-f", "--force",
-                action="store_true", dest="force",
-                help="Forced execution of this application",
-            )
-
-        if self.show_simulate_opt:
-            help_msg = getattr(self, '_simulate_opt_help', None)
-            if not help_msg or str(help_msg) == '':
-                help_msg = "Simulation af all actions, nothing is really done."
-            general_group.add_argument(
-                "-s", "--simulate",
-                action="store_true", dest="simulate", help=help_msg,
-            )
-
-        general_group.add_argument(
-            "-h", "--help",
-            action='help',
-            dest='help',
-            help='Show this help message and exit'
-        )
-        general_group.add_argument(
-            "--usage",
-            action='store_true',
-            dest='usage',
-            help="Display brief usage message and exit"
-        )
-        general_group.add_argument(
-            "-V", '--version',
-            action='version',
-            version='Version of %(prog)s: {}'.format(self.version),
-            help="Show program's version number and exit"
-        )
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Public available method to initiate the argument parser.
-
-        Note::
-             avoid adding the general options '--verbose', '--help', '--usage'
-             and '--version'. These options are allways added after executing
-             this method.
-
-        Descendant classes may override this method.
-
-        """
-
-        pass
-
-    # -------------------------------------------------------------------------
-    def _perform_arg_parser(self):
-        """
-        Underlaying method for parsing arguments.
-        """
-
-        self.args = self.arg_parser.parse_args()
-
-        if self.args.usage:
-            self.arg_parser.print_usage(sys.stdout)
-            self.exit(0)
-
-        if self.args.verbose is not None and self.args.verbose > self.verbose:
-            self.verbose = self.args.verbose
-
-        if self.args.quiet:
-            self.quiet = self.args.quiet
-
-        if self.args.color == 'yes':
-            self._terminal_has_colors = True
-        elif self.args.color == 'no':
-            self._terminal_has_colors = False
-        else:
-            self._terminal_has_colors = self.terminal_can_color()
-
-        if getattr(self.args, 'force', False):
-            self.force = True
-
-        if getattr(self.args, 'simulate', False):
-            self.simulate = True
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-
-        Descendant classes may override this method.
-        """
-
-        pass
-
-    # -------------------------------------------------------------------------
-    def _init_env(self):
-        """
-        Initialization of self.env by application specific environment
-        variables.
-
-        It calls self.init_env(), after it has done his job.
-
-        """
-
-        for (key, value) in list(os.environ.items()):
-
-            if not key.startswith(self.env_prefix):
-                continue
-
-            newkey = key.replace(self.env_prefix, '', 1)
-            self.env[newkey] = value
-
-        self.init_env()
-
-    # -------------------------------------------------------------------------
-    def init_env(self):
-        """
-        Public available method to initiate self.env additional to the implicit
-        initialization done by this module.
-        Maybe it can be used to import environment variables, their
-        names not starting with self.env_prefix.
-
-        Currently a dummy method, which ca be overriden by descendant classes.
-
-        """
-
-        pass
-
-    # -------------------------------------------------------------------------
-    def _perform_env(self):
-        """
-        Method to do some useful things with the found environment.
-
-        It calls self.perform_env(), after it has done his job.
-
-        """
-
-        # try to detect verbosity level from environment
-        if 'VERBOSE' in self.env and self.env['VERBOSE']:
-            v = 0
-            try:
-                v = int(self.env['VERBOSE'])
-            except ValueError:
-                v = 1
-            if v > self.verbose:
-                self.verbose = v
-
-        self.perform_env()
-
-    # -------------------------------------------------------------------------
-    def perform_env(self):
-        """
-        Public available method to perform found environment variables after
-        initialization of self.env.
-
-        Currently a dummy method, which ca be overriden by descendant classes.
-
-        """
-
-        pass
-
-    # -------------------------------------------------------------------------
-    def colored(self, msg, color):
-        """
-        Wrapper function to colorize the message. Depending, whether the current
-        terminal can display ANSI colors, the message is colorized or not.
-
-        @param msg: The message to colorize
-        @type msg: str
-        @param color: The color to use, must be one of the keys of COLOR_CODE
-        @type color: str
-
-        @return: the colorized message
-        @rtype: str
-
-        """
-
-        if not self.terminal_has_colors:
-            return msg
-        return colorstr(msg, color)
-
-    # -------------------------------------------------------------------------
-    def get_command(self, cmd, quiet=False):
-        """
-        Searches the OS search path for the given command and gives back the
-        normalized position of this command.
-        If the command is given as an absolute path, it check the existence
-        of this command.
-
-        @param cmd: the command to search
-        @type cmd: str
-        @param quiet: No warning message, if the command could not be found,
-                      only a debug message
-        @type quiet: bool
-
-        @return: normalized complete path of this command, or None,
-                 if not found
-        @rtype: str or None
-
-        """
-
-        if self.verbose > 2:
-            LOG.debug("Searching for command {!r} ...".format(cmd))
-
-        # Checking an absolute path
-        if os.path.isabs(cmd):
-            if not os.path.exists(cmd):
-                LOG.warning("Command {!r} doesn't exists.".format(cmd))
-                return None
-            if not os.access(cmd, os.X_OK):
-                msg = "Command {!r} is not executable.".format(cmd)
-                LOG.warning(msg)
-                return None
-            return os.path.normpath(cmd)
-
-        # Checking a relative path
-        for d in caller_search_path():
-            if self.verbose > 3:
-                LOG.debug("Searching command in {!r} ...".format(d))
-            p = os.path.join(d, cmd)
-            if os.path.exists(p):
-                if self.verbose > 2:
-                    LOG.debug("Found {!r} ...".format(p))
-                if os.access(p, os.X_OK):
-                    return os.path.normpath(p)
-                else:
-                    LOG.debug("Command {!r} is not executable.".format(p))
-
-        # command not found, sorry
-        if quiet:
-            if self.verbose > 2:
-                LOG.debug("Command {!r} not found.".format(cmd))
-        else:
-            LOG.warning("Command {!r} not found.".format(cmd))
-
-        return None
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/barracuda_sync_app.py b/pp_lib/barracuda_sync_app.py
deleted file mode 100644 (file)
index 6ed67f1..0000000
+++ /dev/null
@@ -1,498 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the barracuda-sync application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import re
-import textwrap
-import copy
-import shlex
-
-# Third party modules
-import six
-
-from ldap3 import ObjectDef
-from ldap3 import BASE, LEVEL, SUBTREE                              # noqa
-
-# Own modules
-from .common import pp
-
-from .ldap_app import PpLdapAppError, PpLdapApplication
-
-from .mailaddress import MailAddress
-
-__version__ = '0.4.4'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpBarracudaSyncError(PpLdapAppError):
-    pass
-
-
-# =============================================================================
-class PpBarracudaSyncApp(PpLdapApplication):
-    """Class for the 'barracuda-sync' application to ensure a synchronisation
-        of all existing aliases and virtual aliases in Postfix with the
-        LDAP entries used by Barracuda to ensure the existence of aliases.
-    """
-
-    default_barracuda_base_dn = 'ou=barracuda,ou=Applications,o=Pixelpark,o=isp'
-    postfix_config_dir = os.sep + os.path.join('etc', 'postfix')
-    postfix_maps_dir = os.path.join(postfix_config_dir, 'maps')
-
-    default_virtaliases_files = [
-        os.path.join(postfix_maps_dir, 'virtual-aliases'),
-    ]
-
-    default_ignore_aliases = [
-        'root',
-    ]
-
-    default_origin = 'pixelpark.com'
-
-    re_virtaliases_line = re.compile(r'^([^#\s:]+)\s', re.MULTILINE)
-
-    open_args = {}
-    if six.PY3:
-        open_args = {
-            'encoding': 'utf-8',
-            'errors': 'surrogateescape',
-        }
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.barracuda_base_dn = self.default_barracuda_base_dn
-        self.virtaliases_files = copy.copy(self.default_virtaliases_files)
-        self.origin = self.default_origin
-        self.ignore_aliases = copy.copy(self.default_ignore_aliases)
-
-        self.existing_aliases = []
-        self.ldap_aliases = []
-        self.aliases_to_create = []
-        self.aliases_to_remove = []
-        self.ignore_aliases_res = []
-
-        self._show_simulate_opt = True
-
-        description = textwrap.dedent('''\
-            Synchronization of existing virtual aliases
-            with alias definitions in LDAP for Barracuda.
-            ''').strip()
-
-        super(PpBarracudaSyncApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='barracuda-sync'
-        )
-
-        self._check_virtaliases_files()
-        self._init_ignore_aliases_res()
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PpBarracudaSyncApp, self).as_dict(short=short)
-        res['default_barracuda_base_dn'] = self.default_barracuda_base_dn
-        res['postfix_config_dir'] = self.postfix_config_dir
-        res['postfix_maps_dir'] = self.postfix_maps_dir
-        res['default_virtaliases_files'] = self.default_virtaliases_files
-        res['default_origin'] = self.default_origin
-        res['open_args'] = self.open_args
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        self.arg_parser.add_argument(
-            '-P', '--postfix-dir',
-            metavar="DIR", dest='postfix_dir',
-            help="Configuration directory for Postfix (default: {!r}).".format(
-                self.postfix_config_dir)
-        )
-
-        super(PpBarracudaSyncApp, self).init_arg_parser()
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpBarracudaSyncApp, self).perform_config()
-
-        virtaliases_files = None
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 2:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            if section_name.lower() not in ('barracuda-sync', 'barracuda_sync', 'barracudasync'):
-                continue
-
-            section = self.cfg[section_name]
-            if self.verbose > 2:
-                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                    n=section_name, s=pp(section)))
-
-            if 'postfix_dir' in section:
-                self._init_postfix_dir(section['postfix_dir'])
-
-            if 'virtaliases_files' in section:
-                virtaliases_files = self._cfg_virtaliases_files(
-                    section['virtaliases_files'], virtaliases_files)
-
-            if 'ignore_aliases' in section:
-                self._cfg_ignore_aliases(section['ignore_aliases'])
-
-            if 'base_dn' in section:
-                v = section['base_dn'].strip()
-                if v:
-                    self.barracuda_base_dn = v
-
-            if 'origin' in section:
-                v = section['origin'].strip()
-                if v:
-                    self.origin = v
-
-        if hasattr(self.args, 'postfix_dir') and self.args.postfix_dir:
-            self._init_postfix_dir(self.args.postfix_dir)
-
-        if not os.path.isdir(self.postfix_config_dir):
-            LOG.error("Postfix directory {!r} does not exists or is not a directory.".format(
-                self.postfix_config_dir))
-            self.exit(1)
-
-        if not os.path.isdir(self.postfix_maps_dir):
-            LOG.error("Postfix maps directory {!r} does not exists or is not a directory.".format(
-                self.postfix_maps_dir))
-            self.exit(1)
-
-        self._init_virtaliases_files(virtaliases_files)
-
-    # -------------------------------------------------------------------------
-    def _cfg_virtaliases_files(self, value, virtaliases_files):
-
-        ret = None
-        if virtaliases_files is not None:
-            ret = copy.copy(virtaliases_files)
-
-        files = shlex.split(value)
-        if files:
-            if ret is None:
-                ret = []
-            for f in files:
-                if f not in ret:
-                    ret.append(f)
-
-        return ret
-
-    # -------------------------------------------------------------------------
-    def _cfg_ignore_aliases(self, value):
-
-        aliases = shlex.split(value)
-        if aliases:
-            for alias in aliases:
-                if alias.startswith('-'):
-                    alias = alias[1:]
-                    if alias == '':
-                        continue
-                    if alias in self.ignore_aliases:
-                        self.ignore_aliases.remove(alias)
-                elif alias not in self.ignore_aliases:
-                    self.ignore_aliases.append(alias)
-
-    # -------------------------------------------------------------------------
-    def _init_virtaliases_files(self, virtaliases_files):
-
-        self.virtaliases_files = copy.copy(self.default_virtaliases_files)
-        if virtaliases_files is None:
-            return
-
-        self.virtaliases_files = []
-        for afile in virtaliases_files:
-            if not os.path.isabs(afile):
-                afile = os.path.join(self.postfix_config_dir, afile)
-            afile = os.path.normpath(afile)
-            if afile not in self.virtaliases_files:
-                self.virtaliases_files.append(afile)
-
-    # -------------------------------------------------------------------------
-    def _check_virtaliases_files(self):
-
-        ok = True
-        for afile in self.virtaliases_files:
-
-            if not os.path.exists(afile):
-                LOG.error("Virtual aliases file {!r} does not exists.".format(afile))
-                ok = False
-                continue
-
-            if not os.path.isfile(afile):
-                LOG.error("Virtual aliases file {!r} is not a regular file.".format(afile))
-                ok = False
-                continue
-
-            if not os.access(afile, os.R_OK):
-                LOG.error("No read access to virtual aliases file {!r}.".format(afile))
-                ok = False
-                continue
-
-        if not ok:
-            self.exit(1)
-
-    # -------------------------------------------------------------------------
-    def _init_postfix_dir(self, value):
-
-        if os.path.isdir(value):
-            d = os.path.abspath(value)
-            self.postfix_config_dir = d
-            self.postfix_maps_dir = os.path.join(d, 'maps')
-            self.default_aliases_files = [
-                os.path.join(self.postfix_maps_dir, 'aliases'),
-            ]
-            self.default_virtaliases_files = [
-                os.path.join(self.postfix_maps_dir, 'virtual-aliases'),
-            ]
-        else:
-            LOG.warn("Postfix directory {!r} does not exists or is not a directory.".format(
-                value))
-
-    # -------------------------------------------------------------------------
-    def _init_ignore_aliases_res(self):
-
-        LOG.debug("Preparing regexes for aliases to ignore ...")
-        self.ignore_aliases_res = []
-
-        for alias in self.ignore_aliases:
-
-            a = alias.strip()
-            if a == '':
-                continue
-            pattern = r'^' + alias
-            if not MailAddress.valid_address(alias):
-                pattern += r'(?:@(?:.*\.)?' + re.escape(self.origin) + r')?'
-            pattern += r'\s*$'
-            regex = re.compile(pattern, re.IGNORECASE)
-            self.ignore_aliases_res.append(regex)
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        super(PpBarracudaSyncApp, self).pre_run()
-
-        self._check_ldap_barracuda_container()
-
-    # -------------------------------------------------------------------------
-    def _check_ldap_barracuda_container(self):
-
-        LOG.debug("Checking existence of Baracuda LDAP container {!r}.".format(
-            self.barracuda_base_dn))
-        query = '(objectclass=organizationalunit)'
-
-        self.ldap_connection.search(
-            search_base=self.barracuda_base_dn, search_filter=query,
-            search_scope=BASE, attributes='*')
-
-        LOG.debug("Found {} entries.".format(len(self.ldap_connection.response)))
-        if len(self.ldap_connection.response) < 1:
-            LOG.error((
-                "Did not found LDAP container {!r} for "
-                "Barracuda alias definitions.").format(
-                self.barracuda_base_dn))
-            self.exit(5)
-
-        entry = self.ldap_connection.response[0]
-        if self.verbose > 1:
-            LOG.debug("Container entry - class {cl!r}, content:\n{co}".format(
-                cl=entry.__class__.__name__, co=pp(entry)))
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        LOG.info("Starting ...")
-
-        self.read_virtaliases_files()
-        self.existing_aliases.sort(key=str.lower)
-        LOG.info("Found {} existing aliases.".format(len(self.existing_aliases)))
-        if self.verbose > 1:
-            LOG.debug("Existing aliases:\n{}".format(pp(self.existing_aliases)))
-
-        self.read_ldap_aliases()
-        self.eval_diffs()
-
-        self.add_failing_ldap_entries()
-        self.remove_unnecessary_aliases()
-
-        LOG.info("Fertsch!")
-
-    # -------------------------------------------------------------------------
-    def read_virtaliases_files(self):
-
-        LOG.info("Reading all virtual aliases files ...")
-        for afile in self.virtaliases_files:
-            if self.verbose > 1:
-                LOG.debug("Checking for virtaliases file {!r} ...".format(afile))
-            if not os.path.isfile(afile):
-                continue
-            content = ''
-            LOG.debug("Reading virtaliases file {!r} ...".format(afile))
-            with open(afile, 'r', **self.open_args) as fh:
-                content = fh.read()
-            aliases = self.re_virtaliases_line.findall(content)
-            for alias in aliases:
-                do_add = True
-                for regex in self.ignore_aliases_res:
-                    if regex.search(alias):
-                        do_add = False
-                        break
-                if not do_add:
-                    continue
-                if alias not in self.existing_aliases:
-                    if self.verbose > 2:
-                        LOG.debug("Registring existing alias {!r}.".format(alias))
-                    self.existing_aliases.append(alias)
-
-    # -------------------------------------------------------------------------
-    def read_ldap_aliases(self):
-
-        LOG.info("Reading all aliases from LDAP ...")
-
-        alias = ObjectDef(['mailRecipient'])
-        alias += ['cn', 'mail']
-
-        query_filter = '(&(objectclass=mailRecipient)(mail=*))'
-
-        entries = self.ldap_search_subtree(alias, query_filter, base=self.barracuda_base_dn)
-
-        for entry in entries:
-            dn = entry.entry_dn
-            cn = entry['cn'][0]
-            mail = entry['mail'][0]
-            if self.verbose > 3:
-                LOG.debug("Found LDAP alias, DN: {dn!r}, CN: {cn!r}, Mail: {m!r}.".format(
-                    dn=dn, cn=cn, m=mail))
-
-            if not cn:
-                continue
-            if cn not in self.ldap_aliases:
-                if self.verbose > 2:
-                    LOG.debug("Registring LDAP alias {!r}.".format(cn))
-                self.ldap_aliases.append(cn)
-
-        self.ldap_aliases.sort(key=str.lower)
-        LOG.info("Found {} LDAP aliases.".format(len(self.ldap_aliases)))
-        if self.verbose > 1:
-            LOG.debug("LDAP aliases:\n{}".format(pp(self.ldap_aliases)))
-
-    # -------------------------------------------------------------------------
-    def eval_diffs(self):
-
-        LOG.info("Evaluating differences ...")
-
-        self.aliases_to_create = []
-        self.aliases_to_remove = []
-
-        for alias in self.existing_aliases:
-            if alias not in self.ldap_aliases and alias not in self.aliases_to_create:
-                self.aliases_to_create.append(alias)
-
-        for alias in self.ldap_aliases:
-            if alias not in self.existing_aliases and alias not in self.aliases_to_remove:
-                self.aliases_to_remove.append(alias)
-
-        LOG.info("Aliases to create in LDAP:\n{}".format(pp(self.aliases_to_create)))
-        LOG.info("Aliases to remove from LDAP:\n{}".format(pp(self.aliases_to_remove)))
-
-    # -------------------------------------------------------------------------
-    def add_failing_ldap_entries(self):
-
-        LOG.info("Adding failing LDAP aliases ...")
-
-        for alias in self.aliases_to_create:
-
-            mail = alias
-            if not MailAddress.valid_address(alias):
-                mail += '@' + self.origin
-
-            dn = 'cn=' + alias + ',' + self.barracuda_base_dn
-            object_class = ["top", "mailRecipient"]
-            attributes = {
-                'mail': mail,
-            }
-            LOG.info("Creating LDAP alias {a!r} => {dn!r}.".format(a=alias, dn=dn))
-            LOG.debug("Object-Classes: {}".format(pp(object_class)))
-            LOG.debug("Attributes: {}".format(pp(attributes)))
-            if not self.simulate:
-                self.ldap_connection.add(dn, object_class, attributes)
-                LOG.debug("Result: {}".format(self.ldap_connection.result))
-
-    # -------------------------------------------------------------------------
-    def remove_unnecessary_aliases(self):
-
-        LOG.info("Removing unnecessary LDAP aliases ...")
-
-        attributes = ['cn', 'mail']
-
-        for alias in self.aliases_to_remove:
-
-            query = '(&(objectclass=mailRecipient)(cn=' + alias + '))'
-            LOG.debug("Searching for entry with CN {!r}.".format(alias))
-            self.ldap_connection.search(
-                search_base=self.barracuda_base_dn,
-                search_filter=query,
-                search_scope=LEVEL,
-                attributes=attributes)
-
-            LOG.debug("Found {} entries.".format(len(self.ldap_connection.response)))
-
-            if not self.ldap_connection.response:
-                LOG.error("No LDAP entry found for CN {!r}.".format(alias))
-                continue
-            entry = self.ldap_connection.response[0]
-            dn = entry['dn']
-
-            LOG.info("Removing LDAP entry {!r} ...".format(dn))
-            if not self.simulate:
-                self.ldap_connection.delete(dn)
-                LOG.debug("Result: {}".format(self.ldap_connection.result))
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/cfg_app.py b/pp_lib/cfg_app.py
deleted file mode 100644 (file)
index 6efbe10..0000000
+++ /dev/null
@@ -1,939 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the application object with support
-          for configuration files.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import re
-import copy
-import json
-import socket
-import pwd
-import pipes
-import codecs
-import ipaddress
-
-from subprocess import Popen, PIPE
-
-from email.mime.text import MIMEText
-from email import charset
-
-import smtplib
-
-# Third party modules
-import six
-
-from six import StringIO
-from six.moves import configparser
-
-from configparser import Error as ConfigParseError
-
-# Own modules
-from .global_version import __version__ as __global_version__
-
-from .errors import PpAppError
-
-from .common import pp, to_bool, RE_DOT_AT_END
-
-from .merge import merge_structure
-
-from .mailaddress import MailAddress
-
-from .app import PpApplication
-
-__version__ = '0.7.1'
-LOG = logging.getLogger(__name__)
-
-VALID_MAIL_METHODS = ('smtp', 'sendmail')
-
-
-# =============================================================================
-class PpCfgAppError(PpAppError):
-    """Base error class for all exceptions happened during
-    execution this configured application"""
-
-    pass
-
-
-# =============================================================================
-class PpConfigApplication(PpApplication):
-    """
-    Class for configured application objects.
-    """
-
-    default_mail_recipients = [
-        'frank.brehm@pixelpark.com'
-    ]
-    default_mail_cc = [
-        'thomas.kotschok@pixelpark.com',
-    ]
-
-    default_reply_to = 'frank.brehm@pixelpark.com'
-
-    default_mail_server = 'mx.pixelpark.net'
-
-    current_user_name = pwd.getpwuid(os.getuid()).pw_name
-    current_user_gecos = pwd.getpwuid(os.getuid()).pw_gecos
-    default_mail_from = MailAddress(current_user_name, socket.getfqdn())
-
-    whitespace_re = re.compile(r'(?:[,;]+|\s*[,;]*\s+)+')
-
-    charset.add_charset('utf-8', charset.SHORTEST, charset.QP)
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=None, usage=None, description=None,
-            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
-            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False):
-
-        self.cfg_encoding = cfg_encoding
-        self._need_config_file = bool(need_config_file)
-
-        self.cfg = {}
-
-        self._cfg_dir = None
-        self.cfg_stems = []
-        self.cfg_files = []
-        self.log_cfg_files = []
-
-        self.mail_recipients = copy.copy(self.default_mail_recipients)
-        self.mail_from = '{n} <{m}>'.format(
-            n=self.current_user_gecos, m=self.default_mail_from)
-        self.mail_cc = copy.copy(self.default_mail_cc)
-        self.reply_to = self.default_reply_to
-        self.mail_method = 'smtp'
-        self.mail_server = self.default_mail_server
-        self.smtp_port = 25
-        self._config_has_errors = None
-
-        super(PpConfigApplication, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
-            initialized=False, usage=usage, description=description,
-            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
-            env_prefix=env_prefix,
-        )
-
-        if cfg_dir is None:
-            self._cfg_dir = 'pixelpark'
-        else:
-            d = str(cfg_dir).strip()
-            if d == '':
-                self._cfg_dir = None
-            else:
-                self._cfg_dir = d
-
-        if cfg_stems:
-            if isinstance(cfg_stems, list):
-                for stem in cfg_stems:
-                    s = str(stem).strip()
-                    if not s:
-                        msg = "Invalid configuration stem {!r} given.".format(stem)
-                        raise PpCfgAppError(msg)
-                    self.cfg_stems.append(s)
-            else:
-                s = str(cfg_stems).strip()
-                if not s:
-                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
-                    raise PpCfgAppError(msg)
-                self.cfg_stems.append(s)
-        else:
-            self.cfg_stems = self.appname
-
-        self._init_cfgfiles()
-
-        enc = getattr(self.args, 'cfg_encoding', None)
-        if enc:
-            self.cfg_encoding = enc
-
-        self.perform_arg_parser()
-        self.init_logging()
-
-        self._read_config()
-        self._perform_config()
-
-        self._init_log_cfgfiles()
-        self.reinit_logging()
-
-    # -----------------------------------------------------------
-    @property
-    def need_config_file(self):
-        """
-        hide command line parameter --default-config and
-        don't execute generation of default config
-        """
-        return getattr(self, '_need_config_file', False)
-
-    # -----------------------------------------------------------
-    @property
-    def cfg_encoding(self):
-        """The encoding character set of the configuration files."""
-        return self._cfg_encoding
-
-    @cfg_encoding.setter
-    def cfg_encoding(self, value):
-        try:
-            codec = codecs.lookup(value)
-        except Exception as e:
-            msg = "{c} on setting encoding {v!r}: {e}".format(
-                c=e.__class__.__name__, v=value, e=e)
-            LOG.error(msg)
-        else:
-            self._cfg_encoding = codec.name
-
-    # -----------------------------------------------------------
-    @property
-    def config_has_errors(self):
-        """A flag, showing, that there are errors in configuration."""
-        return self._config_has_errors
-
-    @config_has_errors.setter
-    def config_has_errors(self, value):
-        if value is None:
-            self._config_has_errors = None
-        else:
-            self._config_has_errors = to_bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def cfg_dir(self):
-        """The directory containing the configuration files."""
-        return self._cfg_dir
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PpConfigApplication, self).as_dict(short=short)
-        res['need_config_file'] = self.need_config_file
-        res['cfg_encoding'] = self.cfg_encoding
-        res['cfg_dir'] = self.cfg_dir
-        res['config_has_errors'] = self.config_has_errors
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        mail_group = self.arg_parser.add_argument_group('Mailing options')
-
-        mail_group.add_argument(
-            '--recipients', '--mail-recipients',
-            metavar="ADDRESS", nargs='+', dest="mail_recipients",
-            help="Mail addresses of all recipients for mails generated by this script."
-        )
-
-        mail_group.add_argument(
-            '--cc', '--mail-cc',
-            metavar="ADDRESS", nargs='*', dest="mail_cc",
-            help="Mail addresses of all CC recipients for mails generated by this script."
-        )
-
-        mail_group.add_argument(
-            '--reply-to', '--mail-reply-to',
-            metavar="ADDRESS", dest="mail_reply_to",
-            help="Reply mail address for mails generated by this script."
-        )
-
-        mail_group.add_argument(
-            '--mail-method',
-            metavar="METHOD", choices=VALID_MAIL_METHODS, dest="mail_method",
-            help=(
-                "Method for sending the mails generated by this script. "
-                "Valid values: {v}, default: {d!r}.".format(
-                    v=', '.join(map(lambda x: repr(x), VALID_MAIL_METHODS)),
-                    d=self.mail_method))
-        )
-
-        mail_group.add_argument(
-            '--mail-server',
-            metavar="SERVER", dest="mail_server",
-            help=(
-                "Mail server for submitting generated by this script if "
-                "the mail method of this script is 'smtp'. Default: {!r}.").format(
-                self.mail_server)
-        )
-
-        mail_group.add_argument(
-            '--smtp-port',
-            metavar="PORT", type=int, dest='smtp_port',
-            help=(
-                "The port to use for submitting generated by this script if "
-                "the mail method of this script is 'smtp'. Default: {}.".format(self.smtp_port))
-        )
-
-        cfgfile_group = self.arg_parser.add_argument_group('Config file options')
-
-        cfgfile_group.add_argument(
-            "-C", "--cfgfile", "--cfg-file", "--config",
-            metavar="FILE", nargs='+', dest="cfg_file",
-            help="Configuration files to use additional to the standard configuration files.",
-        )
-
-        cfgfile_group.add_argument(
-            "--log-cfgfile",
-            metavar="FILE", dest="log_cfgfile",
-            help=(
-                "Configuration file for logging in JSON format. "
-                "See https://docs.python.org/3/library/logging.config.html"
-                "#logging-config-dictschema how the structures has to be defined.")
-        )
-
-        cfgfile_group.add_argument(
-            "--cfg-encoding",
-            metavar="ENCODING", dest="cfg_encoding", default=self.cfg_encoding,
-            help=(
-                "The encoding character set of the configuration files "
-                "(default: %(default)r)."),
-        )
-
-    # -------------------------------------------------------------------------
-    def _init_cfgfiles(self):
-        """Method to generate the self.cfg_files list."""
-
-        self.cfg_files = []
-
-        cfg_basenames = []
-        for stem in self.cfg_stems:
-            cfg_basename = '{}.ini'.format(stem)
-            cfg_basenames.append(cfg_basename)
-
-        # add /etc/app/app.ini or $VIRTUAL_ENV/etc/app/app.ini
-        etc_dir = os.sep + 'etc'
-        if 'VIRTUAL_ENV' in os.environ:
-            etc_dir = os.path.join(os.environ['VIRTUAL_ENV'], 'etc')
-        for cfg_basename in cfg_basenames:
-            syscfg_fn = None
-            if self.cfg_dir:
-                syscfg_fn = os.path.join(etc_dir, self.cfg_dir, cfg_basename)
-            else:
-                syscfg_fn = os.path.join(etc_dir, cfg_basename)
-            self.cfg_files.append(syscfg_fn)
-
-        # add <WORKDIR>/etc/app.ini
-        mod_dir = os.path.dirname(__file__)
-        work_dir = os.path.abspath(os.path.join(mod_dir, '..'))
-        work_etc_dir = os.path.join(work_dir, 'etc')
-        if self.verbose > 1:
-            LOG.debug("Searching for {!r} ...".format(work_etc_dir))
-        for cfg_basename in cfg_basenames:
-            self.cfg_files.append(os.path.join(work_etc_dir, cfg_basename))
-
-        # add $HOME/.config/app.ini
-        usercfg_fn = None
-        user_cfg_dir = os.path.expanduser('~/.config')
-        if user_cfg_dir:
-            if self.cfg_dir:
-                user_cfg_dir = os.path.join(user_cfg_dir, self.cfg_dir)
-            if self.verbose > 1:
-                LOG.debug("user_cfg_dir: {!r}".format(user_cfg_dir))
-            for cfg_basename in cfg_basenames:
-                usercfg_fn = os.path.join(user_cfg_dir, cfg_basename)
-                self.cfg_files.append(usercfg_fn)
-
-        # add a configfile given on command line with --cfg-file
-        cmdline_cfg = getattr(self.args, 'cfg_file', None)
-        if cmdline_cfg:
-            for usercfg_fn in cmdline_cfg:
-                self.cfg_files.append(usercfg_fn)
-
-    # -------------------------------------------------------------------------
-    def _init_log_cfgfiles(self):
-        """Method to generate the self.log_cfg_files list."""
-
-        self.log_cfg_files = []
-
-        cfg_basename = 'logging.json'
-
-        # add /etc/app/logging.json or $VIRTUAL_ENV/etc/app/logging.json
-        etc_dir = os.sep + 'etc'
-        if 'VIRTUAL_ENV' in os.environ:
-            etc_dir = os.path.join(os.environ['VIRTUAL_ENV'], 'etc')
-        syscfg_fn = None
-        if self.cfg_dir:
-            syscfg_fn = os.path.join(etc_dir, self.cfg_dir, cfg_basename)
-        else:
-            syscfg_fn = os.path.join(etc_dir, cfg_basename)
-        self.log_cfg_files.append(syscfg_fn)
-
-        # add <WORKDIR>/etc/app.ini
-        mod_dir = os.path.dirname(__file__)
-        work_dir = os.path.abspath(os.path.join(mod_dir, '..'))
-        work_etc_dir = os.path.join(work_dir, 'etc')
-        if self.verbose > 1:
-            LOG.debug("Searching for {!r} ...".format(work_etc_dir))
-        self.log_cfg_files.append(os.path.join(work_etc_dir, cfg_basename))
-
-        # add $HOME/.config/app.ini
-        usercfg_fn = None
-        user_cfg_dir = os.path.expanduser('~/.config')
-        if user_cfg_dir:
-            if self.cfg_dir:
-                user_cfg_dir = os.path.join(user_cfg_dir, self.cfg_dir)
-            if self.verbose > 1:
-                LOG.debug("user_cfg_dir: {!r}".format(user_cfg_dir))
-            usercfg_fn = os.path.join(user_cfg_dir, cfg_basename)
-            self.log_cfg_files.append(usercfg_fn)
-
-        # add a configfile given on command line with --log-cfgfile
-        cmdline_cfg = getattr(self.args, 'log_cfgfile', None)
-        if cmdline_cfg:
-            self.log_cfg_files.append(cmdline_cfg)
-
-        if self.verbose > 1:
-            LOG.debug("Log config files:\n{}".format(pp(self.log_cfg_files)))
-
-    # -------------------------------------------------------------------------
-    def _init_logging_from_jsonfile(self):
-
-        open_opts = {}
-        if six.PY3:
-            open_opts['encoding'] = 'utf-8'
-            open_opts['errors'] = 'surrogateescape'
-
-        found = False
-        for cfg_file in reversed(self.log_cfg_files):
-
-            if self.verbose > 1:
-                LOG.debug("Searching for {!r} ...".format(cfg_file))
-
-            if not os.path.exists(cfg_file):
-                continue
-            if not os.path.isfile(cfg_file):
-                continue
-            if not os.access(cfg_file, os.R_OK):
-                msg = "No read access to {!r}.".format(cfg_file)
-                self.handle_error(msg, "File error")
-                continue
-
-            log_cfg = None
-            if self.verbose > 1:
-                LOG.debug("Reading and evaluating {!r} ...".format(cfg_file))
-            with open(cfg_file, 'r', **open_opts) as fh:
-                try:
-                    log_cfg = json.load(fh)
-                except (ValueError, TypeError) as e:
-                    msg = "Wrong file {!r} - ".format(cfg_file) + str(e)
-                    self.handle_error(msg, e.__class__.__name__)
-                    continue
-            if self.verbose:
-                if 'root' in log_cfg:
-                    log_cfg['root']['level'] = 'DEBUG'
-                if 'handlers' in log_cfg:
-                    for handler_name in log_cfg['handlers'].keys():
-                        handler = log_cfg['handlers'][handler_name]
-                        handler['level'] = 'DEBUG'
-            if self.verbose > 1:
-                LOG.debug("Evaluated configuration from JSON:\n{} ...".format(pp(log_cfg)))
-            try:
-                logging.config.dictConfig(log_cfg)
-            except Exception as e:
-                msg = "Wrong file {!r} - ".format(cfg_file) + str(e)
-                self.handle_error(msg, e.__class__.__name__)
-                continue
-            found = True
-            break
-
-        return found
-
-    # -------------------------------------------------------------------------
-    def reinit_logging(self):
-        """
-        Re-Initialize the logger object.
-        It creates a colored loghandler with all output to STDERR.
-        Maybe overridden in descendant classes.
-
-        @return: None
-        """
-
-        root_logger = logging.getLogger()
-
-        if self._init_logging_from_jsonfile():
-            if self.verbose:
-                root_logger.setLevel(logging.DEBUG)
-            return
-
-        return
-
-    # -------------------------------------------------------------------------
-    def _read_config(self):
-
-        if self.verbose > 2:
-            LOG.debug("Reading config files with character set {!r} ...".format(
-                self.cfg_encoding))
-        self._config_has_errors = None
-
-        open_opts = {}
-        if six.PY3 and self.cfg_encoding:
-            open_opts['encoding'] = self.cfg_encoding
-            open_opts['errors'] = 'surrogateescape'
-
-        for cfg_file in self.cfg_files:
-            if self.verbose > 2:
-                LOG.debug("Searching for {!r} ...".format(cfg_file))
-            if not os.path.isfile(cfg_file):
-                if self.verbose > 3:
-                    LOG.debug("Config file {!r} not found.".format(cfg_file))
-                continue
-            if self.verbose > 1:
-                LOG.debug("Reading {!r} ...".format(cfg_file))
-
-            config = configparser.ConfigParser()
-            try:
-                with open(cfg_file, 'r', **open_opts) as fh:
-                    stream = StringIO("[default]\n" + fh.read())
-                    if six.PY2:
-                        config.readfp(stream)
-                    else:
-                        config.read_file(stream)
-            except ConfigParseError as e:
-                msg = "Wrong configuration in {!r} found: ".format(cfg_file)
-                msg += str(e)
-                self.handle_error(msg, "Configuration error")
-                continue
-
-            cfg = {}
-            for section in config.sections():
-                if section not in cfg:
-                    cfg[section] = {}
-                for (key, value) in config.items(section):
-                    k = key.lower()
-                    cfg[section][k] = value
-            if self.verbose > 2:
-                LOG.debug("Evaluated config from {f!r}:\n{c}".format(
-                    f=cfg_file, c=pp(cfg)))
-            self.cfg = merge_structure(self.cfg, cfg)
-
-        if self.verbose > 1:
-            LOG.debug("Evaluated config total:\n{}".format(pp(self.cfg)))
-
-    # -------------------------------------------------------------------------
-    def _perform_config(self):
-        """Execute some actions after reading the configuration."""
-
-        for section_name in self.cfg.keys():
-
-            section = self.cfg[section_name]
-
-            if section_name.lower() == 'general':
-                self._perform_config_general(section, section_name)
-                continue
-
-            if section_name.lower() == 'mail':
-                self._perform_config_mail(section, section_name)
-                continue
-
-        self.perform_config()
-
-        self._perform_mail_cmdline_options()
-
-        if self.config_has_errors:
-            LOG.error("There are errors in configuration.")
-            self.exit(1)
-        else:
-            LOG.debug("There are no errors in configuration.")
-            self.config_has_errors = False
-
-    # -------------------------------------------------------------------------
-    def _perform_config_general(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'verbose' in section:
-            v = section['verbose']
-            if to_bool(v):
-                try:
-                    v = int(v)
-                except ValueError:
-                    v = 1
-                    pass
-                except TypeError:
-                    v = 1
-                    pass
-                if v > self.verbose:
-                    self.verbose = v
-                root_logger = logging.getLogger()
-                root_logger.setLevel(logging.DEBUG)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        self._perform_config_mail_rcpt(section, section_name)
-        self._perform_config_mail_cc(section, section_name)
-        self._perform_config_mail_reply_to(section, section_name)
-        self._perform_config_mail_method(section, section_name)
-        self._perform_config_mail_server(section, section_name)
-        self._perform_config_smtp_port(section, section_name)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail_rcpt(self, section, section_name):
-
-        if 'mail_recipients' not in section:
-            return
-
-        v = section['mail_recipients'].strip()
-        self.mail_recipients = []
-        if v:
-            tokens = self.whitespace_re.split(v)
-            for token in tokens:
-                if MailAddress.valid_address(token):
-                    if token not in self.mail_recipients:
-                        self.mail_recipients.append(token)
-                else:
-                    msg = (
-                        "Found invalid recipient mail address {!r} "
-                        "in configuration.").format(
-                        token)
-                    LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail_cc(self, section, section_name):
-
-        if 'mail_cc' not in section:
-            return
-
-        v = section['mail_cc'].strip()
-        self.mail_cc = []
-        if v:
-            tokens = self.whitespace_re.split(v)
-            if self.verbose > 1:
-                LOG.debug("CC addresses:\n{}".format(pp(tokens)))
-            for token in tokens:
-                if MailAddress.valid_address(token):
-                    if token not in self.mail_cc:
-                        self.mail_cc.append(token)
-                else:
-                    msg = "Found invalid cc mail address {!r} in configuration.".format(
-                        token)
-                    LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail_reply_to(self, section, section_name):
-
-        if 'reply_to' not in section:
-            return
-
-        v = section['reply_to'].strip()
-        self.reply_to = None
-        if v:
-            tokens = self.whitespace_re.split(v)
-            if len(tokens):
-                if MailAddress.valid_address(tokens[0]):
-                    self.reply_to = tokens[0]
-                else:
-                    msg = "Found invalid reply mail address {!r} in configuration.".format(
-                        tokens[0])
-                    LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail_method(self, section, section_name):
-
-        if 'mail_method' not in section:
-            return
-
-        v = section['mail_method'].strip().lower()
-        if v:
-            if v in VALID_MAIL_METHODS:
-                self.mail_method = v
-            else:
-                msg = "Found invalid mail method {!r} in configuration.".format(
-                    section['mail_method'])
-                LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_config_mail_server(self, section, section_name):
-
-        if 'mail_server' not in section:
-            return
-
-        v = section['reply_to'].strip()
-        if v:
-            self.mail_server = v
-
-    # -------------------------------------------------------------------------
-    def _perform_config_smtp_port(self, section, section_name):
-
-        if 'smtp_port' not in section:
-            return
-
-        v = section['smtp_port']
-        port = self.smtp_port
-        try:
-            port = int(v)
-        except (ValueError, TypeError):
-            msg = "Found invalid SMTP port number {!r} in configuration.".format(v)
-            LOG.error(msg)
-        else:
-            if port <= 0:
-                msg = "Found invalid SMTP port number {!r} in configuration.".format(port)
-                LOG.error(msg)
-            else:
-                self.smtp_port = port
-
-    # -------------------------------------------------------------------------
-    def _perform_mail_cmdline_options(self):
-
-        self._perform_cmdline_mail_rcpt()
-        self._perform_cmdline_mail_cc()
-        self._perform_cmdline_reply_to()
-
-        v = getattr(self.args, 'mail_method', None)
-        if v:
-            self.mail_method = v
-
-        v = getattr(self.args, 'mail_server', None)
-        if v:
-            self.mail_server = v
-
-        v = getattr(self.args, 'smtp_port', None)
-        if v is not None:
-            if v <= 0:
-                msg = "Got invalid SMTP port number {!r}.".format(v)
-                LOG.error(msg)
-            else:
-                self.smtp_port = v
-
-    # -------------------------------------------------------------------------
-    def _perform_cmdline_mail_rcpt(self):
-
-        v = getattr(self.args, 'mail_recipients', None)
-        if v is not None:
-            self.mail_recipients = []
-            for addr in v:
-                tokens = self.whitespace_re.split(addr)
-                for token in tokens:
-                    if MailAddress.valid_address(token):
-                        if token not in self.mail_recipients:
-                            self.mail_recipients.append(token)
-                    else:
-                        msg = "Got invalid recipient mail address {!r}.".format(token)
-                        LOG.error(msg)
-        if not self.mail_recipients:
-            msg = "Did not found any valid recipient mail addresses."
-            LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_cmdline_mail_cc(self):
-
-        v = getattr(self.args, 'mail_cc', None)
-        if v is None:
-            return
-
-        self.mail_cc = []
-        for addr in v:
-            tokens = self.whitespace_re.split(addr)
-            for token in tokens:
-                if MailAddress.valid_address(token):
-                    if token not in self.mail_cc:
-                        self.mail_cc.append(token)
-                else:
-                    msg = "Got invalid CC mail address {!r}.".format(token)
-                    LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def _perform_cmdline_reply_to(self):
-
-        v = getattr(self.args, 'mail_reply_to', None)
-        if not v:
-            return
-
-        tokens = self.whitespace_re.split(v)
-        if len(tokens):
-            if MailAddress.valid_address(tokens[0]):
-                self.reply_to = tokens[0]
-            else:
-                msg = "Got invalid reply mail address {!r}.".format(
-                    tokens[0])
-                LOG.error(msg)
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-        """
-        Execute some actions after reading the configuration.
-
-        This method should be explicitely called by all perform_config()
-        methods in descendant classes.
-        """
-
-        pass
-
-    # -------------------------------------------------------------------------
-    def send_mail(self, subject, body):
-
-        xmailer = "{a} (Admin Tools version {v})".format(
-            a=self.appname, v=__global_version__)
-
-        mail = MIMEText(body, 'plain', 'utf-8')
-        mail['Subject'] = subject
-        mail['From'] = self.mail_from
-        mail['To'] = ', '.join(self.mail_recipients)
-        mail['Reply-To'] = self.reply_to
-        mail['X-Mailer'] = xmailer
-        if self.mail_cc:
-            mail['Cc'] = ', '.join(self.mail_cc)
-
-        if self.verbose > 1:
-            LOG.debug("Mail to send:\n{}".format(mail.as_string(unixfrom=True)))
-
-        if self.mail_method == 'smtp':
-            self._send_mail_smtp(mail)
-        else:
-            self._send_mail_sendmail(mail)
-
-    # -------------------------------------------------------------------------
-    def _send_mail_smtp(self, mail):
-
-        with smtplib.SMTP(self.mail_server, self.smtp_port) as smtp:
-            if self.verbose > 2:
-                smtp.set_debuglevel(2)
-            elif self.verbose > 1:
-                smtp.set_debuglevel(1)
-
-            smtp.send_message(mail)
-
-    # -------------------------------------------------------------------------
-    def _send_mail_sendmail(self, mail):
-
-        # Searching for the location of sendmail ...
-        paths = (
-            '/usr/sbin/sendmail',
-            '/usr/lib/sendmail',
-        )
-        sendmail = None
-        for path in paths:
-            if os.path.isfile(path) and os.access(path, os.X_OK):
-                sendmail = path
-                break
-
-        if not sendmail:
-            msg = "Did not found sendmail executable."
-            LOG.error(msg)
-            return
-
-        cmd = [sendmail, "-t", "-oi"]
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        p = Popen(cmd, stdin=PIPE, universal_newlines=True)
-        p.communicate(mail.as_string())
-
-    # -------------------------------------------------------------------------
-    def post_init(self):
-        """
-        Method to execute before calling run(). Here could be done some
-        finishing actions after reading in commandline parameters,
-        configuration a.s.o.
-
-        This method could be overwritten by descendant classes, these
-        methhods should allways include a call to post_init() of the
-        parent class.
-
-        """
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def is_local_domain(self, domain):
-
-        zone_name = RE_DOT_AT_END.sub('', domain)
-
-        if self.verbose > 1:
-            LOG.debug("Checking, whether {!r} is a local zone.".format(zone_name))
-
-        tld = zone_name.split('.')[-1]
-        if tld in ('intern', 'internal', 'local', 'localdomain', 'lokal'):
-            LOG.debug("Zone {!r} has a local TLD {!r}.".format(zone_name, tld))
-            return True
-
-        zone_base = zone_name.split('.')[0]
-        if zone_base in ('intern', 'internal', 'local', 'localdomain', 'lokal'):
-            LOG.debug("Zone {!r} has a local base {!r}.".format(zone_name, tld))
-            return True
-
-        if tld != 'arpa':
-            if self.verbose > 2:
-                LOG.debug("Zone {!r} has a public TLD {!r}.".format(zone_name, tld))
-                return False
-
-        if zone_name.endswith('.in-addr.arpa'):
-            tupels = []
-            for tupel in reversed(zone_name.replace('.in-addr.arpa', '').split('.')):
-                tupels.append(tupel)
-            if self.verbose > 2:
-                LOG.debug("Got IPv4 tupels from zone {!r}: {}".format(zone_name, pp(tupels)))
-            bitmask = None
-            if len(tupels) == 1:
-                bitmask = 8
-                tupels.append('0')
-                tupels.append('0')
-                tupels.append('0')
-            elif len(tupels) == 2:
-                tupels.append('0')
-                tupels.append('0')
-                bitmask = 16
-            elif len(tupels) == 3:
-                bitmask = 24
-                tupels.append('0')
-            else:
-                LOG.warn("Could not interprete reverse IPv4 zone {!r}.".format(zone_name))
-                return False
-            net_address = '.'.join(tupels) + '/{}'.format(bitmask)
-            if self.verbose > 2:
-                LOG.debug(
-                    "Got IPv4 network address of zone {!r}: {!r}.".format(
-                        zone_name, net_address))
-            network = ipaddress.ip_network(net_address)
-            if network.is_global:
-                if self.verbose > 1:
-                    LOG.debug(
-                        "The network {!r} of zone {!r} is allocated for public networks.".format(
-                            net_address, zone_name))
-                return False
-            LOG.debug("The network {!r} of zone {!r} is allocated for local networks.".format(
-                net_address, zone_name))
-            return True
-
-        if self.verbose > 2:
-            LOG.debug(
-                "Zone {!r} seems to be a reverse zone for a public network.".format(zone_name))
-        return False
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/check_puppet_env_app.py b/pp_lib/check_puppet_env_app.py
deleted file mode 100644 (file)
index 8025c20..0000000
+++ /dev/null
@@ -1,769 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module the check-puppet-env application
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import re
-import copy
-import json
-import socket
-import pwd
-import sys
-import glob
-import datetime
-import warnings
-
-# Third party modules
-import six
-import yaml
-import requests
-
-from six import StringIO
-from six.moves import configparser
-
-from configparser import Error as ConfigParseError
-
-# Own modules
-from .global_version import __version__ as __global_version__
-
-from .errors import PpAppError
-
-from .common import pp, to_bool, RE_DOT_AT_END
-
-from .merge import merge_structure
-
-from .app import PpApplication
-
-__version__ = '0.6.2'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class CheckPuppetEnvError(PpAppError):
-    """Base error class for all exceptions happened during
-    execution this application"""
-
-    pass
-
-
-# =============================================================================
-class CheckPuppetEnvApp(PpApplication):
-    """
-    Class for the check-puppet-env application objects.
-    """
-
-    default_puppet_root_env_dir = os.sep + os.path.join('etc', 'puppetlabs', 'code', 'environments')
-
-    open_args = {}
-    if six.PY3:
-        open_args = {
-            'encoding': 'utf-8',
-            'errors': 'surrogateescape',
-        }
-
-    dev_null = os.sep + os.path.join('dev', 'null')
-
-    default_forge_uri = 'https://forgeapi.puppet.com/v3/modules'
-    default_http_timeout = 30
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=None, usage=None, description=None,
-            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
-            puppet_root_env_dir=None, out_dir=None,
-            ):
-
-        self.puppet_root_env_dir = puppet_root_env_dir
-        if not self.puppet_root_env_dir:
-            self.puppet_root_env_dir = self.default_puppet_root_env_dir
-        self.out_dir = None
-        self.environments = []
-        self.env_name = None
-        self.env_dir = None
-        self.modules_root_dir = None
-        self.modules = {}
-        self.dependencies = []
-        self.rev_dep = {}
-        self.no_write = False
-        self.forge_uri = self.default_forge_uri
-        self.http_timeout = self.default_http_timeout
-
-        super(CheckPuppetEnvApp, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
-            initialized=False, usage=usage, description=description,
-            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
-            env_prefix=env_prefix,
-        )
-
-        self.initialized = False
-
-        if out_dir:
-            self.out_dir = out_dir
-        else:
-            self.out_dir = os.path.join(self.base_dir, 'tmp')
-
-        self.post_init()
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(CheckPuppetEnvApp, self).as_dict(short=short)
-        res['dev_null'] = self.dev_null
-        res['open_args'] = self.open_args
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        self.arg_parser.add_argument(
-            '-D', '--env-dir', metavar="DIRECTORY", dest="env_dir",
-            help="Parent directory of all puppet environments, default: {!r}".format(
-                self.puppet_root_env_dir)
-        )
-
-        self.arg_parser.add_argument(
-            '-E', '--env', '--environment',
-            dest="env", required=True, metavar="ENVIRONMENT",
-            help="The Puppet environment to analyze."
-        )
-
-        self.arg_parser.add_argument(
-            '-O', '--out', '--output-dir',
-            metavar="DIRECTORY", dest="out_dir",
-            help="Output directory of all analyzing results, default: {!r}".format(
-                os.path.join(self.base_dir, 'tmp'))
-        )
-
-        self.arg_parser.add_argument(
-            '-N', '--no-write', action="store_true", dest="no_write",
-            help="Do not generate output files.",
-        )
-
-        self.arg_parser.add_argument(
-            '--forge-uri', metavar="URL", dest='forge_uri',
-            help="URL of the Puppetforge-API-Server, default: {!r}".format(
-                self.default_forge_uri)
-        )
-
-        self.arg_parser.add_argument(
-            '-T', '--timeout', '--http-timeout',
-            metavar="SECS", dest='http_timeout', type=int,
-            help=(
-                "Timeout for requesting current version of a module from Puppetforge, "
-                "default: {} seconds.").format(self.default_http_timeout)
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-
-        if self.args.no_write:
-            self.no_write = True
-
-        puppet_root_env_dir = self.puppet_root_env_dir
-        retval = 5
-
-        if self.args.env_dir:
-            puppet_root_env_dir = self.args.env_dir
-            retval = 0
-
-        if not os.path.exists(puppet_root_env_dir):
-            msg = (
-                self.appname + ': ' +
-                "Puppet environment directory {!r} does not exists.".format(puppet_root_env_dir))
-            sys.stderr.write(msg + '\n\n')
-            self.exit(retval)
-
-        if not os.path.isdir(puppet_root_env_dir):
-            msg = (
-                self.appname + ': ' +
-                "Path for Puppet environment directory {!r} is not a directory.".format(
-                    puppet_root_env_dir))
-            sys.stderr.write(msg + '\n\n')
-            self.exit(retval)
-
-        self.puppet_root_env_dir = puppet_root_env_dir
-
-        if self.args.forge_uri:
-            self.forge_uri = self.args.forge_uri
-        if self.args.http_timeout:
-            self.http_timeout = self.args.http_timeout
-
-        self._init_puppet_environments()
-        self.env_name = self.args.env
-        self.env_dir = os.path.join(self.puppet_root_env_dir, self.env_name)
-
-        if not os.path.exists(self.env_dir):
-            msg = (
-                self.appname + ': ' +
-                "Invalid Puppet environment {e!r} - directory {d!r} does not exists.".format(
-                    e=self.env_name, d=self.env_dir))
-            sys.stderr.write(msg + '\n\n')
-            msg = "Valid environments are:\n"
-            for env in self.environments:
-                msg += "  * {}\n".format(env)
-            sys.stderr.write(msg + '\n')
-            self.arg_parser.print_usage(sys.stdout)
-            self.exit(0)
-
-        if not os.path.isdir(puppet_root_env_dir):
-            msg = (
-                self.appname + ': ' +
-                "Invalid Puppet environment {e!r} - path {d!r} is not a directory.".format(
-                    e=self.env_name, d=self.env_dir))
-            sys.stderr.write(msg + '\n\n')
-            msg = "Valid environments are:\n"
-            for env in self.environments:
-                msg += "  * {}\n".format(env)
-            sys.stderr.write(msg + '\n')
-            self.exit(retval)
-
-        out_dir = self.out_dir
-        retval = 6
-
-        if self.args.out_dir:
-            out_dir = self.args.out_dir
-            retval = 0
-
-        if not os.path.exists(out_dir):
-            msg = (
-                self.appname + ': ' +
-                "Output directory {!r} does not exists.".format(out_dir))
-            sys.stderr.write(msg + '\n\n')
-            self.exit(retval)
-
-        if not os.path.isdir(out_dir):
-            msg = (
-                self.appname + ': ' +
-                "Path for Output directory {!r} is not a directory.".format(
-                    out_dir))
-            sys.stderr.write(msg + '\n\n')
-            self.exit(retval)
-
-        self.out_dir = out_dir
-
-    # -------------------------------------------------------------------------
-    def _init_puppet_environments(self):
-
-        pattern = os.path.join(self.puppet_root_env_dir, '*')
-        dirs = glob.glob(pattern)
-
-        self.environments = []
-        for path in dirs:
-            if os.path.isdir(path):
-                env = os.path.basename(path)
-                self.environments.append(env)
-
-        self.environments.sort(key=str.lower)
-
-    # -------------------------------------------------------------------------
-    def post_init(self):
-        """
-        Method to execute before calling run(). Here could be done some
-        finishing actions after reading in commandline parameters,
-        configuration a.s.o.
-
-        This method could be overwritten by descendant classes, these
-        methhods should allways include a call to post_init() of the
-        parent class.
-
-        """
-
-        self.perform_arg_parser()
-        self.init_logging()
-        self.modules_root_dir = os.path.join(self.env_dir, 'modules')
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """
-        Main application routine.
-        """
-
-        self.collect_modules()
-        self.print_modules()
-        self.verify_dependencies()
-        self.write_modinfo_yaml()
-        self.print_not_depended()
-        self.write_dependencies()
-
-    # -------------------------------------------------------------------------
-    def write_modinfo_yaml(self):
-
-        outfile_base = 'modules-info.{e}.{d}.yaml'.format(
-            e=self.env_name, d=datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S'))
-        out_file = os.path.join(self.out_dir, outfile_base)
-
-        LOG.info("Writing information about modules in {!r}...".format(out_file))
-        if self.no_write:
-            LOG.debug("Don't writing into {!r}...".format(out_file))
-            if self.verbose:
-                print()
-                print("Module information:")
-                print()
-                print('---')
-                print(yaml.dump(self.modules, width=240))
-                print()
-                return
-
-        with open(out_file, 'w', **self.open_args) as fh:
-            fh.write('---\n')
-            fh.write(yaml.dump(self.modules, width=240))
-
-    # -------------------------------------------------------------------------
-    def print_not_depended(self):
-
-        print()
-        print("Module, von denen keine anderen Module abhängen:")
-        print("================================================")
-        print()
-
-        len_base = 1
-        for b_name in self.modules.keys():
-            base_name = str(b_name)
-            if len(base_name) > len_base:
-                len_base = len(base_name)
-
-        template = '  * {{b:<{}}} -> {{n}}'.format(len_base)
-
-        for b_name in sorted(self.modules.keys(), key=str.lower):
-
-            module_info = self.modules[b_name]
-            base_name = str(b_name)
-
-            if base_name not in self.rev_dep or not self.rev_dep[base_name]:
-                print(template.format(b=base_name, n=module_info['name']))
-
-        print()
-
-    # -------------------------------------------------------------------------
-    def verify_dependencies(self):
-
-        LOG.info("Verifying dependencies ...")
-
-        mods = {}
-        self.dependencies = []
-        self.rev_dep = {}
-        re_name_split = re.compile(r'([^/_-]+)[/_-](.*)')
-        connectors = ('-', '_', '/')
-
-        for b_name in self.modules.keys():
-            module_info = self.modules[b_name]
-            base_name = str(b_name)
-            if module_info['name']:
-                mod_name = module_info['name']
-                mods[module_info['name']] = base_name
-            else:
-                LOG.warn("Did not found complete name of module {!r}.".format(base_name))
-
-        for b_name in self.modules.keys():
-            module_info = self.modules[b_name]
-            base_name = str(b_name)
-            if base_name not in self.rev_dep:
-                self.rev_dep[base_name] = []
-            if not module_info['dependencies']:
-                continue
-            if not module_info['name']:
-                LOG.debug("Did not found complete name of module {!r}.".format(base_name))
-                continue
-            if not module_info['vendor']:
-                LOG.warn("Did not found vendor of module {!r}.".format(base_name))
-            mod_name = module_info['name']
-            if self.verbose > 1:
-                LOG.debug("Checking dependencies of module {!r}...".format(mod_name))
-
-            for dep_key in module_info['dependencies'].keys():
-                dep_mod = str(dep_key)
-                if dep_mod in mods:
-                    dep = (dep_mod, mod_name)
-                    self.dependencies.append(dep)
-                    if mods[dep_mod] not in self.rev_dep:
-                        self.rev_dep[mods[dep_mod]] = []
-                    if base_name not in self.rev_dep[mods[dep_mod]]:
-                        self.rev_dep[mods[dep_mod]].append(base_name)
-                    module_info['dependencies'][dep_key]['module'] = mods[dep_mod]
-                    continue
-                if self.verbose > 2:
-                    LOG.debug("Dependency to {d!r} of module {m!r} wrong formatted.".format(
-                        d=dep_mod, m=mod_name))
-                match = re_name_split.match(dep_mod)
-                found = False
-                if match:
-                    dep_mod_vendor = match.group(1)
-                    dep_mod_base = match.group(2)
-                for connector in connectors:
-                    dep_mod_name = dep_mod_vendor + connector + dep_mod_base
-                    if dep_mod_name in mods:
-                        dep = (dep_mod_name, mod_name)
-                        self.dependencies.append(dep)
-                        if mods[dep_mod_name] not in self.rev_dep:
-                            self.rev_dep[mods[dep_mod_name]] = []
-                        if base_name not in self.rev_dep[mods[dep_mod_name]]:
-                            self.rev_dep[mods[dep_mod_name]].append(base_name)
-                        module_info['dependencies'][dep_key]['module'] = mods[dep_mod_name]
-                        found = True
-                        break
-                if found:
-                    continue
-                LOG.warn("Did not found dependency to {d!r} of module {m!r}.".format(
-                    d=dep_mod, m=mod_name))
-
-        if self.verbose > 2:
-            LOG.debug("Found dependencies:\n{}".format(pp(self.dependencies)))
-            LOG.debug("Reverse dependencies:\n{}".format(pp(self.rev_dep)))
-
-    # -------------------------------------------------------------------------
-    def write_dependencies(self):
-
-        outfile_base = 'modules-deps.{e}.{d}.dot'.format(
-            e=self.env_name, d=datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S'))
-        out_file = os.path.join(self.out_dir, outfile_base)
-
-        LOG.info("Writing graphviz dot file about module dependecies in {!r}...".format(out_file))
-
-        header_lines = (
-            'digraph Dependencies {',
-            '',
-            '\t// Graph attributes',
-            '\tnodesep=0.7;',
-        )
-
-        def printout(fh, line):
-            if self.verbose:
-                print(line)
-            fh.write(line + '\n')
-
-        if self.no_write:
-            LOG.debug("Don't writing into {!r}...".format(out_file))
-            out_file = self.dev_null
-
-        with open(out_file, 'w', **self.open_args) as fh:
-
-            # File header
-            for line in header_lines:
-                printout(fh, line)
-
-            # Print nodes
-            line = '\n\t// Modules as nodes'
-            printout(fh, line)
-
-            for b_name in sorted(self.modules.keys(), key=str.lower):
-
-                module_info = self.modules[b_name]
-                base_name = str(b_name)
-
-                mod_name = base_name
-                if module_info['name']:
-                    mod_name = module_info['name']
-                tgt_dot_id = module_info['dot_id']
-
-                line = '\t{};'.format(tgt_dot_id)
-                printout(fh, line)
-
-            line = '\n\t// #############################\n\t// Dependencies'
-            printout(fh, line)
-
-            # Print dependencies as edges
-            for b_name in sorted(self.modules.keys(), key=str.lower):
-
-                module_info = self.modules[b_name]
-                base_name = str(b_name)
-
-                mod_name = base_name
-                if module_info['name']:
-                    mod_name = module_info['name']
-                tgt_dot_id = module_info['dot_id']
-
-                line = '\n\t// {i} ({n})'.format(i=tgt_dot_id, n=mod_name)
-                printout(fh, line)
-
-                for dep_key in module_info['dependencies'].keys():
-                    dep_mod = str(dep_key)
-                    src_module = module_info['dependencies'][dep_key]['module']
-                    if src_module in self.modules:
-                        src_dot_id = self.modules[src_module]['dot_id']
-                        line = '\t{src} -> {tgt};'.format(
-                            src=src_dot_id, tgt=tgt_dot_id)
-                        printout(fh, line)
-
-            # File footer
-            printout(fh, '\n}\n')
-
-    # -------------------------------------------------------------------------
-    def print_modules(self):
-
-        title_base = 'Modul'
-        title_name = 'Name komplett'
-        title_vendor = 'Vendor'
-        title_uversion = "Upstream Version"
-        title_version = 'Version'
-
-        len_base = len(title_base)
-        len_name = len(title_name)
-        len_vendor = len(title_vendor)
-        len_uversion = len(title_uversion)
-        len_version = len(title_version)
-
-        for b_name in self.modules.keys():
-            module_info = self.modules[b_name]
-            base_name = str(b_name)
-            if len(base_name) > len_base:
-                len_base = len(base_name)
-            if module_info['name']:
-                if len(module_info['name']) > len_name:
-                    len_name = len(module_info['name'])
-            if module_info['vendor']:
-                if len(module_info['vendor']) > len_vendor:
-                    len_vendor = len(module_info['vendor'])
-            if module_info['upstream_version']:
-                if len(module_info['upstream_version']) > len_uversion:
-                    len_uversion = len(module_info['upstream_version'])
-            if module_info['version']:
-                if len(module_info['version']) > len_version:
-                    len_version = len(module_info['version'])
-
-        template = ((
-            '{{base:<{lb}}}  {{name:<{ln}}}  {{vendor:<{lven}}}    '
-            '{{uversion:<{luver}}} {{version:<{lver}}}').format(
-                lb=len_base, ln=len_name, lven=len_vendor,
-                luver=len_uversion, lver=len_version))
-        len_total = len_base + len_name + len_vendor + len_uversion + len_version + 12
-        if self.verbose > 1:
-            LOG.debug("Module line template: {!r}".format(template))
-        print()
-        print(template.format(
-            base=title_base, name=title_name, vendor=title_vendor,
-            uversion=title_uversion, version=title_version))
-        print('=' * len_total)
-
-        for b_name in sorted(self.modules.keys(), key=str.lower):
-
-            module_info = self.modules[b_name]
-            base_name = str(b_name)
-
-            mod_name = '~'
-            if module_info['name']:
-                mod_name = module_info['name']
-
-            vendor_name = '~'
-            if module_info['vendor']:
-                vendor_name = module_info['vendor']
-
-            uver = '~'
-            if module_info['upstream_version']:
-                uver = module_info['upstream_version']
-
-            version = '~'
-            if module_info['version']:
-                version = module_info['version']
-
-            print(template.format(
-                base=base_name, name=mod_name, vendor=vendor_name,
-                uversion=uver, version=version))
-
-        print()
-
-    # -------------------------------------------------------------------------
-    def collect_modules(self):
-
-        LOG.info("Collecting all modules from {!r} ...".format(self.modules_root_dir))
-        self.modules = {}
-
-        if not os.path.exists(self.modules_root_dir):
-            LOG.error("Directory {!r} does not exists.".format(self.modules_root_dir))
-            self.exit(7)
-
-        if not os.path.isdir(self.modules_root_dir):
-            LOG.error("Path {!r} is not a directory".format(self.modules_root_dir))
-            self.exit(7)
-
-        pattern = os.path.join(self.modules_root_dir, '*')
-        if self.verbose > 2:
-            LOG.debug("Globbing pattern for module directories: {!r}".format(pattern))
-        for module_dir in glob.glob(pattern):
-            module_info = self.get_module_info(module_dir)
-            if module_info:
-                base_name = module_info['base_name']
-                self.modules[base_name] = module_info
-                upstream_version = self.get_upstream_version(module_info)
-                self.modules[base_name]['upstream_version'] = upstream_version
-                if not self.verbose:
-                    if upstream_version:
-                        print('.', end='', flush=True)
-                    else:
-                        print('~', end='', flush=True)
-                else:
-                    print('!', end='', flush=True)
-        if not self.verbose:
-            print()
-
-        if self.verbose > 2:
-            LOG.debug("Found module information:\n{}".format(pp(self.modules)))
-
-    # -------------------------------------------------------------------------
-    def get_upstream_version(self, module_info):
-
-        version = None
-
-        url = "{url}/{user}-{module}".format(
-            url=self.forge_uri, user=module_info['vendor'], module=module_info['base_name'])
-
-        LOG.debug((
-            "Trying to get current version of module {user}-{module} from Puppet forge.").format(
-                user=module_info['vendor'], module=module_info['base_name']))
-        if self.verbose > 2:
-            LOG.debug("URL to request: {}".format(url))
-
-        session = requests.Session()
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always")
-            response = session.request('GET', url, timeout=self.http_timeout)
-            if w:
-                warn_class = w[-1].category.__name__
-                warn_msg = '{}: {}'.format(
-                    warn_class, w[-1].message)
-                if warn_class == 'SubjectAltNameWarning':
-                    LOG.debug(warn_msg)
-                else:
-                    LOG.warn(warn_msg)
-
-        LOG.debug("Got status code: {}.".format(response.status_code))
-        if not response.ok:
-            LOG.debug("Did not found module {user}-{module} on Puppet forge.".format(
-                user=module_info['vendor'], module=module_info['base_name']))
-            return None
-
-        if not response.text:
-            LOG.warn("No output for URL {!r}".format(url))
-            return None
-        if self.verbose > 2:
-            msg = "Output:\n{}".format(response.text)
-            LOG.debug(msg)
-
-        js_info = response.json()
-        if 'current_release' in js_info:
-            if 'version' in js_info['current_release']:
-                version = js_info['current_release']['version']
-            else:
-                msg = "Did not found version of current_release of module {user}-{module}.".format(
-                    user=module_info['vendor'], module=module_info['base_name'])
-                LOG.warn(msg)
-        else:
-            msg = "Did not found current_release of module {user}-{module}.".format(
-                user=module_info['vendor'], module=module_info['base_name'])
-            LOG.warn(msg)
-
-        LOG.debug("Vurrent version of module {user}-{module} is {version}.".format(
-            user=module_info['vendor'], module=module_info['base_name'], version=version))
-
-        return version
-
-    # -------------------------------------------------------------------------
-    def get_module_info(self, module_dir):
-
-        if self.verbose > 2:
-            LOG.debug("Get module information from {!r}.".format(module_dir))
-
-        if not os.path.exists(module_dir):
-            LOG.warn("Directory {!r} does not exists.".format(module_dir))
-            return None
-
-        if not os.path.isdir(module_dir):
-            LOG.warn("Path {!r} is not a directory".format(module_dir))
-            return None
-
-        re_dot_id = re.compile(r'[/-]+')
-
-        module_info = {}
-        module_info['base_name'] = os.path.basename(module_dir)
-        metadata_file = os.path.join(module_dir, 'metadata.json')
-        if not os.path.exists(metadata_file):
-            LOG.warn("Metadatafile {!r} does not exists.".format(metadata_file))
-            return None
-        if not os.path.isfile(metadata_file):
-            LOG.warn("Metadatafile {!r} is not a regular file.".format(metadata_file))
-            return None
-        if not os.access(metadata_file, os.R_OK):
-            LOG.warn("Metadatafile {!r} is readable.".format(metadata_file))
-            return None
-        if self.verbose > 2:
-            LOG.debug("Reading and evaluating {!r}.".format(metadata_file))
-        meta_info = {}
-
-        try:
-            with open(metadata_file, 'r', **self.open_args) as fh:
-                meta_info = json.load(fh)
-        except json.JSONDecodeError as e:
-            LOG.warn((
-                "Could not interprete {f!r} (line {l}, column {c}) "
-                "as a regular JSON file: {e}").format(
-                f=metadata_file, l=e.lineno, c=e.colno, e=e.msg))
-            return None
-
-        module_info['name'] = None
-        module_info['dot_id'] = None
-        module_info['vendor'] = None
-        module_info['version'] = None
-        module_info['dependencies'] = {}
-        if 'name'in meta_info:
-            module_info['name'] = meta_info['name']
-            pat_vendor = r'^(\S+)[-_/]' + re.escape(module_info['base_name']) + r'$'
-            match = re.match(pat_vendor, module_info['name'])
-            if match:
-                module_info['vendor'] = match.group(1)
-            module_info['dot_id'] = re_dot_id.sub('_', module_info['name'])
-        else:
-            module_info['dot_id'] = re_dot_id.sub('_',  module_info['base_name'])
-
-        if 'version' in meta_info:
-            module_info['version'] = meta_info['version']
-
-        if 'dependencies' in meta_info:
-            for dep in meta_info['dependencies']:
-                if 'name' in dep:
-                    dep_info = {
-                        'name': dep['name'],
-                        'version': None,
-                        'module': None,
-                    }
-                    if 'version_requirement' in dep:
-                        dep_info['version'] = dep['version_requirement']
-                    module_info['dependencies'][dep['name']] = dep_info
-
-        return module_info
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/colored.py b/pp_lib/colored.py
deleted file mode 100644 (file)
index 12264f9..0000000
+++ /dev/null
@@ -1,219 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@summary: additional logging formatter for colored output via console
-"""
-
-# Standard modules
-import logging
-# import os.path
-# import sys
-import copy
-
-# Third party modules
-
-# Own modules
-
-# import pb_provisioning.common
-
-# from pb_provisioning.common import to_unicode_or_bust, to_utf8_or_bust
-
-__version__ = '0.1.4'
-
-# =============================================================================
-# Color coding module variables and helper functions
-
-COLOR_CODE = {
-    'ENDC': 0,  # RESET COLOR
-    'BOLD': 1,
-    'UNDERLINE': 4,
-    'BLINK': 5,
-    'INVERT': 7,
-    'CONCEALD': 8,
-    'STRIKE': 9,
-    'GREY30': 90,
-    'GREY40': 2,
-    'GREY65': 37,
-    'GREY70': 97,
-    'GREY20_BG': 40,
-    'GREY33_BG': 100,
-    'GREY80_BG': 47,
-    'GREY93_BG': 107,
-    'DARK_RED': 31,
-    'RED': 91,
-    'RED_BG': 41,
-    'LIGHT_RED_BG': 101,
-    'DARK_YELLOW': 33,
-    'YELLOW': 93,
-    'YELLOW_BG': 43,
-    'LIGHT_YELLOW_BG': 103,
-    'DARK_BLUE': 34,
-    'BLUE': 94,
-    'BLUE_BG': 44,
-    'LIGHT_BLUE_BG': 104,
-    'DARK_MAGENTA': 35,
-    'PURPLE': 95,
-    'MAGENTA_BG': 45,
-    'LIGHT_PURPLE_BG': 105,
-    'DARK_CYAN': 36,
-    'AUQA': 96,
-    'AQUA': 96,
-    'CYAN_BG': 46,
-    'LIGHT_AUQA_BG': 106,
-    'LIGHT_AQUA_BG': 106,
-    'DARK_GREEN': 32,
-    'GREEN': 92,
-    'GREEN_BG': 42,
-    'LIGHT_GREEN_BG': 102,
-    'BLACK': 30,
-}
-
-
-# -----------------------------------------------------------------------------
-def termcode(num):
-    """
-    Output of an ANSII terminal code.
-    """
-
-    return('\033[%sm' % (num))
-
-
-# -----------------------------------------------------------------------------
-def colorstr(message, color):
-    """
-    Wrapper function to colorize the message.
-
-    @param message: The message to colorize
-    @type message: str
-    @param color: The color to use, must be one of the keys of COLOR_CODE
-    @type color: str
-
-    @return: the colorized message
-    @rtype: str
-
-    """
-
-    tcode = ''
-    if isinstance(color, (list, tuple)):
-        for clr in color:
-            tcode += termcode(COLOR_CODE[clr])
-    else:
-        tcode = termcode(COLOR_CODE[color])
-
-    return tcode + message + termcode(COLOR_CODE['ENDC'])
-
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class ColoredFormatter(logging.Formatter):
-    """
-    A variant of code found at:
-    http://stackoverflow.com/questions/384076/how-can-i-make-the-python-logging-output-to-be-colored
-    """
-
-    LEVEL_COLOR = {
-        'DEBUG': None,
-        'INFO': 'GREEN',
-        'WARNING': 'YELLOW',
-        'ERROR': ('BOLD', 'RED'),
-        'CRITICAL': 'RED_BG',
-    }
-
-    # -------------------------------------------------------------------------
-    def __init__(self, fmt=None, datefmt=None):
-        """
-        Initialize the formatter with specified format strings.
-
-        Initialize the formatter either with the specified format string, or a
-        default. Allow for specialized date formatting with the optional
-        datefmt argument (if omitted, you get the ISO8601 format).
-        """
-
-        logging.Formatter.__init__(self, fmt, datefmt)
-
-    # -----------------------------------------------------------
-    @property
-    def color_debug(self):
-        """The color used to output debug messages."""
-        return self.LEVEL_COLOR['DEBUG']
-
-    @color_debug.setter
-    def color_debug(self, value):
-        self.LEVEL_COLOR['DEBUG'] = value
-
-    # -----------------------------------------------------------
-    @property
-    def color_info(self):
-        """The color used to output info messages."""
-        return self.LEVEL_COLOR['INFO']
-
-    @color_info.setter
-    def color_info(self, value):
-        self.LEVEL_COLOR['INFO'] = value
-
-    # -----------------------------------------------------------
-    @property
-    def color_warning(self):
-        """The color used to output warning messages."""
-        return self.LEVEL_COLOR['WARNING']
-
-    @color_warning.setter
-    def color_warning(self, value):
-        self.LEVEL_COLOR['WARNING'] = value
-
-    # -----------------------------------------------------------
-    @property
-    def color_error(self):
-        """The color used to output error messages."""
-        return self.LEVEL_COLOR['ERROR']
-
-    @color_error.setter
-    def color_error(self, value):
-        self.LEVEL_COLOR['ERROR'] = value
-
-    # -----------------------------------------------------------
-    @property
-    def color_critical(self):
-        """The color used to output critical messages."""
-        return self.LEVEL_COLOR['CRITICAL']
-
-    @color_critical.setter
-    def color_critical(self, value):
-        self.LEVEL_COLOR['CRITICAL'] = value
-
-    # -------------------------------------------------------------------------
-    def format(self, record):
-        """
-        Format the specified record as text.
-        """
-
-        record = copy.copy(record)
-        levelname = record.levelname
-
-        if levelname in self.LEVEL_COLOR:
-
-            record.name = colorstr(record.name, 'BOLD')
-            record.filename = colorstr(record.filename, 'BOLD')
-            record.module = colorstr(record.module, 'BOLD')
-            record.funcName = colorstr(record.funcName, 'BOLD')
-            record.pathname = colorstr(record.pathname, 'BOLD')
-            record.processName = colorstr(record.processName, 'BOLD')
-            record.threadName = colorstr(record.threadName, 'BOLD')
-
-            if self.LEVEL_COLOR[levelname] is not None:
-                record.levelname = colorstr(
-                    levelname, self.LEVEL_COLOR[levelname])
-                record.msg = colorstr(record.msg, self.LEVEL_COLOR[levelname])
-
-        return logging.Formatter.format(self, record)
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/common.py b/pp_lib/common.py
deleted file mode 100644 (file)
index e0d7729..0000000
+++ /dev/null
@@ -1,387 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for common used functions.
-"""
-
-# Standard modules
-import sys
-import os
-import logging
-import re
-import pprint
-import platform
-import locale
-
-# Third party modules
-import six
-
-# Own modules
-
-__version__ = '0.5.3'
-
-LOG = logging.getLogger(__name__)
-
-RE_YES = re.compile(r'^\s*(?:y(?:es)?|true)\s*$', re.IGNORECASE)
-RE_NO = re.compile(r'^\s*(?:no?|false|off)\s*$', re.IGNORECASE)
-PAT_TO_BOOL_TRUE = locale.nl_langinfo(locale.YESEXPR)
-RE_TO_BOOL_TRUE = re.compile(PAT_TO_BOOL_TRUE)
-PAT_TO_BOOL_FALSE = locale.nl_langinfo(locale.NOEXPR)
-RE_TO_BOOL_FALSE = re.compile(PAT_TO_BOOL_FALSE)
-
-RE_DOT = re.compile(r'\.')
-RE_DOT_AT_END = re.compile(r'(\.)*$')
-RE_DECIMAL = re.compile(r'^\d+$')
-RE_IPV4_PTR = re.compile(r'\.in-addr\.arpa\.$', re.IGNORECASE)
-RE_IPV6_PTR = re.compile(r'\.ip6\.arpa\.$', re.IGNORECASE)
-
-
-# =============================================================================
-def pp(value, indent=4, width=99, depth=None):
-    """
-    Returns a pretty print string of the given value.
-
-    @return: pretty print string
-    @rtype: str
-    """
-
-    pretty_printer = pprint.PrettyPrinter(
-        indent=indent, width=width, depth=depth)
-    return pretty_printer.pformat(value)
-
-
-# =============================================================================
-def terminal_can_colors(debug=False):
-    """
-    Method to detect, whether the current terminal (stdout and stderr)
-    is able to perform ANSI color sequences.
-
-    @return: both stdout and stderr can perform ANSI color sequences
-    @rtype: bool
-
-    """
-
-    cur_term = ''
-    if 'TERM' in os.environ:
-        cur_term = os.environ['TERM'].lower().strip()
-
-    colored_term_list = (
-        r'ansi',
-        r'linux.*',
-        r'screen.*',
-        r'[xeak]term.*',
-        r'gnome.*',
-        r'rxvt.*',
-        r'interix',
-    )
-    term_pattern = r'^(?:' + r'|'.join(colored_term_list) + r')$'
-    re_term = re.compile(term_pattern)
-
-    ansi_term = False
-    env_term_has_colors = False
-
-    if cur_term:
-        if cur_term == 'ansi':
-            env_term_has_colors = True
-            ansi_term = True
-        elif re_term.search(cur_term):
-            env_term_has_colors = True
-    if debug:
-        sys.stderr.write(
-            "ansi_term: %r, env_term_has_colors: %r\n" % (
-                ansi_term, env_term_has_colors))
-
-    has_colors = False
-    if env_term_has_colors:
-        has_colors = True
-    for handle in [sys.stdout, sys.stderr]:
-        if (hasattr(handle, "isatty") and handle.isatty()):
-            if debug:
-                sys.stderr.write("%s is a tty.\n" % (handle.name))
-            if (platform.system() == 'Windows' and not ansi_term):
-                if debug:
-                    sys.stderr.write("platform is Windows and not ansi_term.\n")
-                has_colors = False
-        else:
-            if debug:
-                sys.stderr.write("%s is not a tty.\n" % (handle.name))
-            if ansi_term:
-                pass
-            else:
-                has_colors = False
-
-    return has_colors
-
-
-# =============================================================================
-def to_bool(value):
-    """
-    Converter from string to boolean values (e.g. from configurations)
-    """
-
-    if not value:
-        return False
-
-    try:
-        v_int = int(value)
-    except ValueError:
-        pass
-    except TypeError:
-        pass
-    else:
-        if v_int == 0:
-            return False
-        else:
-            return True
-
-    global PAT_TO_BOOL_TRUE
-    global RE_TO_BOOL_TRUE
-    global PAT_TO_BOOL_FALSE
-    global RE_TO_BOOL_FALSE
-
-    c_yes_expr = locale.nl_langinfo(locale.YESEXPR)
-    if c_yes_expr != PAT_TO_BOOL_TRUE:
-        PAT_TO_BOOL_TRUE = c_yes_expr
-        RE_TO_BOOL_TRUE = re.compile(PAT_TO_BOOL_TRUE)
-    # LOG.debug("Current pattern for 'yes': %r.", c_yes_expr)
-
-    c_no_expr = locale.nl_langinfo(locale.NOEXPR)
-    if c_no_expr != PAT_TO_BOOL_FALSE:
-        PAT_TO_BOOL_FALSE = c_no_expr
-        RE_TO_BOOL_FALSE = re.compile(PAT_TO_BOOL_FALSE)
-    # LOG.debug("Current pattern for 'no': %r.", c_no_expr)
-
-    v_str = ''
-    if isinstance(value, str):
-        v_str = value
-        if six.PY2:
-            if isinstance(value, unicode):                      # noqa
-                v_str = value.encode('utf-8')
-    elif six.PY3 and isinstance(value, bytes):
-        v_str = value.decode('utf-8')
-    else:
-        v_str = str(value)
-
-    match = RE_YES.search(v_str)
-    if match:
-        return True
-    match = RE_TO_BOOL_TRUE.search(v_str)
-    if match:
-        return True
-
-    match = RE_NO.search(v_str)
-    if match:
-        return False
-    match = RE_TO_BOOL_FALSE.search(v_str)
-    if match:
-        return False
-
-    return bool(value)
-
-
-# =============================================================================
-def to_unicode(obj, encoding='utf-8'):
-
-    do_decode = False
-    if six.PY2:
-        if isinstance(obj, str):
-            do_decode = True
-    else:
-        if isinstance(obj, bytes):
-            do_decode = True
-
-    if do_decode:
-        obj = obj.decode(encoding)
-
-    return obj
-
-
-# =============================================================================
-def to_utf8(obj):
-
-    return encode_or_bust(obj, 'utf-8')
-
-
-# =============================================================================
-def encode_or_bust(obj, encoding='utf-8'):
-
-    do_encode = False
-    if six.PY2:
-        if isinstance(obj, unicode):                            # noqa
-            do_encode = True
-    else:
-        if isinstance(obj, str):
-            do_encode = True
-
-    if do_encode:
-        obj = obj.encode(encoding)
-
-    return obj
-
-
-# =============================================================================
-def to_bytes(obj, encoding='utf-8'):
-    "Wrapper for encode_or_bust()"
-
-    return encode_or_bust(obj, encoding)
-
-
-# =============================================================================
-def to_str(obj, encoding='utf-8'):
-    """
-    Transformes the given string-like object into the str-type according
-    to the current Python version.
-    """
-
-    if six.PY2:
-        return encode_or_bust(obj, encoding)
-    else:
-        return to_unicode(obj, encoding)
-
-
-# =============================================================================
-def caller_search_path():
-    """
-    Builds a search path for executables from environment $PATH
-    including some standard paths.
-
-    @return: all existing search paths
-    @rtype: list
-    """
-
-    path_list = []
-    search_path = os.environ['PATH']
-    if not search_path:
-        search_path = os.defpath
-
-    search_path_list = [
-        '/opt/PPlocal/bin',
-    ]
-
-    for d in search_path.split(os.pathsep):
-        search_path_list.append(d)
-
-    default_path = [
-        '/bin',
-        '/usr/bin',
-        '/usr/local/bin',
-        '/sbin',
-        '/usr/sbin',
-        '/usr/local/sbin',
-        '/usr/ucb',
-        '/usr/sfw/bin',
-        '/opt/csw/bin',
-        '/usr/openwin/bin',
-        '/usr/ccs/bin',
-    ]
-
-    for d in default_path:
-        search_path_list.append(d)
-
-    for d in search_path_list:
-        if not os.path.exists(d):
-            continue
-        if not os.path.isdir(d):
-            continue
-        d_abs = os.path.realpath(d)
-        if d_abs not in path_list:
-            path_list.append(d_abs)
-
-    return path_list
-
-# =============================================================================
-def compare_fqdn(x, y):
-
-    # LOG.debug("Comparing {!r} <=> {!r}.".format(x, y))
-
-    # First check for None values
-    if x is None and y is None:
-        return 0
-    if x is None:
-        return -1
-    if y is None:
-        return 1
-
-    # Check for empty FQDNs
-    xs = str(x).strip().lower()
-    ys = str(y).strip().lower()
-
-    if xs == '' and ys == '':
-        return 0
-    if xs == '':
-        return -1
-    if ys == '':
-        return 1
-
-    # Ensure a dot at end
-    xs = RE_DOT_AT_END.sub('.', xs)
-    ys = RE_DOT_AT_END.sub('.', ys)
-
-    if xs == ys:
-        return 0
-
-    # Reverse IPv4 zones first, then reverse IPv6 zones
-    if RE_IPV4_PTR.search(xs):
-        if not RE_IPV4_PTR.search(ys):
-            return -1
-    elif RE_IPV4_PTR.search(ys):
-        if not RE_IPV4_PTR.search(xs):
-            return 1
-    elif RE_IPV6_PTR.search(xs):
-        if not RE_IPV6_PTR.search(ys):
-            return -1
-    elif RE_IPV6_PTR.search(ys):
-        if not RE_IPV6_PTR.search(xs):
-            return 1
-
-    return compare_fqdn_tokens(xs, ys)
-
-# =============================================================================
-def compare_fqdn_tokens(xs, ys):
-
-    xa = RE_DOT.split(xs)
-    xa.reverse()
-    xa.pop(0)
-
-    ya = RE_DOT.split(ys)
-    ya.reverse()
-    ya.pop(0)
-
-    # Compare token from the last to the first
-    nr_tokens = min(len(xa), len(ya))
-    while nr_tokens > 0:
-        token_x = xa.pop(0)
-        token_y = ya.pop(0)
-        if RE_DECIMAL.match(token_x) and RE_DECIMAL.match(token_y):
-            num_x = int(token_x)
-            num_y = int(token_y)
-            if num_x < num_y:
-                return -1
-            elif num_x > num_y:
-                return 1
-        else:
-            if token_x < token_y:
-                return -1
-            elif token_x > token_y:
-                return 1
-        nr_tokens -= 1
-
-    if len(xa):
-        return 1
-    if len(ya):
-        return -1
-
-    return 0
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/config_named_app.py b/pp_lib/config_named_app.py
deleted file mode 100644 (file)
index 9ec3133..0000000
+++ /dev/null
@@ -1,1900 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: A module for the application class for configuring named
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import re
-import pwd
-import copy
-import textwrap
-import socket
-import grp
-import tempfile
-import time
-import datetime
-import ipaddress
-import stat
-import shutil
-import shlex
-
-from subprocess import Popen, TimeoutExpired, PIPE
-
-# Third party modules
-import six
-import requests
-
-from six.moves.urllib.parse import urlunsplit
-
-# Own modules
-from .common import pp, to_bool, to_str
-
-from .cfg_app import PpCfgAppError, PpConfigApplication
-
-from .pidfile import PidFileError, PidFile
-
-__version__ = '0.7.4'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpConfigNamedError(PpCfgAppError):
-    pass
-
-
-# =============================================================================
-class PpConfigNamedApp(PpConfigApplication):
-    """
-    Class for a application 'config-named' for configuring the BIND named daemon.
-    """
-
-    default_pidfile = '/run/dns-deploy-zones.pid'
-
-    default_pdns_api_host = 'systemshare.pixelpark.com'
-    default_pdns_api_port = 8081
-    default_pdns_api_root_path = '/api/v1'
-    default_pdns_api_server_id = 'localhost'
-
-    default_named_conf_dir = '/etc'
-    default_named_conf = 'named.conf'
-    default_named_bindkeys_file = 'named.iscdlv.key'
-    default_named_rootkeys_file = 'named.root.key'
-    default_named_def_zones_file = 'named.rfc1912.zones'
-    default_named_acl_cfg_file = 'named.acl.conf'
-    default_named_log_cfg_file = 'named.log.conf'
-    default_named_zones_cfg_file = 'named.zones.conf'
-
-    default_named_basedir = '/var/named'
-    default_named_datadir = 'data'
-    default_named_slavedir = 'slaves'
-    default_named_managed_keysdir = 'dynamic'
-    default_named_root_zone_file = 'named.ca'
-
-    default_named_rundir = '/run/named'
-    default_named_pidfile = 'named.pid'
-    default_named_session_keyfile = 'session.key'
-
-    default_named_log_dir = '/var/log/named'
-
-    default_named_version2show = 'none'
-
-    default_zone_masters = [
-        '217.66.53.86',
-    ]
-
-    default_cmd_checkconf = '/usr/sbin/named-checkconf'
-    default_cmd_reload = '/usr/sbin/rndc reload'
-    default_cmd_status = '/usr/bin/systemctl status named.service'
-    default_cmd_start = '/usr/bin/systemctl start named.service'
-    default_cmd_restart = '/usr/bin/systemctl restart named.service'
-
-    re_split_addresses = re.compile(r'[,;\s]+')
-    re_integer = re.compile(r'^\s*(\d+)\s*$')
-
-    re_ipv4_zone = re.compile(r'^((?:\d+\.)+)in-addr\.arpa\.$')
-    re_ipv6_zone = re.compile(r'^((?:[\da-f]\.)+)ip6\.arpa\.$')
-
-    re_block_comment = re.compile(r'/\*.*?\*/', re.MULTILINE | re.DOTALL)
-    re_line_comment = re.compile(r'(?://|#).*$', re.MULTILINE)
-
-    open_args = {}
-    if six.PY3:
-        open_args = {
-            'encoding': 'utf-8',
-            'errors': 'surrogateescape',
-        }
-
-    log_channels = {
-        'named': {
-            'type': 'file',
-            'print-time': True,
-            'print-category': True,
-            'print-severity': True,
-        },
-        'syslog': {
-            'type': 'syslog',
-            'facility': 'daemon',
-            'print-category': True,
-        },
-        'security': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'config': {
-            'type': 'file',
-            'severity': 'debug',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'dnssec': {
-            'type': 'file',
-            'severity': 'dynamic',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'ratelimit': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'query': {
-            'type': 'file',
-            'severity': 'debug',
-            'print-time': True,
-        },
-        'query-error': {
-            'type': 'file',
-            'severity': 'notice',
-            'print-time': True,
-        },
-        'resolver': {
-            'type': 'file',
-            'severity': 'dynamic',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'xfer-in': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'xfer-out': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'update': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'notify': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'unmatched': {
-            'type': 'file',
-            'print-time': True,
-            'print-category': True,
-            'print-severity': True,
-        },
-        'network': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-        'lame-servers': {
-            'type': 'file',
-            'print-time': True,
-            'print-severity': True,
-        },
-    }
-
-    log_categories = {
-        'client': ['null'],
-        'config': ['syslog', 'named', 'config'],
-        'database': ['syslog', 'named'],
-        'default': ['syslog', 'named'],
-        'delegation-only': ['syslog', 'named'],
-        'dispatch': ['syslog', 'named'],
-        'dnssec': ['syslog', 'named', 'dnssec'],
-        'general': ['syslog', 'named'],
-        'lame-servers': ['lame-servers'],
-        'network': ['syslog', 'named', 'network'],
-        'notify': ['syslog', 'named', 'notify'],
-        'queries': ['query', 'query-error'],
-        'resolver': ['syslog', 'named', 'resolver'],
-        'rpz': ['syslog', 'named'],
-        'rate-limit': ['syslog', 'named', 'ratelimit'],
-        'security': ['syslog', 'named', 'security'],
-        'unmatched': ['syslog', 'named', 'unmatched'],
-        'update': ['syslog', 'named', 'update'],
-        'update-security': ['syslog', 'named', 'update', 'security'],
-        'xfer-in': ['syslog', 'named', 'xfer-in'],
-        'xfer-out': ['syslog', 'named', 'xfer-out'],
-    }
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self._show_simulate_opt = True
-
-        self.pidfile_name = self.default_pidfile
-
-        self.pdns_api_host = self.default_pdns_api_host
-        self.pdns_api_port = self.default_pdns_api_port
-        self.pdns_api_root_path = self.default_pdns_api_root_path
-        self.pdns_api_server_id = self.default_pdns_api_server_id
-        self.pdns_api_key = None
-
-        self.is_internal = False
-        self.named_listen_on_v6 = False
-
-        # Configuration files and directories
-        self.named_conf_dir = self.default_named_conf_dir
-        self._named_conf = self.default_named_conf
-        self._named_bindkeys_file = self.default_named_bindkeys_file
-        self._named_rootkeys_file = self.default_named_rootkeys_file
-        self._named_def_zones_file = self.default_named_def_zones_file
-        self._named_acl_cfg_file = self.default_named_acl_cfg_file
-        self._named_log_cfg_file = self.default_named_log_cfg_file
-        self._named_zones_cfg_file = self.default_named_zones_cfg_file
-
-        # Variable status directories and files
-        self.named_basedir = self.default_named_basedir
-        self._named_datadir = self.default_named_datadir
-        self._named_slavedir = self.default_named_slavedir
-        self._named_managed_keysdir = self.default_named_managed_keysdir
-        self._named_root_zone_file = self.default_named_root_zone_file
-
-        # Runtime volatile directories and files
-        self.named_rundir = self.default_named_rundir
-        self._named_pidfile = self.default_named_pidfile
-        self._named_session_keyfile = self.default_named_session_keyfile
-
-        # Runtime user and group
-        self.named_user = 'named'
-        self.named_uid = None
-        self.named_group = 'named'
-        self.named_gid = None
-
-        self.named_dnssec = False
-
-        # Logging configuration
-        self.named_logdir = '/var/log/named'
-        self.query_log = False
-
-        self.named_show_bind_version = False
-        self.named_version2show = self.default_named_version2show
-
-        self.zone_masters = copy.copy(self.default_zone_masters)
-
-        self.zones = []
-        self.pidfile = None
-
-        self.tempdir = None
-        self.temp_named_conf = None
-        self.temp_acl_cfg_file = None
-        self.temp_log_cfg_file = None
-        self.temp_zones_cfg_file = None
-        self.keep_tempdir = False
-
-        self.backup_suffix = (
-            '.' + datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S') + '.bak')
-
-        self.reload_necessary = False
-        self.restart_necessary = False
-
-        self.cmd_checkconf = self.default_cmd_checkconf
-        self.cmd_reload = self.default_cmd_reload
-        self.cmd_status = self.default_cmd_status
-        self.cmd_start = self.default_cmd_start
-        self.cmd_restart = self.default_cmd_restart
-
-        self.files2replace = {}
-        self.moved_files = {}
-
-        self.acls = {
-            'allow-notify': ['dnsmaster.pixelpark.com'],
-            'allow-transfer': ['dnsmaster.pixelpark.com'],
-        }
-
-        description = textwrap.dedent('''\
-            Generation of configuration of named (the BIND 9 name daemon).
-            ''').strip()
-
-        super(PpConfigNamedApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='dns-deploy-zones',
-        )
-
-        self.post_init()
-
-    # -------------------------------------------
-    @property
-    def named_conf(self):
-        """The named.conf as an absolute pathname."""
-        return os.path.join(self.named_conf_dir, self._named_conf)
-
-    # -------------------------------------------
-    @property
-    def named_acl_cfg_file(self):
-        """The config file for ACLs as an absolute pathname."""
-        return os.path.join(self.named_conf_dir, self._named_acl_cfg_file)
-
-    # -------------------------------------------
-    @property
-    def named_bindkeys_file(self):
-        """The file for bind keys."""
-        return os.path.join(self.named_conf_dir, self._named_bindkeys_file)
-
-    # -------------------------------------------
-    @property
-    def named_rootkeys_file(self):
-        """The file for root keys."""
-        return os.path.join(self.named_conf_dir, self._named_rootkeys_file)
-
-    # -------------------------------------------
-    @property
-    def named_def_zones_file(self):
-        """The file for default zones."""
-        return os.path.join(self.named_conf_dir, self._named_def_zones_file)
-
-    # -------------------------------------------
-    @property
-    def named_log_cfg_file(self):
-        """The file for logging configuration."""
-        return os.path.join(self.named_conf_dir, self._named_log_cfg_file)
-
-    # -------------------------------------------
-    @property
-    def named_zones_cfg_file(self):
-        """The file for configuration of all own zones."""
-        return os.path.join(self.named_conf_dir, self._named_zones_cfg_file)
-
-    # -------------------------------------------
-    @property
-    def rndc_config_file(self):
-        """The config file for RNDC (included in named.conf)"""
-        return os.path.join(self.named_conf_dir, 'rndc.key')
-
-    # -------------------------------------------
-    @property
-    def named_pidfile(self):
-        """The PID file for the named daemon."""
-        return os.path.join(self.named_rundir, self._named_pidfile)
-
-    # -------------------------------------------
-    @property
-    def named_session_keyfile(self):
-        """The file for the named session key."""
-        return os.path.join(self.named_rundir, self._named_session_keyfile)
-
-    # -------------------------------------------
-    @property
-    def named_datadir_abs(self):
-        """The directory for additional data of named."""
-        return os.path.join(self.named_basedir, self._named_datadir)
-
-    # -------------------------------------------
-    @property
-    def named_datadir_rel(self):
-        """The directory for additional data of named."""
-        return self._named_datadir
-
-    # -------------------------------------------
-    @property
-    def named_dump_dir(self):
-        """Directory name of the named dump file."""
-        return os.path.join(self.named_basedir, 'dump')
-
-    # -------------------------------------------
-    @property
-    def named_dump_file(self):
-        """File name of the named dump file."""
-        return os.path.join(self.named_dump_dir, 'named_dump.db')
-
-    # -------------------------------------------
-    @property
-    def named_stats_dir(self):
-        """Directory name of the named statistics."""
-        return os.path.join(self.named_basedir, 'stats')
-
-    # -------------------------------------------
-    @property
-    def named_stats_file(self):
-        """File name of the named statistics file."""
-        return os.path.join(self.named_stats_dir, 'named.stats')
-
-    # -------------------------------------------
-    @property
-    def named_slavedir_rel(self):
-        """The directory for zone files of slave zones."""
-        return self._named_slavedir
-
-    # -------------------------------------------
-    @property
-    def named_slavedir_abs(self):
-        """The directory for zone files of slave zones."""
-        return os.path.join(self.named_basedir, self._named_slavedir)
-
-    # -------------------------------------------
-    @property
-    def named_root_zone_file_rel(self):
-        """The filename of the root zone."""
-        return self._named_root_zone_file
-
-    # -------------------------------------------
-    @property
-    def named_root_zone_file_abs(self):
-        """The filename of the root zone."""
-        return os.path.join(self.named_basedir, self._named_root_zone_file)
-
-    # -------------------------------------------
-    @property
-    def named_managed_keysdir(self):
-        """The directory for managed session keys."""
-        return os.path.join(self.named_basedir, self._named_managed_keysdir)
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PpConfigNamedApp, self).as_dict(short=short)
-        res['named_conf'] = self.named_conf
-        res['named_acl_cfg_file'] = self.named_acl_cfg_file
-        res['named_pidfile'] = self.named_pidfile
-        res['named_session_keyfile'] = self.named_session_keyfile
-        res['named_bindkeys_file'] = self.named_bindkeys_file
-        res['named_rootkeys_file'] = self.named_rootkeys_file
-        res['named_def_zones_file'] = self.named_def_zones_file
-        res['named_log_cfg_file'] = self.named_log_cfg_file
-        res['named_zones_cfg_file'] = self.named_zones_cfg_file
-        res['rndc_config_file'] = self.rndc_config_file
-        res['named_dump_dir'] = self.named_dump_dir
-        res['named_dump_file'] = self.named_dump_file
-        res['named_stats_dir'] = self.named_stats_dir
-        res['named_stats_file'] = self.named_stats_file
-        res['named_datadir_abs'] = self.named_datadir_abs
-        res['named_datadir_rel'] = self.named_datadir_rel
-        res['named_slavedir_abs'] = self.named_slavedir_abs
-        res['named_slavedir_rel'] = self.named_slavedir_rel
-        res['named_managed_keysdir'] = self.named_managed_keysdir
-        res['named_root_zone_file_rel'] = self.named_root_zone_file_rel
-        res['named_root_zone_file_abs'] = self.named_root_zone_file_abs
-        res['default_pidfile'] = self.default_pidfile
-        res['default_pdns_api_host'] = self.default_pdns_api_host
-        res['default_pdns_api_port'] = self.default_pdns_api_port
-        res['default_pdns_api_root_path'] = self.default_pdns_api_root_path
-        res['default_pdns_api_server_id'] = self.default_pdns_api_server_id
-        res['default_named_conf_dir'] = self.default_named_conf_dir
-        res['default_named_conf'] = self.default_named_conf
-        res['default_named_bindkeys_file'] = self.default_named_bindkeys_file
-        res['default_named_rootkeys_file'] = self.default_named_rootkeys_file
-        res['default_named_def_zones_file'] = self.default_named_def_zones_file
-        res['default_named_acl_cfg_file'] = self.default_named_acl_cfg_file
-        res['default_named_log_cfg_file'] = self.default_named_log_cfg_file
-        res['default_named_zones_cfg_file'] = self.default_named_zones_cfg_file
-        res['default_named_basedir'] = self.default_named_basedir
-        res['default_named_datadir'] = self.default_named_datadir
-        res['default_named_slavedir'] = self.default_named_slavedir
-        res['default_named_managed_keysdir'] = self.default_named_managed_keysdir
-        res['default_named_root_zone_file'] = self.default_named_root_zone_file
-        res['default_named_rundir'] = self.default_named_rundir
-        res['default_named_pidfile'] = self.default_named_pidfile
-        res['default_named_session_keyfile'] = self.default_named_session_keyfile
-        res['default_named_log_dir'] = self.default_named_log_dir
-        res['default_named_version2show'] = self.default_named_version2show
-        res['default_zone_masters'] = copy.copy(self.default_zone_masters)
-        res['default_cmd_checkconf'] = copy.copy(self.default_cmd_checkconf)
-        res['default_cmd_reload'] = copy.copy(self.default_cmd_reload)
-        res['default_cmd_start'] = copy.copy(self.default_cmd_start)
-        res['default_cmd_status'] = copy.copy(self.default_cmd_status)
-        res['default_cmd_restart'] = copy.copy(self.default_cmd_restart)
-        res['re_split_addresses'] = self.re_split_addresses
-        res['re_integer'] = self.re_integer
-        res['re_ipv4_zone'] = self.re_ipv4_zone
-        res['re_ipv6_zone'] = self.re_ipv6_zone
-        res['open_args'] = self.open_args
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-
-        is_internal_group = self.arg_parser.add_mutually_exclusive_group()
-
-        is_internal_group.add_argument(
-            '--internal', '--not-public', dest='internal', action='store_true',
-            help="Creating a named configuration for a internal name server.",
-        )
-
-        is_internal_group.add_argument(
-            '--public', '--not-internal', dest='public', action='store_true',
-            help="Creating a named configuration for a public name server.",
-        )
-
-        query_log_group = self.arg_parser.add_mutually_exclusive_group()
-
-        query_log_group.add_argument(
-            '--querylog', dest='querylog', action='store_true',
-            help="Enabling query logging in the named configuration.",
-        )
-
-        query_log_group.add_argument(
-            '--no-querylog', dest='no_querylog', action='store_true',
-            help="Disabling query logging in the named configuration.",
-        )
-
-        self.arg_parser.add_argument(
-            '-K', '--keep-tempdir', dest='keep_tempdir', action='store_true',
-            help=(
-                "Keeping the temporary directory instead of removing it at the end "
-                "(e.g. for debugging purposes)"),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpConfigNamedApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 3:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            section = self.cfg[section_name]
-
-            if section_name.lower() == 'app':
-                self._check_path_config(section, section_name, 'pidfile', 'pidfile_name', True)
-
-            if section_name.lower() in (
-                    'powerdns-api', 'powerdns_api', 'powerdnsapi',
-                    'pdns-api', 'pdns_api', 'pdnsapi'):
-                self.set_api_options(section, section_name)
-
-            if section_name.lower() == 'named':
-                self.set_named_options(section, section_name)
-
-            if section_name.lower() == 'acl':
-                self.read_acl_lists(section, section_name)
-
-        self._perform_cmdline_opts()
-
-    # -------------------------------------------------------------------------
-    def _perform_cmdline_opts(self):
-
-        if hasattr(self.args, 'internal') and self.args.internal:
-            self.is_internal = True
-        elif hasattr(self.args, 'public') and self.args.public:
-            self.is_internal = False
-
-        if hasattr(self.args, 'querylog') and self.args.querylog:
-            self.query_log = True
-        elif hasattr(self.args, 'no_querylog') and self.args.no_querylog:
-            self.query_log = False
-
-        self.keep_tempdir = getattr(self.args, 'keep_tempdir', False)
-
-    # -------------------------------------------------------------------------
-    def set_api_options(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'host' in section:
-            v = section['host']
-            host = v.lower().strip()
-            if host:
-                self.pdns_api_host = host
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-                if port <= 0 or port > 2**16:
-                    raise ValueError(
-                        "a port must be greater than 0 and less than {}.".format(2**16))
-            except (TypeError, ValueError) as e:
-                LOG.error(
-                    "Wrong port number {!r} in configuration section {!r}: {}".format(
-                        section['port'], section_name, e))
-                self.config_has_errors = True
-            else:
-                self.pdns_api_port = port
-
-        self._check_path_config(
-            section, section_name, 'root_path',
-            'pdns_api_root_path', True, 'root path of the PowerDNS')
-
-        if 'server_id' in section and section['server_id'].strip():
-            self.pdns_api_server_id = section['server_id'].strip().lower()
-
-        if 'key' in section:
-            key = section['key'].strip()
-            self.pdns_api_key = key
-
-    # -------------------------------------------------------------------------
-    def set_named_options(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'is_internal' in section:
-            if section['is_internal'] is None:
-                self.is_internal = False
-            else:
-                self.is_internal = to_bool(section['is_internal'])
-
-        if 'listen_on_v6' in section and section['listen_on_v6'] is not None:
-            self.named_listen_on_v6 = to_bool(section['listen_on_v6'])
-
-        if 'dnssec' in section and section['dnssec'] is not None:
-            self.named_dnssec = to_bool(section['dnssec'])
-
-        # Configuration files and directories
-        self._check_path_config(section, section_name, 'config_dir', 'named_conf_dir', True)
-        self._check_path_config(section, section_name, 'named_conf', '_named_conf', False)
-        self._check_path_config(
-            section, section_name, 'bindkeys_file', '_named_bindkeys_file', False)
-        self._check_path_config(
-            section, section_name, 'rootkeys_file', '_named_rootkeys_file', False)
-        self._check_path_config(
-            section, section_name, 'default_zones_file', '_named_def_zones_file', False)
-        self._check_path_config(
-            section, section_name, 'acl_cfg_file', '_named_acl_cfg_file', False)
-        self._check_path_config(
-            section, section_name, 'log_cfg_file', '_named_log_cfg_file', False)
-        self._check_path_config(
-            section, section_name, 'zones_cfg_file', '_named_zones_cfg_file', False)
-
-        # Variable status directories and files
-        self._check_path_config(section, section_name, 'base_dir', 'named_basedir', True)
-        self._check_path_config(section, section_name, 'data_dir', '_named_datadir', False)
-        self._check_path_config(section, section_name, 'slave_dir', '_named_slavedir', False)
-        self._check_path_config(
-            section, section_name, 'managed_keys_dir', '_named_managed_keysdir', False)
-        self._check_path_config(
-            section, section_name, 'root_zone_file', '_named_root_zone_file', False)
-
-        # Runtime volatile directories and files
-        self._check_path_config(section, section_name, 'run_dir', 'named_rundir', True)
-        self._check_path_config(section, section_name, 'pidfile', '_named_pidfile', False)
-        self._check_path_config(
-            section, section_name, 'session_keyfile', '_named_session_keyfile', False)
-
-        # Logging configuration
-        self._check_path_config(section, section_name, 'log_dir', 'named_logdir', True)
-        if 'query_log' in section:
-            self.query_log = to_bool(section['query_log'])
-
-        if 'show_bind_version' in section and section['show_bind_version'] is not None:
-            self.named_show_bind_version = to_bool(section['show_bind_version'])
-
-        if 'version_to_show' in section and section['version_to_show'] is not None:
-            self.named_version2show = section['version_to_show'].strip()
-
-        # Runtime user and group
-        if 'named_user' in section and section['named_user'] is not None:
-            self.named_user = section['named_user'].strip()
-        if 'named_group' in section and section['named_group'] is not None:
-            self.named_group = section['named_group'].strip()
-
-        if 'masters' in section:
-            self._get_masters_from_cfg(section['masters'], section_name)
-
-        for item in ('cmd_checkconf', 'cmd_reload', 'cmd_status', 'cmd_start', 'cmd_restart'):
-            if item in section and section[item].strip():
-                setattr(self, item, section[item].strip())
-
-    # -------------------------------------------------------------------------
-    def _check_path_config(self, section, section_name, key, class_prop, absolute=True, desc=None):
-
-        if key not in section:
-            return
-
-        d = ''
-        if desc:
-            d = ' ' + str(desc).strip()
-
-        path = section[key].strip()
-        if not path:
-            msg = "No path given for{} [{}]/{} in configuration.".format(
-                d, section_name, key)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        if absolute and not os.path.isabs(path):
-            msg = "Path {!r} for{} [{}]/{} in configuration must be an absolute path.".format(
-                path, d, section_name, key)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        setattr(self, class_prop, path)
-
-    # -------------------------------------------------------------------------
-    def _get_masters_from_cfg(self, value, section_name):
-
-        value = value.strip()
-        if not value:
-            msg = "No masters given in [{}]/masters.".format(section_name)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        masters = []
-
-        for m in self.re_split_addresses.split(value):
-            if m:
-                m = m.strip().lower()
-                try:
-                    addr_info = socket.getaddrinfo(                                         # noqa
-                        m, 53, proto=socket.IPPROTO_TCP, family=socket.AF_INET)             # noqa
-                except socket.gaierror as e:
-                    msg = (
-                        "Invalid hostname or address {!r} found in "
-                        "[{}]/masters: {}").format(m, section_name, e)
-                    LOG.error(msg)
-                    self.config_has_errors = True
-                    m = None
-            if m:
-                masters.append(m)
-        if masters:
-            self.zone_masters = masters
-
-    # -------------------------------------------------------------------------
-    def read_acl_lists(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        for acl_name in section.keys():
-
-            entries_str = section[acl_name].strip()
-            entries = self.re_split_addresses.split(entries_str)
-            self.acls[acl_name] = entries
-
-    # -------------------------------------------------------------------------
-    def post_init(self):
-
-        super(PpConfigNamedApp, self).post_init()
-        self.initialized = False
-
-        cred_ok = True
-        LOG.debug("Checking named user {!r} and group {!r} ...".format(
-            self.named_user, self.named_group))
-
-        match = self.re_integer.search(self.named_user)
-        if match:
-            self.named_uid = int(match.group(1))
-        else:
-            try:
-                uid = pwd.getpwnam(self.named_user).pw_uid
-            except KeyError:
-                msg = "Username {!r} not found.".format(self.named_user)
-                LOG.error(msg)
-                cred_ok = False
-            else:
-                self.named_uid = uid
-
-        match = self.re_integer.search(self.named_group)
-        if match:
-            self.named_gid = int(match.group(1))
-        else:
-            try:
-                gid = grp.getgrnam(self.named_group).gr_gid
-            except KeyError:
-                msg = "Group {!r} not found.".format(self.named_group)
-                LOG.error(msg)
-                cred_ok = False
-            else:
-                self.named_gid = gid
-
-        if not cred_ok:
-            self.exit(1)
-
-        self.pidfile = PidFile(
-            filename=self.pidfile_name, appname=self.appname, verbose=self.verbose,
-            base_dir=self.base_dir, simulate=self.simulate)
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        my_uid = os.geteuid()
-        if my_uid:
-            if self.simulate:
-                LOG.warn("You must be root to execute this script.")
-                group_ids = {}
-                for group in grp.getgrall():
-                    for g_username in group.gr_mem:
-                        g_uid = pwd.getpwnam(g_username).pw_uid
-                        if g_uid == my_uid:
-                            group_ids[group.gr_name] = group.gr_gid
-                if self.verbose > 2:
-                    LOG.debug("You are a member of the groups:\n{}".format(group_ids))
-                if self.named_group in group_ids:
-                    LOG.warn((
-                        "But hey - this is simulation mode, and and so it's "
-                        "sufficient to be a member of group {!r} ...").format(
-                        self.named_group))
-                else:
-                    LOG.error((
-                        "But also in simulation mode you has to be a member "
-                        "of group {!r}!").format(self.named_group))
-                    self.exit(1)
-                time.sleep(1)
-            else:
-                LOG.error("You must be root to execute this script.")
-                self.exit(1)
-
-        try:
-            self.pidfile.create()
-        except PidFileError as e:
-            LOG.error("Could not occupy pidfile: {}".format(e))
-            self.exit(7)
-            return
-
-        try:
-
-            self.get_api_zones()
-            self.init_temp_objects()
-            self.create_temp_files()
-            self.compare_files()
-            self.check_directories()
-
-            try:
-                self.replace_configfiles()
-                if not self.check_namedconf():
-                    self.restore_configfiles()
-                    self.exit(99)
-                self.apply_config()
-            except Exception:
-                self.restore_configfiles()
-                raise
-
-        finally:
-            self.cleanup()
-            self.pidfile = None
-
-    # -------------------------------------------------------------------------
-    def create_temp_files(self):
-
-        LOG.info("Generating all config files in a temporary directory ...")
-
-        self.generate_acl_file()
-        self.generate_named_conf()
-        self.generate_log_cfg_file()
-        self.generate_slave_cfg_file()
-
-    # -------------------------------------------------------------------------
-    def init_temp_objects(self):
-        """Init temporary objects and properties."""
-
-        self.tempdir = tempfile.mkdtemp(
-            prefix=(self.appname + '.'), suffix='.tmp.d'
-        )
-        LOG.debug("Temporary directory: {!r}.".format(self.tempdir))
-
-        self.temp_named_conf = os.path.join(
-            self.tempdir, self.default_named_conf)
-        self.temp_acl_cfg_file = os.path.join(
-            self.tempdir, self.default_named_acl_cfg_file)
-        self.temp_log_cfg_file = os.path.join(
-            self.tempdir, self.default_named_log_cfg_file)
-        self.temp_zones_cfg_file = os.path.join(
-            self.tempdir, self.default_named_zones_cfg_file)
-
-        if self.verbose > 1:
-            LOG.debug("Temporary named.conf: {!r}".format(self.temp_named_conf))
-            LOG.debug("Temporary ACL conf: {!r}".format(self.temp_acl_cfg_file))
-            LOG.debug("Temporary LOG conf: {!r}".format(self.temp_log_cfg_file))
-            LOG.debug("Temporary zones conf: {!r}".format(self.temp_zones_cfg_file))
-
-    # -------------------------------------------------------------------------
-    def generate_acl_file(self):
-
-        LOG.info("Generating {} ...".format(self.default_named_acl_cfg_file))
-
-        cur_date = datetime.datetime.now().isoformat(' ')
-
-        lines = []
-        lines.append('###############################################################')
-        lines.append('')
-        lines.append(' Bind9 configuration file for ACLs')
-        lines.append(' {}'.format(self.named_acl_cfg_file))
-        lines.append('')
-        lines.append(' Generated at: {}'.format(cur_date))
-        lines.append('')
-        lines.append('###############################################################')
-        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
-
-        content = header
-
-        for acl_name in sorted(self.acls.keys()):
-
-            lines = []
-            lines.append('')
-            lines.append('// ---------------------------------------------------------------')
-            lines.append('acl {} {{'.format(acl_name))
-            if acl_name in ('allow-notify', 'allow-transfer'):
-                lines.append('\t// Localhost')
-                lines.append('\t127.0.0.1;')
-                lines.append('\t::1;')
-
-            ips_done = []
-
-            for entry in self.acls[acl_name]:
-
-                hostname = entry
-                ip = None
-                ips = []
-                try:
-                    ip = ipaddress.ip_address(entry)
-                    ips.append(entry)
-                    hostname = socket.getfqdn(entry)
-                except ValueError:
-                    for info in socket.getaddrinfo(entry, 53):
-                        if info[0] not in (socket.AF_INET, socket.AF_INET6):
-                            continue
-                        if info[0] == socket.AF_INET:
-                            ips.append(info[4][0])
-                        elif self.named_listen_on_v6:
-                            ips.append(info[4][0])
-
-                if ips and hostname:
-                    lines.append('\t// {}'.format(hostname))
-                    for ip in sorted(ips):
-                        if ip not in ips_done:
-                            lines.append('\t{};'.format(ip))
-                            ips_done.append(ip)
-                else:
-                    msg = "Did not found IP address of {!r} for ACL {!r}.".format(
-                        entry, acl_name)
-                    LOG.error(msg)
-
-            lines.append('};')
-
-            content += '\n'.join(lines) + '\n'
-
-        content += '\n// vim: ts=8 filetype=named noet noai\n'
-
-        with open(self.temp_acl_cfg_file, 'w', **self.open_args) as fh:
-            fh.write(content)
-
-        if self.verbose > 2:
-            LOG.debug("Generated {!r}:\n{}".format(self.temp_acl_cfg_file, content.strip()))
-
-    # -------------------------------------------------------------------------
-    def generate_named_conf(self):                                                          # noqa
-
-        LOG.info("Generating {} ...".format(self.default_named_conf))
-
-        cur_date = datetime.datetime.now().isoformat(' ')
-
-        lines = []
-        lines.append('###############################################################')
-        lines.append('')
-        lines.append(' Main Bind9 configuration file')
-        lines.append(' {}'.format(self.named_conf))
-        lines.append('')
-        lines.append(' Provided by Red Hat bind package to configure the ISC BIND named(8) DNS')
-        lines.append('')
-        lines.append(' See /usr/share/doc/bind*/sample/ for example named configuration files.')
-        lines.append('')
-        lines.append(' See the BIND Administrator\'s Reference Manual (ARM) for details about the')
-        lines.append(' configuration located in /usr/share/doc/bind-{version}/Bv9ARM.html')
-        lines.append('')
-        lines.append(' Generated at: {}'.format(cur_date))
-        lines.append('')
-        lines.append('###############################################################')
-        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
-
-        content = header
-
-        content += '\n// access control lists\n'
-        content += 'include "{}";\n'.format(self.named_acl_cfg_file)
-
-        option_lines = []
-        option_lines.append('options {')
-        option_lines.append('\tlisten-on { any; };')
-        if self.named_listen_on_v6:
-            option_lines.append('\tlisten-on-v6 { any; };')
-        else:
-            option_lines.append('\tlisten-on-v6 { ::1; };')
-        option_lines.append('')
-        option_lines.append('\trecursion no;')
-        option_lines.append('')
-        option_lines.append('\tdirectory "{}";'.format(self.named_basedir))
-        option_lines.append('\tpid-file "{}";'.format(self.named_pidfile))
-        option_lines.append('\tdump-file "{}";'.format(self.named_dump_file))
-        option_lines.append('\tstatistics-file "{}";'.format(self.named_stats_file))
-        option_lines.append('\tsession-keyfile "{}";'.format(self.named_session_keyfile))
-
-        option_lines.append('')
-        option_lines.append('\t// DNSSEC')
-        option_lines.append('\tdnssec-enable yes;')
-        option_lines.append('\tdnssec-validation yes;')
-
-        option_lines.append('')
-        option_lines.append('\t// Path to ISC DLV key')
-        option_lines.append('\tbindkeys-file "{}";'.format(self.named_bindkeys_file))
-
-        option_lines.append('')
-        option_lines.append('\tmanaged-keys-directory "{}";'.format(self.named_managed_keysdir))
-
-        option_lines.append('')
-        option_lines.append('\tallow-transfer {')
-        option_lines.append('\t\tallow-transfer;')
-        option_lines.append('\t};')
-
-        option_lines.append('')
-        option_lines.append('\tallow-notify {')
-        option_lines.append('\t\tallow-notify;')
-        option_lines.append('\t};')
-
-        if not self.named_show_bind_version:
-            option_lines.append('')
-            option_lines.append('\tversion "{}";'.format(self.named_version2show))
-
-        option_lines.append('')
-        option_lines.append('};')
-        content += '\n' + '\n'.join(option_lines) + '\n'
-
-        if not os.path.exists(self.rndc_config_file):
-            LOG.error("File {!r} does not exists, please generate it with `rndc-confgen`.".format(
-                self.rndc_config_file))
-            if not self.simulate:
-                self.exit(8)
-        elif not os.path.isfile(self.rndc_config_file):
-            LOG.error("File {!r} is not a regular file.".format(self.rndc_config_file))
-            if not self.simulate:
-                self.exit(8)
-        content += '\n// Managed Keys of RNDC\n'
-        content += 'include "{}";\n'.format(self.rndc_config_file)
-        content += '\ncontrols {\n'
-        content += '\tinet 127.0.0.1 port 953 allow {\n'
-        content += '\t\t127.0.0.1;\n'
-        content += '\t\t::1/128;\n'
-        content += '\t} keys {\n'
-        content += '\t\t"rndc-key";\n'
-        content += '\t};\n'
-        content += '};\n'
-
-        content += '\n// logging configuration\n'
-        content += 'include "{}";\n'.format(self.named_log_cfg_file)
-
-        # Defining root zone file
-        if not os.path.exists(self.named_root_zone_file_abs):
-            LOG.error("File {!r} does not exists.".format(self.named_root_zone_file_abs))
-            if not self.simulate:
-                self.exit(8)
-        elif not os.path.isfile(self.named_root_zone_file_abs):
-            LOG.error("File {!r} is not a regular file.".format(self.named_root_zone_file_abs))
-            if not self.simulate:
-                self.exit(8)
-        fname = self.named_root_zone_file_rel
-        if os.path.isabs(fname):
-            fname_rel = os.path.relpath(fname, self.named_basedir)
-            if not fname_rel.startswith('../'):
-                fname = fname_rel
-        content += '\nzone "." {\n'
-        content += '\ttype hint;\n'
-        content += '\tfile "{}";\n'.format(fname)
-        content += '};\n'
-
-        # Including zone definitions for default zones
-        if not os.path.exists(self.named_def_zones_file):
-            LOG.error("File {!r} does not exists.".format(self.named_def_zones_file))
-            if not self.simulate:
-                self.exit(8)
-        elif not os.path.isfile(self.named_def_zones_file):
-            LOG.error("File {!r} is not a regular file.".format(self.named_def_zones_file))
-            if not self.simulate:
-                self.exit(8)
-        content += '\n// Default zones per RFC 1912\n'
-        content += 'include "{}";\n'.format(self.named_def_zones_file)
-
-        # Including root keys for DNSSEC
-        if not os.path.exists(self.named_rootkeys_file):
-            LOG.error("File {!r} does not exists.".format(self.named_rootkeys_file))
-            if not self.simulate:
-                self.exit(8)
-        elif not os.path.isfile(self.named_rootkeys_file):
-            LOG.error("File {!r} is not a regular file.".format(self.named_rootkeys_file))
-            if not self.simulate:
-                self.exit(8)
-        content += '\n// Including root keys for DNSSEC\n'
-        content += 'include "{}";\n'.format(self.named_rootkeys_file)
-
-        content += '\n// Including definitions of all slave zones\n'
-        content += 'include "{}";\n'.format(self.named_zones_cfg_file)
-
-        content += '\n// vim: ts=8 filetype=named noet noai\n'
-
-        with open(self.temp_named_conf, 'w', **self.open_args) as fh:
-            fh.write(content)
-
-        if self.verbose > 2:
-            LOG.debug("Generated {!r}:\n{}".format(self.temp_named_conf, content.strip()))
-
-    # -------------------------------------------------------------------------
-    def generate_log_cfg_file(self):                                                        # noqa
-
-        LOG.info("Generating {} ...".format(self.default_named_log_cfg_file))
-
-        cur_date = datetime.datetime.now().isoformat(' ')
-
-        lines = []
-        lines.append('###############################################################')
-        lines.append('')
-        lines.append(' Bind9 configuration for logging')
-        lines.append(' {}'.format(self.named_log_cfg_file))
-        lines.append('')
-        lines.append(' Generated at: {}'.format(cur_date))
-        lines.append('')
-        lines.append('###############################################################')
-        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
-
-        content = header
-
-        content += '\nlogging {\n'
-
-        content += '\n\t// -----------------------------------\n'
-        content += '\t// Channels\n'
-        for channel in sorted(self.log_channels.keys()):
-            channel_def = self.log_channels[channel]
-            lines = []
-            lines.append('')
-            lines.append('\tchannel {} {{'.format(channel))
-            ctype = 'file'
-            if 'type' in channel_def:
-                if channel_def['type'].lower() in ('file', 'syslog', 'stderr', 'null'):
-                    ctype = channel_def['type'].lower()
-                else:
-                    LOG.error("Wrong type {!r} for logging channel {!r}.".format(
-                        channel_def['type'], channel))
-                    continue
-            if ctype == 'file':
-                filename = os.path.join(self.named_logdir, channel + '.log')
-                lines.append('\t\tfile "{}";'.format(filename))
-            elif ctype == 'syslog':
-                fac = 'daemon'
-                if 'facility' in channel_def and channel_def['facility'].strip():
-                    fac = channel_def['facility'].strip().lower()
-                lines.append('\t\tsyslog {};'.format(fac))
-            else:
-                lines.append('\t\t{};'.format(ctype))
-
-            if 'severity' in channel_def and channel_def['severity'].strip():
-                lines.append('\t\tseverity {};'.format(channel_def['severity'].strip().lower()))
-
-            if 'print-category' in channel_def:
-                if to_bool(channel_def['print-category']):
-                    lines.append('\t\tprint-category yes;')
-
-            if 'print-severity' in channel_def:
-                if to_bool(channel_def['print-severity']):
-                    lines.append('\t\tprint-severity yes;')
-
-            if 'print-time' in channel_def:
-                if to_bool(channel_def['print-time']):
-                    lines.append('\t\tprint-time yes;')
-
-            lines.append('\t};')
-
-            content += '\n'.join(lines) + '\n'
-
-        content += '\n\t// -----------------------------------\n'
-        content += '\t// Categories\n'
-        for cat_name in sorted(self.log_categories.keys()):
-            lines = []
-            lines.append('')
-            channels = self.log_categories[cat_name]
-            lines.append('\tcategory {} {{'.format(cat_name))
-
-            if not channels:
-                channels = ['null']
-            if cat_name == 'queries':
-                if self.query_log:
-                    if 'query' not in channels:
-                        channels.append('query')
-                else:
-                    if 'query' in channels:
-                        channels.remove('query')
-
-            for channel in channels:
-                lines.append('\t\t{};'.format(channel))
-
-            lines.append('\t};')
-
-            content += '\n'.join(lines) + '\n'
-
-        content += '\n};\n'
-        content += '\n// vim: ts=8 filetype=named noet noai\n'
-
-        with open(self.temp_log_cfg_file, 'w', **self.open_args) as fh:
-            fh.write(content)
-
-        if self.verbose > 2:
-            LOG.debug("Generated {!r}:\n{}".format(self.temp_log_cfg_file, content.strip()))
-
-    # -------------------------------------------------------------------------
-    def generate_slave_cfg_file(self):
-
-        LOG.info("Generating {} ...".format(self.default_named_zones_cfg_file))
-
-        cur_date = datetime.datetime.now().isoformat(' ')
-        re_rev = re.compile(r'^rev\.', re.IGNORECASE)
-        re_trail_dot = re.compile(r'\.+$')
-
-        lines = []
-        lines.append('###############################################################')
-        lines.append('')
-        lines.append(' Bind9 configuration file for slave sones')
-        lines.append(' {}'.format(self.named_zones_cfg_file))
-        lines.append('')
-        lines.append(' Generated at: {}'.format(cur_date))
-        lines.append('')
-        lines.append('###############################################################')
-        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
-
-        content = header
-
-        for zone in self.zones:
-
-            account = str(zone['account']).lower().strip()
-
-            zname = re_trail_dot.sub('', zone['name'])
-            show_name = zone['canonical_name']
-            show_name = re_rev.sub('Reverse ', show_name)
-            show_name = re_trail_dot.sub('', show_name)
-            if account.startswith('intern') or account.startswith('local'):
-                if not self.is_internal:
-                    LOG.debug("Ignoring zone {!r}, because it's an internal zone.".format(zname))
-                    continue
-            else:
-                if self.is_internal:
-                    LOG.debug("Ignoring zone {!r}, because it's a public zone.".format(zname))
-                    continue
-
-            zfile = os.path.join(
-                self.named_slavedir_rel, re_trail_dot.sub('', zone['canonical_name']) + '.zone')
-
-            lines = []
-            lines.append('')
-            lines.append('// {}'.format(show_name))
-            lines.append('zone "{}" in {{'.format(zname))
-            lines.append('\tmasters {')
-            for master in self.zone_masters:
-                lines.append('\t\t{};'.format(master))
-            lines.append('\t};')
-            lines.append('\ttype slave;')
-            lines.append('\tfile "{}";'.format(zfile))
-            lines.append('};')
-
-            content += '\n'.join(lines) + '\n'
-
-        content += '\n// vim: ts=8 filetype=named noet noai\n'
-
-        with open(self.temp_zones_cfg_file, 'w', **self.open_args) as fh:
-            fh.write(content)
-
-        if self.verbose > 2:
-            LOG.debug("Generated {!r}:\n{}".format(self.temp_zones_cfg_file, content.strip()))
-
-    # -------------------------------------------------------------------------
-    def get_api_zones(self):
-
-        LOG.info("Trying to get all zones from PDNS API ...")
-
-        headers = {}
-        if self.pdns_api_key:
-            headers['X-API-Key'] = self.pdns_api_key
-
-        path = os.path.join(
-            self.pdns_api_root_path, 'servers', self.pdns_api_server_id, 'zones')
-        server = self.pdns_api_host
-        if self.pdns_api_port != 80:
-            server = '{}:{}'.format(server, self.pdns_api_port)
-        url = urlunsplit(('http', server, path, None, None))
-        LOG.debug("URL to send API call: {!r}.".format(url))
-        if self.verbose > 1:
-            LOG.debug("Headers:\n%s", pp(headers))
-        session = requests.Session()
-        response = session.request(
-            'GET', url, headers=headers, timeout=10)
-        if self.verbose > 1:
-            LOG.debug("Response status code: {}".format(response.status_code))
-        if not response.ok:
-            try:
-                err = response.json()
-                code = err['httpStatus']
-                msg = err['messages']
-                LOG.error("Got an error from API ({}) with status {}: {}".format(
-                    url, code, msg))
-                self.exit(6)
-            except ValueError:
-                msg = 'Failed to parse the response from {!r}: {}'.format(
-                    url, response.text)
-                LOG.error(msg)
-                self.exit(6)
-
-        json_response = response.json()
-        if self.verbose > 3:
-            LOG.debug("Got a response:\n{}".format(pp(json_response)))
-
-        for entry in json_response:
-
-            # {   'account': '',
-            #     'dnssec': False,
-            #     'id': '56.66.217.in-addr.arpa.',
-            #     'kind': 'Master',
-            #     'last_check': 0,
-            #     'masters': [],
-            #     'name': '56.66.217.in-addr.arpa.',
-            #     'notified_serial': 2018080202,
-            #     'serial': 2018080202,
-            #     'url': 'api/v1/servers/localhost/zones/56.66.217.in-addr.arpa.'},
-
-            zone_name = entry['name']
-            zone = {
-                'name': zone_name,
-                'account': entry['account'],
-                'kind': entry['kind'],
-                'serial': entry['serial'],
-            }
-
-            if entry['dnssec']:
-                self.named_dnssec = True
-            if self.verbose > 1:
-                LOG.debug("Found zone {!r}.".format(zone_name))
-
-            uni_name = None
-            match = self.re_ipv4_zone.search(zone_name)
-            if match:
-                prefix = self._get_ipv4_prefix(match.group(1))
-                if prefix:
-                    if prefix == '127.0.0':
-                        LOG.debug("Pure local zone {!r} will not be considered.".format(prefix))
-                        continue
-                    uni_name = 'rev.' + prefix
-
-            if not uni_name:
-                match = self.re_ipv6_zone.search(zone_name)
-                if match:
-                    prefix = self._get_ipv6_prefix(match.group(1))
-                    if prefix:
-                        uni_name = 'rev.' + prefix
-
-            if not uni_name:
-                uni_name = zone_name.encode('utf-8').decode('idna')
-
-            zone['canonical_name'] = uni_name
-
-            self.zones.append(zone)
-
-        self.zones.sort(key=lambda x: x['canonical_name'])
-
-        if self.verbose > 2:
-            LOG.debug("Got zones:\n{}".format(pp(self.zones)))
-
-    # -------------------------------------------------------------------------
-    def _get_ipv4_prefix(self, match):
-
-        tuples = []
-        for t in match.split('.'):
-            if t:
-                tuples.insert(0, t)
-        if self.verbose > 2:
-            LOG.debug("Got IPv4 tuples: {}".format(pp(tuples)))
-        return '.'.join(tuples)
-
-    # -------------------------------------------------------------------------
-    def _get_ipv6_prefix(self, match):
-
-        tuples = []
-        for t in match.split('.'):
-            if t:
-                tuples.insert(0, t)
-
-        tokens = []
-        while len(tuples):
-            token = ''.join(tuples[0:4]).ljust(4, '0')
-            if token.startswith('000'):
-                token = token[3:]
-            elif token.startswith('00'):
-                token = token[2:]
-            elif token.startswith('0'):
-                token = token[1:]
-            tokens.append(token)
-            del tuples[0:4]
-
-        if self.verbose > 2:
-            LOG.debug("Got IPv6 tokens: {}".format(pp(tokens)))
-
-        return ':'.join(tokens)
-
-    # -------------------------------------------------------------------------
-    def compare_files(self):
-
-        LOG.info("Comparing generated files with existing ones.")
-
-        if not self.files_equal_content(self.temp_named_conf, self.named_conf):
-            self.reload_necessary = True
-            self.restart_necessary = True
-            self.files2replace[self.named_conf] = self.temp_named_conf
-
-        if not self.files_equal_content(self.temp_acl_cfg_file, self.named_acl_cfg_file):
-            self.reload_necessary = True
-            self.files2replace[self.named_acl_cfg_file] = self.temp_acl_cfg_file
-
-        if not self.files_equal_content(self.temp_log_cfg_file, self.named_log_cfg_file):
-            self.reload_necessary = True
-            self.restart_necessary = True
-            self.files2replace[self.named_log_cfg_file] = self.temp_log_cfg_file
-
-        if not self.files_equal_content(self.temp_zones_cfg_file, self.named_zones_cfg_file):
-            self.reload_necessary = True
-            self.files2replace[self.named_zones_cfg_file] = self.temp_zones_cfg_file
-
-        if self.verbose > 1:
-            LOG.debug("Files to replace:\n{}".format(pp(self.files2replace)))
-
-    # -------------------------------------------------------------------------
-    def files_equal_content(self, file_src, file_tgt):
-
-        LOG.debug("Comparing {!r} with {!r} ...".format(file_src, file_tgt))
-
-        if not file_src:
-            raise PpConfigNamedError("Source file not defined.")
-        if not file_tgt:
-            raise PpConfigNamedError("Target file not defined.")
-
-        if not os.path.exists(file_src):
-            raise PpConfigNamedError("Source file {!r} does not exists.".format(file_src))
-        if not os.path.isfile(file_src):
-            raise PpConfigNamedError("Source file {!r} is not a regular file.".format(file_src))
-
-        if not os.path.exists(file_tgt):
-            LOG.debug("Target file {!r} does not exists.".format(file_tgt))
-            return False
-        if not os.path.isfile(file_tgt):
-            raise PpConfigNamedError("Target file {!r} is not a regular file.".format(file_tgt))
-
-        content_src = ''
-        if self.verbose > 2:
-            LOG.debug("Reading {!r} ...".format(file_src))
-        with open(file_src, 'r', **self.open_args) as fh:
-            content_src = fh.read()
-        lines_str_src = self.re_block_comment.sub('', content_src)
-        lines_str_src = self.re_line_comment.sub('', lines_str_src)
-        lines_src = []
-        for line in lines_str_src.splitlines():
-            line = line.strip()
-            if line:
-                lines_src.append(line)
-        if self.verbose > 3:
-            LOG.debug("Cleaned version of {!r}:\n{}".format(
-                file_src, '\n'.join(lines_src)))
-
-        content_tgt = ''
-        if self.verbose > 2:
-            LOG.debug("Reading {!r} ...".format(file_tgt))
-        with open(file_tgt, 'r', **self.open_args) as fh:
-            content_tgt = fh.read()
-        lines_str_tgt = self.re_block_comment.sub('', content_tgt)
-        lines_str_tgt = self.re_line_comment.sub('', lines_str_tgt)
-        lines_tgt = []
-        for line in lines_str_tgt.splitlines():
-            line = line.strip()
-            if line:
-                lines_tgt.append(line)
-        if self.verbose > 3:
-            LOG.debug("Cleaned version of {!r}:\n{}".format(
-                file_tgt, '\n'.join(lines_tgt)))
-
-        if len(lines_src) != len(lines_tgt):
-            LOG.debug((
-                "Source file {!r} has different number essential lines ({}) than "
-                "the target file {!r} ({} lines).").format(
-                file_src, len(lines_src), file_tgt, len(lines_tgt)))
-            return False
-
-        i = 0
-        while i < len(lines_src):
-            if lines_src[i] != lines_tgt[i]:
-                LOG.debug((
-                    "Source file {!r} has a different content than "
-                    "the target file {!r}.").format(file_src, lines_tgt))
-                return False
-            i += 1
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def check_directories(self):
-
-        LOG.info("Checking all necessary directories for existence and ownership.")
-
-        self.check_directory(self.named_conf_dir)
-        self.check_directory(self.named_basedir, None, self.named_gid, 0o750)
-        self.check_directory(self.named_datadir_abs, self.named_uid, self.named_gid, 0o770)
-        self.check_directory(self.named_dump_dir, self.named_uid, self.named_gid, 0o770)
-        self.check_directory(self.named_stats_dir, self.named_uid, self.named_gid, 0o770)
-        self.check_directory(self.named_slavedir_abs, self.named_uid, self.named_gid, 0o770)
-        self.check_directory(self.named_managed_keysdir, self.named_uid, self.named_gid, 0o770)
-        self.check_directory(self.named_logdir, self.named_uid, self.named_gid, 0o755)
-
-    # -------------------------------------------------------------------------
-    def check_directory(self, dirname, owner_id=None, group_id=None, mode=None):            # noqa
-
-        LOG.debug("Checking directory {!r} ...".format(dirname))
-
-        if not os.path.exists(dirname):
-            LOG.info("Creating directory {!r} ...".format(dirname))
-            if not self.simulate:
-                os.makedirs(dirname, mode=0o755)
-        elif not os.path.isdir(dirname):
-            LOG.error("Path {!r} exists, but is not a directory.".format(dirname))
-            return False
-        else:
-            LOG.debug("Directory {!r} already exists.".format(dirname))
-
-        fstat = None
-        if os.path.exists(dirname):
-            fstat = os.lstat(dirname)
-        else:
-            fstat = os.lstat('/etc')
-
-        uid_set = -1
-        gid_set = -1
-        if owner_id is not None:
-            if fstat.st_uid != owner_id:
-                uid_set = owner_id
-        if group_id is not None:
-            if fstat.st_gid != group_id:
-                gid_set = group_id
-
-        if owner_id is not None and group_id is not None:
-            cur_user = fstat.st_uid
-            cur_group = fstat.st_gid
-            try:
-                cur_user = '{!r}'.format(pwd.getpwuid(fstat.st_uid).pw_name)
-            except KeyError as e:
-                LOG.warn("User id {} not known: {}".format(fstat.st_uid, e))
-            try:
-                cur_group = '{!r}'.format(grp.getgrgid(fstat.st_gid).gr_name)
-            except KeyError as e:
-                LOG.warn("Group id {} not known: {}".format(fstat.st_gid, e))
-            LOG.debug("Current owners of {!r} are {}:{} ({}:{}).".format(
-                dirname, fstat.st_uid, fstat.st_gid, cur_user, cur_group))
-
-        if uid_set != -1 or gid_set != -1:
-            LOG.info("Setting ownership of {!r} to {}:{} ...".format(
-                dirname, uid_set, gid_set))
-            if not self.simulate:
-                os.chown(dirname, uid_set, gid_set)
-
-        if mode is not None:
-            current_permissions = stat.S_IMODE(fstat.st_mode)
-            LOG.debug("Current permissions of {!r} are {:04o}.".format(
-                dirname, current_permissions))
-            new_mode = mode
-
-            if new_mode & stat.S_IWUSR:
-                new_mode |= stat.S_IRUSR
-            if new_mode & stat.S_IRUSR:
-                new_mode |= stat.S_IXUSR
-
-            if new_mode & stat.S_IWGRP:
-                new_mode |= stat.S_IRGRP
-            if new_mode & stat.S_IRGRP:
-                new_mode |= stat.S_IXGRP
-
-            if new_mode & stat.S_IWOTH:
-                new_mode |= stat.S_IROTH
-            if new_mode & stat.S_IROTH:
-                new_mode |= stat.S_IXOTH
-
-            if new_mode != current_permissions:
-                LOG.info("Setting permissions of {!r} to {:04o} ...".format(
-                    dirname, new_mode))
-                if not self.simulate:
-                    os.chmod(dirname, new_mode)
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def replace_configfiles(self):
-
-        if not self.files2replace:
-            LOG.debug("No replacement of any config files necessary.")
-            return
-
-        LOG.debug("Start replacing of config files ...")
-
-        for tgt_file in self.files2replace.keys():
-
-            backup_file = tgt_file + self.backup_suffix
-
-            if os.path.exists(tgt_file):
-                self.moved_files[tgt_file] = backup_file
-                LOG.info("Copying {!r} => {!r} ...".format(tgt_file, backup_file))
-                if not self.simulate:
-                    shutil.copy2(tgt_file, backup_file)
-
-        if self.verbose > 1:
-            LOG.debug("All backuped config files:\n{}".format(pp(self.moved_files)))
-
-        for tgt_file in self.files2replace.keys():
-            src_file = self.files2replace[tgt_file]
-            LOG.info("Copying {!r} => {!r} ...".format(src_file, tgt_file))
-            if not self.simulate:
-                shutil.copy2(src_file, tgt_file)
-
-    # -------------------------------------------------------------------------
-    def restore_configfiles(self):
-
-        LOG.error("Restoring of original config files because of an exception.")
-
-        for tgt_file in self.moved_files.keys():
-            backup_file = self.moved_files[tgt_file]
-            LOG.info("Moving {!r} => {!r} ...".format(backup_file, tgt_file))
-            if not self.simulate:
-                if os.path.exists(backup_file):
-                    os.rename(backup_file, tgt_file)
-                else:
-                    LOG.error("Could not find backup file {!r}.".format(backup_file))
-
-    # -------------------------------------------------------------------------
-    def cleanup(self):
-
-        LOG.info("Cleaning up ...")
-
-        for tgt_file in self.moved_files.keys():
-            backup_file = self.moved_files[tgt_file]
-            LOG.debug("Searching for {!r}.".format(backup_file))
-            if os.path.exists(backup_file):
-                LOG.info("Removing {!r} ...".format(backup_file))
-                if not self.simulate:
-                    os.remove(backup_file)
-
-        # -----------------------
-        def emit_rm_err(function, path, excinfo):
-            LOG.error("Error removing {!r} - {}: {}".format(
-                path, excinfo[1].__class__.__name__, excinfo[1]))
-
-        if self.tempdir:
-            if self.keep_tempdir:
-                msg = (
-                    "Temporary directory {!r} will not be removed. "
-                    "It's on yours to remove it manually.").format(self.tempdir)
-                LOG.warn(msg)
-            else:
-                LOG.debug("Destroying temporary directory {!r} ...".format(self.tempdir))
-                shutil.rmtree(self.tempdir, False, emit_rm_err)
-                self.tempdir = None
-
-    # -------------------------------------------------------------------------
-    def check_namedconf(self):
-
-        LOG.info("Checking syntax correctness of named.conf ...")
-        cmd = shlex.split(self.cmd_checkconf)
-        LOG.debug("Executing: {}".format(' '.join(cmd)))
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=10)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.warn("Output on STDOUT: {}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.warn("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def apply_config(self):
-
-        if not self.reload_necessary and not self.restart_necessary:
-            LOG.info("Reload or restart of named is not necessary.")
-            return
-
-        running = self.named_running()
-        if not running:
-            LOG.warn("Named is not running, please start it manually.")
-            return
-
-        if self.restart_necessary:
-            self.restart_named()
-        else:
-            self.reload_named()
-
-    # -------------------------------------------------------------------------
-    def named_running(self):
-
-        LOG.debug("Checking, whether named is running ...")
-
-        cmd = shlex.split(self.cmd_status)
-        LOG.debug("Executing: {}".format(' '.join(cmd)))
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=10)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.warn("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def start_named(self):
-
-        LOG.info("Starting named ...")
-
-        cmd = shlex.split(self.cmd_start)
-        LOG.debug("Executing: {}".format(' '.join(cmd)))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def restart_named(self):
-
-        LOG.info("Restarting named ...")
-
-        cmd = shlex.split(self.cmd_restart)
-        LOG.debug("Executing: {}".format(' '.join(cmd)))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def reload_named(self):
-
-        LOG.info("Reloading named ...")
-
-        cmd = shlex.split(self.cmd_reload)
-        LOG.debug("Executing: {}".format(' '.join(cmd)))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/deploy_zones_from_pdns.py b/pp_lib/deploy_zones_from_pdns.py
deleted file mode 100644 (file)
index 4c404d0..0000000
+++ /dev/null
@@ -1,884 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: A module for the application class for configuring named
-"""
-from __future__ import absolute_import
-
-import os
-import logging
-import logging.config
-import textwrap
-import re
-import shlex
-import copy
-import datetime
-import socket
-import tempfile
-import time
-import shutil
-import pipes
-
-from subprocess import Popen, TimeoutExpired, PIPE
-
-from functools import cmp_to_key
-
-# Third party modules
-import six
-from pytz import timezone, UnknownTimeZoneError
-
-# Own modules
-from .common import pp, compare_fqdn, to_str, to_bool
-
-from .pdns_app import PpPDNSAppError, PpPDNSApplication
-
-from .pidfile import PidFileError, PidFile
-
-__version__ = '0.5.4'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpDeployZonesError(PpPDNSAppError):
-    pass
-
-
-# =============================================================================
-class PpDeployZonesApp(PpPDNSApplication):
-    """
-    Class for a application 'dns-deploy-zones' for configuring slaves
-    of the BIND named daemon.
-    """
-
-    default_pidfile = '/run/dns-deploy-zones.pid'
-
-    default_named_conf_dir = '/etc'
-    default_named_zones_cfg_file = 'named.zones.conf'
-    default_named_basedir = '/var/named'
-    default_named_slavedir = 'slaves'
-
-    zone_masters_local = [
-        '217.66.53.87',
-    ]
-
-    zone_masters_public = [
-        '217.66.53.97',
-    ]
-
-    default_cmd_checkconf = '/usr/sbin/named-checkconf'
-    default_cmd_reload = '/usr/sbin/rndc reload'
-    default_cmd_status = '/usr/bin/systemctl status named.service'
-    default_cmd_start = '/usr/bin/systemctl start named.service'
-    default_cmd_restart = '/usr/bin/systemctl restart named.service'
-
-    re_ipv4_zone = re.compile(r'^((?:\d+\.)+)in-addr\.arpa\.$')
-    re_ipv6_zone = re.compile(r'^((?:[\da-f]\.)+)ip6\.arpa\.$')
-
-    re_block_comment = re.compile(r'/\*.*?\*/', re.MULTILINE | re.DOTALL)
-    re_line_comment = re.compile(r'(?://|#).*$', re.MULTILINE)
-
-    re_split_addresses = re.compile(r'[,;\s]+')
-    re_integer = re.compile(r'^\s*(\d+)\s*$')
-
-    open_args = {}
-    if six.PY3:
-        open_args = {
-            'encoding': 'utf-8',
-            'errors': 'surrogateescape',
-        }
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, base_dir=None, version=__version__):
-
-        self.zones = []
-        self.pidfile = None
-
-        self._show_simulate_opt = True
-
-        self.is_internal = False
-        self.named_listen_on_v6 = False
-        self.pidfile_name = self.default_pidfile
-
-        # Configuration files and directories
-        self.named_conf_dir = self.default_named_conf_dir
-        self._named_zones_cfg_file = self.default_named_zones_cfg_file
-        self.named_basedir = self.default_named_basedir
-        self._named_slavedir = self.default_named_slavedir
-
-        self.zone_masters = copy.copy(self.zone_masters_public)
-        self.masters_configured = False
-
-        self.tempdir = None
-        self.temp_zones_cfg_file = None
-        self.keep_tempdir = False
-        self.keep_backup = False
-
-        self.backup_suffix = (
-            '.' + datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S') + '.bak')
-
-        self.reload_necessary = False
-        self.restart_necessary = False
-
-        self.cmd_checkconf = self.default_cmd_checkconf
-        self.cmd_reload = self.default_cmd_reload
-        self.cmd_status = self.default_cmd_status
-        self.cmd_start = self.default_cmd_start
-        self.cmd_restart = self.default_cmd_restart
-
-        self.files2replace = {}
-        self.moved_files = {}
-
-        description = textwrap.dedent('''\
-            Generation of the BIND9 configuration file for slave zones.
-            ''')
-
-        super(PpDeployZonesApp, self).__init__(
-            appname=appname, version=version, description=description,
-            base_dir=base_dir, cfg_stems='dns-deploy-zones', environment="public",
-        )
-
-        self.post_init()
-
-    # -------------------------------------------
-    @property
-    def named_zones_cfg_file(self):
-        """The file for configuration of all own zones."""
-        return os.path.join(self.named_conf_dir, self._named_zones_cfg_file)
-
-    # -------------------------------------------
-    @property
-    def named_slavedir_rel(self):
-        """The directory for zone files of slave zones."""
-        return self._named_slavedir
-
-    # -------------------------------------------
-    @property
-    def named_slavedir_abs(self):
-        """The directory for zone files of slave zones."""
-        return os.path.join(self.named_basedir, self._named_slavedir)
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-
-        super(PpDeployZonesApp, self).init_arg_parser()
-
-        self.arg_parser.add_argument(
-            '-B', '--backup', dest="keep_backup", action='store_true',
-            help=("Keep a backup file for each changed configuration file."),
-        )
-
-        self.arg_parser.add_argument(
-            '-K', '--keep-tempdir', dest='keep_tempdir', action='store_true',
-            help=(
-                "Keeping the temporary directory instead of removing it at the end "
-                "(e.g. for debugging purposes)"),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-        """
-
-        super(PpDeployZonesApp, self).perform_arg_parser()
-
-        if self.args.keep_tempdir:
-            self.keep_tempdir = True
-
-        if self.args.keep_backup:
-            self.keep_backup = True
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpDeployZonesApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 3:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            section = self.cfg[section_name]
-
-            if section_name.lower() == 'app':
-                self._check_path_config(section, section_name, 'pidfile', 'pidfile_name', True)
-                if 'keep-backup' in section:
-                    self.keep_backup = to_bool(section['keep-backup'])
-                if 'keep_backup' in section:
-                    self.keep_backup = to_bool(section['keep_backup'])
-
-            if section_name.lower() == 'named':
-                self.set_named_options(section, section_name)
-
-        if not self.masters_configured:
-            if self.environment == 'local':
-                self.zone_masters = copy.copy(self.zone_masters_local)
-            else:
-                self.zone_masters = copy.copy(self.zone_masters_public)
-
-    # -------------------------------------------------------------------------
-    def set_named_options(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        # Configuration files and directories
-        self._check_path_config(
-            section, section_name, 'config_dir', 'named_conf_dir', True)
-        self._check_path_config(
-            section, section_name, 'zones_cfg_file', '_named_zones_cfg_file', False)
-        self._check_path_config(section, section_name, 'base_dir', 'named_basedir', True)
-        self._check_path_config(section, section_name, 'slave_dir', '_named_slavedir', False)
-
-        if 'listen_on_v6' in section and section['listen_on_v6'] is not None:
-            self.named_listen_on_v6 = to_bool(section['listen_on_v6'])
-
-        if 'masters' in section:
-            self._get_masters_from_cfg(section['masters'], section_name)
-
-        for item in ('cmd_checkconf', 'cmd_reload', 'cmd_status', 'cmd_start', 'cmd_restart'):
-            if item in section and section[item].strip():
-                setattr(self, item, section[item].strip())
-
-    # -------------------------------------------------------------------------
-    def _get_masters_from_cfg(self, value, section_name):
-
-        value = value.strip()
-        if not value:
-            msg = "No masters given in [{}]/masters.".format(section_name)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        masters = []
-
-        for m in self.re_split_addresses.split(value):
-            if m:
-                m = m.strip().lower()
-                LOG.debug("Checking given master address {!r} ...".format(m))
-                try:
-                    addr_infos = socket.getaddrinfo(
-                        m, 53, proto=socket.IPPROTO_TCP)
-                    for addr_info in addr_infos:
-                        addr = addr_info[4][0]
-                        if not self.named_listen_on_v6 and addr_info[0] == socket.AF_INET6:
-                            msg = (
-                                "Not using {!r} as a master IP address, because "
-                                "we are not using IPv6.").format(addr)
-                            LOG.debug(msg)
-                            continue
-                        if addr in masters:
-                            LOG.debug("Address {!r} already in masters yet.".format(addr))
-                        else:
-                            LOG.debug("Address {!r} not in masters yet.".format(addr))
-                            masters.append(addr)
-
-                except socket.gaierror as e:
-                    msg = (
-                        "Invalid hostname or address {!r} found in "
-                        "[{}]/masters: {}").format(m, section_name, e)
-                    LOG.error(msg)
-                    self.config_has_errors = True
-                    m = None
-        if masters:
-            if self.verbose > 2:
-                LOG.debug("Using configured masters: {}".format(pp(masters)))
-            self.zone_masters = masters
-            self.masters_configured = True
-        else:
-            LOG.warn("No valid masters found in configuration.")
-
-    # -------------------------------------------------------------------------
-    def post_init(self):
-
-        super(PpDeployZonesApp, self).post_init()
-        self.initialized = False
-
-        if not self.quiet:
-            print('')
-
-        LOG.debug("Post init phase.")
-
-        LOG.debug("Checking for masters, which are local addresses ...")
-        ext_masters = []
-        for addr in self.zone_masters:
-            if addr in self.local_addresses:
-                LOG.debug(
-                    "Address {!r} IS in list of local addresses.".format(addr))
-            else:
-                LOG.debug(
-                    "Address {!r} not in list of local addresses.".format(addr))
-                ext_masters.append(addr)
-        self.zone_masters = ext_masters
-        LOG.info("Using masters for slave zones: {}".format(
-            ', '.join(map(lambda x: '{!r}'.format(x), self.zone_masters))))
-
-        self.pidfile = PidFile(
-            filename=self.pidfile_name, appname=self.appname, verbose=self.verbose,
-            base_dir=self.base_dir, simulate=self.simulate)
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        super(PpDeployZonesApp, self).pre_run()
-
-        if self.environment == 'global':
-            LOG.error(
-                "Using the global DNS master is not supported, "
-                "please use 'local' or 'public'")
-            self.exit(1)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        local_tz_name = 'Europe/Berlin'
-        if 'TZ' in os.environ and os.environ['TZ']:
-            local_tz_name = os.environ['TZ']
-        try:
-            local_tz = timezone(local_tz_name)
-        except UnknownTimeZoneError:
-            LOG.error("Unknown time zone: {!r}.".format(local_tz_name))
-            self.exit(6)
-
-        my_uid = os.geteuid()
-        if my_uid:
-            msg = "You must be root to execute this script."
-            if self.simulate:
-                LOG.warn(msg)
-                time.sleep(1)
-            else:
-                LOG.error(msg)
-                self.exit(1)
-
-        try:
-            self.pidfile.create()
-        except PidFileError as e:
-            LOG.error("Could not occupy pidfile: {}".format(e))
-            self.exit(7)
-            return
-
-        try:
-
-            LOG.info("Starting: {}".format(
-                datetime.datetime.now(local_tz).strftime('%Y-%m-%d %H:%M:%S %Z')))
-
-            self.zones = self.get_api_zones()
-            self.zones.sort(key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode))
-
-            self.init_temp_objects()
-            self.generate_slave_cfg_file()
-            self.compare_files()
-
-            try:
-                self.replace_configfiles()
-                if not self.check_namedconf():
-                    self.restore_configfiles()
-                    self.exit(99)
-                self.apply_config()
-            except Exception:
-                self.restore_configfiles()
-                raise
-
-        finally:
-            self.cleanup()
-            self.pidfile = None
-            LOG.info("Ending: {}".format(
-                datetime.datetime.now(local_tz).strftime('%Y-%m-%d %H:%M:%S %Z')))
-
-    # -------------------------------------------------------------------------
-    def cleanup(self):
-
-        LOG.info("Cleaning up ...")
-
-        for tgt_file in self.moved_files.keys():
-            backup_file = self.moved_files[tgt_file]
-            LOG.debug("Searching for {!r}.".format(backup_file))
-            if os.path.exists(backup_file):
-                if self.keep_backup:
-                    LOG.info("Keep existing backup file {!r}.".format(backup_file))
-                else:
-                    LOG.info("Removing {!r} ...".format(backup_file))
-                    if not self.simulate:
-                        os.remove(backup_file)
-
-        # -----------------------
-        def emit_rm_err(function, path, excinfo):
-            LOG.error("Error removing {!r} - {}: {}".format(
-                path, excinfo[1].__class__.__name__, excinfo[1]))
-
-        if self.tempdir:
-            if self.keep_tempdir:
-                msg = (
-                    "Temporary directory {!r} will not be removed. "
-                    "It's on yours to remove it manually.").format(self.tempdir)
-                LOG.warn(msg)
-            else:
-                LOG.debug("Destroying temporary directory {!r} ...".format(self.tempdir))
-                shutil.rmtree(self.tempdir, False, emit_rm_err)
-                self.tempdir = None
-
-    # -------------------------------------------------------------------------
-    def init_temp_objects(self):
-        """Init temporary objects and properties."""
-
-        self.tempdir = tempfile.mkdtemp(
-            prefix=(self.appname + '.'), suffix='.tmp.d'
-        )
-        LOG.debug("Temporary directory: {!r}.".format(self.tempdir))
-
-        self.temp_zones_cfg_file = os.path.join(
-            self.tempdir, self.default_named_zones_cfg_file)
-
-        if self.verbose > 1:
-            LOG.debug("Temporary zones conf: {!r}".format(self.temp_zones_cfg_file))
-
-    # -------------------------------------------------------------------------
-    def generate_slave_cfg_file(self):
-
-        LOG.info("Generating {} ...".format(self.default_named_zones_cfg_file))
-
-        cur_date = datetime.datetime.now().isoformat(' ')
-        re_rev = re.compile(r'^rev\.', re.IGNORECASE)
-        re_trail_dot = re.compile(r'\.+$')
-
-        lines = []
-        lines.append('###############################################################')
-        lines.append('')
-        lines.append(' Bind9 configuration file for slave sones')
-        lines.append(' {}'.format(self.named_zones_cfg_file))
-        lines.append('')
-        lines.append(' Generated at: {}'.format(cur_date))
-        lines.append('')
-        lines.append('###############################################################')
-        header = textwrap.indent('\n'.join(lines), '//', lambda line: True) + '\n'
-
-        content = header
-
-        for zone in self.zones:
-
-            canonical_name = zone.name_unicode
-            match = self.re_ipv4_zone.search(zone.name)
-            if match:
-                prefix = self._get_ipv4_prefix(match.group(1))
-                if prefix:
-                    if prefix == '127.0.0':
-                        LOG.debug("Pure local zone {!r} will not be considered.".format(prefix))
-                        continue
-                    canonical_name = 'rev.' + prefix
-            else:
-                match = self.re_ipv6_zone.search(zone.name)
-                if match:
-                    prefix = self._get_ipv6_prefix(match.group(1))
-                    if prefix:
-                        canonical_name = 'rev.' + prefix
-
-            show_name = canonical_name
-            show_name = re_rev.sub('Reverse ', show_name)
-            show_name = re_trail_dot.sub('', show_name)
-            zname = re_trail_dot.sub('', zone.name)
-
-            zfile = os.path.join(
-                self.named_slavedir_rel, re_trail_dot.sub('', canonical_name) + '.zone')
-
-            lines = []
-            lines.append('')
-            lines.append('// {}'.format(show_name))
-            lines.append('zone "{}" in {{'.format(zname))
-            lines.append('\tmasters {')
-            for master in self.zone_masters:
-                lines.append('\t\t{};'.format(master))
-            lines.append('\t};')
-            lines.append('\ttype slave;')
-            lines.append('\tfile "{}";'.format(zfile))
-            lines.append('};')
-
-            content += '\n'.join(lines) + '\n'
-
-        content += '\n// vim: ts=8 filetype=named noet noai\n'
-
-        with open(self.temp_zones_cfg_file, 'w', **self.open_args) as fh:
-            fh.write(content)
-
-        if self.verbose > 2:
-            LOG.debug("Generated {!r}:\n{}".format(self.temp_zones_cfg_file, content.strip()))
-
-    # -------------------------------------------------------------------------
-    def _get_ipv4_prefix(self, match):
-
-        tuples = []
-        for t in match.split('.'):
-            if t:
-                tuples.insert(0, t)
-        if self.verbose > 2:
-            LOG.debug("Got IPv4 tuples: {}".format(pp(tuples)))
-        return '.'.join(tuples)
-
-    # -------------------------------------------------------------------------
-    def _get_ipv6_prefix(self, match):
-
-        tuples = []
-        for t in match.split('.'):
-            if t:
-                tuples.insert(0, t)
-
-        tokens = []
-        while len(tuples):
-            token = ''.join(tuples[0:4]).ljust(4, '0')
-            if token.startswith('000'):
-                token = token[3:]
-            elif token.startswith('00'):
-                token = token[2:]
-            elif token.startswith('0'):
-                token = token[1:]
-            tokens.append(token)
-            del tuples[0:4]
-
-        if self.verbose > 2:
-            LOG.debug("Got IPv6 tokens: {}".format(pp(tokens)))
-
-        return ':'.join(tokens)
-
-    # -------------------------------------------------------------------------
-    def compare_files(self):
-
-        LOG.info("Comparing generated files with existing ones.")
-
-        if not self.files_equal_content(self.temp_zones_cfg_file, self.named_zones_cfg_file):
-            self.reload_necessary = True
-            self.files2replace[self.named_zones_cfg_file] = self.temp_zones_cfg_file
-
-        if self.verbose > 1:
-            LOG.debug("Files to replace:\n{}".format(pp(self.files2replace)))
-
-    # -------------------------------------------------------------------------
-    def files_equal_content(self, file_src, file_tgt):
-
-        LOG.debug("Comparing {!r} with {!r} ...".format(file_src, file_tgt))
-
-        if not file_src:
-            raise PpDeployZonesError("Source file not defined.")
-        if not file_tgt:
-            raise PpDeployZonesError("Target file not defined.")
-
-        if not os.path.exists(file_src):
-            raise PpDeployZonesError("Source file {!r} does not exists.".format(file_src))
-        if not os.path.isfile(file_src):
-            raise PpDeployZonesError("Source file {!r} is not a regular file.".format(file_src))
-
-        if not os.path.exists(file_tgt):
-            LOG.debug("Target file {!r} does not exists.".format(file_tgt))
-            return False
-        if not os.path.isfile(file_tgt):
-            raise PpDeployZonesError("Target file {!r} is not a regular file.".format(file_tgt))
-
-        content_src = ''
-        if self.verbose > 2:
-            LOG.debug("Reading {!r} ...".format(file_src))
-        with open(file_src, 'r', **self.open_args) as fh:
-            content_src = fh.read()
-        lines_str_src = self.re_block_comment.sub('', content_src)
-        lines_str_src = self.re_line_comment.sub('', lines_str_src)
-        lines_src = []
-        for line in lines_str_src.splitlines():
-            line = line.strip()
-            if line:
-                lines_src.append(line)
-        if self.verbose > 3:
-            LOG.debug("Cleaned version of {!r}:\n{}".format(
-                file_src, '\n'.join(lines_src)))
-
-        content_tgt = ''
-        if self.verbose > 2:
-            LOG.debug("Reading {!r} ...".format(file_tgt))
-        with open(file_tgt, 'r', **self.open_args) as fh:
-            content_tgt = fh.read()
-        lines_str_tgt = self.re_block_comment.sub('', content_tgt)
-        lines_str_tgt = self.re_line_comment.sub('', lines_str_tgt)
-        lines_tgt = []
-        for line in lines_str_tgt.splitlines():
-            line = line.strip()
-            if line:
-                lines_tgt.append(line)
-        if self.verbose > 3:
-            LOG.debug("Cleaned version of {!r}:\n{}".format(
-                file_tgt, '\n'.join(lines_tgt)))
-
-        if len(lines_src) != len(lines_tgt):
-            LOG.debug((
-                "Source file {!r} has different number essential lines ({}) than "
-                "the target file {!r} ({} lines).").format(
-                file_src, len(lines_src), file_tgt, len(lines_tgt)))
-            return False
-
-        i = 0
-        while i < len(lines_src):
-            if lines_src[i] != lines_tgt[i]:
-                LOG.debug((
-                    "Source file {!r} has a different content than "
-                    "the target file {!r}.").format(file_src, lines_tgt))
-                return False
-            i += 1
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def replace_configfiles(self):
-
-        if not self.files2replace:
-            LOG.debug("No replacement of any config files necessary.")
-            return
-
-        LOG.debug("Start replacing of config files ...")
-
-        for tgt_file in self.files2replace.keys():
-
-            backup_file = tgt_file + self.backup_suffix
-
-            if os.path.exists(tgt_file):
-                self.moved_files[tgt_file] = backup_file
-                LOG.info("Copying {!r} => {!r} ...".format(tgt_file, backup_file))
-                if not self.simulate:
-                    shutil.copy2(tgt_file, backup_file)
-
-        if self.verbose > 1:
-            LOG.debug("All backuped config files:\n{}".format(pp(self.moved_files)))
-
-        for tgt_file in self.files2replace.keys():
-            src_file = self.files2replace[tgt_file]
-            LOG.info("Copying {!r} => {!r} ...".format(src_file, tgt_file))
-            if not self.simulate:
-                shutil.copy2(src_file, tgt_file)
-
-    # -------------------------------------------------------------------------
-    def restore_configfiles(self):
-
-        LOG.error("Restoring of original config files because of an exception.")
-
-        for tgt_file in self.moved_files.keys():
-            backup_file = self.moved_files[tgt_file]
-            LOG.info("Moving {!r} => {!r} ...".format(backup_file, tgt_file))
-            if not self.simulate:
-                if os.path.exists(backup_file):
-                    os.rename(backup_file, tgt_file)
-                else:
-                    LOG.error("Could not find backup file {!r}.".format(backup_file))
-
-    # -------------------------------------------------------------------------
-    def check_namedconf(self):
-
-        LOG.info("Checking syntax correctness of named.conf ...")
-        cmd = shlex.split(self.cmd_checkconf)
-        if 'named-checkconf' in self.cmd_checkconf and self.verbose > 2:
-            cmd.append('-p')
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=10)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.warn("Output on STDOUT: {}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.warn("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def apply_config(self):
-
-        if not self.reload_necessary and not self.restart_necessary:
-            LOG.info("Reload or restart of named is not necessary.")
-            return
-
-        running = self.named_running()
-        if not running:
-            LOG.warn("Named is not running, please start it manually.")
-            return
-
-        if self.restart_necessary:
-            self.restart_named()
-        else:
-            self.reload_named()
-
-    # -------------------------------------------------------------------------
-    def named_running(self):
-
-        LOG.debug("Checking, whether named is running ...")
-
-        cmd = shlex.split(self.cmd_status)
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=10)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.warn("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def start_named(self):
-
-        LOG.info("Starting named ...")
-
-        cmd = shlex.split(self.cmd_start)
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def restart_named(self):
-
-        LOG.info("Restarting named ...")
-
-        cmd = shlex.split(self.cmd_restart)
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def reload_named(self):
-
-        LOG.info("Reloading named ...")
-
-        cmd = shlex.split(self.cmd_reload)
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        LOG.debug("Executing: {}".format(cmd_str))
-
-        if self.simulate:
-            return
-
-        std_out = None
-        std_err = None
-        ret_val = None
-
-        with Popen(cmd, stdout=PIPE, stderr=PIPE) as proc:
-            try:
-                std_out, std_err = proc.communicate(timeout=30)
-            except TimeoutExpired:
-                proc.kill()
-                std_out, std_err = proc.communicate()
-            ret_val = proc.wait()
-
-        LOG.debug("Return value: {!r}".format(ret_val))
-        if std_out and std_out.strip():
-            s = to_str(std_out.strip())
-            LOG.debug("Output on STDOUT:\n{}".format(s))
-        if std_err and std_err.strip():
-            s = to_str(std_err.strip())
-            LOG.error("Output on STDERR: {}".format(s))
-
-        if ret_val:
-            return False
-
-        return True
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/differ.py b/pp_lib/differ.py
deleted file mode 100644 (file)
index 24b7d1d..0000000
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@summary: The module for the MailAddress object.
-"""
-
-# Standard modules
-import os
-import logging
-import re
-from datetime import datetime, timezone
-import difflib
-import pprint
-import copy
-
-from difflib import Differ, IS_LINE_JUNK, IS_CHARACTER_JUNK
-# from difflib import SequenceMatcher
-
-# Third party modules
-import six
-
-# Own modules
-
-__version__ = '0.2.4'
-LOG = logging.getLogger(__name__)
-
-DEFAULT_COMMENT_CHAR = '#'
-
-
-# =============================================================================
-def pp(value, indent=4, width=99, depth=None):
-
-    pretty_printer = pprint.PrettyPrinter(
-        indent=indent, width=width, depth=depth)
-    return pretty_printer.pformat(value)
-
-
-# =============================================================================
-class ConfigDiffer(Differ):
-    """
-    A class for comparing the contents of two contents of configuration files
-    without consideration of comments and whitespaces.
-    """
-
-    pat_linejunk = r'^\s*(?:\#.*)?$'
-    re_linejunk = re.compile(pat_linejunk)
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def is_line_junk(cls, line):
-        return cls.re_linejunk.search(line) is not None
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, comment_chars=None, ignore_empty=False,
-            ignore_whitespace=False, ignore_comment=False, case_insensitive=False):
-
-        self.comment_chars = []
-        self.re_comment_list = []
-        self.re_token_list = []
-        self.ignore_empty = ignore_empty
-        self.ignore_whitespace = ignore_whitespace
-        self.ignore_comment = ignore_comment
-        self.case_insensitive = case_insensitive
-
-        if self.ignore_comment:
-            self.ignore_empty = True
-
-        if comment_chars:
-            if isinstance(comment_chars, (list, set, tuple)):
-                for char in comment_chars:
-                    if not char:
-                        continue
-                    self.comment_chars.append(str(char))
-            else:
-                self.comment_chars.append(str(comment_chars))
-        elif comment_chars is None:
-            self.comment_chars.append(DEFAULT_COMMENT_CHAR)
-
-        super(ConfigDiffer, self).__init__(
-            linejunk=IS_LINE_JUNK, charjunk=IS_CHARACTER_JUNK)
-
-        re_flags = re.MULTILINE
-        if six.PY3:
-            re_flags = re.MULTILINE | re.UNICODE
-
-        # a single quoted token
-        pat = r"^(\s*'(?:\\(?!')|\\'|(?:(?<!\\)[^']))*)(?#single-q-token)"
-        self.re_token_list.append(re.compile(pat, re_flags))
-
-        # a double quoted token
-        pat = r'^(\s*"(?:\\(?!")|\\"|(?:(?<!\\)[^"]))*")(?#double-q-token)'
-        self.re_token_list.append(re.compile(pat, re_flags))
-
-        # a token without quotings
-        pat = r'^(\s*(?:[^\s"' + r"'" + r']+|\\["' + r"'" + r'])+)(?#token-wo-quote)'
-        self.re_token_list.append(re.compile(pat, re_flags))
-
-        self.re_whitespace = re.compile(r'\s+', re_flags)
-
-        self.re_empty = re.compile(r'^(\s*)$')
-
-        if self.comment_chars:
-            i = 0
-            for char in self.comment_chars:
-
-                pat = r'^\s*' + re.escape(char) + r'.*$(?#sole-comment)'
-                self.re_comment_list.append(re.compile(pat, re_flags))
-
-                pat = (
-                    r'^(\s*"(?:[^"](?!' + re.escape(char) + r'))*)\s*' +
-                    re.escape(char) + r'.*$(?#comment-{}-wo-dq)'.format(i))
-                self.re_token_list.append(re.compile(pat, re_flags))
-
-                pat = (
-                    r"^(\s*'(?:[^'](?!" + re.escape(char) + r'))*)\s*' +
-                    re.escape(char) + r'.*$(?#comment-{}-wo-sq)'.format(i))
-                self.re_token_list.append(re.compile(pat, re_flags))
-
-                i += 1
-
-    # -------------------------------------------------------------------------
-    def remove_comments(self, line):
-
-        if not self.re_comment_list:
-            # LOG.debug('line false     %r', line)
-            return line
-
-        if self.re_empty.match(line):
-            # LOG.debug('line empty     %r', line)
-            return line
-
-        old_line = line
-        new_line = ''
-
-        while True:
-
-            # LOG.debug('loop:          old_line: %r, new_line: %r', old_line, new_line)
-
-            for regex in self.re_comment_list:
-                if regex.search(old_line):
-                    new_line += regex.sub('', old_line)
-                    # LOG.debug(
-                    #     'found comment: old_line: %r, new_line: %r, pattern: %r',
-                    #     old_line, new_line, regex.pattern)
-                    return new_line
-
-            token_found = False
-            for regex in self.re_token_list:
-                match = regex.search(old_line)
-                if match:
-                    new_line += match.group(1)
-                    old_line = regex.sub('', old_line)
-                    # LOG.debug(
-                    #     'found token:   old_line: %r, new_line: %r, pattern: %r',
-                    #     old_line, new_line, regex.pattern)
-                    token_found = True
-                    break
-
-            match = self.re_empty.search(old_line)
-            if match:
-                # LOG.debug('old_line empty %r', old_line)
-                new_line += match.group(1)
-                return new_line
-
-            if token_found:
-                continue
-
-            return new_line + old_line
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting function for translating object structure
-        into a string
-
-        @return: structure as string
-        @rtype:  str
-        """
-
-        return pp(self.__dict__)
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("comment_chars=%r" % (self.comment_chars))
-        fields.append("ignore_empty=%r" % (self.ignore_empty))
-        fields.append("ignore_whitespace=%r" % (self.ignore_whitespace))
-        fields.append("ignore_comment=%r" % (self.ignore_comment))
-        fields.append("case_insensitive=%r" % (self.case_insensitive))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    def _mangle_lines(self, lines):
-
-        if isinstance(lines, (list, set, tuple)):
-            line_list = copy.copy(lines)
-        else:
-            line_list = [str(lines)]
-
-        if (not self.ignore_empty and not self.ignore_whitespace and
-                not self.ignore_comment and not self.case_insensitive):
-            return line_list
-
-        reult_list = []
-        for item in line_list:
-            if self.ignore_empty and self.re_empty.search(item):
-                continue
-            item_cp = str(item)
-            if self.ignore_whitespace:
-                item_cp = self.re_whitespace.sub(' ', item_cp)
-            if self.ignore_comment:
-                item_cp = self.remove_comments(item_cp)
-            if self.case_insensitive:
-                item_cp = item_cp.lower()
-            reult_list.append(item_cp)
-
-        return reult_list
-
-    # -------------------------------------------------------------------------
-    def compare(self, a, b):
-
-        list_a = self._mangle_lines(a)
-        list_b = self._mangle_lines(b)
-
-        return super(ConfigDiffer, self).compare(list_a, list_b)
-
-    # -------------------------------------------------------------------------
-    def unified_diff(self, a, b, n=3, lineterm='\n'):
-
-        list_a = self._mangle_lines(a)
-        list_b = self._mangle_lines(b)
-
-        return difflib.unified_diff(list_a, list_b, n=n, lineterm=lineterm)
-
-    # -------------------------------------------------------------------------
-    def is_equal(self, a, b):
-
-        equal = True
-        for line in self.compare(a, b):
-            if not line.startswith(' '):
-                LOG.debug("Difference line: {}".format(line))
-                equal = False
-
-        return equal
-
-
-# =============================================================================
-class ConfigFileDiffer(ConfigDiffer):
-    """
-    A class for comparing the contents of two configuration files
-    without consideration of comments and whitespaces.
-    """
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def file_mtime(cls, path):
-
-        mtime = 0
-        if os.path.exists(path):
-            mtime = os.stat(path).st_mtime
-        t = datetime.fromtimestamp(mtime, timezone.utc)
-        return t.astimezone().isoformat(" ")
-
-    # -------------------------------------------------------------------------
-    def __init__(self):
-
-        super(ConfigFileDiffer, self).__init__()
-
-    # -------------------------------------------------------------------------
-    def compare(self, from_file, to_file):
-
-        from_content = []
-        to_content = []
-
-        open_args = {}
-        if six.PY3:
-            open_args = {
-                'encoding': 'utf-8',
-                'errors': 'surrogateescape',
-            }
-
-        if from_file:
-            if os.path.isfile(from_file):
-                LOG.debug("Reading {!r} ...".format(from_file))
-                with open(from_file, 'r', **open_args) as fh:
-                    from_content = fh.readlines()
-
-        if to_file:
-            if os.path.isfile(to_file):
-                LOG.debug("Reading {!r} ...".format(to_file))
-                with open(to_file, 'r', **open_args) as fh:
-                    to_content = fh.readlines()
-
-        return super(ConfigFileDiffer, self).compare(from_content, to_content)
-
-    # -------------------------------------------------------------------------
-    def is_equal(self, from_file, to_file):
-
-        equal = True
-        for line in self.compare(from_file, to_file):
-            if line.startswith('+') or line.startswith('-'):
-                subline = line[1:].rstrip()
-                if self.is_line_junk(subline):
-                    LOG.debug("Line {!r} is junk.".format(subline))
-                else:
-                    LOG.debug(line.rstrip())
-                    equal = False
-
-        return equal
-
-    # -------------------------------------------------------------------------
-    def unified_diff(self, from_file, to_file, n=3, lineterm='\n'):
-
-        from_content = []
-        to_content = []
-        null_time = datetime.fromtimestamp(0, timezone.utc).astimezone().isoformat(" ")
-        from_mtime = null_time
-        to_mtime = null_time
-
-        open_args = {}
-        if six.PY3:
-            open_args = {
-                'encoding': 'utf-8',
-                'errors': 'surrogateescape',
-            }
-
-        if from_file:
-            if os.path.isfile(from_file):
-                from_mtime = self.file_mtime(from_file)
-                with open(from_file, 'r', **open_args) as fh:
-                    from_content = fh.readlines()
-        else:
-            from_file = '<None>'
-
-        if to_file:
-            if os.path.isfile(to_file):
-                to_mtime = self.file_mtime(to_file)
-                with open(to_file, 'r', **open_args) as fh:
-                    to_content = fh.readlines()
-        else:
-            to_file = '<None>'
-
-        return difflib.unified_diff(
-            from_content, to_content,
-            fromfile=from_file, tofile=to_file,
-            fromfiledate=from_mtime, tofiledate=to_mtime,
-            n=n, lineterm=lineterm)
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/dnsui_users.py b/pp_lib/dnsui_users.py
deleted file mode 100644 (file)
index 2f72638..0000000
+++ /dev/null
@@ -1,650 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the check-dnsui-users application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import textwrap
-import socket
-import re
-import traceback
-
-# Third party modules
-# from ldap3 import ObjectDef, AttrDef, Reader, Writer
-from ldap3 import ObjectDef
-import psycopg2
-
-# Own modules
-from .common import pp
-
-from .ldap_app import PpLdapAppError, PpLdapApplication
-
-__version__ = '0.4.5'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class DnsuiUsersError(PpLdapAppError):
-    pass
-
-# =============================================================================
-class DnsuiUsersApp(PpLdapApplication):
-    """Class for the 'check-dnsui-users' application to ensure:
-        * all users in DNSUI DB, which are not existing in LDAP, are disabled
-        * all users in LDAP, which are members of group 'Administratoren Pixelpark Berlin',
-          are existing and have administrator access.
-    """
-
-    default_admin_group = "cn=Administratoren Pixelpark Berlin"
-
-    # DB data
-    default_db_host = 'master.pp-dns.com'
-    default_db_port = 5432
-    default_db_db = 'dnsui'
-    default_db_user = 'pdnsadm'
-
-    re_ldap_node = re.compile(r'^\s*[a-z]+[a-z0-9]*\s*=\s*\S+', re.IGNORECASE)
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.admin_users = []
-        self.admin_user_dns = []
-        self.admin_group = self.default_admin_group
-
-        self.db_user_index = {}
-        self.ldap_user_index = {}
-        self.users_to_add = []
-        self.users_to_update = []
-        self.db_users_deactivate = []
-
-        self.db_host = self.default_db_host
-        self.db_port = self.default_db_port
-        self.db_db = self.default_db_db
-        self.db_user = self.default_db_user
-        self.db_pass = None
-
-        self.db_users = []
-
-        self.db_connection = None
-
-        self._show_simulate_opt = True
-
-        description = textwrap.dedent('''\
-            Checking existence of all LDAP users in DNSUI database in LDAP, otherwise
-            deactivating them. Checking, that all members of the defined admin group
-            in LDAP are existing in DB and are enabled administrators.
-            ''').strip()
-
-        super(DnsuiUsersApp, self).__init__(
-            appname=appname, version=version, description=description)
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-        """
-        Execute some actions after reading the configuration.
-
-        This method should be explicitely called by all perform_config()
-        methods in descendant classes.
-        """
-
-        super(DnsuiUsersApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 2:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-            section = self.cfg[section_name]
-
-            if section_name.lower() == 'ldap':
-                self.do_admin_group_config(section_name, section)
-
-            if section_name.lower() in ('db', 'database'):
-                self.do_db_cfg(section_name, section)
-
-    # -------------------------------------------------------------------------
-    def do_admin_group_config(self, section_name, section):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'admin_group' not in section:
-            return
-
-        admin_group = str(section['admin_group']).strip()
-        if not admin_group:
-            msg = "Empty value {v!r} for admin group in {s}/admin_group given.".format(
-                s=section_name, v=section['admin_group'])
-            raise DnsuiUsersError(msg)
-
-        if not self.re_ldap_node.match(admin_group):
-            msg = "Invalid value {v!r} for admin group in {s}/admin_group given.".format(
-                s=section_name, v=section['admin_group'])
-            raise DnsuiUsersError(msg)
-
-        self.admin_group = admin_group
-
-    # -------------------------------------------------------------------------
-    def do_db_cfg(self, section_name, section):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'host' in section:
-            host = section['host'].lower().strip()
-            if not host:
-                LOG.error('Invalid hostname {!r} in configuration section {!r} found.'.format(
-                    section['host'], section_name))
-            else:
-                try:
-                    _ = socket.getaddrinfo(host, 5432, proto=socket.IPPROTO_TCP)            # noqa
-                except socket.gaierror as e:
-                    msg = 'Invalid hostname {!r} in configuration section {!r}: {}'.format(
-                        section['host'], section_name, e)
-                    LOG.error(msg)
-                    self.config_has_errors = True
-                else:
-                    self.db_host = host
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-                if port <= 0:
-                    raise ValueError("port number may not be negative.")
-                elif port >= (2 ** 16):
-                    raise ValueError("port number must be less than {}".format((2 ** 16)))
-            except (ValueError, TypeError) as e:
-                msg = 'Invalid port number {!r} in configuration section {!r}: {}'.format(
-                    section['port'], section_name, e)
-                LOG.error(msg)
-                self.config_has_errors = True
-            else:
-                self.db_port = port
-
-        if 'db' in section:
-            db = section['db'].lower().strip()
-            if not db:
-                LOG.error('Invalid database name {!r} in configuration section {!r} found.'.format(
-                    section['db'], section_name))
-            else:
-                self.db_db = db
-
-        if 'user' in section:
-            user = section['user'].lower().strip()
-            if not user:
-                LOG.error('Invalid database user {!r} in configuration section {!r} found.'.format(
-                    section['user'], section_name))
-                self.config_has_errors = True
-            else:
-                self.db_user = user
-
-        if 'password' in section:
-            self.db_pass = section['password']
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-
-        super(DnsuiUsersApp, self).pre_run()
-        self.connect_db()
-
-    # -------------------------------------------------------------------------
-    def connect_db(self):
-
-        result = None
-
-        LOG.debug("Connecting to PostgreSQL database on {}@{}:{}/{} ...".format(
-            self.db_user, self.db_host, self.db_port, self.db_db))
-        try:
-            self.db_connection = psycopg2.connect(
-                host=self.db_host,
-                port=self.db_port,
-                dbname=self.db_db,
-                user=self.db_user,
-                password=self.db_pass,
-            )
-
-            sql = 'SHOW server_version'
-            if self.verbose > 1:
-                LOG.debug("SQL: {}".format(sql))
-            with self.db_connection.cursor() as cursor:
-                cursor.execute(sql)
-                result = cursor.fetchone()
-            if self.verbose > 2:
-                LOG.debug("Got version info:\n{}".format(pp(result)))
-            LOG.info("Database is PostgreSQL version {!r}.".format(result[0]))
-
-        except psycopg2.OperationalError as e:
-            LOG.error("Could not connect to database ({}): {}".format(
-                e.__class__.__name__, e))
-            self.exit(7)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        try:
-
-            LOG.info("Starting user checks ...")
-
-            self.get_admin_user_dns()
-            self.get_admin_users()
-            self.get_db_users()
-
-            self.check_current_admin_users()
-            self.check_current_db_users()
-
-            self.insert_db_users()
-            self.change_db_users()
-            self.deactivate_db_users()
-
-        finally:
-            self._close_db()
-
-    # -------------------------------------------------------------------------
-    def get_admin_user_dns(self):
-
-        LOG.info("Getting list of admin users.")
-
-        self.admin_user_dns = []
-
-        query_filter = (
-            '(&(|(objectclass=groupOfUniqueNames)(objectclass=groupOfURLs))({}))').format(
-                self.admin_group)
-        LOG.debug("Query filter: {!r}".format(query_filter))
-
-        group = ObjectDef(['objectclass', 'groupOfURLs'])
-        group += ['cn', 'memberURL', 'uniqueMember']
-
-        group_entries = self.ldap_search_subtree(group, query_filter)
-        if self.verbose > 1:
-            LOG.debug("Found {} LDAP entries.".format(len(group_entries)))
-
-        if not group_entries:
-            LOG.warn("Did not found any admin groups.")
-            return
-
-        for entry in group_entries:
-            member_urls = []
-            member_dns = []
-            for url in entry['memberURL']:
-                member_urls.append(url)
-            for dn in entry['uniqueMember']:
-                member_dns.append(dn)
-
-            if self.verbose > 2:
-                LOG.debug("Found memberURL: {}".format(pp(member_urls)))
-                LOG.debug("Found unique members:\n{}".format(pp(member_dns)))
-
-            for dn in member_dns:
-                if 'servicedesk' in dn:
-                    continue
-                if dn not in self.admin_user_dns:
-                    self.admin_user_dns.append(dn)
-
-        self.admin_user_dns.sort()
-        LOG.debug("Found admin user dn's:\n{}".format(pp(self.admin_user_dns)))
-
-    # -------------------------------------------------------------------------
-    def get_admin_users(self):
-
-        if not self.admin_user_dns:
-            LOG.warn("Did not found any admin users.")
-            return
-
-        LOG.info("Getting data of admin users from LDAP.")
-
-        person = ObjectDef(['posixAccount', 'shadowAccount'])
-        person += ["uid", "givenName", "sn", "mail"]
-
-        index = 0
-        for dn in self.admin_user_dns:
-
-            if self.verbose > 1:
-                LOG.debug("Searching for admin user {!r}.".format(dn))
-            entries = self.ldap_search_object(person, dn)
-            if self.verbose >= 2:
-                LOG.debug("Found {} LDAP entries.".format(len(entries)))
-            if not entries:
-                LOG.error("No LDAP entry found for DN {!r}.".format(dn))
-                continue
-
-            entry = entries[0]
-            sn = entry['sn'][0].strip()
-            fn = None
-            if entry['givenName'] and entry['givenName'][0]:
-                fn = entry['givenName'][0].strip()
-                if fn == '':
-                    fn = None
-            mail = None
-            if entry['mail'] and entry['mail'][0]:
-                mail = entry['mail'][0].strip()
-                if mail == '':
-                    mail = None
-            name = sn
-            if fn:
-                name = fn + ' ' + sn
-            uid = entry['uid'][0]
-            user = {
-                'dn': dn,
-                'uid': uid,
-                'givenName': fn,
-                'sn': sn,
-                'mail': mail,
-                'name': name
-            }
-            self.admin_users.append(user)
-            self.ldap_user_index[uid] = index
-            index += 1
-
-        LOG.debug("Found admin users:\n{}".format(pp(self.admin_users)))
-
-    # -------------------------------------------------------------------------
-    def get_db_users(self):
-
-        LOG.info("Get list of current users in DB.")
-
-        self.db_users = []
-
-        sql = textwrap.dedent('''\
-            SELECT id, uid, name, email, active, admin, developer
-              FROM public.user
-             WHERE auth_realm = 'LDAP'
-             ORDER BY uid
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("SQL:\n{}".format(sql))
-
-        with self.db_connection.cursor() as db_cursor:
-
-            db_cursor.execute(sql)
-            results = db_cursor.fetchall()
-
-            if self.verbose > 2:
-                LOG.debug("Got users:\n{}".format(pp(results)))
-
-            index = 0
-            for result in results:
-                uid = result[1]
-                user = {
-                    'id': result[0],
-                    'uid': uid,
-                    'name': result[2],
-                    'email': result[3],
-                    'active': result[4],
-                    'admin': result[5],
-                    'developer': result[6],
-                }
-                self.db_users.append(user)
-                self.db_user_index[uid] = index
-                index += 1
-
-        if self.verbose > 1:
-            LOG.debug("Found database users:\n{}".format(pp(self.db_users)))
-            LOG.debug("Uid index:\n{}".format(pp(self.db_user_index)))
-
-    # -------------------------------------------------------------------------
-    def check_current_admin_users(self):
-
-        LOG.info("Checking admin users from LDAP for existence in DB.")
-
-        for ldap_user in self.admin_users:
-
-            uid = ldap_user['uid']
-            if uid in self.db_user_index:
-
-                db_user = self.db_users[self.db_user_index[uid]]
-                change_data = {}
-                if db_user['name'] != ldap_user['name']:
-                    change_data['name'] = ldap_user['name']
-                if db_user['email'] != ldap_user['mail']:
-                    change_data['email'] = ldap_user['mail']
-                if db_user['active'] != 1:
-                    change_data['active'] = 1
-                if db_user['admin'] != 1:
-                    change_data['admin'] = 1
-                if db_user['developer'] != 1:
-                    change_data['developer'] = 1
-                if change_data.keys():
-                    change_data['id'] = db_user['id']
-                    self.users_to_update.append(change_data)
-
-            else:
-
-                db_user = {
-                    'uid': uid,
-                    'name': ldap_user['name'],
-                    'email': ldap_user['mail'],
-                    'active': 1,
-                    'admin': 1,
-                    'developer': 1,
-                }
-                self.users_to_add.append(db_user)
-
-    # -------------------------------------------------------------------------
-    def check_current_db_users(self):
-
-        LOG.info("Checking current users in DB for existence in LDAP.")
-
-        person = ObjectDef(['posixAccount', 'shadowAccount'])
-        person += ["uid", "givenName", "sn", "mail"]
-
-        for db_user in self.db_users:
-
-            uid = db_user['uid']
-            db_id = db_user['id']
-            LOG.debug("Checking DB user {n!r} ({u}) ...".format(n=db_user['name'], u=uid))
-
-            if uid in self.ldap_user_index:
-                if self.verbose > 1:
-                    LOG.debug("DB user {!r} is an active administrator.".format(uid))
-                continue
-
-            query_filter = (
-                '(&(objectclass=posixAccount)(objectclass=shadowAccount)'
-                '(inetuserstatus=active)(objectclass=pppixelaccount)'
-                '(!(ou=*Extern))(uid={}))').format(uid)
-            if self.verbose > 1:
-                LOG.debug("Query filter: {!r}".format(query_filter))
-
-            entries = self.ldap_search_subtree(person, query_filter)
-            if self.verbose > 1:
-                LOG.debug("Found {} LDAP entries.".format(len(entries)))
-            if entries:
-
-                entry = entries[0]
-                change_data = {}
-
-                if db_user['active'] != 1:
-                    change_data['active'] = 1
-
-                if db_user['admin'] != 0:
-                    change_data['admin'] = 0
-
-                sn = entry['sn'][0].strip()
-                fn = None
-                if entry['givenName'] and entry['givenName'][0]:
-                    fn = entry['givenName'][0].strip()
-                    if fn == '':
-                        fn = None
-                mail = None
-                if entry['mail'] and entry['mail'][0]:
-                    mail = entry['mail'][0].strip()
-                    if mail == '':
-                        mail = None
-                name = sn
-                if fn:
-                    name = fn + ' ' + sn
-
-                if db_user['name'] != name:
-                    change_data['name'] = name
-                if db_user['email'] != mail:
-                    change_data['email'] = mail
-                if db_user['developer'] != 1:
-                    change_data['developer'] = 1
-
-                if change_data.keys():
-                    change_data['id'] = db_id
-                    self.users_to_update.append(change_data)
-                else:
-                    LOG.debug("Data uf user {n!r} ({u}) are still correct.".format(
-                        n=db_user['name'], u=uid))
-            else:
-                if db_user['active'] != 0:
-                    LOG.warn(
-                        "DB user {n!r} ({u}) does not exists anymore, will be dectivated.".format(
-                            n=db_user['name'], u=uid))
-                    self.db_users_deactivate.append(db_id)
-                else:
-                    LOG.debug("User {n!r} ({u}) is already dectivated.".format(
-                        n=db_user['name'], u=uid))
-
-    # -------------------------------------------------------------------------
-    def insert_db_users(self):
-
-        if not self.users_to_add:
-            LOG.info("No user data to add to database.")
-            return
-
-        LOG.info("Adding new users to database.")
-        if self.verbose > 1:
-            LOG.debug("User data to insert:\n{}".format(pp(self.users_to_add)))
-
-        sql = textwrap.dedent('''\
-            INSERT INTO public.user (uid, name, email, auth_realm, active, admin, developer)
-                VALUES (%(uid)s, %(name)s, %(email)s, 'LDAP', %(active)s, %(admin)s, %(developer)s)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Insert SQL:\n{}".format(sql))
-
-        with self.db_connection.cursor() as db_cursor:
-
-            for db_user in self.users_to_add:
-
-                LOG.warn("Adding user {n!r} ({u}) ...".format(n=db_user['name'], u=db_user['uid']))
-
-                if self.verbose > 1:
-                    show_sql = db_cursor.mogrify(sql, db_user)
-                    LOG.debug("Executing:\n{}".format(show_sql))
-                if not self.simulate:
-                    db_cursor.execute(sql, db_user)
-
-        LOG.debug("Commiting changes ...")
-        self.db_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def change_db_users(self):
-
-        if not self.users_to_update:
-            LOG.info("No user data to update.")
-            return
-
-        LOG.info("Updating user data in database.")
-        if self.verbose > 1:
-            LOG.debug("User data to update:\n{}".format(pp(self.users_to_update)))
-
-        with self.db_connection.cursor() as db_cursor:
-
-            for db_user in self.users_to_update:
-
-                # LOG.warn(
-                #     "Updating user {n!r} ({u}) ...".format(
-                #         n=db_user['name'], u=db_user['uid']))
-                msg = "Udating user db id {}:".format(db_user['id'])
-
-                sql = 'UPDATE public.user SET'
-                updates = []
-                msg_list = []
-                if 'name' in db_user:
-                    updates.append(' name = %(name)s')
-                    msg_list.append("name = {!r}".format(db_user['name']))
-                if 'email' in db_user:
-                    updates.append(' email = %(email)s')
-                    msg_list.append("email = {!r}".format(db_user['email']))
-                if 'active' in db_user:
-                    updates.append(' active = %(active)s')
-                    msg_list.append("active = {!r}".format(db_user['active']))
-                if 'admin' in db_user:
-                    updates.append(' admin = %(admin)s')
-                    msg_list.append("admin = {!r}".format(db_user['admin']))
-                if 'developer' in db_user:
-                    updates.append(' developer = %(developer)s')
-                    msg_list.append("developer = {!r}".format(db_user['developer']))
-                sql += ', '.join(updates)
-                sql += ' WHERE id = %(id)s'
-                msg += ' ' + ', '.join(msg_list)
-
-                LOG.warn(msg)
-
-                if self.verbose > 1:
-                    show_sql = db_cursor.mogrify(sql, db_user)
-                    LOG.debug("Executing:\n{}".format(show_sql))
-                if not self.simulate:
-                    db_cursor.execute(sql, db_user)
-
-        LOG.debug("Commiting changes ...")
-        self.db_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def deactivate_db_users(self):
-
-        if not self.db_users_deactivate:
-            LOG.info("No user data to deactivate.")
-            return
-
-        LOG.info("Deactivating users in database.")
-        if self.verbose > 1:
-            LOG.debug("User Ids to deactivate:\n{}".format(pp(self.db_users_deactivate)))
-
-        sql = "UPDATE public.user SET active = 0 WHERE id = %s"
-
-        with self.db_connection.cursor() as db_cursor:
-
-            for db_id in self.db_users_deactivate:
-                if self.verbose > 1:
-                    show_sql = db_cursor.mogrify(sql, (db_id, ))
-                    LOG.debug("Executing:\n{}".format(show_sql))
-                if not self.simulate:
-                    db_cursor.execute(sql, (db_id, ))
-
-        LOG.debug("Commiting changes ...")
-        self.db_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def _close_db(self):
-
-        if self.db_connection:
-            LOG.debug("Closing database connection.")
-            try:
-                self.db_connection.close()
-            except Exception as e:
-                LOG.error("Could not close database connection ({}): {}".format(
-                    e.__class__.__name__, e))
-                traceback.print_exc()
-            self.db_connection = None
-
-    # -------------------------------------------------------------------------
-    def post_run(self):
-
-        if self.verbose > 1:
-            LOG.info("executing post_run() ...")
-        self._close_db()
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/du.py b/pp_lib/du.py
deleted file mode 100644 (file)
index 8f731b1..0000000
+++ /dev/null
@@ -1,315 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import locale
-import re
-
-# Third party modules
-import six
-
-# Own modules
-from .common import pp, to_str
-
-from .obj import PpBaseObjectError, PpBaseObject
-
-
-__version__ = '0.4.2'
-
-LOG = logging.getLogger(__name__)
-
-DU_UNITS = ['K', 'k', 'M', 'm', 'G', 'g', 'T', 't', 'H', 'h']
-DU_UNIT_EXP = {
-    'K': 0,
-    'M': 1,
-    'G': 2,
-    'T': 3,
-}
-
-
-# =============================================================================
-class DuError(PpBaseObjectError):
-    pass
-
-
-# =============================================================================
-class DuParseError(DuError):
-
-    # -------------------------------------------------------------------------
-    def __init__(self, line):
-        self.line = line
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-
-        msg = "Could not parse line from DU output: {!r}".format(self.line)
-        return msg
-
-
-# =============================================================================
-class DuListError(DuError):
-    pass
-
-
-# =============================================================================
-class DuEntry(PpBaseObject):
-    """
-    Class encapsulating one DU entry.
-    """
-
-    kilo = 1024
-    if six.PY2:
-        kilo = long(1024)                                   # noqa
-
-    human_limit = 1.5
-
-    factor = {}
-    hlimits = {}
-    for unit in DU_UNIT_EXP.keys():
-        exp = DU_UNIT_EXP[unit]
-        factor[unit] = kilo ** exp
-        hlimits[unit] = human_limit * float(factor[unit])
-
-    locale_conv = locale.localeconv()
-    dp = '.'
-    ts = ','
-    if 'decimal_point' in locale_conv and locale_conv['decimal_point'] != '.':
-        dp = locale_conv['decimal_point']
-    if 'thousands_sep' in locale_conv:
-        ts = locale_conv['thousands_sep']
-
-    parse_pattern = r'^\s*(\d+(?:' + re.escape(dp) + r'\d*)?)([KMGT])?\s+(\S+.*)'
-    parse_re = re.compile(parse_pattern, re.IGNORECASE)
-
-    # -------------------------------------------------------------------------
-    def __init__(
-            self, size_kb, path, appname=None, verbose=0, base_dir=None):
-
-        self._size_kb = None
-        self._path = None
-
-        super(DuEntry, self).__init__(
-            appname=appname, verbose=verbose, version=__version__,
-            base_dir=base_dir, initialized=False)
-
-        self.size_kb = size_kb
-        self.path = path
-
-        self.initialized = True
-
-    # -----------------------------------------------------------
-    @property
-    def size_kb(self):
-        """The size of the entry in KiBytes."""
-        if not hasattr(self, '_size_kb'):
-            if six.PY2:
-                return long(0)                              # noqa
-            return 0
-        return getattr(self, '_size_kb', 0)
-
-    @size_kb.setter
-    def size_kb(self, value):
-        if six.PY2:
-            v = long(value)                                 # noqa
-        else:
-            v = int(value)
-        if v >= 0:
-            self._size_kb = v
-        else:
-            LOG.warn("Wrong size for DU entry{!r}, must be >= 0".format(value))
-
-    # -----------------------------------------------------------
-    @property
-    def path(self):
-        """The path name of the DU entry."""
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        if value is not None:
-            self._path = str(to_str(value))
-        else:
-            self._path = None
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("size_kb={!r}".format(self.size_kb))
-        fields.append("path={!r}".format(self.path))
-        fields.append("appname={!r}".format(self.appname))
-        fields.append("verbose={!r}".format(self.verbose))
-        fields.append("base_dir={!r}".format(self.base_dir))
-        fields.append("initialized={!r}".format(self.initialized))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(DuEntry, self).as_dict(short=short)
-        res['size_kb'] = self.size_kb
-        res['path'] = self.path
-        res['dp'] = self.dp
-        res['ts'] = self.ts
-        res['parse_pattern'] = self.parse_pattern
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        return self.to_str()
-
-    # -------------------------------------------------------------------------
-    def size_str(self, unit='K', precision=0):
-
-        u = unit.upper()
-        unit_show = u
-        if u in self.factor:
-            size_float = float(self.size_kb) / float(self.factor[u])
-            if u == 'K':
-                unit_show = ''
-                precision = 0
-        else:
-            # Human readable
-            unit_show = 'K'
-            size_float = float(self.size_kb)
-            precision = 0
-            if self.verbose > 2:
-                LOG.debug("Checking size_float {s} for factors\n{f}".format(
-                    s=size_float, f=pp(self.hlimits)))
-
-            if size_float > self.hlimits['T']:
-                unit_show = 'T'
-                size_float = size_float / float(self.factor['T'])
-                precision = 1
-            elif size_float > self.hlimits['G']:
-                unit_show = 'G'
-                size_float = size_float / float(self.factor['G'])
-                precision = 1
-            elif size_float > self.hlimits['M']:
-                unit_show = 'M'
-                size_float = size_float / float(self.factor['M'])
-                precision = 1
-            if self.verbose > 2:
-                LOG.debug("Computed size_float: {s} {u}".format(
-                    s=size_float, u=unit_show))
-
-        if unit_show != '':
-            unit_show = ' ' + unit_show
-        template = "{{:,.{:d}f}}".format(precision) + unit_show
-        size_show = template.format(size_float)
-
-        # Localisation
-        if self.dp != '.':
-            size_show = size_show.replace('.', ';').replace(',', self.ts).replace(';', self.dp)
-
-        return size_show
-
-    # -------------------------------------------------------------------------
-    def to_str(self, unit='K', precision=0, size_width=None):
-
-        width = 16
-        unit = unit.upper()
-        if size_width is None:
-            if unit == 'K':
-                width = 16
-            elif unit == 'M':
-                width = 13
-                if precision:
-                    width += 1 + precision
-            elif unit == 'G':
-                width = 9
-                if precision:
-                    width += 1 + precision
-            elif unit == 'T':
-                width = 5
-                if precision:
-                    width += 1 + precision
-            else:
-                # Human
-                width = 9
-        else:
-            width = size_width
-
-        size_show = self.size_str(unit=unit, precision=precision)
-        line = "{0:>{w}s}      {1}".format(size_show, self.path, w=width)
-
-        return line
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def init_locales(cls):
-
-        cls.locale_conv = locale.localeconv()
-        cls.dp = '.'
-        if 'decimal_point' in cls.locale_conv and cls.locale_conv['decimal_point'] != '.':
-            cls.dp = cls.locale_conv['decimal_point']
-        if 'thousands_sep' in cls.locale_conv:
-            cls.ts = cls.locale_conv['thousands_sep']
-
-        cls.parse_pattern = r'^\s*(\d+(?:' + re.escape(cls.dp) + r'\d*)?)([KMGT])?\s+(\S+.*)'
-        cls.parse_re = re.compile(cls.parse_pattern, re.IGNORECASE)
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def from_line(cls, line, appname=None, verbose=0, base_dir=None):
-
-        match = cls.parse_re.match(line)
-        if not match:
-            raise DuParseError(line)
-
-        if verbose > 3:
-            LOG.debug("Got matching groups: {}.".format(match.groups()))
-
-        sz = match.group(1)
-        if cls.ts:
-            sz = sz.replace(cls.ts, '')
-        if cls.dp != '.':
-            sz = sz.replace(cls.dp, '.')
-        if verbose > 2:
-            LOG.debug("De-localized size: {!r}.".format(sz))
-        size = float(sz)
-        unit = match.group(2)
-        path = match.group(3)
-
-        if unit is not None:
-            unit = unit.upper()
-            if unit in cls.factor:
-                size *= cls.factor[unit]
-
-        entry = cls(
-            size_kb=size, path=path, appname=appname, verbose=verbose, base_dir=base_dir)
-
-        return entry
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/errors.py b/pp_lib/errors.py
deleted file mode 100644 (file)
index 2a566e7..0000000
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@summary: module for some common used error classes
-"""
-
-# Standard modules
-import errno
-
-
-__version__ = '0.4.1'
-
-# =============================================================================
-class PpError(Exception):
-    """
-    Base error class for all other self defined exceptions.
-    """
-
-    pass
-
-
-# =============================================================================
-class PpAppError(PpError):
-
-    pass
-
-
-# =============================================================================
-class InvalidMailAddressError(PpError):
-    """Class for a exception in case of a malformed mail address."""
-
-    # -------------------------------------------------------------------------
-    def __init__(self, address, msg=None):
-
-        self.address = address
-        self.msg = msg
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-
-        msg = "Wrong mail address {a!r} ({c})".format(
-            a=self.address, c=self.address.__class__.__name__)
-        if self.msg:
-            msg += ': ' + self.msg
-        else:
-            msg += '.'
-        return msg
-
-
-# =============================================================================
-class FunctionNotImplementedError(PpError, NotImplementedError):
-    """
-    Error class for not implemented functions.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, function_name, class_name):
-        """
-        Constructor.
-
-        @param function_name: the name of the not implemented function
-        @type function_name: str
-        @param class_name: the name of the class of the function
-        @type class_name: str
-
-        """
-
-        self.function_name = function_name
-        if not function_name:
-            self.function_name = '__unkown_function__'
-
-        self.class_name = class_name
-        if not class_name:
-            self.class_name = '__unkown_class__'
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting into a string for error output.
-        """
-
-        msg = "Function {func}() has to be overridden in class {cls!r}."
-        return msg.format(func=self.function_name, cls=self.class_name)
-
-# =============================================================================
-class IoTimeoutError(PpError, IOError):
-    """
-    Special error class indicating a timout error on a read/write operation
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, strerror, timeout, filename=None):
-        """
-        Constructor.
-
-        @param strerror: the error message about the operation
-        @type strerror: str
-        @param timeout: the timout in seconds leading to the error
-        @type timeout: float
-        @param filename: the filename leading to the error
-        @type filename: str
-
-        """
-
-        t_o = None
-        try:
-            t_o = float(timeout)
-        except ValueError:
-            pass
-        self.timeout = t_o
-
-        if t_o is not None:
-            strerror += " (timeout after {:0.1f} secs)".format(t_o)
-
-        if filename is None:
-            super(IoTimeoutError, self).__init__(errno.ETIMEDOUT, strerror)
-        else:
-            super(IoTimeoutError, self).__init__(
-                errno.ETIMEDOUT, strerror, filename)
-
-# =============================================================================
-class ReadTimeoutError(IoTimeoutError):
-    """
-    Special error class indicating a timout error on reading of a file.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, timeout, filename):
-        """
-        Constructor.
-
-        @param timeout: the timout in seconds leading to the error
-        @type timeout: float
-        @param filename: the filename leading to the error
-        @type filename: str
-
-        """
-
-        strerror = "Timeout error on reading"
-        super(ReadTimeoutError, self).__init__(strerror, timeout, filename)
-
-
-# =============================================================================
-class WriteTimeoutError(IoTimeoutError):
-    """
-    Special error class indicating a timout error on a writing into a file.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, timeout, filename):
-        """
-        Constructor.
-
-        @param timeout: the timout in seconds leading to the error
-        @type timeout: float
-        @param filename: the filename leading to the error
-        @type filename: str
-
-        """
-
-        strerror = "Timeout error on writing"
-        super(WriteTimeoutError, self).__init__(strerror, timeout, filename)
-
-# =============================================================================
-class CouldntOccupyLockfileError(PpError):
-    """
-    Special error class indicating, that a lockfile couldn't coccupied
-    after a defined time.
-    """
-
-    # -----------------------------------------------------
-    def __init__(self, lockfile, duration, tries):
-        """
-        Constructor.
-
-        @param lockfile: the lockfile, which could't be occupied.
-        @type lockfile: str
-        @param duration: The duration in seconds, which has lead to this situation
-        @type duration: float
-        @param tries: the number of tries creating the lockfile
-        @type tries: int
-
-        """
-
-        self.lockfile = str(lockfile)
-        self.duration = float(duration)
-        self.tries = int(tries)
-
-    # -----------------------------------------------------
-    def __str__(self):
-
-        return "Couldn't occupy lockfile {!r} in {:0.1f} seconds with {} tries.".format(
-            self.lockfile, self.duration, self.tries)
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/format_du.py b/pp_lib/format_du.py
deleted file mode 100644 (file)
index 596f400..0000000
+++ /dev/null
@@ -1,240 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import textwrap
-import sys
-import copy
-
-# Third party modules
-import six
-
-# Own modules
-from .common import pp
-
-from .app import PpApplication
-
-from .du import DuParseError, DuEntry
-from .du import DU_UNITS, DU_UNIT_EXP
-
-try:
-    from .local_version import __version__ as my_version
-except ImportError:
-    from .global_version import __version__ as my_version
-
-__version__ = '0.4.4'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class FormatDuApp(PpApplication):
-    """
-    Application class for the format-du command
-    """
-
-    units = copy.copy(DU_UNITS)
-    unit_exp = copy.copy(DU_UNIT_EXP)
-
-    # -------------------------------------------------------------------------
-    def __init__(
-            self, appname=None, verbose=0, version=my_version, *arg, **kwargs):
-
-        indent = ' ' * self.usage_term_len
-
-        usage = textwrap.dedent("""\
-        %(prog)s [--color [{{yes,no,auto}}]] [-v] [Format options] [FILE]
-
-        {i}%(prog)s --usage
-        {i}%(prog)s -h|--help
-        {i}%(prog)s -V|--version
-        """).strip().format(i=indent)
-
-        desc = """Formats the output of 'du -k' for various modifiers."""
-
-        self.precision = 0
-        self.unit = 'K'
-        self.factor = 1
-        self.total = False
-
-        super(FormatDuApp, self).__init__(
-            usage=usage,
-            description=desc,
-            appname=appname,
-            verbose=verbose,
-            version=version,
-            *arg, **kwargs
-        )
-
-        self.post_init()
-        DuEntry.init_locales()
-        if self.verbose > 2:
-            LOG.debug("Locale conversions:\n{}".format(pp(DuEntry.locale_conv)))
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-        """
-
-        super(FormatDuApp, self).init_arg_parser()
-
-        format_options = self.arg_parser.add_argument_group('Format options')
-
-        format_options.add_argument(
-            '-c', '--total',
-            action='store_true', dest='total',
-            help="Produces a grand total",
-        )
-
-        format_options.add_argument(
-            '-u', '--unit',
-            dest='unit', metavar='UNIT',
-            choices=self.units,
-            help=(
-                "Unit for displaying the results. Valid units are: 'K' (KiBytes, the default), "
-                "'M' (MiBytes), 'G' (GiBytes) and 'H' (human readable, the most appropriate unit "
-                "will be used. In case of 'K', no unit will be displayed.")
-        )
-
-        format_options.add_argument(
-            '-p', '--precision',
-            type=int, default=0, metavar='DIGITS',
-            help="Number of digits for displaying the result (default: %(default)r).",
-        )
-
-        self.arg_parser.add_argument(
-            'file',
-            metavar='FILE', type=str, nargs='?',
-            help=(
-                'A file with the output of "du -k". If not given or "-", then '
-                'the standard input will be read.'),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-
-        Descendant classes may override this method.
-        """
-
-        if self.args.total:
-            self.total = True
-
-        if self.args.unit:
-            self.unit = self.args.unit.upper()
-            if self.unit in self.unit_exp:
-                exp = self.unit_exp[self.unit]
-                self.factor = 1024 ** exp
-
-        if self.args.precision is not None:
-            if self.args.precision < 0:
-                p = self.colored('{!r}'.format(self.args.precision), 'RED')
-                LOG.error("Invalid precision {}, it must not be less than zero.".format(p))
-                sys.stderr.write('\n')
-                self.arg_parser.print_help(sys.stderr)
-                self.exit(1)
-            self.precision = self.args.precision
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """The underlaying startpoint of the application."""
-
-        fh = None
-        opened = False
-        open_args = {}
-        if six.PY3:
-            open_args['encoding'] = 'utf-8'
-            open_args['errors'] = 'surrogateescape'
-        filename = None
-
-        try:
-            if self.args.file and self.args.file != '-':
-                fh = open(self.args.file, 'r', **open_args)
-                opened = True
-                filename = '{!r}'.format(self.args.file)
-            else:
-                fh = sys.stdin
-                filename = '<standard input>'
-
-            LOG.debug("Reading DU info from {}.".format(filename))
-            self.read_file(fh)
-
-        finally:
-            if opened:
-                fh.close()
-
-    # -------------------------------------------------------------------------
-    def read_file(self, fh):
-
-        line = None
-        eof = False
-        lnr = 0
-
-        total = 0
-        if six.PY2:
-            total = long(0)                                             # noqa
-
-        while not eof:
-            lnr += 1
-            line = fh.readline()
-            if not line:
-                eof = True
-                break
-            line = line.strip()
-            if not line:
-                continue
-            entry = self.eval_line(line, lnr)
-            if entry:
-                total += entry.size_kb
-
-        if self.total:
-            total_entry = DuEntry(
-                size_kb=total, path='total', appname=self.appname,
-                verbose=self.verbose, base_dir=self.base_dir)
-            if self.verbose > 1:
-                LOG.debug("Total entry:\n{}".format(pp(total_entry.as_dict())))
-            print(total_entry.to_str(unit=self.unit, precision=self.precision))
-
-        if self.verbose > 1:
-            LOG.debug("Finished reading.")
-
-    # -------------------------------------------------------------------------
-    def eval_line(self, line, lnr):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating line {!r} ...".format(line))
-
-        try:
-            entry = DuEntry.from_line(
-                line=line, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
-        except DuParseError as e:
-            LOG.error("Could not parse line {lnr}: {e}".format(lnr=lnr, e=e))
-            LOG.debug("Parsing pattern: {!r}".format(DuEntry.parse_pattern))
-            return None
-
-        if self.verbose > 1:
-            LOG.debug("Entry:\n{}".format(pp(entry.as_dict())))
-        print(entry.to_str(unit=self.unit, precision=self.precision))
-
-        return entry
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/global_version.py b/pp_lib/global_version.py
deleted file mode 100644 (file)
index 33a817e..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicis Pixelpark GmbH, Berlin
-@summary: Modules global version number
-"""
-
-__author__ = 'Frank Brehm <frank.brehm@pixelpark.com>'
-__contact__ = 'frank.brehm@pixelpark.com'
-__version__ = '0.7.0'
-__license__ = 'LGPL3+'
-
-# vim: fileencoding=utf-8 filetype=python ts=4
diff --git a/pp_lib/homes_admin.py b/pp_lib/homes_admin.py
deleted file mode 100644 (file)
index af5f03d..0000000
+++ /dev/null
@@ -1,324 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The base module all maintaining scripts for the home directories
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import re
-import pwd
-import glob
-
-# Third party modules
-import six
-
-# Own modules
-from .common import pp
-
-from .cfg_app import PpCfgAppError, PpConfigApplication
-
-__version__ = '0.1.3'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpHomesAdminError(PpCfgAppError):
-    pass
-
-
-# =============================================================================
-class PpHomesAdminApp(PpConfigApplication):
-    """
-    Base class for applications maintaining the global Home directories.
-    """
-
-    # /mnt/nfs
-    default_chroot_homedir = os.sep + os.path.join('mnt', 'nfs')
-    # /home
-    default_home_root = os.sep + 'home'
-
-    # /etc/pixelpark/exclude_homes
-    default_exclude_file = os.sep + os.path.join('etc', 'pixelpark', 'exclude_homes')
-
-    comment_re = re.compile(r'\s*#.*')
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, description=None,
-            cfg_stems='homes-admin', version=__version__):
-
-        self.default_mail_recipients = [
-            'admin.berlin@pixelpark.com'
-        ]
-        self.default_mail_cc = []
-
-        self.chroot_homedir = self.default_chroot_homedir
-        self.home_root_abs = self.default_home_root
-        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
-
-        self.exclude_file = self.default_exclude_file
-
-        self.exclude_dirs = []
-        self.passwd_home_dirs = []
-        self.unnecessary_dirs = []
-
-        super(PpHomesAdminApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems=cfg_stems,
-        )
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-
-        homes_group = self.arg_parser.add_argument_group('Homes administration options.')
-
-        homes_group.add_argument(
-            '-R', '--chroot-dir',
-            metavar='DIR', dest='chroot_homedir',
-            help=(
-                "Directory, where the {h!r} share is mounted from the "
-                "NFS server. Maybe '/', default: {d!r}.").format(
-                    h=self.default_home_root, d=self.default_chroot_homedir)
-        )
-
-        homes_group.add_argument(
-            '-H', '--homes',
-            metavar='DIR', dest='home_root',
-            help=(
-                "The shared directory on the NFS server for all home directories. "
-                "Default: {!r}.").format(self.default_home_root)
-        )
-
-        homes_group.add_argument(
-            '-E', '--exclude-file',
-            metavar='FILE', dest='exclude_file',
-            help=(
-                "The file containing all directories underneath {h!r}, which are  "
-                "excluded from all operations. Default: {f!r}.").format(
-                    h=self.default_home_root, f=self.default_exclude_file)
-        )
-
-        super(PpHomesAdminApp, self).init_arg_parser()
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpHomesAdminApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 3:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            section = self.cfg[section_name]
-
-            if section_name.lower() not in (
-                    'test-home', 'test_home', 'testhome', 'homes', 'admin'):
-                continue
-
-            if self.verbose > 2:
-                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                    n=section_name, s=pp(section)))
-
-            if section_name.lower() == 'homes':
-
-                if 'chroot_homedir' in section:
-                    v = section['chroot_homedir']
-                    if not os.path.isabs(v):
-                        msg = (
-                            "The chrooted path of the home directories must be an "
-                            "absolute pathname (found [{s}]/chroot_homedir "
-                            "=> {v!r} in configuration.").format(s=section_name, v=v)
-                        raise PpHomesAdminError(msg)
-                    self.chroot_homedir = v
-
-                if 'home_root' in section:
-                    v = section['home_root']
-                    if not os.path.isabs(v):
-                        msg = (
-                            "The root path of the home directories must be an "
-                            "absolute pathname (found [{s}]/home_root "
-                            "=> {v!r} in configuration.").format(s=section_name, v=v)
-                        raise PpHomesAdminError(msg)
-                    self.home_root_abs = v
-
-            elif section_name.lower() == 'admin':
-
-                if 'exclude_file' in section:
-                    v = section['exclude_file']
-                    if not os.path.isabs(v):
-                        msg = (
-                            "The path of file of excluded directories must be an "
-                            "absolute pathname (found [{s}]/exclude_file "
-                            "=> {v!r} in configuration.").format(s=section_name, v=v)
-                        raise PpHomesAdminError(msg)
-                    self.exclude_file = v
-
-        self._perform_home_cmdline_opts()
-
-        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
-        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
-
-    # -------------------------------------------------------------------------
-    def _perform_home_cmdline_opts(self):
-
-        if hasattr(self.args, 'chroot_homedir') and self.args.chroot_homedir:
-            v = self.args.chroot_homedir
-            if not os.path.isabs(v):
-                msg = (
-                    "The chrooted path of the home directories must be an "
-                    "absolute pathname (got {!r} as command line parameter).").format(v)
-                raise PpHomesAdminError(msg)
-            self.chroot_homedir = v
-
-        if hasattr(self.args, 'home_root') and self.args.home_root:
-            v = self.args.home_root
-            if not os.path.isabs(v):
-                msg = (
-                    "The root path of the home directories must be an "
-                    "absolute pathname (got {!r} as command line parameter).").format(v)
-                raise PpHomesAdminError(msg)
-            self.home_root_abs = v
-
-        if hasattr(self.args, 'exclude_file') and self.args.exclude_file:
-            v = self.args.exclude_file
-            if not os.path.isabs(v):
-                msg = (
-                    "The path of file of excluded directories must be an "
-                    "absolute pathname (got {!r} as command line parameter).").format(v)
-                raise PpHomesAdminError(msg)
-            self.exclude_file = v
-
-    # -------------------------------------------------------------------------
-    def read_exclude_dirs(self):
-
-        LOG.info("Reading exclude file {!r} ...".format(self.exclude_file))
-        upper_dir = os.pardir + os.sep
-
-        if not os.path.exists(self.exclude_file):
-            msg = "Exclude file {!r} does not exists.".format(self.exclude_file)
-            LOG.error(msg)
-            return
-
-        if not os.path.isfile(self.exclude_file):
-            msg = "Exclude file {!r} is not a regular file.".format(self.exclude_file)
-            LOG.error(msg)
-            return
-
-        if not os.access(self.exclude_file, os.R_OK):
-            msg = "No read access to exclude file {!r}.".format(self.exclude_file)
-            LOG.error(msg)
-            return
-
-        open_args = {}
-        if six.PY3:
-            open_args['encoding'] = 'utf-8'
-            open_args['errors'] = 'surrogateescape'
-
-        with open(self.exclude_file, 'r', **open_args) as fh:
-            lnr = 0
-            for line in fh.readlines():
-                lnr += 1
-                line = line.strip()
-                if not line:
-                    continue
-                line = self.comment_re.sub('', line)
-                if not line:
-                    continue
-                if self.verbose > 3:
-                    LOG.debug("Evaluating line {l!r} (file {f!r}, line {lnr}).".format(
-                        l=line, f=self.exclude_file, lnr=lnr))
-                tokens = self.whitespace_re.split(line)
-                for token in tokens:
-                    if not os.path.isabs(token):
-                        LOG.warn((
-                            "Entry {e!r} in file {f!r}, line {l}, "
-                            "is not an absolute path.").format(
-                            e=token, f=self.exclude_file, l=lnr))
-                        continue
-                    home_relative = os.path.relpath(token, self.home_root_abs)
-                    if token == os.sep or home_relative.startswith(upper_dir):
-                        LOG.warn((
-                            "Entry {e!r} in file {f!r}, line {l}, "
-                            "is outside home root {h!r}.").format(
-                            e=token, f=self.exclude_file, l=lnr, h=self.home_root_abs))
-                        continue
-                    if token not in self.exclude_dirs:
-                        self.exclude_dirs.append(token)
-
-        self.exclude_dirs.sort(key=str.lower)
-
-        LOG.debug("Found {} directories to exclude.".format(len(self.exclude_dirs)))
-        if self.verbose > 2:
-            LOG.debug("Found directories to exclude:\n{}".format(pp(self.exclude_dirs)))
-
-    # -------------------------------------------------------------------------
-    def read_passwd_homes(self):
-
-        LOG.info("Reading all home directories from 'getent passwd' ...")
-
-        upper_dir = os.pardir + os.sep
-        entries = pwd.getpwall()
-
-        for entry in entries:
-            home = entry.pw_dir
-            if not home:
-                continue
-            home_relative = os.path.relpath(home, self.home_root_abs)
-            if home == os.sep or home_relative.startswith(upper_dir):
-                if self.verbose > 1:
-                    LOG.debug((
-                        "Home directory {d!r} of user {u!r} "
-                        "is outside home root {h!r}.").format(
-                        d=home, u=entry.pw_name, h=self.home_root_abs))
-                continue
-            if home not in self.passwd_home_dirs:
-                self.passwd_home_dirs.append(home)
-
-        self.passwd_home_dirs.sort(key=str.lower)
-
-        LOG.debug("Found {} home directories in passwd.".format(len(self.passwd_home_dirs)))
-        if self.verbose > 2:
-            LOG.debug("Home directories in passwd:\n{}".format(pp(self.passwd_home_dirs)))
-
-    # -------------------------------------------------------------------------
-    def check_homes(self):
-
-        LOG.info("Checking for unnecessary home directories ...")
-
-        glob_pattern = os.path.join(self.home_root_real, '*')
-        all_home_entries = glob.glob(glob_pattern)
-
-        for path in all_home_entries:
-            if not os.path.isdir(path):
-                continue
-            home_rel = os.sep + os.path.relpath(path, self.chroot_homedir)
-            if self.verbose > 2:
-                LOG.debug("Checking {p!r} ({h!r}) ...".format(
-                    p=path, h=home_rel))
-            if home_rel in self.passwd_home_dirs:
-                continue
-            if home_rel in self.exclude_dirs:
-                continue
-            LOG.debug("Marking {!r} as unnecessary.".format(home_rel))
-            self.unnecessary_dirs.append(home_rel)
-
-        self.unnecessary_dirs.sort(key=str.lower)
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/idna_xlate.py b/pp_lib/idna_xlate.py
deleted file mode 100644 (file)
index 86f612c..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the 'idna-xlate' application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import textwrap
-import sys
-import copy
-
-# Third party modules
-import six
-
-# Own modules
-from .common import pp, to_str, to_bytes
-
-from .app import PpApplication
-
-try:
-    from .local_version import __version__ as my_version
-except ImportError:
-    from .global_version import __version__ as my_version
-
-__version__ = '0.2.1'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class IdnaXlateApp(PpApplication):
-    """
-    Application class for the idna-xlate command.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=my_version, *arg, **kwargs):
-
-        self.items = []
-
-        indent = ' ' * self.usage_term_len
-
-        usage = textwrap.dedent("""\
-            %(prog)s [--color [{{yes,no,auto}}]] [-v | -q] ITEM [ITEM ...]
-
-            {i}%(prog)s --usage
-            {i}%(prog)s -h|--help
-            {i}%(prog)s -V|--version
-            """).strip().format(i=indent)
-
-        desc = "Formats the given items into IDNA formatted strings (Punycode)."
-
-        super(IdnaXlateApp, self).__init__(
-            usage=usage,
-            description=desc,
-            verbose=verbose,
-            version=version,
-            *arg, **kwargs
-        )
-
-        self.post_init()
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-        """
-
-        super(IdnaXlateApp, self).init_arg_parser()
-
-
-        self.arg_parser.add_argument(
-            'items',
-            metavar='ITEM', type=str, nargs='+',
-            help=(
-                'The item to translate into IDNA encoded strings.'),
-        )
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """The underlaying startpoint of the application."""
-
-        if self.verbose:
-            print("Items to translate:\n")
-
-        for item in self.args.items:
-
-            if item == 'xn--':
-                print(" * {}".format(self.colored(
-                    "Invalid item 'xn--'", ('BOLD', 'RED'))))
-                continue
-
-            item_idna = item
-            if 'xn--' in item:
-                item_idna = to_str(to_bytes(item).decode('idna'))
-            else:
-                item_idna = to_str(item.encode('idna'))
-
-            print(" * {i!r}: {p!r}".format(i=item, p=item_idna))
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/import_pdnsdata.py b/pp_lib/import_pdnsdata.py
deleted file mode 100644 (file)
index 93caabe..0000000
+++ /dev/null
@@ -1,1141 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the 'import-pdnsdata' application
-          to import all data from the current PowerDNS database
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import re
-import textwrap
-import traceback
-import socket
-import datetime
-import time
-
-# Third party modules
-import psycopg2
-import pymysql
-
-# Own modules
-from .common import pp, to_str
-from .common import RE_DOT_AT_END
-
-from .cfg_app import PpCfgAppError, PpConfigApplication
-
-from .pdns_record import PdnsSoaData
-
-__version__ = '0.10.3'
-LOG = logging.getLogger(__name__)
-
-# =============================================================================
-class ImportPdnsdataError(PpCfgAppError):
-    pass
-
-# =============================================================================
-class ImportPdnsdataApp(PpConfigApplication):
-    """
-    Application class for the 'import-pdnsdata'-Application.
-    """
-
-    # Source DB data
-    default_src_db_host = 'mysql-pp06.pixelpark.com'
-    default_src_db_port = 3306
-    default_src_db_schema = 'pdns'
-    default_src_db_user = 'pdns'
-
-    # Target DB data
-    default_tgt_db_type = 'postgresql'
-    default_tgt_db_host = 'systemshare.pixelpark.com'
-    default_tgt_db_port_psql = 5432
-    default_tgt_db_port_mysql = 3306
-    default_tgt_db_schema = 'pdns'
-    default_tgt_db_user = 'pdns'
-
-    re_is_local_account = re.compile(r'(lo[ck]al|privat|intern)', re.IGNORECASE)
-
-    sql_insert_domain = textwrap.dedent('''\
-        INSERT INTO domains (id, name, master, last_check, type, notified_serial, account)
-            VALUES (%(id)s, %(name)s, %(master)s, %(last_check)s,
-                %(type)s, %(notified_serial)s, %(account)s)
-        ''').strip()
-
-    sql_insert_dom_meta = textwrap.dedent('''\
-        INSERT INTO domainmetadata (domain_id, kind, content)
-            VALUES (%(domain_id)s, %(kind)s, %(content)s)
-        ''').strip()
-
-    sql_insert_record = textwrap.dedent('''\
-        INSERT INTO records (id, domain_id, name, type, content,
-                             ttl, prio, change_date, disabled,
-                             ordername, auth)
-             VALUES (%(id)s, %(domain_id)s, %(name)s, %(type)s, %(content)s,
-                     %(ttl)s, %(prio)s, %(change_date)s, %(disabled)s,
-                     %(ordername)s, %(auth)s)
-        ''').strip()
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        description = textwrap.dedent('''\
-        Importing complete Database for PowerDNS from old DB into the new one.
-        ''').strip()
-
-        self.default_mail_recipients = ['frank.brehm@pixelpark.com']
-
-        self.src_db_host = self.default_src_db_host
-        self.src_db_port = self.default_src_db_port
-        self.src_db_schema = self.default_src_db_schema
-        self.src_db_user = self.default_src_db_user
-        self.src_db_pass = None
-
-        self.tgt_db_type = self.default_tgt_db_type
-        self.tgt_db_host = self.default_tgt_db_host
-        self.tgt_db_port = self.default_tgt_db_port_psql
-        if self.tgt_db_type == 'mysql':
-            self.tgt_db_port = self.default_tgt_db_port_mysql
-        self.tgt_db_schema = self.default_tgt_db_schema
-        self.tgt_db_user = self.default_tgt_db_user
-        self.tgt_db_pass = None
-
-        self.src_connection = None
-        self.tgt_connection = None
-
-        self.domain_ids = {}
-
-        self._show_simulate_opt = True
-
-        super(ImportPdnsdataApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='import-pdnsdata'
-        )
-
-        self.nr = {
-            'cryptokeys': {
-                'has_domain': True,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-            'domainmetadata': {
-                'has_domain': True,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-            'domains': {
-                'has_domain': False,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-            'records': {
-                'has_domain': True,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-            'supermasters': {
-                'has_domain': False,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-            'tsigkeys': {
-                'has_domain': False,
-                'total': 0,
-                'valid': 0,
-                'invalid': 0,
-            },
-        }
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(ImportPdnsdataApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 2:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-            section = self.cfg[section_name]
-
-            if section_name.lower() in ('src_db', 'src_db', 'srcdb', 'source', 'src'):
-                self.do_src_db_cfg(section_name, section)
-
-            if section_name.lower() in ('tgt_db', 'tgt_db', 'tgtdb', 'target', 'tgt'):
-                self.do_tgt_db_cfg(section_name, section)
-
-    # -------------------------------------------------------------------------
-    def do_src_db_cfg(self, section_name, section):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'host' in section:
-            host = section['host'].lower().strip()
-            if not host:
-                LOG.error(
-                    'Invalid source hostname {!r} in configuration section {!r} found.'.format(
-                        section['host'], section_name))
-            else:
-                try:
-                    _ = socket.getaddrinfo(host, 3306, proto=socket.IPPROTO_TCP)            # noqa
-                except socket.gaierror as e:
-                    msg = 'Invalid source hostname {!r} in configuration section {!r}: {}'.format(
-                        section['host'], section_name, e)
-                    LOG.error(msg)
-                    self.config_has_errors = True
-                else:
-                    self.src_db_host = host
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-                if port <= 0:
-                    raise ValueError("port number may not be negative.")
-                elif port >= (2 ** 16):
-                    raise ValueError("port number must be less than {}".format((2 ** 16)))
-            except (ValueError, TypeError) as e:
-                msg = 'Invalid source port number {!r} in configuration section {!r}: {}'.format(
-                    section['port'], section_name, e)
-                LOG.error(msg)
-                self.config_has_errors = True
-            else:
-                self.src_db_port = port
-
-        if 'schema' in section:
-            schema = section['schema'].lower().strip()
-            if not schema:
-                LOG.error((
-                    'Invalid source database name {!r} '
-                    'in configuration section {!r} found.').format(
-                        section['schema'], section_name))
-            else:
-                self.src_db_schema = schema
-
-        if 'user' in section:
-            user = section['user'].lower().strip()
-            if not user:
-                LOG.error((
-                    'Invalid source database user {!r} '
-                    'in configuration section {!r} found.').format(
-                        section['user'], section_name))
-                self.config_has_errors = True
-            else:
-                self.src_db_user = user
-
-        if 'password' in section:
-            self.src_db_pass = section['password']
-
-    # -------------------------------------------------------------------------
-    def do_tgt_db_cfg(self, section_name, section):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'type' in section:
-            db_type = section['type'].lower().strip()
-            if db_type not in ('mysql', 'postgresql', 'postgres', 'psql'):
-                LOG.error('Invalid database type {!r} in configuration section {!r} found.'.format(
-                    section['type'], section_name))
-                self.config_has_errors = True
-            else:
-                if db_type == 'mysql':
-                    self.tgt_db_type = 'mysql'
-                    self.tgt_db_port = self.default_tgt_db_port_mysql
-                else:
-                    self.tgt_db_type = 'postgresql'
-                    self.tgt_db_port = self.default_tgt_db_port_psql
-
-        if 'host' in section:
-            host = section['host'].lower().strip()
-            if not host:
-                LOG.error(
-                    'Invalid target hostname {!r} in configuration section {!r} found.'.format(
-                        section['host'], section_name))
-                self.config_has_errors = True
-            else:
-                try:
-                    _ = socket.getaddrinfo(host, 3306, proto=socket.IPPROTO_TCP)            # noqa
-                except socket.gaierror as e:
-                    msg = 'Invalid target hostname {!r} in configuration section {!r}: {}'.format(
-                        section['host'], section_name, e)
-                    LOG.error(msg)
-                    self.config_has_errors = True
-                else:
-                    self.tgt_db_host = host
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-                if port <= 0:
-                    raise ValueError("port number may not be negative.")
-                elif port >= (2 ** 16):
-                    raise ValueError("port number must be less than {}".format((2 ** 16)))
-            except (ValueError, TypeError) as e:
-                msg = 'Invalid target port number {!r} in configuration section {!r}: {}'.format(
-                    section['port'], section_name, e)
-                LOG.error(msg)
-                self.config_has_errors = True
-            else:
-                self.tgt_db_port = port
-
-        if 'schema' in section:
-            schema = section['schema'].lower().strip()
-            if not schema:
-                LOG.error((
-                    'Invalid target database name {!r} '
-                    'in configuration section {!r} found.').format(
-                        section['schema'], section_name))
-            else:
-                self.tgt_db_schema = schema
-
-        if 'user' in section:
-            user = section['user'].lower().strip()
-            if not user:
-                LOG.error((
-                    'Invalid target database user {!r} '
-                    'in configuration section {!r} found.').format(
-                    section['user'], section_name))
-                self.config_has_errors = True
-            else:
-                self.tgt_db_user = user
-
-        if 'password' in section:
-            self.tgt_db_pass = section['password']
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-
-        self.connect_src_db()
-        self.connect_tgt_db()
-
-    # -------------------------------------------------------------------------
-    def connect_src_db(self):
-
-        result = None
-
-        LOG.debug("Connecting to source MySQL database on {}@{}:{}/{} ...".format(
-            self.src_db_user, self.src_db_host, self.src_db_port, self.src_db_schema))
-        try:
-            self.src_connection = pymysql.connect(
-                host=self.src_db_host,
-                port=self.src_db_port,
-                db=self.src_db_schema,
-                user=self.src_db_user,
-                password=self.src_db_pass,
-                charset='utf8',
-                cursorclass=pymysql.cursors.DictCursor
-            )
-
-            sql = 'SHOW VARIABLES LIKE "version"'
-            if self.verbose > 1:
-                LOG.debug("SQL: {}".format(sql))
-            with self.src_connection.cursor() as cursor:
-                cursor.execute(sql)
-                result = cursor.fetchone()
-            if self.verbose > 2:
-                LOG.debug("Got version info:\n{}".format(pp(result)))
-            LOG.info("Source database is MySQL version {!r}.".format(result['Value']))
-
-        except (pymysql.err.OperationalError) as e:
-            LOG.error("Could not connect to source database ({}): {}".format(
-                e.__class__.__name__, e))
-            self.exit(6)
-
-    # -------------------------------------------------------------------------
-    def connect_tgt_db(self):
-
-        if self.tgt_db_type == 'mysql':
-            self.connect_tgt_db_mysql()
-        else:
-            self.connect_tgt_db_psql()
-
-    # -------------------------------------------------------------------------
-    def connect_tgt_db_mysql(self):
-
-        result = None
-
-        LOG.debug("Connecting to target MySQL database on {}@{}:{}/{} ...".format(
-            self.tgt_db_user, self.tgt_db_host, self.tgt_db_port, self.tgt_db_schema))
-        try:
-            self.tgt_connection = pymysql.connect(
-                host=self.tgt_db_host,
-                port=self.tgt_db_port,
-                db=self.tgt_db_schema,
-                user=self.tgt_db_user,
-                password=self.tgt_db_pass,
-                charset='utf8',
-                cursorclass=pymysql.cursors.DictCursor
-            )
-
-            sql = 'SHOW VARIABLES LIKE "version"'
-            if self.verbose > 1:
-                LOG.debug("SQL: {}".format(sql))
-            with self.tgt_connection.cursor() as cursor:
-                cursor.execute(sql)
-                result = cursor.fetchone()
-            if self.verbose > 2:
-                LOG.debug("Got version info:\n{}".format(pp(result)))
-            LOG.info("Target database is MySQL version {!r}.".format(result['Value']))
-
-        except (pymysql.err.OperationalError) as e:
-            LOG.error("Could not connect to target database ({}): {}".format(
-                e.__class__.__name__, e))
-            self.exit(6)
-
-    # -------------------------------------------------------------------------
-    def connect_tgt_db_psql(self):
-
-        result = None
-
-        LOG.debug("Connecting to target PostgreSQL database on {}@{}:{}/{} ...".format(
-            self.tgt_db_user, self.tgt_db_host, self.tgt_db_port, self.tgt_db_schema))
-        try:
-            self.tgt_connection = psycopg2.connect(
-                host=self.tgt_db_host,
-                port=self.tgt_db_port,
-                dbname=self.tgt_db_schema,
-                user=self.tgt_db_user,
-                password=self.tgt_db_pass,
-            )
-
-            sql = 'SHOW server_version'
-            if self.verbose > 1:
-                LOG.debug("SQL: {}".format(sql))
-            with self.tgt_connection.cursor() as cursor:
-                cursor.execute(sql)
-                result = cursor.fetchone()
-            if self.verbose > 2:
-                LOG.debug("Got version info:\n{}".format(pp(result)))
-            LOG.info("Target database is PostgreSQL version {!r}.".format(result[0]))
-
-        except psycopg2.OperationalError as e:
-            LOG.error("Could not connect to target database ({}): {}".format(
-                e.__class__.__name__, e))
-            self.exit(7)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        try:
-            self.get_src_info()
-            self.clean_tgt_db()
-            self.import_supermasters()
-            self.import_domains()
-            self.import_cryptokeys()
-            self.import_domainmetadata()
-            self.import_records()
-            self.import_tsigkeys()
-#            self.create_ipv6_as_zone()
-        finally:
-            self._close_all()
-
-    # -------------------------------------------------------------------------
-    def get_src_info(self):
-
-        LOG.debug("Retreiving number of source datasets ...")
-
-        result = None
-
-        max_tblname_len = 1
-        for table in self.nr.keys():
-            if len(table) > max_tblname_len:
-                max_tblname_len = len(table)
-        max_tblname_len += 1
-        tpl = "Found {{:<{}}} {{:>8}}".format(max_tblname_len)
-        if self.verbose > 2:
-            LOG.debug("Output template: {!r}".format(tpl))
-
-        with self.src_connection.cursor() as cursor:
-
-            for table in sorted(self.nr.keys()):
-                has_domain = self.nr[table].get('has_domain', False)
-                count_total = 0
-                count_valid = 0
-                count_invalid = 0
-                sql = "SELECT COUNT(*) AS count_rows FROM {}".format(table)
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                cursor.execute(sql)
-                result = cursor.fetchone()
-                count_total = int(result['count_rows'])
-                self.nr[table]['total'] = count_total
-                self.nr[table]['valid'] = 0
-                self.nr[table]['invalid'] = 0
-
-                if count_total and has_domain:
-
-                    sql = textwrap.dedent('''\
-                        SELECT COUNT(*) AS count_rows
-                          FROM {}
-                         WHERE domain_id NOT IN (
-                                SELECT id FROM domains)
-                        ''').strip().format(table)
-
-                    if self.verbose > 1:
-                        LOG.debug("SQL: {}".format(sql))
-                    cursor.execute(sql)
-                    result = cursor.fetchone()
-                    count_invalid = int(result['count_rows'])
-                    if count_invalid:
-                        count_valid = count_total - count_invalid
-                    self.nr[table]['valid'] = count_valid
-                    self.nr[table]['invalid'] = count_invalid
-
-        title = "Number of rows in current PowerDNS database"
-
-        print()
-        print(title)
-        print(('=' * len(title)))
-
-        for table in sorted(self.nr.keys()):
-            has_domain = self.nr[table].get('has_domain', False)
-            msg = tpl.format(table, self.nr[table]['total'])
-            if has_domain:
-                if self.nr[table]['invalid']:
-                    msg += " ({} valid, {} invalid)".format(
-                        self.nr[table]['valid'], self.nr[table]['invalid'])
-                else:
-                    msg += " (all valid)"
-            print(msg)
-        print()
-
-    # -------------------------------------------------------------------------
-    def clean_tgt_db(self):
-
-        tables = [
-            'comments', 'cryptokeys', 'domainmetadata', 'records',
-            'supermasters', 'tsigkeys', 'domains',
-        ]
-
-        sequences = [
-            'comments_id_seq', 'cryptokeys_id_seq', 'domainmetadata_id_seq',
-            'domains_id_seq', 'records_id_seq', 'tsigkeys_id_seq',
-        ]
-
-        LOG.info("Truncating all tables in target database ...")
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-
-            for table in tables:
-
-                LOG.debug("Truncating table {!r} ...".format(table))
-                sql = 'DELETE FROM {}'.format(table)
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                if not self.simulate:
-                    tgt_cursor.execute(sql)
-
-            if self.tgt_db_type != 'mysql':
-
-                for sequence in sequences:
-
-                    LOG.debug("Resetting sequence {!r} ...".format(sequence))
-                    sql = "SELECT SETVAL('{}', 1)".format(sequence)
-                    if self.verbose > 1:
-                        LOG.debug("SQL: {}".format(sql))
-                    if not self.simulate:
-                        tgt_cursor.execute(sql)
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def _import_domain(self, dom_data, tgt_cursor):
-
-        dom_id = dom_data['id']
-        dom_name = dom_data['name']
-        self.domain_ids[dom_id] = dom_name
-
-        if self.is_local_domain(dom_name):
-            LOG.debug("Setting zone {!r} to a local only zone.".format(dom_name))
-            cur_account = dom_data['account']
-            if cur_account is None:
-                cur_account = ''
-            else:
-                cur_account = cur_account.strip()
-            if not self.re_is_local_account.search(cur_account):
-                if cur_account == '':
-                    cur_account = 'local'
-                else:
-                    cur_account += ', local'
-                if self.verbose > 1:
-                    LOG.debug(
-                        "Setting account information of zone {!r} to {!r}.".format(
-                            dom_name, cur_account))
-                dom_data['account'] = cur_account
-        if self.verbose > 1:
-            LOG.debug("SQL for insert domain:\n{}".format(
-                to_str(tgt_cursor.mogrify(self.sql_insert_domain, dom_data))))
-        if not self.simulate:
-            tgt_cursor.execute(self.sql_insert_domain, dom_data)
-
-        # Inserting domain metadata for SOA-EDIT-API
-        params = {
-            'domain_id': dom_id,
-            'kind': 'SOA-EDIT-API',
-            'content': 'INCEPTION-INCREMENT',
-        }
-        if self.verbose > 1:
-            LOG.debug("SQL for insert domain metadata:\n{}".format(
-                to_str(tgt_cursor.mogrify(self.sql_insert_dom_meta, params))))
-        if not self.simulate:
-            tgt_cursor.execute(self.sql_insert_dom_meta, params)
-
-    # -------------------------------------------------------------------------
-    def import_domains(self):
-
-        LOG.info("Importing all domains ...")
-
-        self.domain_ids = {}
-
-        src_sql = textwrap.dedent('''\
-            SELECT id, name, master, last_check, type, notified_serial, account
-              FROM domains
-            ORDER by name
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                i = 0
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got domains:\n{}".format(pp(results)))
-
-                for result in results:
-                    i += 1
-                    self._import_domain(result, tgt_cursor)
-
-                LOG.info("Imported {} domains.".format(i))
-
-            if self.tgt_db_type != 'mysql':
-                # Get current max domain Id
-                LOG.debug("Get max. Domain Id ...")
-                sql = "SELECT MAX(id) AS max_id FROM domains"
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                tgt_cursor.execute(sql)
-                result = tgt_cursor.fetchone()
-                if self.verbose > 2:
-                    LOG.debug("Got max domain Id:\n{}".format(pp(result)))
-                max_id = int(result[0])
-
-                # Setting this as new value for sequence
-                sql = "SELECT SETVAL('domains_id_seq', %s)"
-                LOG.debug("Setting curval of domains_id_seq to {} ...".format(max_id))
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(to_str(tgt_cursor.mogrify(sql, (max_id, )))))
-                if not self.simulate:
-                    tgt_cursor.execute(sql, (max_id, ))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def import_cryptokeys(self):
-
-        LOG.info("Importing all cryptokeys ...")
-
-        src_sql = textwrap.dedent('''\
-            SELECT id, domain_id, flags, active, content
-              FROM cryptokeys
-             WHERE domain_id IN (
-                    SELECT id FROM domains)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        tgt_sql = textwrap.dedent('''\
-            INSERT INTO cryptokeys (id, domain_id, flags, active, content)
-                 VALUES (%(id)s, %(domain_id)s, %(flags)s, %(active)s, %(content)s)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Target SQL:\n{}".format(tgt_sql))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got cryptokeys:\n{}".format(pp(results)))
-
-                if not results:
-                    LOG.info("No cryptokeys in source database.")
-                    LOG.debug("Commiting changes ...")
-                    self.tgt_connection.commit()
-                    return
-
-                i = 0
-                for result in results:
-                    i += 1
-                    if self.tgt_db_type != 'mysql':
-                        if result['active']:
-                            result['active'] = True
-                        else:
-                            result['active'] = False
-                    if not self.simulate:
-                        tgt_cursor.execute(tgt_sql, result)
-                LOG.info("Imported {} cryptokeys.".format(i))
-
-            if self.tgt_db_type != 'mysql':
-                LOG.debug("Get max. CryptoKey Id ...")
-                sql = "SELECT MAX(id) AS max_id FROM cryptokeys"
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                tgt_cursor.execute(sql)
-                result = tgt_cursor.fetchone()
-                if self.verbose > 2:
-                    LOG.debug("Got max cryptokey Id:\n{}".format(pp(result)))
-                max_id = int(result[0])
-                sql = "SELECT SETVAL('cryptokeys_id_seq', %s)"
-                LOG.debug("Setting curval of cryptokeys_id_seq to {} ...".format(max_id))
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                if not self.simulate:
-                    tgt_cursor.execute(sql, (max_id, ))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def import_domainmetadata(self):
-
-        LOG.info("Importing all domainmetadata ...")
-
-        src_sql = textwrap.dedent('''\
-            SELECT domain_id, kind, content
-              FROM domainmetadata
-             WHERE domain_id IN (
-                    SELECT id FROM domains)
-             ORDER BY domain_id, kind, content
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        tgt_sql = textwrap.dedent('''\
-            INSERT INTO domainmetadata (domain_id, kind, content)
-                 VALUES (%(domain_id)s, %(kind)s, %(content)s)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Target SQL:\n{}".format(tgt_sql))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                nr_total = 0
-                nr_imported = 0
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got domainmetadata:\n{}".format(pp(results)))
-
-                if not results:
-                    LOG.info("No domainmetadata in source database.")
-                    LOG.debug("Commiting changes ...")
-                    self.tgt_connection.commit()
-                    return
-
-                for result in results:
-                    nr_total += 1
-                    if result['kind'].lower() == 'also-notify':
-                        continue
-                    nr_imported += 1
-                    if not self.simulate:
-                        tgt_cursor.execute(tgt_sql, result)
-                LOG.info("Imported {i} and rejected {r} domainmetadata.".format(
-                    i=nr_imported, r=(nr_total - nr_imported)))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def _import_record(self, record, tgt_cursor):
-
-        if self.tgt_db_type == 'mysql':
-            record['disabled'] = 0
-        else:
-            record['disabled'] = False
-            if record['auth'] is None:
-                record['auth'] = True
-            else:
-                if record['auth']:
-                    record['auth'] = True
-                else:
-                    record['auth'] = False
-            if record['ordername'] is None:
-                dom_id = record['domain_id']
-                if dom_id in self.domain_ids:
-                    dom_name = self.domain_ids[dom_id]
-                    if record['name'] == dom_name:
-                        record['ordername'] = ''
-                    else:
-                        idx = record['name'].rfind('.' + dom_name)
-                        if idx >= 0:
-                            record['ordername'] = record['name'][:idx]
-                        else:
-                            record['ordername'] = ''
-                else:
-                    record['ordername'] = ''
-            if record['type'] in ('NS', 'MX'):
-                record['content'] = RE_DOT_AT_END.sub('', record['content'])
-            elif record['type'] == 'SOA':
-                soa = PdnsSoaData.init_from_data(
-                    record['content'], appname=self.appname,
-                    verbose=self.verbose, base_dir=self.base_dir)
-                soa.primary = RE_DOT_AT_END.sub('', soa.primary)
-                soa.email = RE_DOT_AT_END.sub('', soa.email)
-                record['content'] = soa.data
-        if self.verbose > 3:
-            LOG.debug("SQL for insert record:\n{}".format(
-                to_str(tgt_cursor.mogrify(self.sql_insert_record, record))))
-        if not self.simulate:
-            tgt_cursor.execute(self.sql_insert_record, record)
-
-    # -------------------------------------------------------------------------
-    def import_records(self):
-
-        LOG.info("Importing all records ...")
-
-        src_sql = textwrap.dedent('''\
-            SELECT id, domain_id, name, type, content,
-                   ttl, prio, change_date, ordername, auth
-              FROM records
-             WHERE domain_id IN (
-                    SELECT id FROM domains)
-             ORDER BY name
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        if self.verbose > 1:
-            LOG.debug("Target SQL:\n{}".format(self.sql_insert_record))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                i = 0
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got records:\n{}".format(pp(results)))
-
-                if not results:
-                    LOG.info("No records in source database.")
-                    LOG.debug("Commiting changes ...")
-                    self.tgt_connection.commit()
-                    return
-
-                for result in results:
-                    i += 1
-                    self._import_record(result, tgt_cursor)
-
-                LOG.info("Imported {} records.".format(i))
-
-            if self.tgt_db_type != 'mysql':
-                LOG.debug("Get max. records Id ...")
-                sql = "SELECT MAX(id) AS max_id FROM records"
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                tgt_cursor.execute(sql)
-                result = tgt_cursor.fetchone()
-                if self.verbose > 2:
-                    LOG.debug("Got max records Id:\n{}".format(pp(result)))
-                max_id = int(result[0])
-                sql = "SELECT SETVAL('records_id_seq', %s)"
-                LOG.debug("Setting curval of records_id_seq to {} ...".format(max_id))
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(to_str(tgt_cursor.mogrify(sql, (max_id, )))))
-                if not self.simulate:
-                    tgt_cursor.execute(sql, (max_id, ))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def import_supermasters(self):
-
-        LOG.info("Importing all supermasters ...")
-
-        src_sql = textwrap.dedent('''\
-            SELECT ip, nameserver, account
-              FROM supermasters
-            ORDER by nameserver
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        tgt_sql = textwrap.dedent('''\
-            INSERT INTO supermasters (ip, nameserver, account)
-                 VALUES (%(ip)s, %(nameserver)s, %(account)s)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Target SQL:\n{}".format(tgt_sql))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                i = 0
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got supermasters:\n{}".format(pp(results)))
-
-                if not results:
-                    LOG.info("No supermasters in source database.")
-                    LOG.debug("Commiting changes ...")
-                    self.tgt_connection.commit()
-                    return
-
-                for result in results:
-                    i += 1
-                    if not self.simulate:
-                        tgt_cursor.execute(tgt_sql, result)
-                LOG.info("Imported {} supermasters.".format(i))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def import_tsigkeys(self):
-
-        LOG.info("Importing all tsigkeys ...")
-
-        src_sql = textwrap.dedent('''\
-            SELECT id, name, algorithm, secret
-              FROM tsigkeys
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Source SQL:\n{}".format(src_sql))
-
-        tgt_sql = textwrap.dedent('''\
-            INSERT INTO tsigkeys (id, name, algorithm, secret)
-                 VALUES (%(id)s, %(name)s, %(algorithm)s, %(secret)s)
-            ''').strip()
-        if self.verbose > 1:
-            LOG.debug("Target SQL:\n{}".format(tgt_sql))
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-            with self.src_connection.cursor() as src_cursor:
-
-                i = 0
-                src_cursor.execute(src_sql)
-                results = src_cursor.fetchall()
-
-                if self.verbose > 3:
-                    LOG.debug("Got tsigkeys:\n{}".format(pp(results)))
-
-                if not results:
-                    LOG.info("No tsigkeys in source database.")
-                    LOG.debug("Commiting changes ...")
-                    self.tgt_connection.commit()
-                    return
-
-                for result in results:
-                    i += 1
-                    if not self.simulate:
-                        tgt_cursor.execute(tgt_sql, result)
-                LOG.info("Imported {} tsigkeys.".format(i))
-
-            if self.tgt_db_type != 'mysql':
-                LOG.debug("Get max. TsigKey Id ...")
-                sql = "SELECT MAX(id) AS max_id FROM tsigkeys"
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                tgt_cursor.execute(sql)
-                result = tgt_cursor.fetchone()
-                if self.verbose > 2:
-                    LOG.debug("Got max TsigKey Id:\n{}".format(pp(result)))
-                max_id = int(result[0])
-                sql = "SELECT SETVAL('tsigkeys_id_seq', %s)"
-                LOG.debug("Setting curval of tsigkeys_id_seq to {} ...".format(max_id))
-                if self.verbose > 1:
-                    LOG.debug("SQL: {}".format(sql))
-                if not self.simulate:
-                    tgt_cursor.execute(sql, (max_id, ))
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def create_ipv6_as_zone(self):
-
-        zone_name = '0.0.7.a.8.7.9.0.1.0.0.2.ip6.arpa'
-        net_addr = '2001:978:a700::'
-        nameservers = (
-            'ns1.pp-dns.com.',
-            'ns2.pp-dns.com.',
-            'ns3.pp-dns.com.',
-            'ns4.pp-dns.com.',
-        )
-        mail_addr = 'hostmaster.pixelpark.net'
-
-        LOG.info("Creating zone {z!r} for AS network {n!r} ...".format(
-            z=zone_name, n=net_addr))
-
-        today = datetime.date.today()
-        serial = 1000000 * today.year + 10000 * today.month + 100 * today.day + 1
-        domain_id = 9999
-        refresh = 10800
-        retry = 3600
-        expire = 604800
-        ttl = 3600
-        change_date = int(time.time())
-
-        with self.tgt_connection.cursor() as tgt_cursor:
-
-            LOG.debug("Inserting domain ...")
-            sql = textwrap.dedent('''\
-                INSERT INTO domains (name, master, type, notified_serial, account)
-                  VALUES (%(zone_name)s, '', 'MASTER', %(serial)s, 'public')
-                ''').strip()
-            data = {'zone_name': zone_name, 'serial': serial}
-            if self.verbose > 1:
-                LOG.debug("SQL for insert domain:\n{}".format(
-                    to_str(tgt_cursor.mogrify(sql, data))))
-            if not self.simulate:
-                tgt_cursor.execute(sql, data)
-
-            LOG.debug("Retrieving domain_id from DB ...")
-            sql = 'SELECT id FROM domains WHERE name = %s'
-            if self.verbose > 1:
-                LOG.debug("SQL for retrieving domain_id:\n{}".format(
-                    to_str(tgt_cursor.mogrify(sql, [zone_name]))))
-            if not self.simulate:
-                domain_id = None
-                tgt_cursor.execute(sql, [zone_name])
-                results = tgt_cursor.fetchall()
-                if self.verbose > 2:
-                    LOG.debug("Got results:\n{}".format(pp(results)))
-                for result in results:
-                    domain_id = result[0]
-                if domain_id is None:
-                    raise ImportPdnsdataError(
-                        "Did not found Domain Id of zone {!r}.".format(zone_name))
-            LOG.info("Using Id of zone {z!r}: {i}.".format(z=zone_name, i=domain_id))
-
-            ns_used = RE_DOT_AT_END.sub('', nameservers[0])
-            soa = PdnsSoaData(
-                primary=ns_used, email=mail_addr, serial=serial,
-                refresh=refresh, retry=retry, expire=expire, ttl=ttl,
-                appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
-            )
-
-            LOG.debug("Inserting SOA {!r} ...".format(soa.data))
-            sql = textwrap.dedent('''\
-                INSERT INTO records (
-                        domain_id, name, type, content, ttl, prio,
-                        change_date, disabled, ordername, auth)
-                    VALUES (
-                        %(domain_id)s, %(name)s, 'SOA', %(content)s, %(ttl)s, 0,
-                        %(change_date)s, %(disabled)s, '', %(auth)s)
-                ''').strip()
-            data = {
-                'domain_id': domain_id, 'name': zone_name, 'content': soa.data,
-                'ttl': ttl, 'change_date': change_date, 'disabled': False, 'auth': True,
-            }
-            if self.verbose > 1:
-                LOG.debug("SQL for insert SOA:\n{}".format(
-                    to_str(tgt_cursor.mogrify(sql, data))))
-            if not self.simulate:
-                tgt_cursor.execute(sql, data)
-
-            LOG.debug("Inserting nameservers ...")
-            sql = textwrap.dedent('''\
-                INSERT INTO records (
-                        domain_id, name, type, content, ttl, prio,
-                        change_date, disabled, ordername, auth)
-                    VALUES (
-                        %(domain_id)s, %(name)s, 'NS', %(content)s, %(ttl)s, 0,
-                        %(change_date)s, %(disabled)s, '', %(auth)s)
-                ''').strip()
-            for ns in nameservers:
-                ns_used = RE_DOT_AT_END.sub('', ns)
-                data = {
-                    'domain_id': domain_id, 'name': zone_name, 'content': ns_used,
-                    'ttl': ttl, 'change_date': change_date, 'disabled': False, 'auth': True,
-                }
-                if self.verbose > 1:
-                    LOG.debug("SQL for insert nameserver:\n{}".format(
-                        to_str(tgt_cursor.mogrify(sql, data))))
-                if not self.simulate:
-                    tgt_cursor.execute(sql, data)
-
-            LOG.debug("Inserting domain metadata ...")
-            sql = textwrap.dedent('''\
-                INSERT INTO domainmetadata (domain_id, kind, content)
-                    VALUES (%(domain_id)s, %(kind)s, %(content)s)
-                ''').strip()
-            data = {
-                'domain_id': domain_id,
-                'kind': 'SOA-EDIT-API',
-                'content': 'INCEPTION-INCREMENT',
-            }
-            if self.verbose > 1:
-                LOG.debug("SQL for insert domain metadata:\n{}".format(
-                    to_str(tgt_cursor.mogrify(sql, data))))
-            if not self.simulate:
-                tgt_cursor.execute(sql, data)
-
-        LOG.debug("Commiting changes ...")
-        self.tgt_connection.commit()
-
-    # -------------------------------------------------------------------------
-    def _close_all(self):
-
-        if self.src_connection:
-            LOG.debug("Closing source database connection.")
-            try:
-                self.src_connection.close()
-            except Exception as e:
-                LOG.error("Could not close source database connection ({}): {}".format(
-                    e.__class__.__name__, e))
-                traceback.print_exc()
-            self.src_connection = None
-
-        if self.tgt_connection:
-            LOG.debug("Closing target database connection.")
-            try:
-                self.tgt_connection.close()
-            except Exception as e:
-                LOG.error("Could not close target database connection ({}): {}".format(
-                    e.__class__.__name__, e))
-                traceback.print_exc()
-            self.tgt_connection = None
-
-    # -------------------------------------------------------------------------
-    def post_run(self):
-
-        if self.verbose > 1:
-            LOG.info("executing post_run() ...")
-        self._close_all()
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/ldap_app.py b/pp_lib/ldap_app.py
deleted file mode 100644 (file)
index 5b84e3a..0000000
+++ /dev/null
@@ -1,411 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for a LDAP based application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import re
-import copy
-
-# Third party modules
-
-# ldap3 classes and objects
-from ldap3 import Server, ServerPool, Connection, Reader, Writer, ObjectDef
-# ldap3 constants
-from ldap3 import IP_V4_PREFERRED, ROUND_ROBIN, AUTO_BIND_NONE, ALL_ATTRIBUTES
-from ldap3 import SUBTREE
-
-from ldap3.core.exceptions import LDAPPasswordIsMandatoryError
-
-from ldap3.utils.log import set_library_log_detail_level, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED
-
-# Own modules
-from .common import pp, to_bool
-
-from .cfg_app import PpCfgAppError, PpConfigApplication
-
-__version__ = '0.4.9'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpLdapAppError(PpCfgAppError):
-    """Base error class for all exceptions happened during
-    execution this configured application"""
-
-    pass
-
-
-# =============================================================================
-class PpLdapApplication(PpConfigApplication):
-    """
-    Class for a LDAP based configured application objects.
-    """
-
-    default_ldap_hosts = [
-        'ldap.pixelpark.com'
-    ]
-
-    default_ldap_port = 389
-    default_ldap_port_ssl = 636
-    default_ldap_use_ssl = False
-
-    default_ldap_base_dn = 'o=isp'
-    default_ldap_bind_dn = 'uid=Solaris_NSS,ou=Unix NSS,ou=Applications,o=pixelpark,o=isp'
-    default_ldap_timeout = 30
-
-    fs_re = re.compile(r'(?:\s+|\s*[,;]\s*)')
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=None, usage=None, description=None,
-            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
-            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False):
-
-        self.ldap_hosts = copy.copy(self.default_ldap_hosts)
-        self.ldap_use_ssl = self.default_ldap_use_ssl
-        self.ldap_port = self.default_ldap_port
-        if self.ldap_use_ssl:
-            self.ldap_port = self.default_ldap_port_ssl
-
-        self.ldap_base_dn = self.default_ldap_base_dn
-        self.ldap_bind_dn = self.default_ldap_bind_dn
-        self.ldap_bind_pw = None
-        self.ldap_timeout = self.default_ldap_timeout
-
-        # Either a single Server object or a ServerPool object
-        self.ldap_server = None
-        self.ldap_connection = None
-
-        stems = []
-        if cfg_stems:
-            if isinstance(cfg_stems, list):
-                for stem in cfg_stems:
-                    s = str(stem).strip()
-                    if not s:
-                        msg = "Invalid configuration stem {!r} given.".format(stem)
-                        raise PpLdapAppError(msg)
-                    stems.append(s)
-            else:
-                s = str(cfg_stems).strip()
-                if not s:
-                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
-                    raise PpLdapAppError(msg)
-                stems.append(s)
-        else:
-            stems = [self.appname]
-        if 'ldap' not in stems:
-            stems.insert(0, 'ldap')
-
-        super(PpLdapApplication, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
-            initialized=False, usage=usage, description=description,
-            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
-            env_prefix=env_prefix, cfg_dir=cfg_dir, cfg_stems=stems,
-            cfg_encoding=cfg_encoding, need_config_file=need_config_file,
-        )
-
-        if self.verbose > 5:
-            set_library_log_detail_level(EXTENDED)
-        elif self.verbose > 4:
-            set_library_log_detail_level(NETWORK)
-        elif self.verbose > 3:
-            set_library_log_detail_level(PROTOCOL)
-        elif self.verbose > 2:
-            set_library_log_detail_level(BASIC)
-        else:
-            set_library_log_detail_level(ERROR)
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-        """
-        Execute some actions after reading the configuration.
-
-        This method should be explicitely called by all perform_config()
-        methods in descendant classes.
-        """
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 2:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-            section = self.cfg[section_name]
-
-            if section_name.lower() == 'ldap':
-                self.do_ldap_cfg(section_name, section)
-
-    # -------------------------------------------------------------------------
-    def _ldap_cfg_host(self, section_name, section):
-
-        got_host = False
-
-        if 'host' in section:
-            hosts = self.fs_re.split(section['host'])
-            for host in hosts:
-                if not host:
-                    continue
-                if not got_host:
-                    self.ldap_hosts = []
-                    got_host = True
-                host = host.lower()
-                if host in self.ldap_hosts:
-                    continue
-                self.ldap_hosts.append(host)
-
-    # -------------------------------------------------------------------------
-    def _ldap_cfg_port(self, section_name, section):
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-            except (ValueError, TypeError):
-                msg = "Invalid LDAP port ({s}/port => {v!r}) found in configuration.".format(
-                    s=section_name, v=section['port'])
-                raise PpLdapAppError(msg)
-            if port <= 0 or port >= 2 ** 16:
-                msg = "Invalid LDAP port ({s}/port => {v!r}) found in configuration.".format(
-                    s=section_name, v=port)
-                raise PpLdapAppError(msg)
-            self.ldap_port = port
-
-    # -------------------------------------------------------------------------
-    def _ldap_cfg_other(self, section_name, section):
-
-        if 'ssl' in section:
-            self.ldap_use_ssl = to_bool(section['ssl'])
-
-        if 'tls' in section:
-            self.ldap_use_ssl = to_bool(section['tls'])
-
-        if 'base_dn' in section:
-            self.ldap_base_dn = section['base_dn'].strip()
-        if 'bind_dn' in section:
-            self.ldap_bind_dn = section['bind_dn'].strip()
-        if 'bind_pw' in section:
-            self.ldap_bind_pw = section['bind_pw']
-        if 'timeout' in section:
-            try:
-                timeout = int(section['timeout'])
-            except (ValueError, TypeError):
-                msg = (
-                    "Invalid LDAP timeout ({s}/port => {v!r}) found in configuration.").format(
-                    s=section_name, v=section['timeout'])
-                LOG.error(msg)
-            if timeout > 0:
-                self.ldap_timeout = timeout
-
-    # -------------------------------------------------------------------------
-    def do_ldap_cfg(self, section_name, section):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        self._ldap_cfg_host(section_name, section)
-        self._ldap_cfg_port(section_name, section)
-        self._ldap_cfg_other(section_name, section)
-
-        # ----------------------
-        def _get_ldap_server(host):
-            return Server(
-                host, port=self.ldap_port, use_ssl=self.ldap_use_ssl,
-                mode=IP_V4_PREFERRED, connect_timeout=self.ldap_timeout)
-
-        # Init LDAP Server objects
-        if len(self.ldap_hosts):
-            self.ldap_server = ServerPool(None, ROUND_ROBIN)
-            for h in self.ldap_hosts:
-                server = _get_ldap_server(h)
-                self.ldap_server.add(server)
-        else:
-            msg = "No LDAP servers found in configuration."
-            raise PpLdapAppError(msg)
-
-        # Init LDAP connection object
-        self.ldap_connection = Connection(
-            self.ldap_server, user=self.ldap_bind_dn, password=self.ldap_bind_pw,
-            auto_bind=AUTO_BIND_NONE, lazy=True, auto_range=True
-        )
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.verbose > 1:
-            LOG.debug("Executing pre_run() ...")
-
-        super(PpLdapApplication, self).pre_run()
-
-        LOG.info("Binding local address for LDAP requests ...")
-        try:
-            self.ldap_connection.bind()
-        except LDAPPasswordIsMandatoryError as e:
-            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
-            self.handle_error(msg, e.__class__.__name__)
-            self.exit(1)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """
-        Dummy function as main routine.
-
-        MUST be overwritten by descendant classes.
-
-        """
-        LOG.debug("Executing nothing ...")
-
-    # -------------------------------------------------------------------------
-    def ldap_search(self, query_filter, dn=None, attributes=ALL_ATTRIBUTES, scope=SUBTREE):
-
-        if self.verbose > 1:
-            LOG.debug("Query string: {q!r}, attributes: {a}".format(
-                q=query_filter, a=pp(attributes)))
-
-        if dn is None:
-            dn = self.ldap_base_dn
-
-        cursor = Reader(
-            self.ldap_connection,
-            query=query_filter, base=dn, attributes=attributes
-        )
-
-        try:
-            cursor.search()
-        except LDAPPasswordIsMandatoryError as e:
-            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
-            LOG.error(msg)
-            return []
-
-        return cursor.entries
-
-        try:
-            self.ldap_connection.search(
-                dn, query_filter, search_scope=scope, attributes=attributes)
-        except LDAPPasswordIsMandatoryError as e:
-            msg = "Please configure [LDAP]/bind_pw in configuration - " + str(e)
-            LOG.error(msg)
-            return []
-        entries = self.ldap_connection.entries
-        return entries
-
-    # -------------------------------------------------------------------------
-    def ldap_search_subtree(self, obj_def, query=None, base=None):
-
-        if base is None:
-            base = self.ldap_base_dn
-
-        cursor = Reader(
-            self.ldap_connection,
-            object_def=obj_def, query=query, base=base)
-
-        if self.verbose > 1:
-            LOG.debug("LDAP-Reader:\n{}".format(cursor))
-
-        cursor.search()
-        return cursor.entries
-
-    # -------------------------------------------------------------------------
-    def ldap_search_object(self, obj_def, object_dn, base=None):
-
-        if base is None:
-            base = self.ldap_base_dn
-
-        cursor = Reader(
-            self.ldap_connection,
-            object_def=obj_def, base=base)
-
-        if self.verbose > 1:
-            LOG.debug("LDAP-Reader:\n{}".format(cursor))
-
-        cursor.search_object(entry_dn=object_dn)
-        return cursor.entries
-
-    # -------------------------------------------------------------------------
-    def get_numeric_uid(self, dn, base=None):
-
-        person = ObjectDef(['posixAccount', 'shadowAccount'])
-        person += ["uid", "uidNumber", "gidNumber"]
-
-        entries = self.ldap_search_object(person, dn)
-        LOG.debug("Found {} LDAP entries.".format(len(entries)))
-
-        if not entries:
-            LOG.error("No LDAP entry found for DN {!r}.".format(dn))
-            return None
-
-        entry = entries[0]
-
-        uid = entry['uidNumber'][0]
-        return uid
-
-    # -------------------------------------------------------------------------
-    def set_numeric_uid(self, dn, new_uid, simulate=False, base=None):
-
-        person = ObjectDef(['posixAccount', 'shadowAccount'])
-        person += ["uid", "uidNumber", "gidNumber", 'objectClass']
-
-        if base is None:
-            base = self.ldap_base_dn
-
-        read_cursor = Reader(
-            self.ldap_connection,
-            object_def=person, base=base)
-        read_cursor.search_object(entry_dn=dn)
-
-        if not read_cursor.entries:
-            msg = "Did not found Counter LDAP entry {!r}.".format(dn)
-            raise PpLdapAppError(msg)
-
-        entry = read_cursor.entries[0]
-        if self.verbose:
-            LOG.debug("Found entry:\n{}".format(entry))
-
-        writer_cursor = Writer.from_cursor(read_cursor)
-        entry = writer_cursor.entries[0]
-        entry.uidNumber = new_uid
-        if self.verbose > 1:
-            LOG.debug("Writer entry before commit:\n{}".format(entry))
-
-        LOG.info("Setting numeric user Id of {d!r} to {u} ...".format(
-            d=dn, u=new_uid))
-
-        if not simulate:
-            entry.entry_commit_changes()
-            if self.verbose:
-                LOG.debug("Writer entry after commit:\n{}".format(entry))
-
-    # -------------------------------------------------------------------------
-    def post_run(self):
-        """
-        Dummy function to run after the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.verbose > 1:
-            LOG.debug("executing post_run() ...")
-
-        LOG.debug("Unbinding from the LDAP servers ...")
-        self.ldap_connection.unbind()
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/mailaddress.py b/pp_lib/mailaddress.py
deleted file mode 100644 (file)
index 11c7f8f..0000000
+++ /dev/null
@@ -1,277 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicis Pixelpark GmbH, Berlin
-@summary: The module for the MailAddress object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import re
-
-# Own modules
-from .errors import InvalidMailAddressError
-
-from .common import to_str
-
-__version__ = '0.3.2'
-log = logging.getLogger(__name__)
-
-
-# =============================================================================
-class MailAddress(object):
-    """
-    Class for encapsulating a mail simple address.
-    """
-
-    pattern_valid_domain = r'@((?:[a-z0-9](?:[a-z0-9\-]*[a-z0-9])?\.)+[a-z][a-z]+)$'
-
-    pattern_valid_user = r'^([a-z0-9][a-z0-9_\-\.\+\&@]*[a-z0-9]'
-    pattern_valid_user += r'(?:\+[a-z0-9][a-z0-9_\-\.]*[a-z0-9])*)'
-
-    pattern_valid_address = pattern_valid_user + pattern_valid_domain
-
-    re_valid_user = re.compile(pattern_valid_user + r'$', re.IGNORECASE)
-    re_valid_domain = re.compile(r'^' + pattern_valid_domain, re.IGNORECASE)
-    re_valid_address = re.compile(pattern_valid_address, re.IGNORECASE)
-
-    verbose = 0
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def valid_address(cls, address, raise_on_failure=False):
-
-        if not address:
-            e = InvalidMailAddressError(address, "Empty address.")
-            if raise_on_failure:
-                raise e
-            elif cls.verbose > 2:
-                log.debug(str(e))
-            return False
-
-        addr = to_str(address)
-        if not isinstance(addr, str):
-            e = InvalidMailAddressError(address, "Wrong type.")
-            if raise_on_failure:
-                raise e
-            elif cls.verbose > 2:
-                log.debug(str(e))
-            return False
-
-        if cls.re_valid_address.search(addr):
-            return True
-
-        e = InvalidMailAddressError(address, "Invalid address.")
-        if raise_on_failure:
-            raise e
-        elif cls.verbose > 2:
-            log.debug(str(e))
-        return False
-
-    # -------------------------------------------------------------------------
-    def __init__(self, user=None, domain=None):
-
-        self._user = ''
-        self._domain = ''
-
-        if not domain:
-            if user:
-                addr = to_str(user)
-                if self.valid_address(addr):
-                    match = self.re_valid_address.search(addr)
-                    self._user = match.group(1)
-                    self._domain = match.group(2)
-                    return
-                match = self.re_valid_domain.search(addr)
-                if match:
-                    self._domain = match.group(1)
-                    return
-                self._user = addr
-                return
-
-        self._user = to_str(user)
-        self._domain = to_str(domain)
-
-    # -----------------------------------------------------------
-    @property
-    def user(self):
-        """The user part of the address."""
-        if self._user is None:
-            return ''
-        return self._user
-
-    # -----------------------------------------------------------
-    @property
-    def domain(self):
-        """The domain part of the address."""
-        if self._domain is None:
-            return ''
-        return self._domain
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-
-        if not self.user and not self.domain:
-            return ''
-
-        if not self.domain:
-            return self.user
-
-        if not self.user:
-            return '@' + self.domain
-
-        return self.user + '@' + self.domain
-
-    # -------------------------------------------------------------------------
-    def str_for_access(self):
-
-        if not self.user and not self.domain:
-            return None
-
-        if not self.domain:
-            return self.user + '@'
-
-        if not self.user:
-            return self.domain
-
-        return self.user + '@' + self.domain
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("user={!r}".format(self.user))
-        fields.append("domain={!r}".format(self.domain))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    def __hash__(self):
-        return hash(str(self).lower())
-
-    # -------------------------------------------------------------------------
-    def __eq__(self, other):
-
-        if not isinstance(other, MailAddress):
-            if other is None:
-                return False
-            return str(self).lower() == str(other).lower()
-
-        if not self.user:
-            if other.user:
-                return False
-            if not self.domain:
-                if other.domain:
-                    return False
-                return True
-            if not other.domain:
-                return False
-            if self.domain.lower() == other.domain.lower():
-                return True
-            return False
-
-        if not self.domain:
-            if other.domain:
-                return False
-            if not other.user:
-                return False
-            if self.user.lower() == other.user.lower():
-                return True
-            return False
-
-        if not other.user:
-            return False
-        if not other.domain:
-            return False
-        if self.domain.lower() != other.domain.lower():
-            return False
-        if self.user.lower() != other.user.lower():
-            return False
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def __ne__(self, other):
-
-        if self == other:
-            return False
-        return True
-
-    # -------------------------------------------------------------------------
-    def __lt__(self, other):
-
-        if not isinstance(other, MailAddress):
-            if other is None:
-                return False
-            return str(self).lower() < str(other).lower()
-
-        if not self.user:
-            if not self.domain:
-                if other.domain:
-                    return False
-                return True
-            if not other.domain:
-                return False
-            if self.domain.lower() != other.domain.lower():
-                return self.domain.lower() < other.domain.lower()
-            if other.user:
-                return False
-            return True
-
-        if not self.domain:
-            if other.domain:
-                return True
-            if not other.user:
-                return False
-            if self.user.lower() != other.user.lower():
-                return self.user.lower() < other.user.lower()
-            return False
-
-        if not other.domain:
-            return False
-        if not other.user:
-            return False
-
-        if self.domain.lower() != other.domain.lower():
-            return self.domain.lower() < other.domain.lower()
-        if self.user.lower() != other.user.lower():
-            return self.user.lower() < other.user.lower()
-
-        return False
-
-    # -------------------------------------------------------------------------
-    def __gt__(self, other):
-
-        if not isinstance(other, MailAddress):
-            return NotImplemented
-
-        if self < other:
-            return False
-        return True
-
-    # -------------------------------------------------------------------------
-    def __copy__(self):
-        "Implementing a wrapper for copy.copy()."
-
-        addr = MailAddress()
-        addr._user = self.user
-        addr._domain = self.domain
-        return addr
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/merge.py b/pp_lib/merge.py
deleted file mode 100644 (file)
index 7daadc7..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-"""
-
-import itertools
-
-# =============================================================================
-class ZipExhausted(Exception):
-    pass
-
-
-# =============================================================================
-def izip_longest(*args, **kwds):
-    '''
-    Function izip_longest() does not exists anymore in Python3 itertools.
-    Taken from https://docs.python.org/2/library/itertools.html#itertools.izip_longest
-    '''
-    # izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
-
-    fillvalue = kwds.get('fillvalue')
-    counter = [len(args) - 1]
-
-    # ------------------
-    def sentinel():
-        if not counter[0]:
-            raise ZipExhausted
-        counter[0] -= 1
-        yield fillvalue
-
-    # ------------------
-    fillers = itertools.repeat(fillvalue)
-    iterators = [itertools.chain(it, sentinel(), fillers) for it in args]
-    try:
-        while iterators:
-            yield tuple(map(next, iterators))
-    except ZipExhausted:
-        pass
-
-
-# =============================================================================
-def merge_structure(a, b):
-    '''
-    Taken from https://gist.github.com/saurabh-hirani/6f3f5d119076df70e0da
-    '''
-    if isinstance(a, dict) and isinstance(b, dict):
-        d = dict(a)
-        d.update({k: merge_structure(a.get(k, None), b[k]) for k in b})
-        return d
-
-    if isinstance(a, list) and isinstance(b, list):
-        is_a_nested = any(x for x in a if isinstance(x, list) or isinstance(x, dict))
-        is_b_nested = any(x for x in b if isinstance(x, list) or isinstance(x, dict))
-        if is_a_nested or is_b_nested:
-            return [merge_structure(x, y) for x, y in izip_longest(a, b)]
-        else:
-            return a + b
-
-    return a if b is None else b
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/mk_home_app.py b/pp_lib/mk_home_app.py
deleted file mode 100644 (file)
index ce8b05c..0000000
+++ /dev/null
@@ -1,380 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the mk-home application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import logging
-import logging.config
-import textwrap
-import shutil
-import stat
-
-# Third party modules
-# from ldap3 import ObjectDef, AttrDef, Reader, Writer
-from ldap3 import ObjectDef
-
-# from ldap3.core.exceptions import LDAPKeyError
-
-# Own modules
-from .common import pp
-
-from .ldap_app import PpLdapAppError, PpLdapApplication
-
-__version__ = '0.5.1'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpMkHomeError(PpLdapAppError):
-    pass
-
-
-# =============================================================================
-class PpMkHomeApp(PpLdapApplication):
-    """Class for the 'mk-home' application to ensure:
-        * existence of HOME directories for all users in LDAP
-        * all LDAP users having a valid numeric UID (different to 999999999)
-    """
-
-    default_initial_uid = 999999999
-    # /mnt/nfs
-    default_chroot_homedir = os.sep + os.path.join('mnt', 'nfs')
-    # /home
-    default_home_root = os.sep + 'home'
-    # /etc/skel
-    default_skel_dir = os.sep + os.path.join('etc', 'skel')
-    default_dn_counter = 'uid=uidNumber,ou=ldapTool,ou=Applications,o=Pixelpark,o=isp'
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.initial_uid = self.default_initial_uid
-        self.chroot_homedir = self.default_chroot_homedir
-        self.home_root_abs = self.default_home_root
-        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
-        self.show_simulate_opt = True
-        self.user_entries = []
-        self.users = {}
-        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
-        self.skel_dir = self.default_skel_dir
-        self.dn_counter = self.default_dn_counter
-        self.el_printed = False
-
-        description = textwrap.dedent('''\
-            Home Directory and UIDNumber generation - this script will search for
-            Unix Accounts in LDAP and generate the HomeDirectory for Users if it
-            dosen't exists. Also it looks for accounts with the special
-            UIDNumber {} and generate an new for them.
-            ''').strip().format(self.default_initial_uid)
-
-        super(PpMkHomeApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='mk-home'
-        )
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpMkHomeApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 2:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            if section_name.lower() not in ('mk-home', 'mk_home', 'mkhome'):
-                continue
-
-            section = self.cfg[section_name]
-            if self.verbose > 2:
-                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                    n=section_name, s=pp(section)))
-
-            if 'initial_uid' in section:
-                v = section['initial_uid']
-                try:
-                    uid = int(v)
-                except (ValueError, TypeError):
-                    msg = (
-                        "Invalid initial numeric user Id ([{s}]/initial_uid "
-                        "=> {v!r}) found in configuration.").format(s=section_name, v=v)
-                    raise PpMkHomeError(msg)
-                if uid <= 0:
-                    msg = (
-                        "Invalid initial numeric user Id ([{s}]/initial_uid "
-                        "=> {v!r}) found in configuration.").format(s=section_name, v=v)
-                    raise PpMkHomeError(msg)
-                self.initial_uid = uid
-
-            if 'chroot_homedir' in section:
-                v = section['chroot_homedir']
-                if not os.path.isabs(v):
-                    msg = (
-                        "The chrooted path of the home directories must be an "
-                        "absolute pathname (found [{s}]/chroot_homedir "
-                        "=> {v!r} in configuration.").format(s=section_name, v=v)
-                    raise PpMkHomeError(msg)
-                self.chroot_homedir = v
-
-            if 'home_root' in section:
-                v = section['home_root']
-                if not os.path.isabs(v):
-                    msg = (
-                        "The root path of the home directories must be an "
-                        "absolute pathname (found [{s}]/home_root "
-                        "=> {v!r} in configuration.").format(s=section_name, v=v)
-                    raise PpMkHomeError(msg)
-                self.home_root_abs = v
-
-            if 'skel_dir' in section:
-                v = section['skel_dir']
-                if not os.path.isabs(v):
-                    msg = (
-                        "The skeleton directory must be an "
-                        "absolute pathname (found [{s}]/skel_dir "
-                        "=> {v!r} in configuration.").format(s=section_name, v=v)
-                    raise PpMkHomeError(msg)
-                self.skel_dir = v
-
-            if 'dn_counter' in section:
-                self.dn_counter = section['dn_counter'].strip()
-
-        self.home_root_rel = os.path.relpath(self.home_root_abs, os.sep)
-        self.home_root_real = os.path.join(self.chroot_homedir, self.home_root_rel)
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if os.geteuid():
-            msg = "Only root may execute this application."
-            LOG.error(msg)
-            self.exit(1)
-
-        if not os.path.exists(self.chroot_homedir):
-            msg = "The chrooted path of the home directories {!r} does not exists.".format(
-                self.chroot_homedir)
-            LOG.error(msg)
-            self.exit(1)
-
-        if not os.path.isdir(self.chroot_homedir):
-            msg = "The chrooted path of the home directories {!r} is not a directory.".format(
-                self.chroot_homedir)
-            LOG.error(msg)
-            self.exit(1)
-
-        if not os.path.isdir(self.skel_dir):
-            msg = "The skeleton directory {!r} does not exists or is not a directory.".format(
-                self.skel_dir)
-            LOG.error(msg)
-            self.exit(1)
-
-        super(PpMkHomeApp, self).pre_run()
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        self.get_all_user_entries()
-        self.check_numeric_uids()
-        self.check_home_dirs()
-
-    # -------------------------------------------------------------------------
-    def get_all_user_entries(self):
-
-        LOG.info("Reading Accounts from LDAP ...")
-
-        query_filter = '(&(objectclass=posixAccount)(objectclass=shadowAccount))'
-        # attributes = ["uid", "uidNumber", "homeDirectory", "gidNumber"]
-
-        person = ObjectDef(['posixAccount', 'shadowAccount'])
-        person += ["uid", "uidNumber", "homeDirectory", "gidNumber"]
-
-        # self.user_entries = self.ldap_search(query_filter, attributes=attributes)
-        self.user_entries = self.ldap_search_subtree(person, query_filter)
-        LOG.debug("Found {} LDAP entries.".format(len(self.user_entries)))
-
-        for entry in self.user_entries:
-            dn = entry.entry_dn
-            self.users[dn] = {
-                'uid': entry['uid'][0],
-                'uidNumber': entry['uidNumber'][0],
-                'gidNumber': entry['gidNumber'][0],
-                'homeDirectory': entry['homeDirectory'][0],
-            }
-
-        if self.verbose > 2:
-            LOG.debug("All found user entries from LDAP:\n{}".format(pp(self.users)))
-
-    # -------------------------------------------------------------------------
-    def set_new_counter(self, new_uid):
-
-        return self.set_numeric_uid(self.dn_counter, new_uid, simulate=self.simulate)
-
-    # -------------------------------------------------------------------------
-    def check_numeric_uids(self):
-
-        LOG.info("Checking UID's for new Users ...")
-
-        uid_counter = self.get_numeric_uid(self.dn_counter)
-        if uid_counter is None:
-            LOG.error("Did not found current numeric UID of the counter.")
-            self.exit(5)
-        LOG.debug("Current UID counter: {}".format(uid_counter))
-
-        i = 0
-
-        for dn in self.users.keys():
-
-            user = self.users[dn]
-
-            uid = user['uidNumber']
-            # gid = user['gidNumber']
-            user_name = user['uid']
-            # home = user['homeDirectory']
-
-            if uid == self.initial_uid:
-
-                i += 1
-                new_uid = uid_counter + 1
-                LOG.info("Setting numeric UID of user {n!r} to {u}...".format(
-                    n=user_name, u=new_uid))
-
-                uid_counter = self.get_numeric_uid(self.dn_counter)
-                new_uid = uid_counter + 1
-                # Setting uid of user itself
-                self.set_numeric_uid(dn, new_uid, simulate=self.simulate)
-                # Setting uid of the counter
-                self.set_new_counter(new_uid)
-
-                user['uidNumber'] = new_uid
-
-        if self.verbose:
-            print('')
-        if i:
-            if i > 1:
-                LOG.info("Total {} numeric user Ids set.".format(i))
-            else:
-                LOG.info("Total one numeric user Id set.")
-        else:
-            LOG.info("No numeric user Ids set.")
-
-        if self.verbose:
-            print('')
-
-    # -------------------------------------------------------------------------
-    def _check_home_dir(self, dn, upper_dir, home_mode=stat.S_IRWXU):
-
-        user = self.users[dn]
-
-        uid = user['uidNumber']
-        gid = user['gidNumber']
-        user_name = user['uid']
-        home = user['homeDirectory']
-
-        LOG.debug("Checking home directory {h!r} of {d!r} ...".format(h=home, d=dn))
-        if not os.path.isabs(home):
-            LOG.warn("Home directory {h!r} of user {u!r} is not absolute.".format(
-                h=home, u=dn))
-            self.el_printed = False
-            return False
-
-        home_relative = os.path.relpath(home, self.home_root_abs)
-        if home_relative.startswith(upper_dir):
-            if self.verbose > 1:
-                LOG.debug("Home directory {h!r} outside {r!r} is not considered.".format(
-                    h=home, r=self.home_root_abs))
-            self.el_printed = False
-            return False
-
-        chroot_dir = os.path.join(self.chroot_homedir, os.path.relpath(home, os.sep))
-        if self.verbose > 1:
-            LOG.debug("Checking existence of {!r} ...".format(chroot_dir))
-        if os.path.exists(chroot_dir):
-            if os.path.isdir(chroot_dir):
-                if self.verbose > 2:
-                    LOG.debug("Directory {!r} is already existing.".format(chroot_dir))
-            else:
-                LOG.error("Directory {!r} exists, but is NOT a directory.".format(chroot_dir))
-            self.el_printed = False
-            return False
-
-        if not self.el_printed:
-            if self.verbose:
-                print("")
-            self.el_printed = True
-
-        LOG.info("Creating home directory {!r} ....".format(chroot_dir))
-        LOG.debug("Copying recursive {s!r} to {c!r} ....".format(s=self.skel_dir, c=chroot_dir))
-
-        if not self.simulate:
-            shutil.copytree(self.skel_dir, chroot_dir, symlinks=True)
-
-        LOG.debug("Chowning recursive {c!r} to {u}:{g} (user {n!r}) ...".format(
-            c=chroot_dir, u=uid, g=gid, n=user_name))
-
-        if not self.simulate:
-            for root, dirs, files in os.walk(chroot_dir):
-                if self.verbose > 1:
-                    LOG.debug("Chowning {!r} ...".format(root))
-                os.chown(root, uid, gid, follow_symlinks=False)
-                for file_name in files:
-                    fname_abs = os.path.join(root, file_name)
-                    if self.verbose > 1:
-                        LOG.debug("Chowning {!r} ...".format(fname_abs))
-                    os.chown(fname_abs, uid, gid, follow_symlinks=False)
-
-        LOG.debug("Setting permissions of {h!r} to {p:04o} ...".format(h=chroot_dir, p=home_mode))
-        if not self.simulate:
-            os.chmod(chroot_dir, home_mode)
-
-        if self.verbose:
-            print("")
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def check_home_dirs(self):
-
-        LOG.info("Checking home directories ...")
-        upper_dir = os.pardir + os.sep
-        home_mode = stat.S_IRWXU
-        self.el_printed = False
-
-        created = 0
-
-        for dn in sorted(self.users.keys(), key=str.lower):
-            if self._check_home_dir(dn, upper_dir, home_mode):
-                created += 1
-
-        if self.verbose:
-            print('')
-        if created:
-            if created > 1:
-                LOG.info("Total {} home directories created.".format(created))
-            else:
-                LOG.info("Total one home directory created.")
-        else:
-            LOG.info("No home directories created.")
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/obj.py b/pp_lib/obj.py
deleted file mode 100644 (file)
index 8208c98..0000000
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import sys
-import os
-import logging
-import datetime
-import traceback
-
-# Third party modules
-
-# Own modules
-from .common import pp, to_bytes
-
-from .errors import PpError
-
-__version__ = '0.2.4'
-
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpBaseObjectError(PpError):
-    """
-    Base error class useable by all descendand objects.
-    """
-
-    pass
-
-
-# =============================================================================
-class PpBaseObject(object):
-    """
-    Base class for all objects.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=False):
-        """
-        Initialisation of the base object.
-
-        Raises an exception on a uncoverable error.
-        """
-
-        self._appname = None
-        """
-        @ivar: name of the current running application
-        @type: str
-        """
-        if appname:
-            v = str(appname).strip()
-            if v:
-                self._appname = v
-        if not self._appname:
-            self._appname = os.path.basename(sys.argv[0])
-
-        self._version = version
-        """
-        @ivar: version string of the current object or application
-        @type: str
-        """
-
-        self._verbose = int(verbose)
-        """
-        @ivar: verbosity level (0 - 9)
-        @type: int
-        """
-        if self._verbose < 0:
-            msg = "Wrong verbose level {!r}, must be >= 0".format(verbose)
-            raise ValueError(msg)
-
-        self._initialized = False
-        """
-        @ivar: initialisation of this object is complete
-               after __init__() of this object
-        @type: bool
-        """
-
-        self._base_dir = base_dir
-        """
-        @ivar: base directory used for different purposes, must be an existent
-               directory. Defaults to directory of current script daemon.py.
-        @type: str
-        """
-        if base_dir:
-            if not os.path.exists(base_dir):
-                msg = "Base directory {!r} does not exists.".format(base_dir)
-                self.handle_error(msg)
-                self._base_dir = None
-            elif not os.path.isdir(base_dir):
-                msg = "Base directory {!r} is not a directory.".format(base_dir)
-                self.handle_error(msg)
-                self._base_dir = None
-        if not self._base_dir:
-            self._base_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
-
-        self._initialized = bool(initialized)
-
-    # -----------------------------------------------------------
-    @property
-    def appname(self):
-        """The name of the current running application."""
-        if hasattr(self, '_appname'):
-            return self._appname
-        return os.path.basename(sys.argv[0])
-
-    @appname.setter
-    def appname(self, value):
-        if value:
-            v = str(value).strip()
-            if v:
-                self._appname = v
-
-    # -----------------------------------------------------------
-    @property
-    def version(self):
-        """The version string of the current object or application."""
-        return getattr(self, '_version', __version__)
-
-    # -----------------------------------------------------------
-    @property
-    def verbose(self):
-        """The verbosity level."""
-        return getattr(self, '_verbose', 0)
-
-    @verbose.setter
-    def verbose(self, value):
-        v = int(value)
-        if v >= 0:
-            self._verbose = v
-        else:
-            LOG.warn("Wrong verbose level {!r}, must be >= 0".format(value))
-
-    # -----------------------------------------------------------
-    @property
-    def initialized(self):
-        """The initialisation of this object is complete."""
-        return getattr(self, '_initialized', False)
-
-    @initialized.setter
-    def initialized(self, value):
-        self._initialized = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def base_dir(self):
-        """The base directory used for different purposes."""
-        return self._base_dir
-
-    @base_dir.setter
-    def base_dir(self, value):
-        if value.startswith('~'):
-            value = os.path.expanduser(value)
-        if not os.path.exists(value):
-            msg = "Base directory {!r} does not exists.".format(value)
-            LOG.error(msg)
-        elif not os.path.isdir(value):
-            msg = "Base directory {!r} is not a directory.".format(value)
-            LOG.error(msg)
-        else:
-            self._base_dir = value
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting function for translating object structure
-        into a string
-
-        @return: structure as string
-        @rtype:  str
-        """
-
-        return pp(self.as_dict(short=True))
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("appname={!r}".format(self.appname))
-        fields.append("verbose={!r}".format(self.verbose))
-        fields.append("version={!r}".format(self.version))
-        fields.append("base_dir={!r}".format(self.base_dir))
-        fields.append("initialized={!r}".format(self.initialized))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = self.__dict__
-        res = {}
-        for key in self.__dict__:
-            if short and key.startswith('_') and not key.startswith('__'):
-                continue
-            val = self.__dict__[key]
-            if isinstance(val, PpBaseObject):
-                res[key] = val.as_dict(short=short)
-            else:
-                res[key] = val
-        res['__class_name__'] = self.__class__.__name__
-        res['appname'] = self.appname
-        res['version'] = self.version
-        res['verbose'] = self.verbose
-        res['initialized'] = self.initialized
-        res['base_dir'] = self.base_dir
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def handle_error(
-            self, error_message=None, exception_name=None, do_traceback=False):
-        """
-        Handle an error gracefully.
-
-        Print a traceback and continue.
-
-        @param error_message: the error message to display
-        @type error_message: str
-        @param exception_name: name of the exception class
-        @type exception_name: str
-        @param do_traceback: allways show a traceback
-        @type do_traceback: bool
-
-        """
-
-        msg = 'Exception happened: '
-        if exception_name is not None:
-            exception_name = exception_name.strip()
-            if exception_name:
-                msg = exception_name + ': '
-            else:
-                msg = ''
-        if error_message:
-            msg += str(error_message)
-        else:
-            msg += 'undefined error.'
-
-        root_log = logging.getLogger()
-        has_handlers = False
-        if root_log.handlers:
-            has_handlers = True
-
-        if has_handlers:
-            LOG.error(msg)
-            if do_traceback:
-                LOG.error(traceback.format_exc())
-        else:
-            curdate = datetime.datetime.now()
-            curdate_str = "[" + curdate.isoformat(' ') + "]: "
-            msg = curdate_str + msg + "\n"
-            if hasattr(sys.stderr, 'buffer'):
-                sys.stderr.buffer.write(to_bytes(msg))
-            else:
-                sys.stderr.write(msg)
-            if do_traceback:
-                traceback.print_exc()
-
-        return
-
-    # -------------------------------------------------------------------------
-    def handle_info(self, message, info_name=None):
-        """
-        Shows an information. This happens both to STDERR and to all
-        initialized log handlers.
-
-        @param message: the info message to display
-        @type message: str
-        @param info_name: Title of information
-        @type info_name: str
-
-        """
-
-        msg = ''
-        if info_name is not None:
-            info_name = info_name.strip()
-            if info_name:
-                msg = info_name + ': '
-        msg += str(message).strip()
-
-        root_log = logging.getLogger()
-        has_handlers = False
-        if root_log.handlers:
-            has_handlers = True
-
-        if has_handlers:
-            LOG.info(msg)
-        else:
-            curdate = datetime.datetime.now()
-            curdate_str = "[" + curdate.isoformat(' ') + "]: "
-            msg = curdate_str + msg + "\n"
-            if hasattr(sys.stderr, 'buffer'):
-                sys.stderr.buffer.write(to_bytes(msg))
-            else:
-                sys.stderr.write(msg)
-
-        return
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/pdns_app.py b/pp_lib/pdns_app.py
deleted file mode 100644 (file)
index 1cda9a9..0000000
+++ /dev/null
@@ -1,855 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for a application object related to PowerDNS.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import re
-import copy
-import json
-import os
-import ipaddress
-import socket
-import getpass
-import time
-
-# Third party modules
-import requests
-import psutil
-
-# Own modules
-from .common import pp
-
-from .cfg_app import PpCfgAppError, PpConfigApplication
-from .pdns_zone import PdnsApiZone
-from .pdns_record import PdnsSoaData
-
-__version__ = '0.6.5'
-LOG = logging.getLogger(__name__)
-_LIBRARY_NAME = "pp-pdns-api-client"
-
-
-# =============================================================================
-class PpPDNSAppError(PpCfgAppError):
-    """Base error class for all exceptions happened during
-    execution this configured application"""
-    pass
-
-
-# =============================================================================
-class PDNSApiError(PpPDNSAppError):
-    """Base class for more complex exceptions"""
-    def __init__(self, resp, content, uri=None):
-        self.resp = resp
-        self.content = content
-        self.uri = uri
-
-
-# =============================================================================
-class PDNSApiNotAuthorizedError(PDNSApiError):
-    """The authorization information provided is not correct"""
-    pass
-
-
-# =============================================================================
-class PDNSApiNotFoundError(PDNSApiError):
-    """The ProfitBricks entity was not found"""
-    pass
-
-
-# =============================================================================
-class PDNSApiValidationError(PDNSApiError):
-    """The HTTP data provided is not valid"""
-    pass
-
-
-# =============================================================================
-class PDNSApiRateLimitExceededError(PDNSApiError):
-    """The number of requests sent have exceeded the allowed API rate limit"""
-    pass
-
-
-# =============================================================================
-class PDNSApiRequestError(PDNSApiError):
-    """Base error for request failures"""
-    pass
-
-
-# =============================================================================
-class PDNSApiTimeoutError(PDNSApiRequestError):
-    """Raised when a request does not finish in the given time span."""
-    pass
-
-
-# =============================================================================
-class PpPDNSApplication(PpConfigApplication):
-    """
-    Class for configured application objects related to PowerDNS.
-    """
-
-    api_keys = {
-        'global': "6d1b08e2-59c6-49e7-9e48-039ade102016",
-        'public': "cf0fb928-2a73-49ec-86c2-36e85c9672ff",
-        'local': "d94b183a-c50d-47f7-b338-496090af1577"
-    }
-
-    api_hosts = {
-        'global': "dnsmaster.pp-dns.com",
-        'public': "dnsmaster-public.pixelpark.com",
-        'local': "dnsmaster-local.pixelpark.com"
-    }
-
-    default_api_port = 8081
-    default_api_servername = "localhost"
-    default_timeout = 20
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None,
-            initialized=None, usage=None, description=None,
-            argparse_epilog=None, argparse_prefix_chars='-', env_prefix=None,
-            cfg_dir=None, cfg_stems=None, cfg_encoding='utf-8', need_config_file=False,
-            environment='global'):
-
-        self._api_key = self.api_keys['global']
-        self._api_host = self.api_hosts['global']
-        self._api_port = self.default_api_port
-        self._api_servername = self.default_api_servername
-        self._api_server_version = 'unknown'
-        self._user_agent = '{}/{}'.format(_LIBRARY_NAME, self.version)
-        self._timeout = self.default_timeout
-
-        self.local_addresses = []
-
-        self._environment = 'global'
-        if environment != 'global':
-            self.environment = environment
-
-        stems = []
-        if cfg_stems:
-            if isinstance(cfg_stems, list):
-                for stem in cfg_stems:
-                    s = str(stem).strip()
-                    if not s:
-                        msg = "Invalid configuration stem {!r} given.".format(stem)
-                        raise PpPDNSAppError(msg)
-                    stems.append(s)
-            else:
-                s = str(cfg_stems).strip()
-                if not s:
-                    msg = "Invalid configuration stem {!r} given.".format(cfg_stems)
-                    raise PpPDNSAppError(msg)
-                stems.append(s)
-        else:
-            stems = [self.appname]
-        if 'pdns-api' not in stems:
-            stems.insert(0, 'pdns-api')
-
-        super(PpPDNSApplication, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
-            initialized=False, usage=usage, description=description,
-            argparse_epilog=argparse_epilog, argparse_prefix_chars=argparse_prefix_chars,
-            env_prefix=env_prefix, cfg_dir=cfg_dir, cfg_stems=stems,
-            cfg_encoding=cfg_encoding, need_config_file=need_config_file,
-        )
-
-        for interface, snics in psutil.net_if_addrs().items():
-            for snic in snics:
-                if snic.family == socket.AF_INET or snic.family == socket.AF_INET6:
-                    addr = str(ipaddress.ip_address(re.sub(r'%.*', '', snic.address)))
-                    if addr not in self.local_addresses:
-                        self.local_addresses.append(addr)
-
-        self._user_agent = '{}/{}'.format(_LIBRARY_NAME, self.version)
-
-    # -----------------------------------------------------------
-    @property
-    def api_key(self):
-        "The API key to use the PowerDNS API"
-        return self._api_key
-
-    @api_key.setter
-    def api_key(self, value):
-        if value is None or str(value).strip() == '':
-            raise PpPDNSAppError("Invalid API key {!r} given.".format(value))
-        self._api_key = str(value).strip()
-
-    # -----------------------------------------------------------
-    @property
-    def api_host(self):
-        "The host name or address providing the PowerDNS API."
-        return self._api_host
-
-    @api_host.setter
-    def api_host(self, value):
-        if value is None or str(value).strip() == '':
-            raise PpPDNSAppError("Invalid API host {!r} given.".format(value))
-        self._api_host = str(value).strip().lower()
-
-    # -----------------------------------------------------------
-    @property
-    def api_port(self):
-        "The TCP port number of the PowerDNS API."
-        return self._api_port
-
-    @api_port.setter
-    def api_port(self, value):
-        v = int(value)
-        if v < 1:
-            raise PpPDNSAppError("Invalid API port {!r} given.".format(value))
-        self._api_port = v
-
-    # -----------------------------------------------------------
-    @property
-    def api_servername(self):
-        "The (virtual) name of the PowerDNS server used in API calls."
-        return self._api_servername
-
-    @api_servername.setter
-    def api_servername(self, value):
-        if value is None or str(value).strip() == '':
-            raise PpPDNSAppError("Invalid API server name {!r} given.".format(value))
-        self._api_servername = str(value).strip()
-
-    # -----------------------------------------------------------
-    @property
-    def api_server_version(self):
-        "The version of the PowerDNS server, how provided by API."
-        return self._api_server_version
-
-    # -----------------------------------------------------------
-    @property
-    def user_agent(self):
-        "The name of the user agent used in API calls."
-        return self._user_agent
-
-    @user_agent.setter
-    def user_agent(self, value):
-        if value is None or str(value).strip() == '':
-            raise PpPDNSAppError("Invalid user agent {!r} given.".format(value))
-        self._user_agent = str(value).strip()
-
-    # -----------------------------------------------------------
-    @property
-    def timeout(self):
-        "The timeout in seconds on requesting the PowerDNS API."
-        return self._timeout
-
-    @timeout.setter
-    def timeout(self, value):
-        v = int(value)
-        if v < 1:
-            raise PpPDNSAppError("Invalid timeout {!r} given.".format(value))
-        self._timeout = v
-
-    # -----------------------------------------------------------
-    @property
-    def environment(self):
-        "The name of the PowerDNS environment."
-        return self._environment
-
-    @environment.setter
-    def environment(self, value):
-        if value is None:
-            raise PpPDNSAppError("Invalid environment None given.")
-        v = str(value).strip().lower()
-        if v not in self.api_keys.keys():
-            raise PpPDNSAppError("Invalid environment {!r} given.".format(value))
-        self._environment = v
-        self._api_host = self.api_hosts[v]
-        self._api_key = self.api_keys[v]
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PpPDNSApplication, self).as_dict(short=short)
-        res['api_host'] = self.api_host
-        res['api_hosts'] = copy.copy(self.api_hosts)
-        res['api_key'] = self.api_key
-        res['api_keys'] = copy.copy(self.api_keys)
-        res['api_port'] = self.api_port
-        res['api_servername'] = self.api_servername
-        res['default_api_port'] = self.default_api_port
-        res['default_api_servername'] = self.default_api_servername
-        res['default_timeout'] = self.default_timeout
-        res['environment'] = self.environment
-        res['timeout'] = self.timeout
-        res['user_agent'] = self.user_agent
-        res['api_server_version'] = self.api_server_version
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        super(PpPDNSApplication, self).init_arg_parser()
-
-        pdns_group = self.arg_parser.add_argument_group('PowerDNS API options')
-        env_group = pdns_group.add_mutually_exclusive_group()
-
-        envs = []
-        for env in self.api_keys.keys():
-            envs.append(str(env))
-        envs.sort()
-
-        env_group.add_argument(
-            '-E', '--env', '--environment',
-            metavar="ENVIRONMENT", choices=envs, dest="env",
-            help=(
-                "Select, which PowerDNS environment to use. "
-                "Valid values: {v}, default: {d!r}.".format(
-                    v=', '.join(map(lambda x: repr(x), envs)),
-                    d='global'))
-        )
-
-        env_group.add_argument(
-            '-G', '--global',
-            action='store_true', dest="env_global",
-            help=("Using the 'global' PowerDNS environment."),
-        )
-
-        env_group.add_argument(
-            '-L', '--local',
-            action='store_true', dest="env_local",
-            help=("Using the 'local' PowerDNS environment."),
-        )
-
-        env_group.add_argument(
-            '-P', '--public',
-            action='store_true', dest="env_public",
-            help=("Using the 'public' PowerDNS environment."),
-        )
-
-        pdns_group.add_argument(
-            '-p', '--port',
-            metavar="PORT", type=int, dest='api_port', default=self.default_api_port,
-            help=("Which port to connect to PowerDNS API, default: {}.".format(
-                self.default_api_port)),
-        )
-
-        pdns_group.add_argument(
-            '-t', '--timeout',
-            metavar="SECS", type=int, dest='timeout', default=self.default_timeout,
-            help=("The timeout in seconds to request the PowerDNS API, default: {}.".format(
-                self.default_timeout)),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-        """
-
-        if self.args.env:
-            self.environment = self.args.env
-        elif self.args.env_global:
-            self.environment = 'global'
-        elif self.args.env_local:
-            self.environment = 'local'
-        elif self.args.env_public:
-            self.environment = 'public'
-
-        if self.args.api_port:
-            self.api_port = self.args.api_port
-
-        if self.args.timeout:
-            self.timeout = self.args.timeout
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpPDNSApplication, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 3:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            section = self.cfg[section_name]
-
-            if section_name.lower() in (
-                    'powerdns-api', 'powerdns_api', 'powerdnsapi',
-                    'pdns-api', 'pdns_api', 'pdnsapi'):
-                self.set_cfg_api_options(section, section_name)
-
-    # -------------------------------------------------------------------------
-    def set_cfg_api_options(self, section, section_name):
-
-        if self.verbose > 2:
-            LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                n=section_name, s=pp(section)))
-
-        if 'environment' in section:
-            v = section['environment'].strip().lower()
-            if v not in self.api_hosts:
-                LOG.error("Wrong environment {!r} found in configuration.".format(
-                    section['environment']))
-                self.config_has_errors = True
-            else:
-                self.environment = v
-
-        if 'host' in section:
-            v = section['host']
-            host = v.lower().strip()
-            if host:
-                self.api_host = host
-
-        if 'port' in section:
-            try:
-                port = int(section['port'])
-                if port <= 0 or port > 2**16:
-                    raise ValueError(
-                        "a port must be greater than 0 and less than {}.".format(2**16))
-            except (TypeError, ValueError) as e:
-                LOG.error("Wrong port number {!r} in configuration section {!r}: {}".format(
-                    section['port'], section_name, e))
-                self.config_has_errors = True
-            else:
-                self.api_port = port
-
-        if 'server_id' in section and section['server_id'].strip():
-            self.api_servername = section['server_id'].strip().lower()
-
-        if 'key' in section:
-            key = section['key'].strip()
-            self.api_key = key
-
-    # -------------------------------------------------------------------------
-    def _check_path_config(self, section, section_name, key, class_prop, absolute=True, desc=None):
-
-        if key not in section:
-            return
-
-        d = ''
-        if desc:
-            d = ' ' + str(desc).strip()
-
-        path = section[key].strip()
-        if not path:
-            msg = "No path given for{} [{}]/{} in configuration.".format(
-                d, section_name, key)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        if absolute and not os.path.isabs(path):
-            msg = "Path {!r} for{} [{}]/{} in configuration must be an absolute path.".format(
-                path, d, section_name, key)
-            LOG.error(msg)
-            self.config_has_errors = True
-            return
-
-        setattr(self, class_prop, path)
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.verbose > 1:
-            LOG.debug("executing pre_run() ...")
-
-        LOG.debug("Setting Loglevel of the requests module to WARNING")
-        logging.getLogger("requests").setLevel(logging.WARNING)
-
-        super(PpPDNSApplication, self).pre_run()
-        self.get_api_server_version()
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-        """
-        Dummy function as main routine.
-
-        MUST be overwritten by descendant classes.
-
-        """
-        LOG.debug("Executing nothing ...")
-
-    # -------------------------------------------------------------------------
-    def post_run(self):
-        """
-        Dummy function to run after the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if self.verbose > 1:
-            LOG.debug("executing post_run() ...")
-
-    # -------------------------------------------------------------------------
-    def get_api_server_version(self):
-
-        path = "/servers/{}".format(self.api_servername)
-        try:
-            json_response = self.perform_request(path)
-        except (PDNSApiNotFoundError, PDNSApiValidationError):
-            LOG.error("Could not found server info.")
-            return None
-        if self.verbose > 2:
-            LOG.debug("Got a response:\n{}".format(pp(json_response)))
-
-        if 'version' in json_response:
-            self._api_server_version = json_response['version']
-            LOG.info("PowerDNS server version {!r}.".format(self.api_server_version))
-            return self.api_server_version
-        LOG.error("Did not found version info in server info:\n{}".format(pp(json_response)))
-        return None
-
-    # -------------------------------------------------------------------------
-    def _build_url(self, path):
-
-        url = 'http://{}'.format(self.api_host)
-        if self.api_port != 80:
-            url += ':{}'.format(self.api_port)
-
-        url += '/api/v1' + path
-        LOG.debug("Used URL: {!r}".format(url))
-        return url
-
-    # -------------------------------------------------------------------------
-    def perform_request(self, path, method='GET', data=None, headers=None, may_simulate=False):
-        """Performing the underlying API request."""
-
-        if headers is None:
-            headers = dict()
-        headers['X-API-Key'] = self.api_key
-
-        url = self._build_url(path)
-        if self.verbose > 1:
-            LOG.debug("Request method: {!r}".format(method))
-        if data and self.verbose > 2:
-            data_out = "{!r}".format(data)
-            try:
-                data_out = json.loads(data)
-            except ValueError:
-                pass
-            else:
-                data_out = pp(data_out)
-            LOG.debug("Data:\n{}".format(data_out))
-            LOG.debug("RAW data:\n{}".format(data))
-
-        headers.update({'User-Agent': self.user_agent})
-        headers.update({'Content-Type': 'application/json'})
-        if self.verbose > 1:
-            LOG.debug("Headers:\n%s", pp(headers))
-
-        if may_simulate and self.simulate:
-            LOG.debug("Simulation mode, Request will not be sent.")
-            return ''
-
-        session = requests.Session()
-        response = session.request(method, url, data=data, headers=headers, timeout=self.timeout)
-
-        try:
-            if not response.ok:
-                LOG.debug
-                err = response.json()
-                code = response.status_code
-                msg = err['error']
-                if response.status_code == 401:
-                    raise PDNSApiNotAuthorizedError(code, msg, url)
-                if response.status_code == 404:
-                    raise PDNSApiNotFoundError(code, msg, url)
-                if response.status_code == 422:
-                    raise PDNSApiValidationError(code, msg, url)
-                if response.status_code == 429:
-                    raise PDNSApiRateLimitExceededError(code, msg, url)
-                else:
-                    raise PDNSApiError(code, msg, url)
-
-        except ValueError:
-            raise PpPDNSAppError('Failed to parse the response', response.text)
-
-        if self.verbose > 3:
-            LOG.debug("RAW response: {!r}.".format(response.text))
-        if not response.text:
-            return ''
-
-        json_response = response.json()
-
-        if 'location' in response.headers:
-            json_response['requestId'] = self._request_id(response.headers)
-
-        return json_response
-
-    # -------------------------------------------------------------------------
-    def get_api_zones(self):
-
-        LOG.debug("Trying to get all zones from PDNS API ...")
-
-        path = "/servers/{}/zones".format(self.api_servername)
-        json_response = self.perform_request(path)
-        if self.verbose > 3:
-            LOG.debug("Got a response:\n{}".format(pp(json_response)))
-
-        zone_list = []
-
-        for data in json_response:
-            zone = PdnsApiZone.init_from_dict(
-                data, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
-            zone_list.append(zone)
-            if self.verbose > 2:
-                print("{!r}".format(zone))
-
-        if self.verbose > 1:
-            LOG.debug("Found {} zones.".format(len(zone_list)))
-
-        return zone_list
-
-    # -------------------------------------------------------------------------
-    def get_api_zone(self, zone_name):
-
-        zone_unicode = zone_name
-        json_response = None
-        zout = "{!r}".format(zone_name)
-        if 'xn--' in zone_name:
-            zone_unicode = zone_name.encode('idna').decode('idna')
-            zout = "{!r} ({})".format(zone_name, zone_unicode)
-        LOG.debug("Trying to get complete information about zone {!r} ...".format(zone_name))
-
-        path = "/servers/{}/zones/{}".format(self.api_servername, zone_name)
-        try:
-            json_response = self.perform_request(path)
-        except (PDNSApiNotFoundError, PDNSApiValidationError):
-            LOG.error("The given zone {} was not found.".format(zout))
-            return None
-        if self.verbose > 2:
-            LOG.debug("Got a response:\n{}".format(pp(json_response)))
-
-        zone = PdnsApiZone.init_from_dict(
-            json_response, appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
-        if self.verbose > 2:
-            LOG.debug("Zone object:\n{}".format(pp(zone.as_dict())))
-
-        return zone
-
-    # -------------------------------------------------------------------------
-    def patch_zone(self, zone, payload):
-
-        if self.verbose > 1:
-            LOG.debug("Patching zone {!r} ...".format(zone.name))
-
-        path = "/servers/{}/zones/{}".format(self.api_servername, zone.name)
-        return self.perform_request(path, 'PATCH', json.dumps(payload), may_simulate=True)
-
-    # -------------------------------------------------------------------------
-    def update_soa(self, zone, new_soa, comment=None, ttl=None):
-
-        if not isinstance(new_soa, PdnsSoaData):
-            msg = "New SOA must by of type PdnsSoaData, given {t}: {s!r}".format(
-                t=new_soa.__class__.__name__, s=new_soa)
-            raise TypeError(msg)
-
-        if ttl:
-            ttl = int(ttl)
-        else:
-            cur_soa_rrset = zone.get_soa_rrset()
-            ttl = cur_soa_rrset.ttl
-
-        if comment is not None:
-            comment = str(comment).strip()
-            if comment == '':
-                comment = None
-
-        rrset = {
-            'name': zone.name,
-            'type': 'SOA',
-            'ttl': ttl,
-            'changetype': 'REPLACE',
-            'records': [],
-            'comments': [],
-        }
-
-#        if comment:
-#            comment_rec = {
-#                'content': comment,
-#                'account': getpass.getuser(),
-#                'modified_at': int(time.time() + 0.5),
-#            }
-#            rrset['comments'] = [comment_rec]
-
-        record = {
-            'content': new_soa.data,
-            'disabled': False,
-            'name': zone.name,
-            'set-ptr': False,
-            'type': 'SOA',
-        }
-        rrset['records'].append(record)
-        payload = {"rrsets": [rrset]}
-
-        if self.verbose > 1:
-            LOG.debug("Setting new SOA {s!r} for zone {z!r}, TTL {t} ...".format(
-                s=new_soa.data, z=zone.name, t=ttl))
-
-        self.patch_zone(zone, payload)
-
-    # -------------------------------------------------------------------------
-    def increase_serial(self, zone_name, comment=None):
-
-        zone = self.get_api_zone(zone_name)
-        if not zone:
-            raise PpPDNSAppError("Did not found zone for {!r}.".format(zone_name))
-
-        LOG.info("Increasing serial in SOA of zone {!r} ....".format(zone_name))
-
-        api_host_address = None
-        for addr_info in socket.getaddrinfo(self.api_host, 53, family=socket.AF_INET):
-            api_host_address = addr_info[4][0]
-            break
-
-        api_soa = zone.get_soa()
-        if not api_soa:
-            raise PpPDNSAppError("Could not find SOA for zone {!r}.".format(zone_name))
-        if self.verbose > 2:
-            LOG.debug("Got SOA for zone {z!r} by API:\n{s}".format(
-                z=zone_name, s=api_soa))
-
-        dns_soa = zone.get_soa_by_dns(api_host_address)
-        if self.verbose > 2:
-            LOG.debug("Got SOA for zone {z!r} from DNS by {h!r}:\n{s}".format(
-                h=self.api_host, z=zone_name, s=dns_soa))
-
-        new_serial = zone.get_new_serial(dns_soa.serial)
-        LOG.debug("Got new serial number for zone {z!r}: {s}.".format(
-            z=zone_name, s=new_serial))
-
-        api_soa.serial = new_serial
-        return self.update_soa(zone, api_soa, comment)
-
-    # -------------------------------------------------------------------------
-    def set_nameservers(
-        self, zone, new_nameservers, for_zone=None, comment=None, new_ttl=None,
-            do_serial=True, do_notify=True):
-
-        current_nameservers = zone.get_zone_nameservers(for_zone=for_zone)
-        if for_zone:
-            LOG.debug("Current nameservers of {f!r} in zone {z!r}:\n{ns}".format(
-                f=for_zone, z=zone.name, ns=pp(current_nameservers)))
-        else:
-            LOG.debug("Current nameservers of zone {z!r}:\n{ns}".format(
-                z=zone.name, ns=pp(current_nameservers)))
-
-        ns2remove = []
-        ns2add = []
-
-        for ns in current_nameservers:
-            if ns not in new_nameservers:
-                ns2remove.append(ns)
-        for ns in new_nameservers:
-            if ns not in current_nameservers:
-                ns2add.append(ns)
-
-        if not ns2remove and not ns2add:
-            if for_zone:
-                msg = "Subzone {f!r} has already the expected nameservers in zone {z!r}."
-            else:
-                msg = "Zone {z!r} has already the expected nameservers."
-            LOG.info(msg.format(f=for_zone, z=zone.name))
-            return False
-
-        LOG.debug("Nameservers to remove from zone {z!r}:\n{ns}".format(
-            z=zone.name, ns=pp(ns2remove)))
-        LOG.debug("Nameservers to add to zone {z!r}:\n{ns}".format(
-            z=zone.name, ns=pp(ns2add)))
-
-        ns_ttl = None
-        if not new_ttl:
-            cur_rrset = zone.get_ns_rrset(for_zone=for_zone)
-            if cur_rrset:
-                ns_ttl = cur_rrset.ttl
-            else:
-                soa = zone.get_soa()
-                ns_ttl = soa.ttl
-                del soa
-        else:
-            ns_ttl = int(new_ttl)
-        if ns_ttl <= 0:
-            ns_ttl = 3600
-        LOG.debug("TTL for NS records: {}.".format(ns_ttl))
-
-        rrset_name = zone.name.lower()
-        if for_zone:
-            rrset_name = for_zone.lower()
-
-        records = []
-        for ns in new_nameservers:
-            record = {
-                "name": rrset_name,
-                "type": "NS",
-                "content": ns,
-                "disabled": False,
-                "set-ptr": False,
-            }
-            records.append(record)
-        rrset = {
-            "name": rrset_name,
-            "type": "NS",
-            "ttl": ns_ttl,
-            "changetype": "REPLACE",
-            "records": records,
-        }
-
-        if comment:
-            comment_rec = {
-                'content': comment,
-                'account': getpass.getuser(),
-                'modified_at': int(time.time() + 0.5),
-            }
-            rrset['comments'] = [comment_rec]
-
-        payload = {"rrsets": [rrset]}
-
-        self.patch_zone(zone, payload)
-
-        if do_serial:
-            self.increase_serial(zone.name)
-
-        if do_notify:
-            self.notify_zone(zone)
-
-        return True
-
-    # -------------------------------------------------------------------------
-    def notify_zone(self, zone):
-
-        LOG.info("Notifying slaves of zone {!r} ...".format(zone.name))
-
-        path = "/servers/{}/zones/{}/notify".format(self.api_servername, zone.name)
-        return self.perform_request(path, 'PUT', '', may_simulate=True)
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pdns_list_zones.py b/pp_lib/pdns_list_zones.py
deleted file mode 100644 (file)
index be91a52..0000000
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the pdns-list-zones application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import textwrap
-
-from functools import cmp_to_key
-
-# Own modules
-from .common import compare_fqdn
-
-from .pdns_app import PpPDNSAppError, PpPDNSApplication
-from .pdns_zone import PdnsApiZone
-
-__version__ = '0.5.1'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpPDNSListZonesError(PpPDNSAppError):
-    pass
-
-
-# =============================================================================
-class PpPDNSListZonesApp(PpPDNSApplication):
-    """Class for the 'pdns-list-zones' application get a list of all available
-       zones from PowerDNS
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.no_header = False
-        self.no_summary = False
-        self.show_numbers = False
-        self.minimal = False
-
-        description = textwrap.dedent('''\
-            Lists all available zones from given PowerDNS API.
-            ''')
-
-        super(PpPDNSListZonesApp, self).__init__(
-            appname=appname, version=version, description=description,
-        )
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        super(PpPDNSListZonesApp, self).init_arg_parser()
-
-        self.arg_parser.add_argument(
-            '-H', '--no-header', action='store_true', dest='no_header',
-            help="Don't show header lines at the beginning of the list."
-        )
-
-        self.arg_parser.add_argument(
-            '-n', '--no-summary', action='store_true', dest='no_summary',
-            help="Don't show summary at the end of the list."
-        )
-
-        col_group = self.arg_parser.add_mutually_exclusive_group()
-
-        col_group.add_argument(
-            '-M', '--minimal', action='store_true', dest='minimal',
-            help=(
-                "Minimal output, includes --no-header and --no-summary. "
-                "Mutually exclusive to --numbers.")
-        )
-
-        col_group.add_argument(
-            '-N', '--numbers', action='store_true', dest='show_numbers',
-            help="Show number of Ressource Record Sets and Records for each zone",
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-        """
-
-        super(PpPDNSListZonesApp, self).perform_arg_parser()
-
-        if self.args.no_header:
-            self.no_header = True
-
-        if self.args.no_summary:
-            self.no_summary = True
-
-        if self.args.show_numbers:
-            self.show_numbers = True
-
-        if self.args.minimal:
-            self.no_header = True
-            self.no_summary = True
-            self.minimal = True
-            self.show_numbers = False
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        LOG.info("Listing all available zones from PowerDNS environment {!r}.".format(
-            self.environment))
-
-        zone_list = self.get_api_zones()
-
-        len_zone = 10
-        for zone in zone_list:
-            if len(zone.name_unicode) > len_zone:
-                len_zone = len(zone.name_unicode)
-
-        if self.minimal:
-            tpl = PdnsApiZone.get_list_template(minimal=True)
-        elif self.show_numbers:
-            tpl = PdnsApiZone.get_list_template(show_numbers=True)
-        else:
-            tpl = PdnsApiZone.get_list_template(show_numbers=False)
-
-        if self.verbose > 2:
-            LOG.debug("Used template for line: {!r}".format(tpl))
-
-        if not self.no_header:
-            line = tpl.format(
-                name="Zone", len_zone=len_zone, kind="Type", serial="Serial",
-                dnssec="DNSSEC", nr_rrsets='RR Sets', nr_records='Records',
-                account="Account information")
-            print(line)
-            print('-' * len(line))
-
-        nr_zones = 0
-        nr_rrsets = 0
-        nr_records = 0
-        for zone in sorted(zone_list, key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode)):
-            if self.show_numbers:
-                nr_zones += 1
-                zone_complete =  self.get_api_zone(zone.name)
-                for rrset in zone_complete.rrsets:
-                    nr_rrsets += 1
-                    for record in rrset.records:
-                        nr_records += 1
-                print(zone.get_line(len_zone, zone_complete.rrsets))
-            else:
-                print(zone.get_line(len_zone, minimal=self.minimal))
-
-        if not self.no_summary:
-            print('-' * len(line))
-            line = tpl.format(
-                name="Total:", len_zone=len_zone, kind="", serial=nr_zones,
-                dnssec="Zones", nr_rrsets=nr_rrsets,  nr_records=nr_records, account="")
-            print(line)
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pdns_migrate_ns.py b/pp_lib/pdns_migrate_ns.py
deleted file mode 100644 (file)
index 595d341..0000000
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for migration of of nameservers.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import textwrap
-import copy
-import time
-
-from functools import cmp_to_key
-
-# Own modules
-from .common import pp, compare_fqdn, to_str
-from .common import RE_DOT_AT_END
-
-from .pdns_app import PpPDNSAppError, PpPDNSApplication
-
-__version__ = '0.2.9'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PDNSMigrateNsError(PpPDNSAppError):
-    pass
-
-
-# =============================================================================
-class PDNSMigrateNsApp(PpPDNSApplication):
-    """Class for the 'pdns-migrate-nameservers' application to migrate the nameservers
-       of all zones of PowerDNS from the old nameservers to the new one.
-    """
-
-    new_public_nameservers = [
-        'ns1.pp-dns.com.',
-        'ns2.pp-dns.com.',
-        'ns3.pp-dns.com.',
-        'ns4.pp-dns.com.',
-    ]
-
-    new_local_nameservers = [
-        'ns1-local.pixelpark.com.',
-        'ns2-local.pixelpark.com.',
-        'ns3-local.pixelpark.com.',
-    ]
-
-    address_hostmaster_local = 'hostmaster.pixelpark.com.'
-    address_hostmaster_public = 'hostmaster.pixelpark.net.'
-
-    default_pause = 3
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.zones = []
-        self.zone_names = []
-        self.oneshot = False
-        self.pause = self.default_pause
-
-        description = textwrap.dedent('''\
-            Substituting NS records in all zones by the new ones.
-            ''')
-
-        self._show_simulate_opt = True
-
-        super(PDNSMigrateNsApp, self).__init__(
-            appname=appname, version=version, description=description,
-        )
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        super(PDNSMigrateNsApp, self).init_arg_parser()
-
-        self.arg_parser.add_argument(
-            "-1", '--oneshot', action="store_true", dest="oneshot",
-            help="Stop execution after first successful migration."
-        )
-
-        self.arg_parser.add_argument(
-            '--pause', dest='pause', type=int,
-            default=self.default_pause, metavar='SECS',
-            help=(
-                "Pause in seconds between mangling the particular zones. "
-                "(Default: %(default)r)."),
-        )
-
-        self.arg_parser.add_argument(
-            'zones', metavar='ZONE', nargs='*',
-            help=(
-                "All zones, for which the migration should be executed. "
-                "If not given, the migration will be executed for all zones."),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-        """
-
-        super(PDNSMigrateNsApp, self).perform_arg_parser()
-
-        if self.args.oneshot:
-            self.oneshot = True
-
-        if self.args.pause and self.args.pause > 0:
-            self.pause = self.args.pause
-
-        for zone in self.args.zones:
-            zone_idna = zone
-            if 'xn--' not in zone:
-                zone_idna = to_str(zone.encode('idna'))
-            zone_idna = RE_DOT_AT_END.sub('.', zone_idna).lower()
-            self.zone_names.append(zone_idna)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        LOG.info("Substituting NS records in all zones by the new ones.")
-
-        self.zones = self.get_api_zones()
-
-        if not self.zone_names:
-            for zone in sorted(self.zones, key=lambda x: cmp_to_key(compare_fqdn)(x.name_unicode)):
-                self.zone_names.append(zone.name)
-
-        idx = 0
-        print('')
-        print('')
-        for zone_name in self.zone_names:
-            if idx:
-                print('')
-                print('')
-                print('Sleeping {} seconds...'.format(self.pause))
-                print('')
-                time.sleep(self.pause)
-            migrated = self.migrate_zone(zone_name)
-            if migrated:
-                idx += 1
-            if self.oneshot and migrated:
-                break
-
-    # -------------------------------------------------------------------------
-    def migrate_zone(self, zone_name):
-
-        LOG.info("Migrating zone {!r} ...".format(zone_name))
-        zone = self.get_api_zone(zone_name)
-        if not zone:
-            return False
-
-        new_nameservers = []
-        hm_address = self.address_hostmaster_public
-
-        is_local = self.is_local_domain(zone_name)
-
-        if is_local:
-            LOG.debug("Using local nameservers for substituting.")
-            new_nameservers = sorted(self.new_local_nameservers)
-            hm_address = self.address_hostmaster_local
-        else:
-            LOG.debug("Using public nameservers for substituting.")
-            new_nameservers = sorted(self.new_public_nameservers)
-        if self.verbose > 1:
-            LOG.debug("Expected nameservers of zone:\n{}".format(pp(new_nameservers)))
-
-        soa = zone.get_soa()
-        if not soa:
-            LOG.error("Could not find SOA for zone {!r}.".format(zone_name))
-            return False
-        if self.verbose > 2:
-            LOG.debug("Current SOA of zone {!r}:\n{}".format(zone_name, soa))
-
-        new_soa = copy.copy(soa)
-        new_soa.primary = new_nameservers[0]
-        new_soa.email = hm_address
-        if self.verbose > 2:
-            LOG.debug("New SOA of zone {!r}:\n{}".format(zone_name, new_soa))
-
-        if new_soa != soa:
-            LOG.info("Update SOA of zone {!r} ...".format(zone_name))
-            self.update_soa(zone, new_soa, "Update SOA on great NS- and SOA-Migration.")
-        else:
-            LOG.debug("Update SOA of zone is not necessary.".format(zone_name))
-
-        LOG.info("Setting nameservers for zone {!r} ...".format(zone_name))
-        if not self.set_nameservers(zone, new_nameservers, do_serial=False):
-            return False
-
-        zone_parts = zone_name.split('.')
-        top_zone_name = '.'.join(zone_parts[1:])
-        LOG.debug("Top zone of {z!r} is {t!r}.".format(z=zone_name, t=top_zone_name))
-
-        have_top_zone = False
-        for t_zone in self.zones:
-            if t_zone.name == top_zone_name:
-                have_top_zone = True
-                break
-
-        if have_top_zone:
-            LOG.info("Setting nameservers for zone {z!r} in zone {t!r}.".format(
-                z=zone_name, t=top_zone_name))
-            top_zone = self.get_api_zone(top_zone_name)
-            return self.set_nameservers(
-                top_zone, new_nameservers, zone_name, do_serial=False)
-        else:
-            LOG.debug("Top zone {!r} is not in our responsibility.".format(top_zone_name))
-        return True
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pdns_record.py b/pp_lib/pdns_record.py
deleted file mode 100644 (file)
index 17a29fc..0000000
+++ /dev/null
@@ -1,614 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
-@summary: An encapsulation class for a DNS record object by PowerDNS API
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import copy
-import re
-import datetime
-
-# Third party modules
-
-# Own modules
-from .common import pp, compare_fqdn, to_utf8, to_str
-
-from .obj import PpBaseObjectError, PpBaseObject
-
-__version__ = '0.4.6'
-
-LOG = logging.getLogger(__name__)
-
-
-TYPE_ORDER = {
-    'SOA': 0,
-    'NS': 1,
-    'MX': 2,
-    'A': 3,
-    'AAAA': 4,
-    'CNAME': 5,
-    'SRV': 6,
-    'TXT': 7,
-    'SPF': 8,
-    'PTR': 9,
-}
-
-# =============================================================================
-class PdnsApiRrsetError(PpBaseObjectError):
-    pass
-
-
-# =============================================================================
-class PdnsWrongSoaDataError(PdnsApiRrsetError):
-
-    # -------------------------------------------------------------------------
-    def __init__(self, data):
-        self.data = str(data)
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-
-        msg = "Could not interprete SOA data: {!r}.".format(self.data)
-        return msg
-
-
-# =============================================================================
-def compare_rrsets(x, y):
-
-    if not isinstance(x, PdnsApiRrset):
-        raise TypeError("Argument x {!r} must be a PdnsApiRrset object.".format(x))
-
-    if not isinstance(y, PdnsApiRrset):
-        raise TypeError("Argument y {!r} must be a PdnsApiRrset object.".format(y))
-
-    ret = compare_fqdn(x.name, y.name)
-    if ret:
-        return ret
-
-    xt = 99
-    yt = 99
-    if x.type.upper() in TYPE_ORDER:
-        xt = TYPE_ORDER[x.type.upper()]
-    if y.type.upper() in TYPE_ORDER:
-        yt = TYPE_ORDER[y.type.upper()]
-
-    if xt < yt:
-        return -1
-    if xt > yt:
-        return 1
-    return 0
-
-# =============================================================================
-class PdnsApiRecord(PpBaseObject):
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None, initialized=None,
-            content=None, disabled=False):
-
-        self._content = content
-        self._disabled = False
-        self.disabled = disabled
-
-        super(PdnsApiRecord, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
-
-        if initialized is not None:
-            self.initialized = initialized
-
-    # -----------------------------------------------------------
-    @property
-    def content(self):
-        "The underlying content of this record."
-        return self._content
-
-    # -----------------------------------------------------------
-    @property
-    def disabled(self):
-        "Flag, whether the record is disabled or not."
-        return self._disabled
-
-    @disabled.setter
-    def disabled(self, value):
-        self._disabled = bool(value)
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PdnsApiRecord, self).as_dict(short=short)
-        res['content'] = self.content
-        res['disabled'] = self.disabled
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def __copy__(self):
-
-        return PdnsApiRecord(
-            appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
-            initialized=self.initialized, content=self.content, disabled=self.disabled)
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting function for translating object structure
-        into a string
-
-        @return: structure as string
-        @rtype:  str
-        """
-
-        return pp(self.as_dict(short=True))
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("content={!r}".format(self.content))
-        fields.append("disabled={!r}".format(self.disabled))
-        fields.append("appname={!r}".format(self.appname))
-        fields.append("verbose={!r}".format(self.verbose))
-        fields.append("version={!r}".format(self.version))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-
-# =============================================================================
-class PdnsSoaData(PpBaseObject):
-
-    re_soa_data = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s*$')
-    re_ws = re.compile(r'\s+')
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, primary=None, email=None, serial=None, refresh=None, retry=None, expire=None,
-            ttl=None, appname=None, verbose=0, version=__version__,
-            base_dir=None):
-
-        self._primary = None
-        self._email = None
-        self._serial = None
-        self._refresh = None
-        self._retry = None
-        self._expire = None
-        self._ttl = None
-
-        super(PdnsSoaData, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir,
-            initialized=False)
-
-        self.primary = primary
-        self.email = email
-        self.serial = serial
-        self.refresh = refresh
-        self.retry = retry
-        self.expire = expire
-        self.ttl = ttl
-
-        if (self.primary and self.email and self.serial is not None and self.refresh and
-                self.retry and self.expire and self.ttl):
-            self.initialized = True
-        else:
-            self.initialized = False
-
-    # -----------------------------------------------------------
-    @property
-    def primary(self):
-        "The primary name server of this SOA"
-        return self._primary
-
-    @primary.setter
-    def primary(self, value):
-        if value is None:
-            self._primary = None
-            return
-        self._primary = str(value).strip()
-
-    # -----------------------------------------------------------
-    @property
-    def email(self):
-        "The E-Mail-address of the hostmaster of this zone."
-        return self._email
-
-    @email.setter
-    def email(self, value):
-        if value is None:
-            self._email = None
-            return
-        self._email = str(value).strip()
-
-    # -----------------------------------------------------------
-    @property
-    def serial(self):
-        "The serial number of this SOA."
-        return self._serial
-
-    @serial.setter
-    def serial(self, value):
-        if value is None:
-            self._serial = None
-            return
-        self._serial = int(value)
-
-    # -----------------------------------------------------------
-    @property
-    def refresh(self):
-        "The time in seconds when slaves should ask master for changes."
-        return self._refresh
-
-    @refresh.setter
-    def refresh(self, value):
-        if value is None:
-            self._refresh = None
-            return
-        self._refresh = int(value)
-
-    # -----------------------------------------------------------
-    @property
-    def retry(self):
-        """The time in seconds when slaves should retry getting changes from master,
-            if an attemt to get it was not successful."""
-        return self._retry
-
-    @retry.setter
-    def retry(self, value):
-        if value is None:
-            self._retry = None
-            return
-        self._retry = int(value)
-
-    # -----------------------------------------------------------
-    @property
-    def expire(self):
-        """The time in seconds when slaves should expiring the zone,
-            if an attemt to get it was not successful."""
-        return self._expire
-
-    @expire.setter
-    def expire(self, value):
-        if value is None:
-            self._expire = None
-            return
-        self._expire = int(value)
-
-    # -----------------------------------------------------------
-    @property
-    def ttl(self):
-        "The defaul TTL of this zone."
-        return self._ttl
-
-    @ttl.setter
-    def ttl(self, value):
-        if value is None:
-            self._ttl = None
-            return
-        self._ttl = int(value)
-
-    # -----------------------------------------------------------
-    @property
-    def data(self):
-        "String representation of SOA data."
-        if (self.primary and self.email and self.serial is not None and self.refresh and
-                self.retry and self.expire and self.ttl):
-            return "{_primary} {_email} {_serial} {_refresh} {_retry} {_expire} {_ttl}".format(
-                **self.__dict__)
-        else:
-            return None
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PdnsSoaData, self).as_dict(short=short)
-        res['primary'] = self.primary
-        res['email'] = self.email
-        res['serial'] = self.serial
-        res['refresh'] = self.refresh
-        res['retry'] = self.retry
-        res['expire'] = self.expire
-        res['ttl'] = self.ttl
-        res['data'] = self.data
-
-        return res
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def init_from_data(cls, data, appname=None, verbose=0, base_dir=None):
-
-        line = cls.re_ws.sub(' ', to_str(data))
-        match = cls.re_soa_data.match(line)
-        if not match:
-            raise PdnsWrongSoaDataError(data)
-
-        soa = cls(
-            primary=match.group(1), email=match.group(2), serial=match.group(3),
-            refresh=match.group(4), retry=match.group(5), expire=match.group(6),
-            ttl=match.group(7), appname=appname, verbose=verbose, base_dir=base_dir)
-        return soa
-
-    # -------------------------------------------------------------------------
-    def __copy__(self):
-
-        if self.verbose > 4:
-            LOG.debug("Copying current {}-object in a new one.".format(self.__class__.__name__))
-
-        soa = PdnsSoaData(
-            primary=self.primary, email=self.email, serial=self.serial, refresh=self.refresh,
-            retry=self.retry, expire=self.expire, ttl=self.ttl, appname=self.appname,
-            version=self.version, base_dir=self.base_dir)
-        return soa
-
-    # -------------------------------------------------------------------------
-    def __eq__(self, other):
-
-        if self.verbose > 4:
-            LOG.debug("Comparing {}-objects ...".format(self.__class__.__name__))
-
-        if not isinstance(other, PdnsSoaData):
-            return False
-
-        if self.primary != other.primary:
-            return False
-        if self.email != other.email:
-            return False
-        if self.serial != other.serial:
-            return False
-        if self.refresh != other.refresh:
-            return False
-        if self.retry != other.retry:
-            return False
-        if self.expire != other.expire:
-            return False
-        if self.ttl != other.ttl:
-            return False
-
-        return True
-
-# =============================================================================
-class PdnsApiRrset(PpBaseObject):
-
-    default_ttl = 3600
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__,
-            base_dir=None, initialized=None):
-
-        # {   'comments': [],
-        #     'name': 'www.bmwi.tv.',
-        #     'records': [{'content': '77.74.236.5', 'disabled': False}],
-        #     'ttl': 3600,
-        #     'type': 'A'},
-
-        self.comments = []
-        self._name = None
-        self.ttl = self.default_ttl
-        self._type = None
-        self.records = []
-
-        super(PdnsApiRrset, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
-
-        if initialized is not None:
-            self.initialized = initialized
-
-    # -----------------------------------------------------------
-    @property
-    def name(self):
-        "The name of this record set."
-        return self._name
-
-    # -----------------------------------------------------------
-    @property
-    def name_unicode(self):
-        """The name of the resource record set in unicode, if it is an IDNA encoded zone."""
-        n = getattr(self, '_name', None)
-        if n is None:
-            return None
-        if 'xn--' in n:
-            return to_utf8(n).decode('idna')
-        return n
-
-    # -----------------------------------------------------------
-    @property
-    def type(self):
-        "The type of this record set."
-        return self._type
-
-    # -----------------------------------------------------------
-    @property
-    def ttl(self):
-        "The TTL of this record set."
-        return self._ttl
-
-    @ttl.setter
-    def ttl(self, value):
-        self._ttl = int(value)
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def init_from_dict(
-        cls, data, appname=None, verbose=0, version=__version__,
-            base_dir=None, initialized=None):
-
-        if not isinstance(data, dict):
-            raise PdnsApiRrsetError("Given data {!r} is not a dict object.".format(data))
-
-        params = {
-            'appname': appname,
-            'verbose': verbose,
-            'version': version,
-            'base_dir': base_dir
-        }
-        if initialized is not None:
-            params['initialized'] = initialized
-
-        rrset = cls(**params)
-
-        if 'comments' in data and data['comments']:
-            for comment in data['comments']:
-                rrset.comments.append(str(comment))
-
-        rrset._name = str(data['name'])
-        rrset._type = str(data['type']).upper()
-        if 'ttl' in data:
-            rrset._ttl = int(data['ttl'])
-
-        if 'records' in data:
-            for single_record in data['records']:
-                record = PdnsApiRecord(
-                    content=single_record['content'], disabled=single_record['disabled'],
-                    **params)
-                record.initialized = True
-                rrset.records.append(record)
-
-        rrset.initialized = True
-
-        return rrset
-
-    # -------------------------------------------------------------------------
-    def name_relative(self, reference):
-
-        # current name must be an absolute name
-        if not self.name.endswith('.'):
-            return self.name
-
-        # reference name must be an absolute name
-        if not reference.endswith('.'):
-            return self.name
-
-        ref_escaped = r'\.' + re.escape(reference) + r'$'
-        rel_name = re.sub(ref_escaped, '', self.name)
-        return rel_name
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PdnsApiRrset, self).as_dict(short=short)
-        res['name'] = self.name
-        res['type'] = self.type
-        res['ttl'] = self.ttl
-        res['name_unicode'] = self.name_unicode
-        res['comments'] = copy.copy(self.comments)
-        res['records'] = []
-
-        for record in self.records:
-            res['records'].append(record.as_dict(short))
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting function for translating object structure
-        into a string
-
-        @return: structure as string
-        @rtype:  str
-        """
-
-        return pp(self.as_dict(short=True))
-
-    # -------------------------------------------------------------------------
-    def __copy__(self):
-
-        rrset = PdnsApiRrset(
-            appname=self.appname, verbose=self.verbose, base_dir=self.base_dir,
-            initialized=self.initialized)
-
-        rrset._name = self.name
-        rrset._type = self.type
-        rrset._ttl = self.ttl
-        rrset.comments = copy.copy(self.comments)
-        rrset.records = copy.copy(self.records)
-
-    # -------------------------------------------------------------------------
-    def get_zone_lines(self, rrname_len=12, reference=None, default_ttl=None):
-
-        lines = ''
-        for comment in self.comments:
-
-            if self.verbose > 3:
-                LOG.debug("Formatting comment: {}".format(comment))
-
-            try:
-                cmt = eval(comment)
-                mtime = datetime.datetime.utcfromtimestamp(cmt['modified_at'])
-                if cmt['content']:
-                    line = "; {} {}: {}\n".format(
-                        mtime.isoformat(' '), cmt['account'], cmt['content'])
-                else:
-                    line = "; {} {}\n".format(mtime.isoformat(' '), cmt['account'])
-            except Exception as e:
-                LOG.warn("Could not decode comment {!r}: {}".format(comment, e))
-                line = '; {}\n'.format(comment)
-
-            lines += line
-
-        i = 0
-        for record in self.records:
-            show_name = ''
-            if not i:
-                if reference:
-                    show_name = self.name_relative(reference)
-                else:
-                    show_name = self.name
-            i += 1
-            if record.disabled:
-                show_name = '; ' + show_name
-            ttl = self.ttl
-            if default_ttl and default_ttl == self.ttl:
-                ttl = ''
-            tpl = "{name:<{name_len}}  {ttl:>8}  {type:<6}  {content}\n"
-            line = tpl.format(
-                name=show_name, name_len=rrname_len, ttl=ttl,
-                type=self.type, content=record.content)
-            lines += line
-
-        return lines
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pdns_show_zone.py b/pp_lib/pdns_show_zone.py
deleted file mode 100644 (file)
index f859083..0000000
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the pdns-show-zone application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import textwrap
-
-from functools import cmp_to_key
-
-# Own modules
-from .common import to_str
-from .common import RE_DOT_AT_END
-
-from .pdns_app import PpPDNSAppError, PpPDNSApplication
-from .pdns_record import compare_rrsets
-
-__version__ = '0.4.3'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpPDNSShowZoneError(PpPDNSAppError):
-    pass
-
-
-# =============================================================================
-class PpPDNSShowZoneApp(PpPDNSApplication):
-    """Class for the 'pdns-show-zone' application to get all information about
-       a given zone from PowerDNS.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.zones = []
-
-        description = textwrap.dedent('''\
-            Lists all available zones from given PowerDNS API.
-            ''')
-
-        super(PpPDNSShowZoneApp, self).__init__(
-            appname=appname, version=version, description=description,
-        )
-
-        self.initialized = True
-        self.default_ttl = 3600
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        super(PpPDNSShowZoneApp, self).init_arg_parser()
-
-        self.arg_parser.add_argument(
-            'zones', metavar='ZONE', nargs='+',
-            help="All zones, for which the complete information should shown",
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_arg_parser(self):
-        """
-        Public available method to execute some actions after parsing
-        the command line parameters.
-        """
-
-        super(PpPDNSShowZoneApp, self).perform_arg_parser()
-
-        for zone in self.args.zones:
-            zone_idna = zone
-            if 'xn--' not in zone:
-                zone_idna = to_str(zone.encode('idna'))
-            zone_idna = RE_DOT_AT_END.sub('.', zone_idna).lower()
-            self.zones.append(zone_idna)
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        success = True
-        self.get_default_ttl()
-
-        for zone in self.zones:
-            if not self.show_zone(zone):
-                success = False
-
-        if not success:
-            self.exit(1)
-
-    # -------------------------------------------------------------------------
-    def get_default_ttl(self):
-
-        LOG.debug("Retrieving defaul TTL from server ...")
-        path = "/servers/{}/config".format(self.api_servername)
-        json_response = self.perform_request(path)
-        ttl = None
-
-        for cfg in json_response:
-            if cfg['name'] == 'default-ttl':
-                try:
-                    ttl = int(cfg['value'])
-                except ValueError as e:
-                    LOG.error("Found invalid TTL {!r} from server: {}".format(
-                        cfg['value'], e))
-                break
-        if ttl:
-            LOG.debug("Got a default TTL {} from server.".format(ttl))
-            self.default_ttl = ttl
-
-    # -------------------------------------------------------------------------
-    def show_zone(self, zone_name):
-
-        zone_unicode = zone_name
-        zout = "{!r}".format(zone_name)
-        if 'xn--' in zone_name:
-            zone_unicode = zone_name.encode('idna').decode('idna')
-            zout = "{!r} ({})".format(zone_name, zone_unicode)
-
-        LOG.info("Show all information about zone {} from PowerDNS environment {!r}.".format(
-            zout, self.environment))
-        zone = self.get_api_zone(zone_name)
-
-        msg = "All information about zone {}:".format(zout)
-        print("\n{}".format(msg))
-        print('-' * len(msg))
-
-        params = {
-            'name': zone.name,
-            'name_unicode': zone.name_unicode,
-            'kind': zone.kind,
-            'serial': zone.serial,
-            'soa_edit': zone.soa_edit,
-            'dnssec': 'no',
-            'account': zone.account,
-            'default_ttl': self.default_ttl,
-        }
-        if zone.dnssec:
-            params['dnssec'] = 'yes'
-
-        msg = textwrap.dedent("""\
-        Name (Punicode): {name}
-        Name (UTF-8):    {name_unicode}
-        Kind:            {kind}
-        Serial:          {serial}
-        SOA edit:        {soa_edit}
-        DNSSEC enabled:  {dnssec}
-        Default TTL:     {default_ttl}
-        Account info:    {account}
-        """).strip().format(**params)
-
-        if zone.masters:
-            i = 0
-            for master in zone.masters:
-                if i:
-                    msg += "                 {!r}".format(master)
-                else:
-                    msg += "Masters:         {!r}".format(master)
-                i += 1
-        print(msg)
-
-        enabled = 0
-        disabled = 0
-
-        msg = "All Resource Records:"
-        print("\n{}".format(msg))
-        print('-' * len(msg))
-
-        rrname_len = 1
-        for rrset in zone.rrsets:
-            name = rrset.name_relative(zone.name)
-            if len(name) > rrname_len:
-                rrname_len = len(name)
-            for record in rrset.records:
-                if record.disabled:
-                    disabled += 1
-                else:
-                    enabled += 1
-        rrname_len += 2
-        if self.verbose > 2:
-            LOG.debug("Length of longest rrset name: {}".format(rrname_len))
-
-        for rrset in sorted(zone.rrsets, key=lambda x: cmp_to_key(compare_rrsets)(x)):
-            msg = rrset.get_zone_lines(
-                rrname_len=rrname_len, reference=zone.name,
-                default_ttl=self.default_ttl).rstrip()
-            print(msg)
-
-        msg = "\nFound {} enabled and {} disabled records.".format(
-            enabled, disabled)
-        print(msg)
-
-        return True
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pdns_zone.py b/pp_lib/pdns_zone.py
deleted file mode 100644 (file)
index 3188d1b..0000000
+++ /dev/null
@@ -1,559 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Publicies Pixelpark GmbH, Berlin
-@summary: An encapsulation class for zone objects by PowerDNS API
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import copy
-import datetime
-
-# Third party modules
-from dns.resolver import Resolver, NoAnswer
-
-# Own modules
-from .common import pp, to_utf8, to_bool
-from .common import RE_DOT_AT_END
-
-from .obj import PpBaseObjectError, PpBaseObject
-from .pdns_record import PdnsApiRrset, PdnsSoaData
-
-__version__ = '0.5.6'
-
-LOG = logging.getLogger(__name__)
-
-# =============================================================================
-class PdnsApiZoneError(PpBaseObjectError):
-    pass
-
-
-# =============================================================================
-class PdnsApiZone(PpBaseObject):
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, appname=None, verbose=0, version=__version__, base_dir=None, initialized=None,
-            account=None, dnssec=False, id=None, kind=None, last_check=None,
-            masters=None, name=None, notified_serial=None, serial=None, url=None,
-            rrsets=None, soa_edit=None, soa_edit_api=None, nsec3narrow=None, nsec3param=None,
-            presigned=None, api_rectify=None):
-
-        # {   'account': 'local',
-        #     'dnssec': False,
-        #     'id': 'bla.ai.',
-        #     'kind': 'Master',
-        #     'last_check': 0,
-        #     'masters': [],
-        #     'name': 'bla.ai.',
-        #     'notified_serial': 2018080404,
-        #     'serial': 2018080404,
-        #     'url': 'api/v1/servers/localhost/zones/bla.ai.'},
-        self._account = account
-        self._dnssec = dnssec
-        self._id = id
-        self._kind = kind
-        self._last_check = last_check
-        self.masters = []
-        if masters:
-            self.masters = copy.copy(masters)
-        self._name = name
-        self._notified_serial = notified_serial
-        self._serial = serial
-        self._url = url
-        self._nsec3narrow = None
-        if nsec3narrow is not None:
-            self._nsec3narrow = to_bool(nsec3narrow)
-        self._nsec3param = None
-        if nsec3param is not None and str(nsec3param).strip() != '':
-            self._nsec3param = str(nsec3param).strip()
-        self._presigned = None
-        if presigned is not None:
-            self._presigned = to_bool(presigned)
-        self._api_rectify = None
-        if api_rectify is not None:
-            self._api_rectify = to_bool(api_rectify)
-
-        self.rrsets = []
-        self._soa_edit = soa_edit
-        self._soa_edit_api = soa_edit_api
-
-        super(PdnsApiZone, self).__init__(
-            appname=appname, verbose=verbose, version=version, base_dir=base_dir)
-
-        if initialized is not None:
-            self.initialized = initialized
-
-    # -----------------------------------------------------------
-    @classmethod
-    def init_from_dict(
-        cls, data,
-            appname=None, verbose=0, version=__version__, base_dir=None, initialized=None):
-
-        if not isinstance(data, dict):
-            raise PdnsApiZoneError("Given data {!r} is not a dict object.".format(data))
-
-        params = {
-            'appname': appname,
-            'verbose': verbose,
-            'version': version,
-            'base_dir': base_dir
-        }
-        if initialized is not None:
-            params['initialized'] = initialized
-
-        rrsets = None
-        if 'rrsets' in data:
-            if data['rrsets']:
-                rrsets = data['rrsets']
-            data['rrsets'] = None
-
-        params.update(data)
-        zone = cls(**params)
-
-        if rrsets:
-            for single_rrset in rrsets:
-                rrset = PdnsApiRrset.init_from_dict(
-                    single_rrset, appname=appname, verbose=verbose, base_dir=base_dir)
-                zone.rrsets.append(rrset)
-
-        zone.initialized = True
-
-        return zone
-
-    # -----------------------------------------------------------
-    @property
-    def account(self):
-        """The name of the owning account of the zone, internal used
-            to differ local visible zones from all other zones."""
-        return getattr(self, '_account', None)
-
-    @account.setter
-    def account(self, value):
-        if value:
-            v = str(value).strip()
-            if v:
-                self._account = v
-            else:
-                self._account = None
-        else:
-            self._account = None
-
-    # -----------------------------------------------------------
-    @property
-    def dnssec(self):
-        """Is the zone under control of DNSSEC."""
-        return getattr(self, '_dnssec', False)
-
-    @dnssec.setter
-    def dnssec(self, value):
-        self._dnssec = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def id(self):
-        """The unique idendity of the zone."""
-        return getattr(self, '_id', None)
-
-    @id.setter
-    def id(self, value):
-        if value:
-            v = str(value).strip()
-            if v:
-                self._id = v
-            else:
-                self._id = None
-        else:
-            self._id = None
-
-    # -----------------------------------------------------------
-    @property
-    def kind(self):
-        """The kind or type of the zone."""
-        return getattr(self, '_kind', None)
-
-    @kind.setter
-    def kind(self, value):
-        if value:
-            v = str(value).strip()
-            if v:
-                self._kind = v
-            else:
-                self._kind = None
-        else:
-            self._kind = None
-
-    # -----------------------------------------------------------
-    @property
-    def last_check(self):
-        """The timestamp of the last check of the zone"""
-        return getattr(self, '_last_check', None)
-
-    # -----------------------------------------------------------
-    @property
-    def name(self):
-        """The name of the zone."""
-        return getattr(self, '_name', None)
-
-    @name.setter
-    def name(self, value):
-        if value:
-            v = str(value).strip()
-            if v:
-                self._name = v
-            else:
-                self._name = None
-        else:
-            self._name = None
-
-    # -----------------------------------------------------------
-    @property
-    def name_unicode(self):
-        """The name of the zone in unicode, if it is an IDNA encoded zone."""
-        n = getattr(self, '_name', None)
-        if n is None:
-            return None
-        if 'xn--' in n:
-            return to_utf8(n).decode('idna')
-        return n
-
-    # -----------------------------------------------------------
-    @property
-    def notified_serial(self):
-        """The notified serial number of the zone"""
-        return getattr(self, '_notified_serial', None)
-
-    # -----------------------------------------------------------
-    @property
-    def serial(self):
-        """The serial number of the zone"""
-        return getattr(self, '_serial', None)
-
-    # -----------------------------------------------------------
-    @property
-    def url(self):
-        """The URL in the API to get the zone object."""
-        return getattr(self, '_url', None)
-
-    # -----------------------------------------------------------
-    @property
-    def soa_edit(self):
-        """The SOA edit property of the zone object."""
-        return getattr(self, '_soa_edit', None)
-
-    # -----------------------------------------------------------
-    @property
-    def soa_edit_api(self):
-        """The SOA edit property (API) of the zone object."""
-        return getattr(self, '_soa_edit_api', None)
-
-    # -----------------------------------------------------------
-    @property
-    def nsec3narrow(self):
-        """Some stuff belonging to DNSSEC."""
-        return getattr(self, '_nsec3narrow', None)
-
-    # -----------------------------------------------------------
-    @property
-    def nsec3param(self):
-        """Some stuff belonging to DNSSEC."""
-        return getattr(self, '_nsec3param', None)
-
-    # -----------------------------------------------------------
-    @property
-    def presigned(self):
-        """Some stuff belonging to PowerDNS >= 4.1."""
-        return getattr(self, '_presigned', None)
-
-    # -----------------------------------------------------------
-    @property
-    def api_rectify(self):
-        """Some stuff belonging to PowerDNS >= 4.1."""
-        return getattr(self, '_api_rectify', None)
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PdnsApiZone, self).as_dict(short=short)
-        res['account'] = self.account
-        res['dnssec'] = copy.copy(self.dnssec)
-        res['id'] = self.id
-        res['kind'] = self.kind
-        res['last_check'] = self.last_check
-        res['masters'] = copy.copy(self.masters)
-        res['name'] = self.name
-        res['name_unicode'] = self.name_unicode
-        res['notified_serial'] = self.notified_serial
-        res['serial'] = self.serial
-        res['url'] = self.url
-        res['rrsets'] = []
-        res['soa_edit'] = self.soa_edit
-        res['soa_edit_api'] = self.soa_edit_api
-        res['nsec3narrow'] = self.nsec3narrow
-        res['nsec3param'] = self.nsec3param
-        res['presigned'] = self.presigned
-        res['api_rectify'] = self.api_rectify
-
-        for rrset in self.rrsets:
-            if isinstance(rrset, PpBaseObject):
-                res['rrsets'].append(rrset.as_dict(short))
-            else:
-                res['rrsets'].append(rrset)
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """
-        Typecasting function for translating object structure
-        into a string
-
-        @return: structure as string
-        @rtype:  str
-        """
-
-        return pp(self.as_dict(short=True))
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("name={!r}".format(self.name))
-        fields.append("kind={!r}".format(self.kind))
-        fields.append("serial={!r}".format(self.serial))
-        fields.append("dnssec={!r}".format(self.dnssec))
-        fields.append("account={!r}".format(self.account))
-        fields.append("appname={!r}".format(self.appname))
-        fields.append("verbose={!r}".format(self.verbose))
-        fields.append("version={!r}".format(self.version))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    @classmethod
-    def get_list_template(cls, show_numbers=False, minimal=False):
-
-        if minimal:
-            return "{name}"
-
-        tpl = "{name:<{len_zone}}  {kind:<8} {serial:>10}  {dnssec:<6}"
-        if show_numbers:
-            tpl += '  {nr_rrsets:>8}  {nr_records:>8} '
-        tpl += ' {account}'
-        return tpl
-
-    # -------------------------------------------------------------------------
-    def get_line(self, len_zone=20, rrsets=None, minimal=False):
-
-        if minimal:
-            tpl = self.get_list_template(minimal=True)
-        elif rrsets:
-            tpl = self.get_list_template(show_numbers=True)
-        else:
-            tpl = self.get_list_template(show_numbers=False)
-
-        params = {
-            'name': self.name_unicode,
-            'len_zone': len_zone,
-            'kind': self.kind,
-            'serial': self.serial,
-            'dnssec': 'no',
-            'account': '',
-            'nr_rrsets': '',
-            'nr_records': '',
-        }
-        if self.dnssec:
-            params['dnssec'] = 'yes'
-        if self.account:
-            params['account'] = self.account
-
-        if rrsets:
-            params['nr_rrsets'] = 0
-            params['nr_records'] = 0
-            for rrset in rrsets:
-                params['nr_rrsets'] += 1
-                for record in rrset.records:
-                    params['nr_records'] += 1
-
-        return tpl.format(**params)
-
-    # -------------------------------------------------------------------------
-    def get_soa_rrset(self):
-
-        for rrset in self.rrsets:
-            if rrset.type == 'SOA':
-                return rrset
-        return None
-
-    # -------------------------------------------------------------------------
-    def get_soa_record(self):
-
-        rrset = self.get_soa_rrset()
-        if self.verbose > 3:
-            LOG.debug("Got SOA RRset:\n{}".format(rrset))
-        if not rrset:
-            return None
-        if not rrset.records:
-            return None
-        return rrset.records[0]
-
-    # -------------------------------------------------------------------------
-    def get_soa(self):
-
-        soa = None
-        record = self.get_soa_record()
-        if self.verbose > 3:
-            LOG.debug("Got SOA record:\n{}".format(record))
-        if not record:
-            return None
-
-        soa = PdnsSoaData.init_from_data(
-            data=record.content, appname=self.appname,
-            verbose=self.verbose, base_dir=self.base_dir)
-
-        return soa
-
-    # -------------------------------------------------------------------------
-    def get_soa_by_dns(self, *nameservers):
-
-        soa = None
-
-        resolver = Resolver()
-
-        if nameservers:
-            resolver.nameservers = []
-            for ns in nameservers:
-                LOG.debug("Adding nameserver to use: {!r}.".format(ns))
-                resolver.nameservers.append(ns)
-
-        try:
-            answers = resolver.query(self.name, 'SOA', raise_on_no_answer=False)
-        except NoAnswer as e:
-            LOG.error("Got no answer from nameservers: {}".format(e))
-            return None
-
-        for rdata in answers:
-            soa = PdnsSoaData(
-                primary=rdata.mname, email=rdata.rname, serial=rdata.serial, refresh=rdata.refresh,
-                retry=rdata.retry, expire=rdata.expire, ttl=rdata.minimum,
-                appname=self.appname, verbose=self.verbose, base_dir=self.base_dir)
-            return soa
-
-        return None
-
-    # -------------------------------------------------------------------------
-    def get_new_serial(self, *compare_serials):
-
-        i = 0
-        today = datetime.date.today()
-        new_serial = today.year * 1000000 + today.month * 10000 + today.day * 100 + i
-        one_day = datetime.timedelta(1)
-
-        compare = []
-        compare.append(self.serial)
-        compare.append(self.notified_serial)
-        if compare_serials:
-            for serial in compare_serials:
-                compare.append(serial)
-        if self.verbose > 3:
-            LOG.debug("Compare serials: {}".format(pp(compare)))
-
-        found = False
-        while not found:
-            if self.verbose > 3:
-                LOG.debug("Trying new serial {} ...".format(new_serial))
-            found = True
-            for serial in compare:
-                if serial >= new_serial:
-                    found = False
-                    break
-            if found:
-                return new_serial
-            i += 1
-            if i > 99:
-                today += one_day
-                i = 0
-            new_serial = today.year * 1000000 + today.month * 10000 + today.day * 100 + i
-
-        return new_serial
-
-    # -------------------------------------------------------------------------
-    def get_ns_rrset(self, for_zone=None):
-
-        if self.verbose > 3:
-            rrrr = []
-            for rrset in self.rrsets:
-                rrrr.append(rrset.as_dict())
-            LOG.debug("Searching NS record set in:\n{}".format(pp(rrrr)))
-
-        for rrset in self.rrsets:
-            if rrset.type == 'NS':
-                if for_zone:
-                    if for_zone.lower() != rrset.name.lower():
-                        continue
-                else:
-                    if self.name.lower() != rrset.name.lower():
-                        continue
-                if self.verbose > 3:
-                    LOG.debug("Found NS RRSet:\n{}".format(pp(rrset.as_dict())))
-                return rrset
-        return None
-
-    # -------------------------------------------------------------------------
-    def get_ns_records(self, for_zone=None):
-
-        rrset = self.get_ns_rrset(for_zone=for_zone)
-        if self.verbose > 2:
-            LOG.debug("Got NS RRset:\n{}".format(rrset))
-        if not rrset:
-            return None
-        if not rrset.records:
-            return None
-
-        ns_records = []
-        for record in rrset.records:
-            ns_records.append(record)
-        return ns_records
-
-    # -------------------------------------------------------------------------
-    def get_zone_nameservers(self, for_zone=None):
-
-        nameservers = []
-
-        ns_records = self.get_ns_records(for_zone=for_zone)
-        if not ns_records:
-            return nameservers
-
-        for record in ns_records:
-            ns = RE_DOT_AT_END.sub('.', record.content).lower()
-            nameservers.append(ns)
-        nameservers.sort()
-        return nameservers
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/pidfile.py b/pp_lib/pidfile.py
deleted file mode 100644 (file)
index b938c2a..0000000
+++ /dev/null
@@ -1,527 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: A module for a pidfile object.
-          It provides methods to define, check, create
-          and remove a pidfile.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import sys
-import logging
-
-import re
-import signal
-import errno
-
-# Third party modules
-import six
-from six import reraise
-
-# Own modules
-
-from .errors import ReadTimeoutError
-
-from .obj import PpBaseObjectError
-from .obj import PpBaseObject
-
-from .common import to_utf8
-
-__version__ = '0.2.5'
-
-LOG = logging.getLogger(__name__)
-
-# =============================================================================
-class PidFileError(PpBaseObjectError):
-    """Base error class for all exceptions happened during
-    handling a pidfile."""
-
-    pass
-
-
-# =============================================================================
-class InvalidPidFileError(PidFileError):
-    """An error class indicating, that the given pidfile is unusable"""
-
-    def __init__(self, pidfile, reason=None):
-        """
-        Constructor.
-
-        @param pidfile: the filename of the invalid pidfile.
-        @type pidfile: str
-        @param reason: the reason, why the pidfile is invalid.
-        @type reason: str
-
-        """
-
-        self.pidfile = pidfile
-        self.reason = reason
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """Typecasting into a string for error output."""
-
-        msg = None
-        if self.reason:
-            msg = "Invalid pidfile {!r} given: {}".format(self.pidfile, self.reason)
-        else:
-            msg = "Invalid pidfile {!r} given.".format(self.pidfile)
-
-        return msg
-
-# =============================================================================
-class PidFileInUseError(PidFileError):
-    """
-    An error class indicating, that the given pidfile is in use
-    by another application.
-    """
-
-    def __init__(self, pidfile, pid):
-        """
-        Constructor.
-
-        @param pidfile: the filename of the pidfile.
-        @type pidfile: str
-        @param pid: the PID of the process owning the pidfile
-        @type pid: int
-
-        """
-
-        self.pidfile = pidfile
-        self.pid = pid
-
-    # -------------------------------------------------------------------------
-    def __str__(self):
-        """Typecasting into a string for error output."""
-
-        msg = "The pidfile {!r} is currently in use by the application with the PID {}.".format(
-            self.pidfile, self.pid)
-
-        return msg
-
-
-# =============================================================================
-class PidFile(PpBaseObject):
-    """
-    Base class for a pidfile object.
-    """
-
-    open_args = {}
-    if six.PY3:
-        open_args = {
-            'encoding': 'utf-8',
-            'errors': 'surrogateescape',
-        }
-
-    # -------------------------------------------------------------------------
-    def __init__(
-        self, filename, auto_remove=True, appname=None, verbose=0,
-            version=__version__, base_dir=None,
-            initialized=False, simulate=False, timeout=10):
-        """
-        Initialisation of the pidfile object.
-
-        @raise ValueError: no filename was given
-        @raise PidFileError: on some errors.
-
-        @param filename: the filename of the pidfile
-        @type filename: str
-        @param auto_remove: Remove the self created pidfile on destroying
-                            the current object
-        @type auto_remove: bool
-        @param appname: name of the current running application
-        @type appname: str
-        @param verbose: verbose level
-        @type verbose: int
-        @param version: the version string of the current object or application
-        @type version: str
-        @param base_dir: the base directory of all operations
-        @type base_dir: str
-        @param initialized: initialisation is complete after __init__()
-                            of this object
-        @type initialized: bool
-        @param simulate: simulation mode
-        @type simulate: bool
-        @param timeout: timeout in seconds for IO operations on pidfile
-        @type timeout: int
-
-        @return: None
-        """
-
-        self._created = False
-        """
-        @ivar: the pidfile was created by this current object
-        @type: bool
-        """
-
-        super(PidFile, self).__init__(
-            appname=appname,
-            verbose=verbose,
-            version=version,
-            base_dir=base_dir,
-            initialized=False,
-        )
-
-        if not filename:
-            raise ValueError('No filename given on initializing PidFile object.')
-
-        self._filename = os.path.abspath(str(filename))
-        """
-        @ivar: The filename of the pidfile
-        @type: str
-        """
-
-        self._auto_remove = bool(auto_remove)
-        """
-        @ivar: Remove the self created pidfile on destroying the current object
-        @type: bool
-        """
-
-        self._simulate = bool(simulate)
-        """
-        @ivar: Simulation mode
-        @type: bool
-        """
-
-        self._timeout = int(timeout)
-        """
-        @ivar: timeout in seconds for IO operations on pidfile
-        @type: int
-        """
-
-    # -----------------------------------------------------------
-    @property
-    def filename(self):
-        """The filename of the pidfile."""
-        return self._filename
-
-    # -----------------------------------------------------------
-    @property
-    def auto_remove(self):
-        """Remove the self created pidfile on destroying the current object."""
-        return self._auto_remove
-
-    @auto_remove.setter
-    def auto_remove(self, value):
-        self._auto_remove = bool(value)
-
-    # -----------------------------------------------------------
-    @property
-    def simulate(self):
-        """Simulation mode."""
-        return self._simulate
-
-    # -----------------------------------------------------------
-    @property
-    def created(self):
-        """The pidfile was created by this current object."""
-        return self._created
-
-    # -----------------------------------------------------------
-    @property
-    def timeout(self):
-        """The timeout in seconds for IO operations on pidfile."""
-        return self._timeout
-
-    # -----------------------------------------------------------
-    @property
-    def parent_dir(self):
-        """The directory containing the pidfile."""
-        return os.path.dirname(self.filename)
-
-    # -------------------------------------------------------------------------
-    def as_dict(self, short=True):
-        """
-        Transforms the elements of the object into a dict
-
-        @param short: don't include local properties in resulting dict.
-        @type short: bool
-
-        @return: structure as dict
-        @rtype:  dict
-        """
-
-        res = super(PidFile, self).as_dict(short=short)
-        res['filename'] = self.filename
-        res['auto_remove'] = self.auto_remove
-        res['simulate'] = self.simulate
-        res['created'] = self.created
-        res['timeout'] = self.timeout
-        res['parent_dir'] = self.parent_dir
-        res['open_args'] = self.open_args
-
-        return res
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        """Typecasting into a string for reproduction."""
-
-        out = "<%s(" % (self.__class__.__name__)
-
-        fields = []
-        fields.append("filename=%r" % (self.filename))
-        fields.append("auto_remove=%r" % (self.auto_remove))
-        fields.append("appname=%r" % (self.appname))
-        fields.append("verbose=%r" % (self.verbose))
-        fields.append("base_dir=%r" % (self.base_dir))
-        fields.append("initialized=%r" % (self.initialized))
-        fields.append("simulate=%r" % (self.simulate))
-        fields.append("timeout=%r" % (self.timeout))
-
-        out += ", ".join(fields) + ")>"
-        return out
-
-    # -------------------------------------------------------------------------
-    def __del__(self):
-        """Destructor. Removes the pidfile, if it was created by ourselfes."""
-
-        if not self.created:
-            return
-
-        if not os.path.exists(self.filename):
-            if self.verbose > 3:
-                LOG.debug("Pidfile {!r} doesn't exists, not removing.".format(self.filename))
-            return
-
-        if not self.auto_remove:
-            if self.verbose > 3:
-                LOG.debug("Auto removing disabled, don't deleting {!r}.".format(self.filename))
-            return
-
-        if self.verbose > 1:
-            LOG.debug("Removing pidfile {!r} ...".format(self.filename))
-        if self.simulate:
-            if self.verbose > 1:
-                LOG.debug("Just kidding ..")
-            return
-        try:
-            os.remove(self.filename)
-        except OSError as e:
-            LOG.err("Could not delete pidfile {!r}: {}".format(self.filename, e))
-        except Exception as e:
-            self.handle_error(str(e), e.__class__.__name__, True)
-
-    # -------------------------------------------------------------------------
-    def create(self, pid=None):
-        """
-        The main method of this class. It tries to write the PID of the process
-        into the pidfile.
-
-        @param pid: the pid to write into the pidfile. If not given, the PID of
-                    the current process will taken.
-        @type pid: int
-
-        """
-
-        if pid:
-            pid = int(pid)
-            if pid <= 0:
-                msg = "Invalid PID {} for creating pidfile {!r} given.".format(pid, self.filename)
-                raise PidFileError(msg)
-        else:
-            pid = os.getpid()
-
-        if self.check():
-
-            LOG.info("Deleting pidfile {!r} ...".format(self.filename))
-            if self.simulate:
-                LOG.debug("Just kidding ..")
-            else:
-                try:
-                    os.remove(self.filename)
-                except OSError as e:
-                    raise InvalidPidFileError(self.filename, str(e))
-
-        if self.verbose > 1:
-            LOG.debug("Trying opening {!r} exclusive ...".format(self.filename))
-
-        if self.simulate:
-            LOG.debug("Simulation mode - don't real writing in {!r}.".format(self.filename))
-            self._created = True
-            return
-
-        fd = None
-        try:
-            fd = os.open(
-                self.filename, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
-        except OSError as e:
-            error_tuple = sys.exc_info()
-            msg = "Error on creating pidfile {!r}: {}".format(self.filename, e)
-            reraise(PidFileError, msg, error_tuple[2])
-
-        if self.verbose > 2:
-            LOG.debug("Writing {} into {!r} ...".format(pid, self.filename))
-
-        out = to_utf8("%d\n" % (pid))
-        try:
-            os.write(fd, out)
-        finally:
-            os.close(fd)
-
-        self._created = True
-
-    # -------------------------------------------------------------------------
-    def recreate(self, pid=None):
-        """
-        Rewrites an even created pidfile with the current PID.
-
-        @param pid: the pid to write into the pidfile. If not given, the PID of
-                    the current process will taken.
-        @type pid: int
-
-        """
-
-        if not self.created:
-            msg = "Calling recreate() on a not self created pidfile."
-            raise PidFileError(msg)
-
-        if pid:
-            pid = int(pid)
-            if pid <= 0:
-                msg = "Invalid PID {} for creating pidfile {!r} given.".format(pid, self.filename)
-                raise PidFileError(msg)
-        else:
-            pid = os.getpid()
-
-        if self.verbose > 1:
-            LOG.debug("Trying opening {!r} for recreate ...".format(self.filename))
-
-        if self.simulate:
-            LOG.debug("Simulation mode - don't real writing in {!r}.".format(self.filename))
-            return
-
-        fh = None
-        try:
-            fh = open(self.filename, 'w', **self.open_args)
-        except OSError as e:
-            error_tuple = sys.exc_info()
-            msg = "Error on recreating pidfile {!r}: {}".format(self.filename, e)
-            reraise(PidFileError, msg, error_tuple[2])
-
-        if self.verbose > 2:
-            LOG.debug("Writing {} into {!r} ...".format(pid, self.filename))
-
-        try:
-            fh.write("%d\n" % (pid))
-        finally:
-            fh.close()
-
-    # -------------------------------------------------------------------------
-    def check(self):
-        """
-        This methods checks the usability of the pidfile.
-        If the method doesn't raise an exception, the pidfile is usable.
-
-        It returns, whether the pidfile exist and can be deleted or not.
-
-        @raise InvalidPidFileError: if the pidfile is unusable
-        @raise PidFileInUseError: if the pidfile is in use by another application
-        @raise ReadTimeoutError: on timeout reading an existing pidfile
-        @raise OSError: on some other reasons, why the existing pidfile
-                        couldn't be read
-
-        @return: the pidfile exists, but can be deleted - or it doesn't
-                 exists.
-        @rtype: bool
-
-        """
-
-        if not os.path.exists(self.filename):
-            if not os.path.exists(self.parent_dir):
-                reason = "Pidfile parent directory {!r} doesn't exists.".format(
-                    self.parent_dir)
-                raise InvalidPidFileError(self.filename, reason)
-            if not os.path.isdir(self.parent_dir):
-                reason = "Pidfile parent directory {!r} is not a directory.".format(
-                    self.parent_dir)
-                raise InvalidPidFileError(self.filename, reason)
-            if not os.access(self.parent_dir, os.X_OK):
-                reason = "No write access to pidfile parent directory {!r}.".format(
-                    self.parent_dir)
-                raise InvalidPidFileError(self.filename, reason)
-
-            return False
-
-        if not os.path.isfile(self.filename):
-            reason = "It is not a regular file."
-            raise InvalidPidFileError(self.filename, self.parent_dir)
-
-        # ---------
-        def pidfile_read_alarm_caller(signum, sigframe):
-            """
-            This nested function will be called in event of a timeout.
-
-            @param signum: the signal number (POSIX) which happend
-            @type signum: int
-            @param sigframe: the frame of the signal
-            @type sigframe: object
-            """
-
-            return ReadTimeoutError(self.timeout, self.filename)
-
-        if self.verbose > 1:
-            LOG.debug("Reading content of pidfile {!r} ...".format(self.filename))
-
-        signal.signal(signal.SIGALRM, pidfile_read_alarm_caller)
-        signal.alarm(self.timeout)
-
-        content = ''
-        fh = None
-
-        try:
-            fh = open(self.filename, 'r')
-            for line in fh.readlines():
-                content += line
-        finally:
-            if fh:
-                fh.close()
-            signal.alarm(0)
-
-        # Performing content of pidfile
-
-        pid = None
-        line = content.strip()
-        match = re.search(r'^\s*(\d+)\s*$', line)
-        if match:
-            pid = int(match.group(1))
-        else:
-            msg = "No useful information found in pidfile {!r}: {!r}".format(self.filename, line)
-            return True
-
-        if self.verbose > 1:
-            LOG.debug("Trying check for process with PID {} ...".format(pid))
-
-        try:
-            os.kill(pid, 0)
-        except OSError as err:
-            if err.errno == errno.ESRCH:
-                LOG.info("Process with PID {} anonymous died.".format(pid))
-                return True
-            elif err.errno == errno.EPERM:
-                error_tuple = sys.exc_info()
-                msg = "No permission to signal the process {} ...".format(pid)
-                reraise(PidFileError, msg, error_tuple[2])
-            else:
-                error_tuple = sys.exc_info()
-                msg = "Got a {}: {}.".format(err.__class__.__name__, err)
-                reraise(PidFileError, msg, error_tuple[2])
-        else:
-            raise PidFileInUseError(self.filename, pid)
-
-        return False
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/quota_check.py b/pp_lib/quota_check.py
deleted file mode 100644 (file)
index 9ad82be..0000000
+++ /dev/null
@@ -1,710 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the quota-check application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import os
-import datetime
-import logging
-import logging.config
-import re
-import textwrap
-import pwd
-import glob
-import stat
-import pipes
-import gzip
-import shutil
-import time
-import locale
-import sys
-
-from subprocess import Popen, PIPE
-
-# Third party modules
-import six
-import yaml
-
-# Own modules
-from .common import pp, to_str
-
-from .homes_admin import PpHomesAdminError, PpHomesAdminApp
-
-__version__ = '0.6.3'
-LOG = logging.getLogger(__name__)
-ZERO = datetime.timedelta(0)
-
-# A Utc class.
-
-class Utc(datetime.tzinfo):
-    """Utc"""
-
-    def utcoffset(self, dt):
-        return ZERO
-
-    def tzname(self, dt):
-        return "UTC"
-
-    def dst(self, dt):
-        return ZERO
-
-UTC = Utc()
-# UTC = datetime.timezone.utc
-
-
-# =============================================================================
-class PpQuotaCheckError(PpHomesAdminError):
-    pass
-
-
-# =============================================================================
-class PpQuotaCheckApp(PpHomesAdminApp):
-    """
-    Class for the 'quota-check' application to check the utilization
-    of the home share on the NFS server.
-    """
-
-    default_quota_kb = 300 * 1024
-
-    # /var/lib/quota-check
-    default_status_dir = os.sep + os.path.join('var', 'lib', 'quota-check')
-    # /var/lib/quota-check/quota-check.yaml
-    default_statusfile_base = 'quota-check.yaml'
-
-    du_line_re = re.compile(r'^\s*(\d+)\s+(.*)')
-
-    default_max_age = 365.25 * 4 * 24 * 60 * 60
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.default_reply_to = 'noreply@pixelpark.com'
-
-        self.quota_kb = self.default_quota_kb
-
-        self.status_dir = self.default_status_dir
-        self.statusfile_base = self.default_statusfile_base
-        self.statusfile = os.path.join(self.status_dir, self.statusfile_base)
-        self.status_data = {}
-        self.max_age = self.default_max_age
-
-        self.passwd_data = {}
-        self.map_uid = {}
-        self.now = datetime.datetime.utcnow()
-        self.du_cmd = self.get_command('du', quiet=True)
-        self.do_statistics = False
-
-        self.show_simulate_opt = True
-        self._simulate_opt_help = textwrap.dedent('''\
-            Retrieving all utilization values, but not writing it
-            into th status file.
-            ''').strip()
-
-        description = textwrap.dedent('''\
-            This checks the utilization of the home directories on the NFS server
-            and sends a mail per request about all home directories, which are
-            exceeding the given quota (default {} MB).
-            ''').strip().format(self.default_quota_kb)
-
-        super(PpQuotaCheckApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='quota-check'
-        )
-
-        if not self.du_cmd:
-            LOG.error("Command {!r} not found.".format('du'))
-            self.exit(7)
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def init_arg_parser(self):
-        """
-        Method to initiate the argument parser.
-
-        This method should be explicitely called by all init_arg_parser()
-        methods in descendant classes.
-        """
-
-        super(PpQuotaCheckApp, self).init_arg_parser()
-
-        def_mb = self.quota_kb / 1024
-
-        self.arg_parser.add_argument(
-            '-Q', '--quota',
-            metavar="MB", type=int, dest='quota_mb',
-            help="Quota value in MB (default: {} MB).".format(def_mb),
-        )
-
-        self.arg_parser.add_argument(
-            '--status-file',
-            metavar='FILE', dest='status_file',
-            help=(
-                "The YAML file containing the statistics of the current week "
-                "(default: {!r}).").format(self.statusfile)
-        )
-
-        self.arg_parser.add_argument(
-            '-S', '--stats',
-            action="store_true", dest="stats",
-            help=(
-                "Generate statistics, mail them to the administrators and "
-                "rotate the status data file. Without this option the current "
-                "utilization is determined and saved in the status data file."),
-        )
-
-    # -------------------------------------------------------------------------
-    def perform_config(self):
-
-        super(PpQuotaCheckApp, self).perform_config()
-
-        for section_name in self.cfg.keys():
-
-            if self.verbose > 3:
-                LOG.debug("Checking config section {!r} ...".format(section_name))
-
-            if section_name.lower() not in ('quota-check', 'quota_check', 'quotacheck'):
-                continue
-
-            section = self.cfg[section_name]
-            if self.verbose > 2:
-                LOG.debug("Evaluating config section {n!r}:\n{s}".format(
-                    n=section_name, s=pp(section)))
-
-            if 'quota_mb' in section:
-                v = section['quota_mb']
-                quota = self.quota_kb / 1024
-                try:
-                    quota = int(v)
-                except (ValueError, TypeError):
-                    msg = "Found invalid quota MB {!r} in configuration.".format(v)
-                    LOG.error(msg)
-                else:
-                    if quota < 0:
-                        msg = "Found invalid quota MB {!r} in configuration.".format(quota)
-                        LOG.error(msg)
-                    else:
-                        self.quota_kb = quota * 1024
-
-            if 'quota_kb' in section:
-                v = section['quota_kb']
-                quota = self.quota_kb
-                try:
-                    quota = int(v)
-                except (ValueError, TypeError):
-                    msg = "Found invalid quota KB {!r} in configuration.".format(v)
-                    LOG.error(msg)
-                else:
-                    if quota < 0:
-                        msg = "Found invalid quota KB {!r} in configuration.".format(quota)
-                        LOG.error(msg)
-                    else:
-                        self.quota_kb = quota
-
-            if 'status_file' in section:
-                v = section['status_file']
-                if os.path.isabs(v):
-                    self.status_dir = os.path.normpath(os.path.dirname(v))
-                    self.statusfile_base = os.path.basename(v)
-                    self.statusfile = os.path.normpath(v)
-                else:
-                    self.statusfile = os.path.normpath(
-                        os.path.join(self.status_dir, v))
-                    self.status_dir = os.path.dirname(self.statusfile)
-                    self.statusfile_base = os.path.basename(self.statusfile)
-
-        cmdline_quota = getattr(self.args, 'quota_mb', None)
-        if cmdline_quota is not None:
-            self.quota_kb = cmdline_quota * 1024
-
-        sfile = getattr(self.args, 'status_file')
-        if sfile:
-            if os.path.isabs(sfile):
-                self.status_dir = os.path.normpath(os.path.dirname(sfile))
-                self.statusfile_base = os.path.basename(sfile)
-                self.statusfile = os.path.normpath(sfile)
-            else:
-                self.statusfile = os.path.normpath(os.path.join(self.status_dir, sfile))
-                self.status_dir = os.path.dirname(self.statusfile)
-                self.statusfile_base = os.path.basename(self.statusfile)
-
-        self.do_statistics = bool(getattr(self.args, 'stats', False))
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        self.status_data = self.read_status_data()
-        self.status_data['last_check'] = self.now
-        self.read_passwd_data()
-        self.check_home_utilization()
-
-        self.write_status_data()
-
-        if self.do_statistics:
-            self.perform_statistics()
-            self.compress_old_status_files()
-
-    # -------------------------------------------------------------------------
-    def pre_run(self):
-        """
-        Dummy function to run before the main routine.
-        Could be overwritten by descendant classes.
-
-        """
-
-        if os.geteuid():
-            msg = "Only root may execute this application."
-            LOG.error(msg)
-            self.exit(1)
-
-        super(PpQuotaCheckApp, self).pre_run()
-
-    # -------------------------------------------------------------------------
-    def read_status_data(self):
-
-        LOG.info("Reading status data from {!r} ...".format(self.statusfile))
-
-        if not os.path.exists(self.statusfile):
-            LOG.debug("Status file {!r} does not exists.".format(self.statusfile))
-            return {}
-
-        if not os.path.isfile(self.statusfile):
-            msg = "Status file {!r} is not a regular file.".format(self.statusfile)
-            LOG.error(msg)
-            self.exit(5)
-
-        if not os.access(self.statusfile, os.R_OK):
-            msg = "No read access to status file {!r}.".format(self.statusfile)
-            LOG.error(msg)
-            self.exit(6)
-
-        open_args = {}
-        if six.PY3:
-            open_args['encoding'] = 'utf-8'
-            open_args['errors'] = 'surrogateescape'
-
-        status = {}
-
-        with open(self.statusfile, 'r', **open_args) as fh:
-            try:
-                status = yaml.load(fh)
-            except yaml.YAMLError as e:
-                msg = "YAML error in status file {f!r}: {e}".format(
-                    f=self.statusfile, e=e)
-                LOG.error(msg)
-                return {}
-
-        if not isinstance(status, dict):
-            status = {}
-
-        if self.verbose > 2:
-            LOG.debug("Status from {f!r}:\n{s}".format(
-                f=self.statusfile, s=pp(status)))
-
-        return status
-
-    # -------------------------------------------------------------------------
-    def rotate_status_file(self, date=None):
-
-        if not os.path.isfile(self.statusfile):
-            LOG.debug("File {!r} to rotate does not exists.".format(self.statusfile))
-            return
-
-        if not date:
-            file_stat = os.stat(self.statusfile)
-            date = datetime.datetime.utcfromtimestamp(file_stat.st_mtime)
-        (stem, ext) = os.path.splitext(self.statusfile)
-
-        new_fname = "{s}.{d}{e}".format(
-            s=stem, d=date.strftime('%Y-%m-%d_%H:%M:%S'), e=ext)
-        LOG.info("Renaming {o!r} -> {n!r}.".format(o=self.statusfile, n=new_fname))
-        if self.simulate:
-            LOG.info("Simulation mode, status file will not be renamed.")
-            return
-        os.rename(self.statusfile, new_fname)
-
-    # -------------------------------------------------------------------------
-    def compress_old_status_files(self):
-
-        if self.simulate:
-            LOG.info("Simulation mode, status file rotation will not be executed.")
-            return
-
-        (stem, ext) = os.path.splitext(self.statusfile_base)
-        search_base = "{s}.20*{e}".format(s=stem, e=ext)
-        seach_pattern = os.path.join(self.status_dir, search_base)
-        files = glob.glob(seach_pattern)
-        if len(files) <= 1:
-            return
-
-        files.sort()
-        for filename in files[:-1]:
-            file_stat = os.stat(filename)
-            if not file_stat.st_size:
-                LOG.debug("Not compressing {!r} because of zero size.".format(filename))
-                continue
-            LOG.info("Compressing {!r} ...".format(filename))
-            new_name = filename + '.gz'
-            with open(filename, 'rb') as f_in:
-                with gzip.open(new_name, 'wb') as f_out:
-                    shutil.copyfileobj(f_in, f_out)
-            shutil.copystat(filename, new_name)
-            LOG.debug("Removing {!r} ...".format(filename))
-            os.remove(filename)
-
-        files_to_remove = []
-        files = glob.glob(seach_pattern)
-        search_base = "{s}.20*{e}.gz".format(s=stem, e=ext)
-        seach_pattern = os.path.join(self.status_dir, search_base)
-        files += glob.glob(seach_pattern)
-        files.sort()
-        # Removing all files older 4 years
-        limit_age = time.time() - self.max_age
-        limit_age_dt = datetime.datetime.utcfromtimestamp(limit_age)
-        LOG.info("Removing all status files older than {!r} ...".format(
-            limit_age_dt.isoformat(' ')))
-
-        for filename in files[:-1]:
-            if not os.path.isfile(filename):
-                continue
-            file_stat = os.stat(filename)
-            if file_stat.st_mtime < limit_age:
-                files_to_remove.append(filename)
-
-        for filename in files_to_remove:
-            LOG.info("Removing {!r} ...".format(filename))
-            os.remove(filename)
-
-    # -------------------------------------------------------------------------
-    def write_status_data(self):
-
-        LOG.info("Writing status data from {!r} ...".format(self.statusfile))
-
-        if self.verbose > 2:
-            # LOG.debug("Status to write:\n{!r}".format(self.status_data))
-            LOG.debug("Status to write:\n{}".format(pp(self.status_data)))
-
-        if self.simulate:
-            LOG.info("Simulation mode, status file will not be really written.")
-            return
-
-        open_args = {}
-        if six.PY3:
-            open_args['encoding'] = 'utf-8'
-            open_args['errors'] = 'surrogateescape'
-
-        if not os.path.exists(self.status_dir):
-            LOG.info("Creating {!r} ...".format(self.status_dir))
-            mode = stat.S_IRWXU | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IXOTH
-            try:
-                os.makedirs(self.status_dir, mode)
-            except os.error as e:
-                LOG.error("Could not create {!r}: {}".format(self.status_dir, e))
-                sys.exit(9)
-        elif not os.path.isdir(self.status_dir):
-            msg = "Status directory {!r} exists, but is not a directory.".format(self.status_dir)
-            LOG.error(msg)
-            return
-
-        status_dump = yaml.dump(self.status_data, default_flow_style=False)
-        if self.verbose > 2:
-            LOG.debug("Writing YAML data:\n{}".format(status_dump))
-
-        with open(self.statusfile, 'w', **open_args) as fh:
-            fh.write(status_dump)
-
-    # -------------------------------------------------------------------------
-    def read_passwd_data(self):
-
-        LOG.info("Reading all necessary data from 'getent passwd' ...")
-
-        entries = pwd.getpwall()
-
-        for entry in entries:
-            user_name = entry.pw_name
-            uid = entry.pw_uid
-            if user_name not in self.passwd_data:
-                self.passwd_data[user_name] = entry
-            if uid not in self.map_uid:
-                self.map_uid[uid] = user_name
-
-        LOG.debug("Found {} appropriate user entries in passwd.".format(
-            len(self.passwd_data.keys())))
-        if self.verbose > 2:
-            LOG.debug("User data in passwd:\n{}".format(pp(self.passwd_data)))
-
-    # -------------------------------------------------------------------------
-    def get_util_dir_kb(self, directory):
-
-        if not os.path.isdir(directory):
-            return 0
-
-        cmd = [self.du_cmd, '-sk', directory]
-        cmd_str = ' '.join(map(lambda x: pipes.quote(x), cmd))
-        if self.verbose > 2:
-            LOG.debug("Executing: {}".format(cmd_str))
-
-        p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
-        (stdoutdata, stderrdata) = p.communicate()
-        ret = p.wait()
-        if stdoutdata:
-            stdoutdata = to_str(stdoutdata)
-        if stderrdata:
-            stderrdata = to_str(stderrdata)
-
-        if ret:
-            msg = "Return value of \"{c}\": {r}.".format(c=cmd_str, r=ret)
-            if stderrdata:
-                msg += "\nError message: {}".format(stderrdata)
-            LOG.error(msg)
-            return 0
-
-        if not stdoutdata:
-            return 0
-
-        result = 0
-        for line in stdoutdata.splitlines():
-            line = line.strip()
-            match = self.du_line_re.search(line)
-            if not match:
-                continue
-            result = int(match.group(1))
-            break
-
-        return result
-
-    # -------------------------------------------------------------------------
-    def _silly_open_homedirs(self):
-
-        # Senseless opening of all user home directories to activate automounter
-        for user_name in self.passwd_data:
-            entry = self.passwd_data[user_name]
-            pwd_home_dir = entry.pw_dir
-            if not pwd_home_dir:
-                continue
-            if not pwd_home_dir.startswith(self.home_root_abs):
-                if self.verbose > 2:
-                    LOG.debug("Home dir {!r} is not below {!r}.".format(
-                        pwd_home_dir, self.home_root_abs))
-                continue
-            abs_home_dir = os.path.join(self.chroot_homedir, os.path.relpath(pwd_home_dir, os.sep))
-            LOG.debug("Trying to open {!r} ...".format(abs_home_dir))
-            try:
-                os.listdir(abs_home_dir)
-                if self.verbose > 2:
-                    LOG.debug("Found home directory {!r} ...".format(abs_home_dir))
-            except OSError:
-                LOG.warn("Directory {!r} does not exists.".format(abs_home_dir))
-
-    # -------------------------------------------------------------------------
-    def check_home_utilization(self):
-
-        LOG.info("Checking utilization of home directories ...")
-
-        self._silly_open_homedirs()
-
-        glob_pattern = os.path.join(self.home_root_real, '*')
-        all_home_entries = glob.glob(glob_pattern)
-
-        if 'checks' not in self.status_data:
-            self.status_data['checks'] = {}
-        self.status_data['checks'][self.now] = {}
-        check = self.status_data['checks'][self.now]
-        check['data'] = {}
-        check['stats'] = {
-            'begin': self.now,
-            'end': None,
-            'duration': None,
-            'total_kb': 0,
-            'total_mb': 0,
-            'total_gb': 0.0,
-            'number_dirs': 0,
-        }
-
-        total_kb = 0
-        number_dirs = 0
-
-        i = 0
-
-        for path in all_home_entries:
-
-            LOG.debug("Searching for {!r} ...".format(path))
-
-            if not os.path.isdir(path):
-                continue
-            number_dirs += 1
-            i += 1
-            home_rel = os.sep + os.path.relpath(path, self.chroot_homedir)
-            if self.verbose > 2:
-                LOG.debug("Checking {p!r} ({h!r}) ...".format(
-                    p=path, h=home_rel))
-            dir_stat = os.stat(path)
-            dir_uid = dir_stat.st_uid
-            dir_owner = str(dir_uid)
-            username = dir_owner
-            if dir_uid == 0:
-                username = 'root'
-                dir_owner = 'root'
-            elif dir_uid in self.map_uid:
-                dir_owner = self.map_uid[dir_uid]
-                username = dir_owner
-                if dir_owner in self.passwd_data and self.passwd_data[dir_owner].pw_gecos:
-                    dir_owner = self.passwd_data[dir_owner].pw_gecos
-            util = self.get_util_dir_kb(path)
-            total_kb += util
-            result = {
-                'checked': datetime.datetime.utcnow(),
-                'util_kb': util,
-                'uid': dir_uid,
-                'gid': dir_stat.st_gid,
-                'user': username,
-                'gecos': dir_owner,
-            }
-            check['data'][home_rel] = result
-
-        end_ts = datetime.datetime.utcnow()
-        duration = end_ts - self.now
-        dur_days = duration.days
-        dur_secs = duration.seconds
-        if duration.microseconds >= 500000:
-            dur_secs += 1
-        dur_mins = 0
-        dur_hours = 0
-        if dur_secs >= 60:
-            dur_mins = int(dur_secs / 60)
-            dur_secs = dur_secs % 60
-            if dur_mins >= 60:
-                dur_hours = int(dur_mins / 60)
-                dur_mins = dur_mins % 60
-        dur_parts = []
-        if dur_days:
-            dur_parts.append("{} days".format(dur_days))
-        if dur_days or dur_hours:
-            dur_parts.append("{} hours".format(dur_hours))
-        if dur_days or dur_hours or dur_mins:
-            dur_parts.append("{} minutes".format(dur_mins))
-        dur_parts.append("{} seconds".format(dur_secs))
-        check['stats']['end'] = end_ts
-        check['stats']['duration'] = ', '.join(dur_parts)
-        check['stats']['number_dirs'] = number_dirs
-        check['stats']['total_kb'] = total_kb
-        check['stats']['total_mb'] = total_kb // 1024
-        check['stats']['total_gb'] = float(total_kb) / 1024.0 / 1024.0
-
-    # -------------------------------------------------------------------------
-    def send_results(self, total_dirs_top):
-
-        locale_conv = locale.localeconv()
-        dp = ','
-        ts = '.'
-        if 'decimal_point' in locale_conv and locale_conv['decimal_point'] != '.':
-            dp = locale_conv['decimal_point']
-        if 'thousands_sep' in locale_conv:
-            ts = locale_conv['thousands_sep']
-
-        subject = "Quota weekly summary (>= {:.0f} MB)".format(self.quota_kb / 1024)
-
-        body = "Hallo Berlin dudes!\n\n"
-
-        if total_dirs_top.keys():
-
-            max_len_home = 2
-            max_len_size = 4
-            for home in total_dirs_top.keys():
-                if len(home) > max_len_home:
-                    max_len_home = len(home)
-                size = total_dirs_top[home]['util_kb_avg'] / 1024
-                size_out = "{:,.0f} MB".format(size)
-                size_out = size_out.replace('.', ';').replace(',', ts).replace(';', dp)
-                if len(size_out) > max_len_size:
-                    max_len_size = len(size_out)
-
-            for home in sorted(
-                    total_dirs_top.keys(),
-                    key=lambda x: total_dirs_top[x]['util_kb_avg'],
-                    reverse=True):
-                size = total_dirs_top[home]['util_kb_avg'] / 1024
-                user = total_dirs_top[home]['user']
-                gecos = total_dirs_top[home]['gecos']
-                size_out = "{:,.0f} MB".format(size)
-                size_out = size_out.replace('.', ';').replace(',', ts).replace(';', dp)
-                line = " * {h:<{wh}} - {s:>{ws}} ({u} -> {g})\n".format(
-                    h=home, wh=max_len_home, s=size_out, ws=max_len_size, u=user, g=gecos)
-                body += line
-
-        else:
-
-            body += (
-                "No home directory found with a recursive size "
-                "greater or equal than {:.f} MB.").format(self.quota_kb / 1024)
-
-        body += "\n\nCheers\n\n" + self.mail_from + '\n'
-
-        LOG.debug("Subject: {!r}".format(subject))
-        LOG.debug("Body:\n{}".format(body))
-
-        self.send_mail(subject, body)
-
-    # -------------------------------------------------------------------------
-    def perform_statistics(self):
-
-        if 'checks' in self.status_data and len(self.status_data['checks'].keys()):
-            total_dirs = {}
-            for check_date in self.status_data['checks'].keys():
-                check = self.status_data['checks'][check_date]
-                if 'data' not in check or not check['data'].keys():
-                    continue
-                # Consolidating data ...
-                for home in check['data'].keys():
-                    pdata = check['data'][home]
-                    old_kb = 0
-                    nr_checks = 0
-                    if home in total_dirs:
-                        old_kb = total_dirs[home]['util_kb']
-                        nr_checks = total_dirs[home]['nr_checks']
-                    nr_checks += 1
-                    util_kb = old_kb + pdata['util_kb']
-                    total_dirs[home] = {
-                        'gecos': pdata['gecos'],
-                        'gid': pdata['gid'],
-                        'uid': pdata['uid'],
-                        'user': pdata['user'],
-                        'util_kb': util_kb,
-                        'nr_checks': nr_checks,
-                        'util_kb_avg': util_kb / nr_checks,
-                    }
-
-            total_dirs_top = {}
-            for home in total_dirs.keys():
-                if total_dirs[home]['util_kb_avg'] < self.quota_kb:
-                    continue
-                total_dirs_top[home] = total_dirs[home]
-            del total_dirs
-
-            if self.verbose > 1:
-                LOG.debug("Got top home directories:\n{}".format(pp(total_dirs_top)))
-
-            self.send_results(total_dirs_top)
-
-        # Rotate status file and rewrite an empty status file
-        self.rotate_status_file(self.now)
-        self.status_data = {}
-        self.status_data['last_check'] = self.now
-        self.write_status_data()
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list
diff --git a/pp_lib/rec_dict.py b/pp_lib/rec_dict.py
deleted file mode 100644 (file)
index 4060a89..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@summary: The module provides an object class with a dict, which can
-          be updated in a recursive way.
-          It is originated by Jannis Andrija Schnitzer::
-            https://gist.github.com/114831
-"""
-
-# Standard modules
-# import sys
-# import os
-import logging
-
-__author__ = 'jannis@itisme.org (Jannis Andrija Schnitzer)'
-__copyright__ = '(c) 2009 Jannis Andrija Schnitzer'
-__contact__ = 'jannis@itisme.org'
-__version__ = '0.2.1'
-__license__ = 'GPL3'
-
-log = logging.getLogger(__name__)
-
-
-# =============================================================================
-class RecursiveDictionary(dict):
-    """RecursiveDictionary provides the methods rec_update and iter_rec_update
-    that can be used to update member dictionaries rather than overwriting
-    them."""
-
-    # -------------------------------------------------------------------------
-    def rec_update(self, other, **third):
-        """Recursively update the dictionary with the contents of other and
-        third like dict.update() does - but don't overwrite sub-dictionaries.
-        Example:
-        >>> d = RecursiveDictionary({'foo': {'bar': 42}})
-        >>> d.rec_update({'foo': {'baz': 36}})
-        >>> d
-        {'foo': {'baz': 36, 'bar': 42}}
-        """
-
-        try:
-            iterator = iter(other.items())
-        except AttributeError:
-            iterator = other
-
-        self.iter_rec_update(iterator)
-        self.iter_rec_update(iter(third.items()))
-
-    # -------------------------------------------------------------------------
-    def iter_rec_update(self, iterator):
-        for (key, value) in iterator:
-            if key in self and \
-                    isinstance(self[key], dict) and isinstance(value, dict):
-                self[key] = RecursiveDictionary(self[key])
-                self[key].rec_update(value)
-            else:
-                self[key] = value
-
-    # -------------------------------------------------------------------------
-    def __repr__(self):
-        return super(self.__class__, self).__repr__()
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
diff --git a/pp_lib/test_home_app.py b/pp_lib/test_home_app.py
deleted file mode 100644 (file)
index 99ac907..0000000
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-@author: Frank Brehm
-@contact: frank.brehm@pixelpark.com
-@copyright: © 2018 by Frank Brehm, Berlin
-@summary: The module for the mk-home application object.
-"""
-from __future__ import absolute_import
-
-# Standard modules
-import logging
-import logging.config
-import textwrap
-
-# Third party modules
-
-# Own modules
-from .homes_admin import PpHomesAdminError, PpHomesAdminApp
-
-__version__ = '0.5.3'
-LOG = logging.getLogger(__name__)
-
-
-# =============================================================================
-class PpTestHomeError(PpHomesAdminError):
-    pass
-
-
-# =============================================================================
-class PpTestHomeApp(PpHomesAdminApp):
-    """
-    Class for the 'test-home' application to check for unnacessary home directories.
-    """
-
-    # -------------------------------------------------------------------------
-    def __init__(self, appname=None, version=__version__):
-
-        self.default_reply_to = 'noreply@pixelpark.com'
-
-        description = textwrap.dedent('''\
-            This scripts detects unnecessary home directories - without an
-            appropriate home directory in the passwd database and not excluded
-            in {!r}.
-            ''').strip().format(self.default_exclude_file)
-
-        super(PpTestHomeApp, self).__init__(
-            appname=appname, version=version, description=description,
-            cfg_stems='test-home',
-        )
-
-        if not self.mail_recipients:
-            self.exit(5)
-
-        self.initialized = True
-
-    # -------------------------------------------------------------------------
-    def _run(self):
-
-        self.read_exclude_dirs()
-        self.read_passwd_homes()
-        self.check_homes()
-        self.send_results()
-
-    # -------------------------------------------------------------------------
-    def send_results(self):
-
-        if not self.unnecessary_dirs:
-            LOG.debug("No unnecessary home directories, nothing to inform.")
-            return
-
-        subject = 'Nicht benötigte Home-Verzeichnisse'
-        body = textwrap.dedent('''\
-            Die folgenden Home-Verzeichnisse befinden sich weder
-            in der lokalen passwd-Datei, im LDAP oder in der exclude-Liste.
-            Sie können damit archiviert und gelöscht werden.''')
-        body += '\n\n'
-        for home in self.unnecessary_dirs:
-            body += ' - ' + home + '\n'
-
-        self.send_mail(subject, body)
-
-
-# =============================================================================
-
-if __name__ == "__main__":
-
-    pass
-
-# =============================================================================
-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 list