--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+Guest utility to wrap arnied related functionality through python calls.
+
+Copyright: Intra2net AG
+
+
+There are three types of setting some cnfvar configuration:
+
+1) static (:py:class:`set_cnf`) - oldest method using a static preprocessed
+ config file without modifying its content in any way
+2) semi-dynamic (:py:class:`set_cnf_semidynamic`) - old method also using
+ static file but rather as a template, replacing regex-matched values to
+ adapt it to different configurations
+3) dynamic (:py:class:`set_cnf_dynamic`) - new method using dictionaries
+ and custom cnfvar classes and writing them into config files of a desired
+ format (json, cnf, or raw)
+
+The `*_verify` functions are not available on guests as well as any functions
+without a `vm` argument and the `*_reconnect` functions are not available on
+hosts as well as any functions without an optional `vm` argument.
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+import os
+import sys
+import time
+import re
+import subprocess
+import shutil
+import tempfile
+import logging
+log = logging.getLogger('arnied_wrapper')
+
+from cnfline import build_cnfvar
+import cnfvar
+import sysmisc
+
+#: default set_cnf binary
+BIN_SET_CNF = "/usr/intranator/bin/set_cnf"
+#: default location for template configuration files
+SRC_CONFIG_DIR = "."
+#: default location for dumped configuration files
+DUMP_CONFIG_DIR = "."
+
+
+class ConfigError(Exception):
+ pass
+
+
+def run_cmd(cmd="", ignore_errors=False, vm=None):
+ """
+ Universal command run wrapper.
+
+ :param str cmd: command to run
+ :param bool ignore_errors: whether not to raise error on command failure
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :returns: command result output
+ :rtype: str
+ :raises: :py:class:`OSError` if command failed and cannot be ignored
+ """
+ if vm is not None:
+ status, stdout = vm.session.cmd_status_output(cmd)
+ stdout = stdout.encode()
+ stderr = b""
+ if status != 0:
+ stderr = stdout
+ stdout = b""
+ if not ignore_errors:
+ raise subprocess.CalledProcessError(status, cmd, stderr=stderr)
+ return subprocess.CompletedProcess(cmd, status, stdout=stdout, stderr=stderr)
+ else:
+ return subprocess.run(cmd, check=ignore_errors, shell=True)
+
+
+def verify_running(process='arnied', timeout=60, vm=None):
+ """
+ Verify if a given process is running via 'pgrep'.
+ Normally this is used to check if arnied is running.
+
+ :param str process: process to verify if running
+ :param int timeout: run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :raises: :py:class:`RuntimeError` if process is not running
+ """
+ platform_str = ""
+ if vm is not None:
+ vm.verify_alive()
+ platform_str = " on %s" % vm.name
+ for i in range(timeout):
+ log.info("Checking whether %s is running%s (%i\%i)",
+ process, platform_str, i, timeout)
+ result = run_cmd(cmd="pgrep -l -x %s" % process,
+ ignore_errors=True, vm=vm)
+ if result.returncode == 0:
+ log.debug(result)
+ return
+ time.sleep(1)
+ raise RuntimeError("Process %s does not seem to be running" % process)
+
+
+# Basic functionality
+
+
+def accept_licence(vm=None):
+ """
+ Accept the Intra2net license.
+
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+
+ This is mostly useful for simplified webpage access.
+ """
+ cmd = 'echo "LICENSE_ACCEPTED,0: \\"1\\"" | set_cnf'
+ result = run_cmd(cmd=cmd, ignore_errors=True, vm=vm)
+ log.debug(result)
+ cmd = "/usr/intranator/bin/arnied_helper --wait-for-program-end GENERATE"
+ run_cmd(cmd=cmd, vm=vm)
+
+
+def go_online(provider_id, wait_online=True, timeout=60, vm=None):
+ """
+ Go online with the given provider id.
+
+ :param provider_id: provider to go online with
+ :type provider_id: int
+ :param wait_online: whether to wait until online
+ :type wait_online: bool
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+
+ .. seealso:: :py:func:`go_offline`, :py:func:`wait_for_online`
+ """
+ log.info("Switching to online mode with provider %d", provider_id)
+
+ get_cnf_res = run_cmd(cmd='get_cnf PROVIDER %d' % provider_id, vm=vm)
+ if b'PROVIDER,' not in get_cnf_res.stdout:
+ log.warn('There is no PROVIDER %d on the vm. Skipping go_online.',
+ provider_id)
+ return
+
+ cmd = 'tell-connd --online P%i' % provider_id
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+
+ if wait_online:
+ wait_for_online(provider_id, timeout=timeout, vm=vm)
+
+
+def go_offline(wait_offline=True, vm=None):
+ """
+ Go offline.
+
+ :param wait_offline: whether to wait until offline
+ :type wait_offline: bool
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+
+ .. seealso:: :py:func:`go_online`, :py:func:`wait_for_offline`
+ """
+ cmd = 'tell-connd --offline'
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+
+ if wait_offline:
+ if wait_offline is True:
+ wait_for_offline(vm=vm)
+ else:
+ wait_for_offline(wait_offline, vm=vm)
+
+
+def wait_for_offline(timeout=60, vm=None):
+ """
+ Wait for arnied to signal we are offline.
+
+ :param int timeout: maximum timeout for waiting
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ _wait_for_online_status('offline', None, timeout, vm)
+
+
+def wait_for_online(provider_id, timeout=60, vm=None):
+ """
+ Wait for arnied to signal we are online.
+
+ :param provider_id: provider to go online with
+ :type provider_id: int
+ :param int timeout: maximum timeout for waiting
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ _wait_for_online_status('online', provider_id, timeout, vm)
+
+
+def _wait_for_online_status(status, provider_id, timeout, vm):
+ # Don't use tell-connd --status here since the actual
+ # ONLINE signal to arnied is transmitted
+ # asynchronously via arnieclient_muxer.
+
+ if status == 'online':
+ expected_output = 'DEFAULT: 2'
+ set_status_func = lambda: go_online(provider_id, False, vm)
+ elif status == 'offline':
+ expected_output = 'DEFAULT: 0'
+ set_status_func = lambda: go_offline(False, vm)
+ else:
+ raise ValueError('expect status "online" or "offline", not "{0}"!'
+ .format(status))
+
+ log.info("Waiting for arnied to be {0} within {1} seconds"
+ .format(status, timeout))
+
+ for i in range(timeout):
+ # arnied might invalidate the connd "connection barrier"
+ # after generate was running and switch to OFFLINE (race condition).
+ # -> tell arnied every ten seconds to go online again
+ if i % 10 == 0 and i != 0:
+ set_status_func()
+
+ cmd = '/usr/intranator/bin/get_var ONLINE'
+ result = run_cmd(cmd=cmd, ignore_errors=True, vm=vm)
+ log.debug(result)
+
+ if expected_output in result.stdout.decode():
+ log.info("arnied is {0}. Continuing.".format(status))
+ return
+
+ time.sleep(1)
+
+ raise RuntimeError("We didn't manage to go {0} within {1} seconds\n"
+ .format(status, timeout))
+
+
+def disable_virscan(vm=None):
+ """
+ Disable virscan that could block GENERATE and thus all configurations.
+
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ log.info("Disabling virus database update")
+ unset_cnf("VIRSCAN_UPDATE_CRON", vm=vm)
+
+ cmd = "echo 'VIRSCAN_UPDATE_DNS_PUSH,0:\"0\"' |set_cnf"
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+
+ # TODO: this intervention should be solved in later arnied_helper tool
+ cmd = "rm -f /var/intranator/schedule/UPDATE_VIRSCAN_NODIAL*"
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+ log.info("Virus database update disabled")
+
+
+def email_transfer(vm=None):
+ """
+ Transfer all the emails using the guest tool arnied_helper.
+
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ cmd = "/usr/intranator/bin/arnied_helper --transfer-mail"
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+
+
+def wait_for_email_transfer(timeout=300, vm=None):
+ """
+ Wait until the mail queue is empty and all emails are sent.
+
+ :param int timeout: email transfer timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ for i in range(timeout):
+ if i % 10 == 0:
+ # Retrigger mail queue in case something is deferred
+ # by an amavisd-new reconfiguration
+ run_cmd(cmd='postqueue -f', vm=vm)
+ log.info('Waiting for SMTP queue to be empty (%i/%i sec)',
+ i, timeout)
+ if b'Mail queue is empty' in run_cmd(cmd='mailq', vm=vm).stdout:
+ break
+ time.sleep(1)
+ log.debug('SMTP queue is empty')
+
+
+def schedule(program, exec_time=0, optional_args="", vm=None):
+ """
+ Schedule a program to be executed at a given unix time stamp.
+
+ :param str program: program whose execution is scheduled
+ :param int exec_time: scheduled time of program's execution
+ :param str optional_args: optional command line arguments
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ log.info("Scheduling %s to be executed at %i", program, exec_time)
+ tmp_file = tempfile.NamedTemporaryFile(mode="w+",
+ prefix=program.upper() + "_",
+ dir=DUMP_CONFIG_DIR,
+ delete=False)
+ log.debug("Created temporary file %s", tmp_file.name)
+ contents = "%i\n%s\n" % (exec_time, optional_args)
+ tmp_file.write(contents)
+ tmp_file.close()
+ schedule_dir = "/var/intranator/schedule"
+ # clean from already scheduled backups
+ files = vm.session.cmd("ls " + schedule_dir).split() if vm else os.listdir(schedule_dir)
+ for file_name in files:
+ if file_name.startswith(program.upper()):
+ log.debug("Removing previous scheduled %s", file_name)
+ if vm:
+ vm.session.cmd("rm " + file_name)
+ else:
+ os.unlink(os.path.join(schedule_dir, file_name))
+ moved_tmp_file = os.path.join(schedule_dir,
+ os.path.basename(tmp_file.name))
+ if vm:
+ vm.session.cmd("mv " + tmp_file.name + " " + moved_tmp_file)
+ else:
+ shutil.move(tmp_file.name, moved_tmp_file)
+ log.debug("Moved temporary file to %s", moved_tmp_file)
+
+
+def wait_for_run(program, timeout=300, retries=10, vm=None):
+ """
+ Wait for a program using the guest arnied_helper tool.
+
+ :param str program: scheduled or running program to wait for
+ :param int timeout: program run timeout
+ :param int retries: number of tries to verify that the program is scheduled or running
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ log.info("Waiting for program %s to finish with timeout %i",
+ program, timeout)
+ for i in range(retries):
+ cmd = "/usr/intranator/bin/arnied_helper --is-scheduled-or-running " \
+ + program.upper()
+ check_scheduled = run_cmd(cmd=cmd, ignore_errors=True, vm=vm)
+ if check_scheduled.returncode == 0:
+ break
+ time.sleep(1)
+ if i == retries - 1:
+ log.warning("The program %s was not scheduled and is not running", program)
+ cmd = "/usr/intranator/bin/arnied_helper --wait-for-program-end " \
+ + program.upper() + " --wait-for-program-timeout " + str(timeout)
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result.stdout)
+
+
+# Configuration functionality
+
+def get_cnf(cnf_key, cnf_index=1, regex=".*", compact=False, timeout=30, vm=None):
+ """
+ Query arnied for a `cnf_key` and extract some information via regex.
+
+ :param str cnf_key: queried cnf key
+ :param int cnf_index: index of the cnf key
+ :param str regex: regex to apply on the queried cnf key data
+ :param bool compact: whether to retrieve compact version of the matched cnf keys
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :returns: extracted information via the regex
+ :rtype: Match object
+
+ If `cnf_index` is set to -1, retrieve and perform regex matching on all instances.
+ """
+ verify_running(timeout=timeout, vm=vm)
+ platform_str = ""
+ if vm is not None:
+ platform_str = " from %s" % vm.name
+ log.info("Extracting arnied value %s for %s%s using pattern %s",
+ cnf_index, cnf_key, platform_str, regex)
+ cmd = "get_cnf%s %s%s" % (" -c " if compact else "", cnf_key,
+ " %s" % cnf_index if cnf_index != -1 else "")
+ output = run_cmd(cmd=cmd, vm=vm).stdout.decode()
+ return re.search(regex, output, flags=re.DOTALL)
+
+
+def get_cnf_id(cnf_key, value, timeout=30, vm=None):
+ """
+ Get the id of a configuration of type `cnf_key` and name `value`.
+
+ :param str cnf_key: queried cnf key
+ :param str value: cnf value of the cnf key
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :returns: the cnf id or -1 if no such cnf variable
+ :rtype: int
+ """
+ verify_running(timeout=timeout, vm=vm)
+ regex = "%s,(\d+): \"%s\"" % (cnf_key, value)
+ cnf_id = get_cnf(cnf_key, cnf_index=-1, regex=regex, compact=True, vm=vm)
+ if cnf_id is None:
+ cnf_id = -1
+ else:
+ cnf_id = int(cnf_id.group(1))
+ log.info("Retrieved id \"%s\" for %s is %i", value, cnf_key, cnf_id)
+ return cnf_id
+
+
+def get_cnfvar(varname=None, instance=None, data=None, timeout=30, vm=None):
+ """
+ Invoke get_cnf and return a nested CNF structure.
+
+ :param str varname: "varname" field of the CNF_VAR to look up
+ :param instance: "instance" of that variable to return
+ :type instance: int
+ :param str data: "data" field by which the resulting CNF_VAR list should be filtered
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :returns: the resulting "cnfvar" structure or None if the lookup fails or the result could not be parsed
+ :rtype: cnfvar option
+ """
+ verify_running(timeout=timeout, vm=vm)
+ # firstly, build argv for get_cnf
+ cmd = ["get_cnf", "-j"]
+ if varname is not None:
+ cmd.append("%s" % varname)
+ if instance:
+ cmd.append("%d" % instance)
+ cmd_line = " ".join(cmd)
+
+ # now invoke get_cnf
+ result = run_cmd(cmd=cmd_line, vm=vm)
+ (status, raw) = result.returncode, result.stdout
+ if status != 0:
+ log.info("error %d executing \"%s\"", status, cmd_line)
+ log.debug(raw)
+ return None
+
+ # reading was successful, attempt to parse what we got
+ try:
+ cnf = cnfvar.read_cnf_json(raw)
+ except TypeError as exn:
+ log.info("error \"%s\" parsing result of \"%s\"", exn, cmd_line)
+ return None
+ except cnfvar.InvalidCNF as exn:
+ log.info("error \"%s\" validating result of \"%s\"", exn, cmd_line)
+ return None
+
+ if data is not None:
+ return cnfvar.get_vars(cnf, data=data)
+
+ return cnf
+
+
+def get_cnfvar_id(varname, data, timeout=30, vm=None):
+ """
+ Similar to :py:func:`get_cnf_id` but uses :py:func:`get_cnfvar`.
+
+ :param str varname: "varname" field of the CNF_VAR to look up
+ :param str data: "data" field by which the resulting CNF_VAR list should be filtered
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :returns: the cnf id or -1 if no such cnf variable
+ :rtype: int
+ """
+ verify_running(timeout=timeout, vm=vm)
+ log.info("Extracting from arnied CNF_VAR %s with data %s",
+ varname, data)
+ cnf = get_cnfvar(varname=varname, data=data, vm=vm)
+ variables = cnf["cnf"]
+ if len(variables) == 0:
+ log.info("CNF_VAR extraction unsuccessful, defaulting to -1")
+ # preserve behavior
+ return -1
+ first_instance = int(variables[0]["instance"])
+ log.info("CNF_VAR instance lookup yielded %d results, returning first value (%d)",
+ len(variables), first_instance)
+ return first_instance
+
+
+def wait_for_generate(vm=None):
+ """
+ Wait for the 'generate' program to complete.
+
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ ahelper = "/usr/intranator/bin/arnied_helper"
+ cmd = ahelper + " --is-scheduled-or-running GENERATE"
+ need_to_wait = run_cmd(cmd=cmd, ignore_errors=True, vm=vm).returncode == 0
+ if need_to_wait:
+ cmd = ahelper + " --wait-for-program-end GENERATE"
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+ cmd = ahelper + " --is-scheduled-or-running GENERATE_OFFLINE"
+ need_to_wait = run_cmd(cmd=cmd, ignore_errors=True, vm=vm).returncode == 0
+ if need_to_wait:
+ cmd = ahelper + " --wait-for-program-end GENERATE_OFFLINE"
+ result = run_cmd(cmd=cmd, vm=vm)
+ log.debug(result)
+
+
+def unset_cnf(varname="", instance="", vm=None):
+ """
+ Remove configuration from arnied.
+
+ :param str varname: "varname" field of the CNF_VAR to unset
+ :param int instance: "instance" of that variable to unset
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ """
+ cmd = "get_cnf %s %s | set_cnf -x" % (varname, instance)
+ run_cmd(cmd=cmd, vm=vm)
+
+ wait_for_generate(vm)
+
+
+def set_cnf(config_files, kind="cnf", timeout=30, vm=None):
+ """
+ Perform static arnied configuration through a set of config files.
+
+ :param config_files: config files to use for the configuration
+ :type config_files: [str]
+ :param str kind: "json" or "cnf"
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :raises: :py:class:`ConfigError` if cannot apply file
+
+ The config files must be provided and are always expected to be found on
+ the host. If these are absolute paths, they will be kept as is or
+ otherwise will be searched for in `SRC_CONFIG_DIR`. If a vm is provided,
+ the config files will be copied there as temporary files before applying.
+ """
+ log.info("Setting arnied configuration")
+ verify_running(timeout=timeout, vm=vm)
+
+ config_paths = prep_config_paths(config_files)
+ for config_path in config_paths:
+ with open(config_path, "rt", errors='replace') as config:
+ log.debug("Contents of applied %s:\n%s", config_path, config.read())
+ if vm is not None:
+ new_config_path = generate_config_path()
+ vm.copy_files_to(config_path, new_config_path)
+ config_path = new_config_path
+ argv = ["set_cnf", kind == "json" and "-j" or "", config_path]
+
+ result = run_cmd(" ".join(argv), ignore_errors=True, vm=vm)
+ logging.debug(result)
+ if result.returncode != 0:
+ raise ConfigError("Failed to apply config %s%s, set_cnf returned %d"
+ % (config_path,
+ " on %s" % vm.name if vm is not None else "",
+ result.returncode))
+
+ try:
+ wait_for_generate(vm)
+ except Exception as ex:
+ # handle cases of remote configuration that leads to connection meltdown
+ if vm is not None and isinstance(ex, sys.modules["aexpect"].ShellProcessTerminatedError):
+ log.info("Resetting connection to %s", vm.name)
+ vm.session = vm.wait_for_login(timeout=10)
+ log.debug("Connection reset via remote error: %s", ex)
+ else:
+ raise ex
+
+
+def set_cnf_semidynamic(config_files, params_dict, regex_dict=None,
+ kind="cnf", timeout=30, vm=None):
+ """
+ Perform semi-dynamic arnied configuration from an updated version of the
+ config files.
+
+ :param config_files: config files to use for the configuration
+ :type config_files: [str]
+ :param params_dict: parameters to override the defaults in the config files
+ :type params_dict: {str, str}
+ :param regex_dict: regular expressions to use for matching the overriden parameters
+ :type regex_dict: {str, str} or None
+ :param str kind: "json" or "cnf"
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+
+ The config files must be provided and are always expected to be found on
+ the host. If these are absolute paths, they will be kept as is or
+ otherwise will be searched for in `SRC_CONFIG_DIR`. If a vm is provided,
+ the config files will be copied there as temporary files before applying.
+ """
+ log.info("Performing semi-dynamic arnied configuration")
+
+ config_paths = prep_cnf(config_files, params_dict, regex_dict)
+ set_cnf(config_paths, kind=kind, timeout=timeout, vm=vm)
+
+ log.info("Semi-dynamic arnied configuration successful!")
+
+
+def set_cnf_dynamic(cnf, config_file=None, kind="cnf", timeout=30, vm=None):
+ """
+ Perform dynamic arnied configuration from fully generated config files.
+
+ :param cnf: one key with the same value as *kind* and a list of cnfvars as value
+ :type cnf: {str, str}
+ :param config_file: optional user supplied filename
+ :type config_file: str or None
+ :param str kind: "json", "cnf", or "raw"
+ :param int timeout: arnied run verification timeout
+ :param vm: vm to run on if running on a guest instead of the host
+ :type vm: VM object or None
+ :raises: :py:class:`ValueError` if `kind` is not an acceptable value
+ :raises: :py:class:`ConfigError` if cannot apply file
+
+ The config file might not be provided in which case a temporary file will
+ be generated and saved on the host's `DUMP_CONFIG_DIR` of not provided as
+ an absolute path. If a vm is provided, the config file will be copied there
+ as a temporary file before applying.
+ """
+ if config_file is None:
+ config_path = generate_config_path(dumped=True)
+ elif os.path.isabs(config_file):
+ config_path = config_file
+ else:
+ config_path = os.path.join(os.path.abspath(DUMP_CONFIG_DIR), config_file)
+ generated = config_file is None
+ config_file = os.path.basename(config_path)
+ log.info("Using %s cnf file %s%s",
+ "generated" if generated else "user-supplied",
+ config_file, " on %s" % vm.name if vm is not None else "")
+
+ fd = open(config_path, "w")
+ try:
+ SET_CNF_METHODS = {
+ "raw": cnfvar.write_cnf_raw,
+ "json": cnfvar.write_cnf_json,
+ "cnf": cnfvar.write_cnf
+ }
+ SET_CNF_METHODS[kind](cnf, out=fd)
+ except KeyError:
+ raise ValueError("Invalid set_cnf method \"%s\"; expected \"json\" or \"cnf\""
+ % kind)
+ finally:
+ fd.close()
+ log.info("Generated config file %s", config_path)
+
+ kind = "cnf" if kind != "json" else kind
+ set_cnf([config_path], kind=kind, timeout=timeout, vm=vm)
+
+
+def set_cnf_pipe(cnf, timeout=30, block=False):
+ """
+ Set local configuration by talking to arnied via ``set_cnf``.
+
+ :param cnf: one key with the same value as *kind* and a list of cnfvars as value
+ :type cnf: {str, str}
+ :param int timeout: arnied run verification timeout
+ :param bool block: whether to wait for generate to complete the
+ configuration change
+ :returns: whether ``set_cnf`` succeeded or not
+ :rtype: bool
+
+ This is obviously not generic but supposed to be run on the guest.
+ """
+ log.info("Setting arnied configuration through local pipe")
+ verify_running(timeout=timeout)
+
+ st, out, exit = sysmisc.run_cmd_with_pipe([BIN_SET_CNF, "-j"], inp=str(cnf))
+
+ if st is False:
+ log.error("Error applying configuration; status=%r" % exit)
+ log.error("and stderr:\n%s" % out)
+ return False
+ log.debug("Configuration successfully passed to set_cnf, "
+ "read %d B from pipe" % len(out))
+
+ if block is True:
+ log.debug("Waiting for config job to complete")
+ wait_for_generate()
+
+ log.debug("Exiting sucessfully")
+ return True
+
+
+def prep_config_paths(config_files, config_dir=None):
+ """
+ Prepare absolute paths for all configs at an expected location.
+
+ :param config_files: config files to use for the configuration
+ :type config_files: [str]
+ :param config_dir: config directory to prepend to the filepaths
+ :type config_dir: str or None
+ :returns: list of the full config paths
+ :rtype: [str]
+ """
+ if config_dir is None:
+ config_dir = SRC_CONFIG_DIR
+ config_paths = []
+ for config_file in config_files:
+ if os.path.isabs(config_file):
+ # Absolute path: The user requested a specific file
+ # f.e. needed for dynamic arnied config update
+ config_path = config_file
+ else:
+ config_path = os.path.join(os.path.abspath(config_dir),
+ config_file)
+ logging.debug("Using %s for original path %s", config_path, config_file)
+ config_paths.append(config_path)
+ return config_paths
+
+
+def prep_cnf_value(config_file, value,
+ regex=None, template_key=None, ignore_fail=False):
+ """
+ Replace value in a provided arnied config file.
+
+ :param str config_file: file to use for the replacement
+ :param str value: value to replace the first matched group with
+ :param regex: regular expression to use when replacing a cnf value
+ :type regex: str or None
+ :param template_key: key of a quick template to use for the regex
+ :type template_key: str or None
+ :param bool ignore_fail: whether to ignore regex mismatching
+ :raises: :py:class:`ValueError` if (also default) `regex` doesn't have a match
+
+ In order to ensure better matching capabilities you are supposed to
+ provide a regex pattern with at least one subgroup to match your value.
+ What this means is that the value you like to replace is not directly
+ searched into the config text but matched within a larger regex in
+ in order to avoid any mismatch.
+
+ Example:
+ provider.cnf, 'PROVIDER_LOCALIP,0: "(\d+)"', 127.0.0.1
+ """
+ if template_key is None:
+ pattern = regex.encode()
+ else:
+ samples = {"provider": 'PROVIDER_LOCALIP,\d+: "(\d+\.\d+\.\d+\.\d+)"',
+ "global_destination_addr": 'SPAMFILTER_GLOBAL_DESTINATION_ADDR,0: "bounce_target@(.*)"'}
+ pattern = samples[template_key].encode()
+
+ with open(config_file, "rb") as file_handle:
+ text = file_handle.read()
+ match_line = re.search(pattern, text)
+
+ if match_line is None and not ignore_fail:
+ raise ValueError("Pattern %s not found in %s" % (pattern, config_file))
+ elif match_line is not None:
+ old_line = match_line.group(0)
+ text = text[:match_line.start(1)] + value.encode() + text[match_line.end(1):]
+ line = re.search(pattern, text).group(0)
+ log.debug("Updating %s to %s in %s", old_line, line, config_file)
+ with open(config_file, "wb") as file_handle:
+ file_handle.write(text)
+
+
+def prep_cnf(config_files, params_dict, regex_dict=None):
+ """
+ Update all config files with the most current autotest parameters,
+ i.e. override the values hard-coded in those config files.
+
+ :param config_files: config files to use for the configuration
+ :type config_files: [str]
+ :param params_dict: parameters to override the defaults in the config files
+ :type params_dict: {str, str}
+ :param regex_dict: regular expressions to use for matching the overriden parameters
+ :type regex_dict: {str, str} or None
+ :returns: list of prepared (modified) config paths
+ :rtype: [str]
+ """
+ log.info("Preparing %s template config files", len(config_files))
+
+ src_config_paths = prep_config_paths(config_files)
+ new_config_paths = []
+ for config_path in src_config_paths:
+ new_config_path = generate_config_path(dumped=True)
+ shutil.copy(config_path, new_config_path)
+ new_config_paths.append(new_config_path)
+
+ for config_path in new_config_paths:
+ for param_key in params_dict.keys():
+ if regex_dict is None:
+ regex_val = "\s+%s,\d+: \"(.*)\"" % param_key.upper()
+ elif param_key in regex_dict.keys():
+ regex_val = regex_dict[param_key] % param_key.upper()
+ elif re.match("\w*_\d+$", param_key):
+ final_parameter, parent_id = \
+ re.match("(\w*)_(\d+)$", param_key).group(1, 2)
+ regex_val = "\(%s\) %s,\d+: \"(.*)\"" \
+ % (parent_id, final_parameter.upper())
+ log.debug("Requested regex for %s is '%s'",
+ param_key, regex_val)
+ else:
+ regex_val = "\s+%s,\d+: \"(.*)\"" % param_key.upper()
+ prep_cnf_value(config_path, params_dict[param_key],
+ regex=regex_val, ignore_fail=True)
+ log.info("Prepared template config file %s", config_path)
+
+ return new_config_paths
+
+
+def generate_config_path(dumped=False):
+ """
+ Generate path for a temporary config name.
+
+ :param bool dumped: whether the file should be in the dump
+ directory or in temporary directory
+ :returns: generated config file path
+ :rtype: str
+ """
+ dir = os.path.abspath(DUMP_CONFIG_DIR) if dumped else None
+ fd, filename = tempfile.mkstemp(suffix=".cnf", dir=dir)
+ os.close(fd)
+ os.unlink(filename)
+ return filename
+
+
+def translate_cnf_provider(params):
+ """
+ Extend the parameters with the minimal arnied PROVIDER parameters.
+
+ :param params: provider configuration
+ :type params: {str, str}
+ """
+ params["provider_localip"] = params["ip_inic"]
+ params["provider_ip"] = params["ip_provider_inic"]
+ params["provider_dns"] = params["ip_provider_inic"]
+
+
+def translate_cnf_dns_forwarding(params):
+ """
+ Extend the parameters with the minimal arnied DNS_FORWARDING parameters.
+
+ :param params: dns forwarding configuration
+ :type params: {str, str}
+ """
+ params["dns_forwarding"] = params["domain_provider_inic"]
+ params["dns_forwarding_server1"] = params["ip_provider_inic"]
+
+
+# enum
+Delete = 0
+Update = 1
+Add = 2
+Child = 3
+
+
+def batch_update_cnf(cnf, vars):
+ """
+ Perform a batch update of multiple cnf variables.
+
+ :param cnf: CNF variable to update
+ :type cnf: BuildCnfVar object
+ :param vars: tuples of enumerated action and subtuple with data
+ :type vars: [(int, (str, int, str))]
+ :returns: updated CNF variable
+ :rtype: BuildCnfVar object
+
+ The actions are indexed in the same order: delete, update, add, child.
+ """
+ last = 0
+ for (action, data) in vars:
+ if action == Update:
+ var, ref, val = data
+ last = cnf.update_cnf(var, ref, val)
+ if action == Add:
+ var, ref, val = data
+ last = cnf.add_cnf(var, ref, val)
+ elif action == Delete:
+ last = cnf.del_cnf(data)
+ elif action == Child: # only one depth supported
+ var, ref, val = data
+ # do not update last
+ cnf.add_cnf(var, ref, val, different_parent_line_no=last)
+ return cnf
+
+
+def build_cnf(kind, instance=0, vals=[], data="", filename=None):
+ """
+ Build a CNF variable and save it in a config file.
+
+ :param str kind: name of the CNF variable
+ :param int instance: instance number of the CNF variable
+ :param vals: tuples of enumerated action and subtuple with data
+ :type vals: [(int, (str, int, str))]
+ :param str data: data for the CNF variable
+ :param filename: optional custom name of the config file
+ :type filename: str or None
+ :returns: name of the saved config file
+ :rtype: str
+ """
+ builder = build_cnfvar.BuildCnfVar(kind, instance=instance, data=data)
+ batch_update_cnf(builder, vals)
+ filename = generate_config_path(dumped=True) if filename is None else filename
+ [filename] = prep_config_paths([filename], DUMP_CONFIG_DIR)
+ builder.save(filename)
+ return filename
+
--- /dev/null
+"""Basic functions for on-the-fly arnied cnf-var generator"""
+from .cnfline import CnfLine
+import os
+import tempfile
+
+
+class BuildCnfVar(object):
+ """Basic one the fly arnied cnfvar generator"""
+ def __init__(self, name, instance=0, data='', line_no=1):
+ self._parent = CnfLine(name, instance, data, line_no)
+ self._children = {} # key: lineno, value: CnfLine
+
+ def __getattr__(self, name):
+ # Redirect all unknown attributes to "parent" cnfline
+ return getattr(self._parent, name)
+
+ def find_child_line_no(self, name):
+ """Look for child line number of child NAME"""
+ for lineno, cnfline in self._children.items():
+ if cnfline.name == name:
+ return lineno
+
+ # Not found
+ return 0
+
+ def find_free_line_no(self):
+ """Find highest currently in use line number"""
+ highest_line_no = self.line_no
+
+ for line_no, unused in self._children.items():
+ if line_no > highest_line_no:
+ highest_line_no = line_no
+
+ return highest_line_no+1
+
+ def find_free_child_instance(self, name):
+ """Find next free child instance of type NAME"""
+ highest_instance = -1
+
+ for unused, cnfline in self._children.items():
+ if cnfline.name == name and cnfline.instance > highest_instance:
+ highest_instance = cnfline.instance
+
+ return highest_instance+1
+
+ def update_cnf(self,
+ name,
+ instance,
+ data,
+ different_parent_line_no=0,
+ force_append=False):
+ """Update existing cnfline or create new one"""
+ if not force_append:
+ child_line_no = self.find_child_line_no(name)
+ else:
+ child_line_no = 0
+
+ if child_line_no == 0:
+ child_line_no = self.find_free_line_no()
+
+ if instance == -1:
+ instance = self.find_free_child_instance(name)
+
+ parent_line_no = self._parent.line_no
+ if different_parent_line_no:
+ parent_line_no = different_parent_line_no
+
+ new_line = CnfLine(name,
+ instance,
+ data,
+ child_line_no,
+ parent_line_no)
+
+ self._children[child_line_no] = new_line
+ return child_line_no
+
+ def mark_as_own_parent(self, child_line_no):
+ """Remove parent <-> child relationship for
+ a given cnf line. We use this heavily
+ for the *configure_xxx.py* files"""
+ self._children[child_line_no].parent_line_no = 0
+
+ def add_cnf(self,
+ name,
+ instance,
+ data,
+ different_parent_line_no=0):
+ return self.update_cnf(name,
+ instance,
+ data,
+ different_parent_line_no,
+ force_append=True)
+
+ def del_cnf(self, name):
+ """Delete cnfline with name"""
+ for lineno, cnfline in list(self._children.items()):
+ if cnfline.name == name:
+ del self._children[lineno]
+
+ def add_defaults(self, defaults=None):
+ """Add default values from a simple dictionary"""
+ if defaults is None:
+ return
+
+ for name, value in defaults.items():
+ self.update_cnf(name, 0, value)
+
+ def __str__(self):
+ rtn = str(self._parent) + '\n'
+ for unused, value in self._children.items():
+ rtn = rtn + str(value) + '\n'
+
+ return rtn
+
+ def save(self, filename):
+ """Save string representation to disk."""
+ with open(filename, 'w') as out:
+ out.write(str(self))
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create group objects on the fly"""
+
+
+class BuildGroup(BuildCnfVar):
+
+ def __init__(self, data='', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'GROUP', instance, data, line_no)
+
+ # the bare defaults the UI adds upon
+ # creation of new groups
+ defaults = {
+ 'GROUP_COMMENT': '',
+ }
+
+ self.add_defaults(defaults)
+
+ def comment(self, comment):
+ self.update_cnf('GROUP_COMMENT', 0, comment)
+ return self
+
+ def enable_activesync(self):
+ self.update_cnf('GROUP_ACTIVESYNC_ENABLE', 0, '1')
+ return self
+
+ def enable_dialin(self):
+ self.update_cnf('GROUP_DIALIN_ENABLE', 0, '1')
+ return self
+
+ def enable_xauth(self):
+ self.update_cnf('GROUP_XAUTH_ENABLE', 0, '1')
+ return self
+
+ def enable_go_online(self):
+ self.update_cnf('GROUP_ACCESS_GO_ONLINE_ALLOWED', 0, '1')
+ return self
+
+ def enable_remote_administration(self):
+ self.update_cnf('GROUP_ACCESS_REMOTE_ADMINISTRATION_ALLOWED', 0, '1')
+ return self
+
+ def email_quota(self, quota):
+ self.update_cnf('GROUP_EMAIL_QUOTA', 0, quota)
+ return self
+
+ def email_relay_rights_block_relay(self):
+ self.update_cnf('GROUP_EMAIL_RELAY_RIGHTS', 0, 'BLOCK_RELAY')
+ return self
+
+ def email_relay_rights_from_intranet(self):
+ self.update_cnf('GROUP_EMAIL_RELAY_RIGHTS', 0, 'RELAY_FROM_INTRANET')
+ return self
+
+ def email_relay_rights_from_everywhere(self):
+ self.update_cnf('GROUP_EMAIL_RELAY_RIGHTS', 0, 'RELAY_FROM_EVERYWHERE')
+ return self
+
+ def emailfitler_ban_filterlist_ref(self, filterlist_ref):
+ self.update_cnf('GROUP_EMAILFILTER_BAN_FILTERLIST_REF', 0,
+ filterlist_ref)
+ return self
+
+ def proxy_profile_ref(self, profile_ref):
+ self.update_cnf('GROUP_PROXY_PROFILE_REF', 0, profile_ref)
+ return self
--- /dev/null
+"""
+Create intraclients.
+"""
+
+from . import build_cnfvar
+
+default_intraclient_name = "autotest-client"
+default_intraclient_instance = 1
+default_cnfvars = {
+ "INTRACLIENT_COMMENT": default_intraclient_name + " comment",
+ "INTRACLIENT_DNS_RELAYING_ALLOWED": "1",
+ "INTRACLIENT_EMAIL_RELAYING_ALLOWED": "1",
+ "INTRACLIENT_FIREWALL_RULESET_REF": "5",
+ "INTRACLIENT_IP": None,
+ "INTRACLIENT_MAC": None,
+ "INTRACLIENT_PROXY_PROFILE_REF": "-1",
+}
+
+
+class BuildIntraclient(build_cnfvar.BuildCnfVar):
+
+ def __init__(self,
+ data=default_intraclient_name,
+ instance=default_intraclient_instance,
+ line_no=1,
+ ip="192.168.0.42",
+ mac=None,
+ alias=None):
+ build_cnfvar.BuildCnfVar.__init__(self,
+ "INTRACLIENT",
+ instance,
+ data, line_no)
+ self.add_defaults(default_cnfvars)
+
+ self.update_cnf("INTRACLIENT_IP", 0, ip)
+ self.update_cnf("INTRACLIENT_MAC", 0, mac or "")
+
+ if alias is not None:
+ self.update_cnf("INTRACLIENT_ALIAS", 0, alias)
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create own keys cnfvar on the fly """
+
+
+class BuildKey(BuildCnfVar):
+ def __init__(self, data='autotest key', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'KEY_OWN', instance, data, line_no)
+
+ # set some dummy data for cnf_check
+ defaults = {
+ 'KEY_OWN_FINGERPRINT_MD5': '76:3B:CF:8E:CB:BF:A5:7D:CC:87:39:FA:CE:99:2E:96',
+ 'KEY_OWN_FINGERPRINT_SHA1': 'ED:5A:C6:D9:5B:BE:47:1F:B9:4F:CF:A3:80:3B:42:08:F4:00:16:96',
+ 'KEY_OWN_ID_X509': 'CN=some.net.lan',
+ 'KEY_OWN_ISSUER': 'CN=ab, C=fd, L=ab, ST=ab, O=ab, OU=ab/emailAddress=ab@ab.com',
+ 'KEY_OWN_KEYSIZE': '2048',
+ 'KEY_OWN_HASH_ALGO': 'SHA2_256',
+ 'KEY_OWN_PRIVATE_KEY': '-----BEGIN PRIVATE KEY-----\\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKk' +
+ 'ZTzqHXg41RZMiY+ywRZ037pBq8J3BkH\\n-----END PRIVATE KEY-----\\n',
+ 'KEY_OWN_PUBLIC_KEY': '-----BEGIN CERTIFICATE-----\\nMIIFCTCCAvGgAwIBAgIEVBGDuTANBgkqhkiG' +
+ '9w0BAQsFADCBgTEPMA0GA1UEAwwG\\n-----END CERTIFICATE-----\\n',
+ 'KEY_OWN_REQUEST': '-----BEGIN CERTIFICATE REQUEST-----\\nMIIDCzCCAfMCAQAwIjEgMB4GA1UEAww' +
+ 'XaW50cmFkZXYtYWllc\\n-----END CERTIFICATE REQUEST-----\\n',
+ 'KEY_OWN_SUBJECT': 'CN=some.net.lan',
+ 'KEY_OWN_VALIDFROM': '20140911T111257',
+ 'KEY_OWN_VALIDTILL': '20160731T134608',
+ 'KEY_OWN_TYPE': 'SELF',
+
+ # the ones bellow should be set when using 'generate' to create the key
+ 'KEY_OWN_CREATE_CN': 'somehost',
+ 'KEY_OWN_CREATE_EMAIL': 'default@intra2net.com'
+ }
+
+ self.add_defaults(defaults)
+
+ def country(self, country):
+ self.update_cnf('KEY_OWN_CREATE_C', 0, country)
+ return self
+
+ def state(self, state):
+ self.update_cnf('KEY_OWN_CREATE_ST', 0, state)
+ return self
+
+ def city(self, city):
+ self.update_cnf('KEY_OWN_CREATE_L', 0, city)
+ return self
+
+ def company(self, company):
+ self.update_cnf('KEY_OWN_CREATE_O', 0, company)
+ return self
+
+ def department(self, department):
+ self.update_cnf('KEY_OWN_CREATE_OU', 0, department)
+ return self
+
+ def computer_name(self, computer_name):
+ self.update_cnf('KEY_OWN_CREATE_CN', 0, computer_name)
+ return self
+
+ def email(self, email):
+ self.update_cnf('KEY_OWN_CREATE_EMAIL', 0, email)
+ return self
+
+ def days(self, days):
+ self.update_cnf('KEY_OWN_CREATE_DAYS', 0, days)
+ return self
+
+ def keysize(self, keysize):
+ self.update_cnf('KEY_OWN_KEYSIZE', 0, keysize)
+ return self
+
+ def hash_algo(self, hash_algo):
+ self.update_cnf('KEY_OWN_HASH_ALGO', 0, hash_algo)
+ return self
+
+ def certchain(self, certchain):
+ self.update_cnf('KEY_OWN_CERTCHAIN', 0, certchain)
+ return self
+
+ def cerchain_count(self, cerchain_count):
+ self.update_cnf('KEY_OWN_CERTCHAIN_CERTCOUNT', 0, cerchain_count)
+ return self
+
+ def create_subjalt(self, create_subjalt):
+ self.update_cnf('KEY_OWN_CREATE_SUBJALT', 0, create_subjalt)
+ return self
+
+ def create_subjalt_type(self, create_subjalt_type):
+ self.update_cnf('KEY_OWN_CREATE_SUBJALT_TYPE', 0, create_subjalt_type)
+ return self
+
+ def fingerprint_md5(self, fingerprint_md5):
+ self.update_cnf('KEY_OWN_FINGERPRINT_MD5', 0, fingerprint_md5)
+ return self
+
+ def fingerprint_sha1(self, fingerprint_sha1):
+ self.update_cnf('KEY_OWN_FINGERPRINT_SHA1', 0, fingerprint_sha1)
+ return self
+
+ def id_x509(self, id_x509):
+ self.update_cnf('KEY_OWN_ID_X509', 0, id_x509)
+ return self
+
+ def issuer(self, issuer):
+ self.update_cnf('KEY_OWN_ISSUER', 0, issuer)
+ return self
+
+ def private_key(self, private_key):
+ self.update_cnf('KEY_OWN_PRIVATE_KEY', 0, private_key)
+ return self
+
+ def public_key(self, public_key):
+ self.update_cnf('KEY_OWN_PUBLIC_KEY', 0, public_key)
+ return self
+
+ def request(self, request):
+ self.update_cnf('KEY_OWN_REQUEST', 0, request)
+ return self
+
+ def subject(self, subject):
+ self.update_cnf('KEY_OWN_SUBJECT', 0, subject)
+ return self
+
+ def subject_alt(self, subject_alt):
+ self.update_cnf('KEY_OWN_SUBJECT_ALT', 0, subject_alt)
+ return self
+
+ def key_type(self, key_type):
+ self.update_cnf('KEY_OWN_TYPE', 0, key_type)
+ return self
+
+ def valid_from(self, valid_from):
+ self.update_cnf('KEY_OWN_VALIDFROM', 0, valid_from)
+ return self
+
+ def valid_till(self, valid_till):
+ self.update_cnf('KEY_OWN_VALIDTILL', 0, valid_till)
+ return self
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create user cnfvar objects on the fly"""
+
+
+class BuildNIC(BuildCnfVar):
+
+ def __init__(self, data='', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'NIC', instance, data, line_no)
+
+ # the bare defaults the UI adds upon
+ # creation of new groups
+ defaults = {
+ 'NIC_DRIVER': 'virtio_net',
+ 'NIC_LAN_DNS_RELAYING_ALLOWED': "0",
+ 'NIC_LAN_EMAIL_RELAYING_ALLOWED': "0",
+ 'NIC_LAN_FIREWALL_RULESET_REF': "1",
+ 'NIC_LAN_IP': "192.168.1.1",
+ 'NIC_LAN_NAT_INTO': "0",
+ 'NIC_LAN_NETMASK': "255.255.255.0",
+ 'NIC_LAN_PROXY_PROFILE_REF': "-1",
+ 'NIC_MAC': '02:00:00:00:20:00',
+ 'NIC_TYPE': 'DSLROUTER',
+ }
+
+ self.add_defaults(defaults)
+
+ def nic_type(self, nic_type):
+ self.update_cnf('NIC_TYPE', 0, nic_type)
+ return self
+
+ def lan_ip(self, lan_ip):
+ self.update_cnf('NIC_LAN_IP', 0, lan_ip)
+ return self
+
+ def add_group_member_ref(self, group_ref):
+ self.add_cnf('USER_GROUP_MEMBER_REF', -1, group_ref)
+ return self
--- /dev/null
+"""
+Create provider profiles.
+"""
+
+from . import build_cnfvar
+
+# Defaults are extracted from data/shared_arnied/provider.cnf.
+default_provider_name = "autotest_provider"
+default_provider_instance = 1
+default_cnfvars = {
+ "PROVIDER_PROXY_SERVER": "",
+ "PROVIDER_PROXY_PORT": "",
+ "PROVIDER_PROXY_PASSWORD": "",
+ "PROVIDER_PROXY_LOGIN": "",
+ "PROVIDER_NIC_REF": "1",
+ "PROVIDER_NETMASK": "255.255.0.0",
+ "PROVIDER_MTU_SIZE": "1500",
+ "PROVIDER_MODE": "ROUTER",
+ "PROVIDER_MAILTRANSFER_MODE": "IMMEDIATE",
+ "PROVIDER_LOCALIP": "",
+ "PROVIDER_IP": "",
+ "PROVIDER_FIREWALL_RULESET_REF": "7",
+ "PROVIDER_FALLBACK_TIMEOUT": "60",
+ "PROVIDER_FALLBACK_PROVIDER_REF": "-1",
+ "PROVIDER_EMAIL_RELAY_REF": "-1",
+ "PROVIDER_DYNDNS_WEBCHECKIP": "0",
+ "PROVIDER_DYNDNS_ENABLE": "1",
+ "PROVIDER_DNS_MODE": "IP",
+ "PROVIDER_DNS": "",
+ "PROVIDER_BWIDTH_MANAGEMENT_UPSTREAM_SPEED": "",
+ "PROVIDER_BWIDTH_MANAGEMENT_ENABLE": "0",
+ "PROVIDER_BWIDTH_MANAGEMENT_DOWNSTREAM_SPEED": "",
+ "PROVIDER_PINGCHECK_SERVERLIST_REF": "-2",
+}
+
+
+class BuildProvider(build_cnfvar.BuildCnfVar):
+
+ def __init__(self,
+ data=default_provider_name,
+ instance=default_provider_instance,
+ line_no=1,
+ mode="ROUTER",
+ dns=None,
+ ip=None,
+ localip=None):
+ build_cnfvar.BuildCnfVar.__init__(self,
+ "PROVIDER",
+ instance,
+ data,
+ line_no)
+ self.add_defaults(default_cnfvars)
+ self.update_cnf("PROVIDER_MODE", 0, mode)
+
+ if dns is not None:
+ self.update_cnf("PROVIDER_DNS", 0, dns)
+ if ip is not None:
+ self.update_cnf("PROVIDER_IP", 0, ip)
+ if localip is not None:
+ self.update_cnf("PROVIDER_LOCALIP", 0, localip)
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create proxy accesslists on the fly"""
+
+
+class BuildProxyAccesslist(BuildCnfVar):
+ def __init__(self, data='', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'PROXY_ACCESSLIST', instance, data, line_no)
+
+ defaults = {'PROXY_ACCESSLIST_ENTRY_COUNT': '123',
+ 'PROXY_ACCESSLIST_MODE': '1',
+ 'PROXY_ACCESSLIST_SIZETYPE': 1,
+ 'PROXY_ACCESSLIST_TYPE': 0}
+
+ self.add_defaults(defaults)
+
+ def mode_whitelist(self):
+ self.update_cnf('PROXY_ACCESSLIST_MODE', 0, '0')
+ return self
+
+ def mode_blacklist(self):
+ self.update_cnf('PROXY_ACCESSLIST_MODE', 0, '1')
+ return self
+
+ def add_url(self, url):
+ self.add_cnf('PROXY_ACCESSLIST_URL', -1, url)
+ return self
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create proxy profiles on the fly"""
+
+
+class BuildProxyProfile(BuildCnfVar):
+ def __init__(self, data='', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'PROXY_PROFILE', instance, data, line_no)
+
+ def add_accesslist_ref(self, accesslist_instance):
+ self.add_cnf('PROXY_PROFILE_ACCESSLIST_REF', -1, accesslist_instance)
+ return self
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to create user cnfvar objects on the fly"""
+
+
+class BuildUser(BuildCnfVar):
+
+ def __init__(self, data='', instance=0, line_no=1):
+ BuildCnfVar.__init__(self, 'USER', instance, data, line_no)
+
+ # the bare defaults the UI adds upon
+ # creation of new groups
+ defaults = {
+ 'USER_DISABLED': '0',
+ 'USER_FULLNAME': '',
+ 'USER_GROUPWARE_FOLDER_CALENDAR': 'INBOX/Calendar',
+ 'USER_GROUPWARE_FOLDER_CONTACTS': 'INBOX/Contacts',
+ 'USER_GROUPWARE_FOLDER_DRAFTS': 'INBOX/Drafts',
+ 'USER_GROUPWARE_FOLDER_NOTES': 'INBOX/Notes',
+ 'USER_GROUPWARE_FOLDER_OUTBOX': 'INBOX/Sent Items',
+ 'USER_GROUPWARE_FOLDER_TASKS': 'INBOX/Tasks',
+ 'USER_GROUPWARE_FOLDER_TRASH': 'INBOX/Deleted Items',
+ # always a member of the 'Alle' group
+ 'USER_GROUP_MEMBER_REF': '2',
+ 'USER_LOCALE': '',
+ 'USER_PASSWORD': 'test1234',
+ 'USER_TRASH_DELETEDAYS': '30',
+ 'USER_WEBMAIL_MESSAGES_PER_PAGE': '25',
+ 'USER_WEBMAIL_SIGNATURE': '',
+ }
+
+ self.add_defaults(defaults)
+
+ def disabled(self, disabled='1'):
+ self.update_cnf('USER_DISABLED', 0, disabled)
+ return self
+
+ def fullname(self, fullname):
+ self.update_cnf('USER_FULLNAME', 0, fullname)
+ return self
+
+ def password(self, password):
+ self.update_cnf('USER_PASSWORD', 0, password)
+ return self
+
+ def normal_email(self):
+ self.update_cnf('USER_EMAIL_FORWARD_ENABLE', 0, 'NONE')
+ return self
+
+ def forward_email(self, email):
+ self.update_cnf('USER_EMAIL_FORWARD_ENABLE', 0, 'FORWARD')
+
+ addr = self.update_cnf('USER_EMAIL_FORWARD_ADDRESS', 0, '')
+ self.update_cnf('USER_EMAIL_FORWARD_ADDRESS_ADDR', 0, email, addr)
+ return self
+
+ def copy_email(self, email):
+ self.update_cnf('USER_EMAIL_FORWARD_ENABLE', 0, 'COPY')
+
+ addr = self.update_cnf('USER_EMAIL_FORWARD_ADDRESS', 0, '')
+ self.update_cnf('USER_EMAIL_FORWARD_ADDRESS_ADDR', 0, email, addr)
+ return self
+
+ def add_group_member_ref(self, group_ref):
+ self.add_cnf('USER_GROUP_MEMBER_REF', -1, group_ref)
+ return self
--- /dev/null
+"""
+Create vpn connections.
+"""
+
+from . import build_cnfvar
+
+# Defaults are extracted from data/shared_arnied/vpnconn.cnf.
+default_vpnconn_name = "autotest_vpn"
+default_vpnconn_instance = 1
+default_cnfvars = {
+ "VPNCONN_ACTIVATION": "ALWAYS",
+ "VPNCONN_DISABLED": "0",
+ "VPNCONN_DNS_RELAYING_ALLOWED": "1",
+ "VPNCONN_EMAIL_RELAYING_ALLOWED": "1",
+ "VPNCONN_ENCRYPTION_PROFILE_REF": "0",
+ "VPNCONN_FIREWALL_RULESET_REF": "5",
+ "VPNCONN_IKE_VERSION": "1",
+ "VPNCONN_KEY_FOREIGN_REF": "1",
+ "VPNCONN_KEY_OWN_REF": "1",
+ "VPNCONN_KEY_TYPE": "PUBLIC",
+ "VPNCONN_LAN_NAT_IP": "",
+ "VPNCONN_LAN_NAT_MODE": "UNMODIFIED",
+ "VPNCONN_LAN_NAT_NETWORK": "",
+ "VPNCONN_LAN_NIC_REF": "2",
+ "VPNCONN_LAN_NET": "172.17.0.0",
+ "VPNCONN_LAN_NETMASK": "255.255.0.0",
+ "VPNCONN_LAN_TYPE": "NIC",
+ "VPNCONN_LIFETIME_IKE": "480",
+ "VPNCONN_LIFETIME_IPSECSA": "60",
+ "VPNCONN_OFFLINE_DETECTION_SEC": "60",
+ "VPNCONN_PEER_DNS": "",
+ "VPNCONN_PEER_IP": None,
+ "VPNCONN_PEER_TYPE": "IP",
+ "VPNCONN_PROXY_PROFILE_REF": "-2",
+ "VPNCONN_PSK": "",
+ "VPNCONN_PSK_FOREIGN_ID": "",
+ "VPNCONN_PSK_FOREIGN_ID_TYPE": "IP",
+ "VPNCONN_PSK_OWN_ID": "",
+ "VPNCONN_PSK_OWN_ID_TYPE": "IP",
+ "VPNCONN_REMOTE_INET_NAT": "1",
+ "VPNCONN_REMOTE_MODECONFIG_IP": "192.168.99.1",
+ "VPNCONN_REMOTE_NAT_ENABLE": "0",
+ "VPNCONN_REMOTE_NAT_NETWORK": "",
+ "VPNCONN_REMOTE_NET": "172.18.0.0",
+ "VPNCONN_REMOTE_NETMASK": "255.255.0.0",
+ "VPNCONN_REMOTE_TYPE": "CUSTOM",
+ "VPNCONN_RETRIES": "3",
+ "VPNCONN_SECURED": "ESP",
+ "VPNCONN_XAUTH_SERVER_ENABLE": "0"
+}
+
+
+class BuildVPNConn(build_cnfvar.BuildCnfVar):
+
+ def __init__(self,
+ data=default_vpnconn_name,
+ instance=default_vpnconn_instance,
+ line_no=1,
+ peer_ip="172.16.1.172"):
+ build_cnfvar.BuildCnfVar.__init__(self,
+ "VPNCONN",
+ instance,
+ data,
+ line_no)
+ self.add_defaults(default_cnfvars)
+
+ self.update_cnf("VPNCONN_PEER_IP", 0, peer_ip)
--- /dev/null
+class CnfLine(object):
+ """Represents an arnied cnfline"""
+
+ def __init__(self,
+ name='',
+ instance=0,
+ data='',
+ line_no=1,
+ parent_line_no=0):
+ self.name = name
+ self.instance = instance
+ self.data = data
+ self.line_no = line_no
+ self.parent_line_no = parent_line_no
+
+ if len(self.name) == 0:
+ raise ValueError("You can't leave the cnfvar name empty")
+
+ if line_no == 0:
+ raise ValueError('Zero is not a valid line number')
+
+ def __str__(self):
+ """Build cnfline string representation"""
+
+ # Sanity checks
+ if len(self.name) == 0:
+ raise ValueError("Can't display empty cnfvar name")
+ if self.line_no == 0:
+ raise ValueError('Zero is not a valid line number')
+
+ if self.parent_line_no:
+ rtn = '{0} ({1})'.format(self.line_no, self.parent_line_no)
+ else:
+ rtn = '{0}'.format(self.line_no)
+
+ rtn += ' {0},{1}: "{2}"'.format(self.name, self.instance, self.data)
+
+ return rtn
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to configure the proxy antivirus cnfvars on the fly"""
+
+
+class ConfigureProxyAntivirus(BuildCnfVar):
+ def __init__(self, enabled='1', line_no=1):
+ BuildCnfVar.__init__(self, 'VIRSCAN_PROXY_ENABLE',
+ 0, enabled, line_no)
+
+ def automatic_unblock(self, block_minutes):
+ line_no = self.update_cnf('VIRSCAN_PROXY_AUTOMATIC_UNBLOCK', 0, block_minutes)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def debug_log(self, enabled='1'):
+ line_no = self.update_cnf('VIRSCAN_PROXY_DEBUG_LOG', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def add_pass_site(self, url):
+ sites_parent = self.update_cnf('VIRSCAN_PROXY_PASS_SITES', 0, '')
+ self.mark_as_own_parent(sites_parent)
+
+ self.add_cnf('VIRSCAN_PROXY_PASS_SITES_ADDR', -1, url, sites_parent)
+ return self
+
+ def clear_pass_sites(self):
+ self.del_cnf('VIRSCAN_PROXY_PASS_SITES_ADDR')
+ self.del_cnf('VIRSCAN_PROXY_PASS_SITES')
+ return self
+
+ def warn_admin(self, enabled='1'):
+ line_no = self.update_cnf('VIRSCAN_PROXY_WARN_ADMIN', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
--- /dev/null
+from .build_cnfvar import BuildCnfVar
+"""Class to configure the webfilter cnfvars on the fly"""
+
+
+class ConfigureWebfilter(BuildCnfVar):
+ def __init__(self, enabled='1', line_no=1):
+ BuildCnfVar.__init__(self, 'PROXY_WEBFILTER_ENABLE',
+ 0, enabled, line_no)
+
+ def block_drugs(self, enabled='1'):
+ line_no = self.update_cnf('PROXY_WEBFILTER_BLOCK_DRUGS', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def block_gambling(self, enabled='1'):
+ line_no = self.update_cnf('PROXY_WEBFILTER_BLOCK_GAMBLING', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def block_pornography(self, enabled='1'):
+ line_no = self.update_cnf('PROXY_WEBFILTER_BLOCK_PORNOGRAPHY',
+ 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def block_violence(self, enabled='1'):
+ line_no = self.update_cnf('PROXY_WEBFILTER_BLOCK_VIOLENCE', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def block_warez(self, enabled='1'):
+ line_no = self.update_cnf('PROXY_WEBFILTER_BLOCK_WAREZ', 0, enabled)
+ self.mark_as_own_parent(line_no)
+ return self
+
+ def add_pass_site(self, url):
+ sites_parent = self.update_cnf('PROXY_WEBFILTER_PASS_SITES', 0, '')
+ self.mark_as_own_parent(sites_parent)
+
+ self.add_cnf('PROXY_WEBFILTER_PASS_SITES_URL', -1, url, sites_parent)
+ return self
+
+ def clear_pass_sites(self):
+ self.del_cnf('PROXY_WEBFILTER_PASS_SITES_URL')
+ self.del_cnf('PROXY_WEBFILTER_PASS_SITES')
+ return self
--- /dev/null
+#!/usr/bin/env python
+
+"""
+
+SUMMARY
+------------------------------------------------------
+Representation for connd state as returned by "tell-connd --status".
+
+Copyright: Intra2net AG
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+from __future__ import print_function
+import subprocess
+from re import match as regexp
+from os import EX_OK
+
+# constants
+DEFAULT_TELL_CONND_BINARY = '/usr/intranator/bin/tell-connd'
+# TIMEOUT = 1 can only be used with python3
+
+ONLINE_MODE_ALWAYS_ONLINE = 'always online'
+ONLINE_MODE_ALWAYS_OFFLINE = 'always offline'
+ONLINE_MODE_DIAL_ON_COMMAND = 'dial on command'
+ONLINE_MODE_DIAL_ON_DEMAND = 'dial on demand'
+
+SUBSYS_DNS = 'dns'
+SUBSYS_DYNDNS = 'dyndns'
+SUBSYS_MAIL = 'mail'
+SUBSYS_NTP = 'ntp'
+SUBSYS_SOCKS = 'socks'
+SUBSYS_VPN = 'vpn'
+SUBSYS_WEBPROXY = 'webproxy'
+SUBSYS_PINGCHECK = 'pingcheck'
+SUBSYS_IPONLINE = 'iponline'
+ALL_SUBSYS = (SUBSYS_DNS, SUBSYS_DYNDNS, SUBSYS_MAIL, SUBSYS_NTP,
+ SUBSYS_SOCKS, SUBSYS_VPN, SUBSYS_WEBPROXY, SUBSYS_PINGCHECK,
+ SUBSYS_IPONLINE)
+
+ALL_MODES = (ONLINE_MODE_DIAL_ON_DEMAND, ONLINE_MODE_DIAL_ON_COMMAND,
+ ONLINE_MODE_ALWAYS_OFFLINE, ONLINE_MODE_ALWAYS_ONLINE)
+
+
+class ConndState(object):
+ """Representation of connd's status as returned by tell-connd --status."""
+
+ online_mode = None
+ default_provider = None
+ subsys_online = None
+ subsys_offline = None
+ subsys_disabled = None
+ connections = None
+ actions = None
+ online_ips = None
+ connected_vpns = None
+ log_level = None
+ log_file = None
+
+ def __str__(self):
+ return \
+ '[ConndState: {0} (default {1}), {2} conn\'s, {3} ips, {4} vpns ]'\
+ .format(self.online_mode, self.default_provider,
+ len(self.connections), len(self.online_ips),
+ len(self.connected_vpns))
+
+ def complete_str(self):
+ """Return a string representating the complete state."""
+
+ # general
+ parts = [
+ 'ConndState: online mode = "{0}" (default provider: {1})\n'
+ .format(self.online_mode, self.default_provider), ]
+
+ # subsys
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' subsys: online: ')
+ if self.subsys_online:
+ for subsys in self.subsys_online:
+ parts.append(subsys + ' ')
+ else:
+ parts.append('None ')
+ parts.append('; offline: ')
+ if self.subsys_offline:
+ for subsys in self.subsys_offline:
+ parts.append(subsys + ' ')
+ else:
+ parts.append('None ')
+ parts.append('; disabled: ')
+ if self.subsys_disabled:
+ for subsys in self.subsys_disabled:
+ parts.append(subsys + ' ')
+ else:
+ parts.append('None')
+ parts.append('\n')
+
+ # connections
+ parts.append(' conns: ')
+ if self.connections:
+ name, info, actions = self.connections[0]
+ parts.append('{0}: {1}, {2}\n'.format(name, info, actions))
+ else:
+ parts.append('None\n')
+ for name, info, actions in self.connections[1:]:
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' {0}: {1}, {2}\n'.format(name, info,
+ actions))
+
+ # actions
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' actions: ')
+ if self.actions:
+ parts.append(self.actions[0] + '\n')
+ else:
+ parts.append('None\n')
+ for action in self.actions[1:]:
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' {0}\n'.format(action))
+
+ # online IPs
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' IPs: ')
+ if self.online_ips:
+ parts.append(self.online_ips[0])
+ for curr_ip in self.online_ips[1:]:
+ parts.append(', {0}'.format(curr_ip))
+ else:
+ parts.append('None')
+ parts.append('\n')
+
+ # VPNs
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' VPNs: ')
+ if self.connected_vpns:
+ parts.append(self.connected_vpns[0])
+ for vpn in self.connected_vpns[1:]:
+ parts.append(', {0}'.format(vpn))
+ else:
+ parts.append('None')
+ parts.append('\n')
+
+ # log level and target:
+ # ' connctns: (repeated here for correct aligning)
+ parts.append(' Log: level {0}'.format(self.log_level))
+ if self.log_file:
+ parts.append(' to {0}'.format(self.log_file))
+ parts.append('\n')
+
+ return ''.join(parts)
+
+ @staticmethod
+ def run_tell_connd(tell_connd_binary=DEFAULT_TELL_CONND_BINARY, *args):
+ """
+ Run "tell-connd --status", return output iterator and return code.
+
+ Catches all it can, so should usually return (output, return_code)
+ where output = [line1, line2, ...]
+
+ If return_code != 0, output's first line(s) is error message.
+
+ .. todo:: Use reST parameter description here.
+ """
+ try:
+ cmd_parts = [tell_connd_binary, ]
+ cmd_parts.extend(*args)
+ output = subprocess.check_output(cmd_parts,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True, shell=False) # py3:, timeout=TIMEOUT)
+ return EX_OK, output.splitlines()
+ except subprocess.CalledProcessError as cpe: # non-zero return status
+ output = [
+ 'tell-connd exited with status {0}'.format(cpe.returncode), ]
+ output.extend(cpe.output.splitlines())
+ return cpe.returncode, output
+ # not python-2-compatible:
+ # except subprocess.TimeoutExpired as texp:
+ # output = [
+ # 'tell-connd timed out after {0}s. Returning -1'.format(
+ # texp.timeout), ]
+ # output.extend(te.output.splitlines())
+ # return -1, output
+ except Exception as exp:
+ output = [str(exp), ]
+ return -1, output
+
+ @staticmethod
+ def get_state(tell_connd_binary=DEFAULT_TELL_CONND_BINARY):
+ """
+ Get actual state from "tell-connd --status".
+
+ Returns (err_code, output_lines) if something goes wrong running
+ binary; raises assertion if output from tell-connd does not match
+ expected format.
+
+ .. todo:: Use reST parameter description here.
+ """
+
+ state = ConndState()
+
+ err_code, all_lines = ConndState.run_tell_connd(tell_connd_binary,
+ ['--status', ])
+ if err_code != EX_OK:
+ return err_code, all_lines
+
+ output = iter(all_lines)
+
+ # first section
+ line = next(output).strip()
+ state.online_mode = regexp('online mode\s*:\s*(.+)$', line).groups()[0]
+ assert state.online_mode in ALL_MODES, \
+ 'unexpected online mode: {0}'.format(state.online_mode)
+
+ line = next(output).strip()
+ state.default_provider = regexp('default provider\s*:\s*(.*)$',
+ line).groups()[0]
+ if len(state.default_provider) == 0:
+ state.default_provider = None
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # subsys
+ line = next(output).strip()
+ assert line == 'subsys', 'expected subsys but got {0}'.format(line)
+ line = next(output).strip()
+ state.subsys_online = regexp('online\s*:\s*(.*)$', line)\
+ .groups()[0].split()
+ for subsys in state.subsys_online:
+ assert subsys in ALL_SUBSYS, \
+ 'unexpected subsys: {0}'.format(subsys)
+ line = next(output).strip()
+ state.subsys_offline = regexp('offline\s*:\s*(.*)$', line)\
+ .groups()[0].split()
+ for subsys in state.subsys_offline:
+ assert subsys in ALL_SUBSYS, \
+ 'unexpected subsys: {0}'.format(subsys)
+ line = next(output).strip()
+ state.subsys_disabled = regexp('disabled\s*:\s*(.*)$', line)\
+ .groups()[0].split()
+ for subsys in state.subsys_disabled:
+ assert subsys in ALL_SUBSYS, \
+ 'unexpected subsys: {0}'.format(subsys)
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # connection map
+ state.connections = []
+ line = next(output).strip()
+ assert line == 'connection map:', \
+ 'expected connection map but got {0}'.format(line)
+ expect_new = True
+ for line in output:
+ line = line.strip()
+ if len(line) == 0:
+ continue
+ if expect_new:
+ if line == 'end of connection map':
+ break
+ conn_name, conn_info = regexp(
+ '\[\s*(.+)\s*\]\s*:\s*\(\s*(.*)\s*\)', line).groups()
+ expect_new = False
+ else:
+ conn_actions = regexp('actions\s*:\s*\[\s*(.+)\s*\]', line)\
+ .groups()
+ state.connections.append((conn_name, conn_info, conn_actions))
+ expect_new = True
+ assert expect_new
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # actions
+ line = next(output).strip()
+ state.actions = regexp('actions\s*:\s*(.*)', line).groups()[0].split()
+ if len(state.actions) == 1 and state.actions[0].strip() == '-':
+ state.actions = []
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # online IPs
+ line = next(output).strip()
+ state.online_ips = regexp('list of online ips\s*:\s*(.*)', line)\
+ .groups()[0].split()
+ if len(state.online_ips) == 1 \
+ and state.online_ips[0].strip() == 'NONE':
+ state.online_ips = []
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # VPNs
+ state.connected_vpns = []
+ line = next(output).strip()
+ assert line == 'vpns connected:', \
+ 'expected vpns connected, got {0}'.format(line)
+ for line in output:
+ line = line.strip()
+ if len(line) == 0:
+ continue
+ elif line == 'end of list of connected vpns':
+ break
+ else:
+ state.connected_vpns.append(line)
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+
+ # log level
+ line = next(output).strip()
+ state.log_level, state.log_file = \
+ regexp('Logging with level (.+)(?:\s+to\s+(.+))?', line).groups()
+
+ # done
+ line = next(output).strip()
+ assert len(line) == 0, 'expected empty line, but got {0}'.format(line)
+ line = next(output).strip()
+ assert line == 'Done.', 'expect Done but got {0}'.format(line)
+
+ return state
+
+ @staticmethod
+ def set_online_mode(state, provider=None,
+ tell_connd_binary=DEFAULT_TELL_CONND_BINARY):
+ """
+ Change online state with optional provider.
+
+ Provider is silently ignored for ONLINE_MODE_ALWAYS_OFFLINE and
+ otherwise required.
+
+ Returns result of :py:func:`run_tell_connd`: (error_code, output_lines).
+ """
+
+ # check args
+ need_provider = True
+ if state == ONLINE_MODE_DIAL_ON_DEMAND:
+ args = ['--dial-on-demand', provider]
+ elif state == ONLINE_MODE_DIAL_ON_COMMAND:
+ args = ['--dial-on-command', provider]
+ elif state == ONLINE_MODE_ALWAYS_ONLINE:
+ args = ['--online', provider]
+ elif state == ONLINE_MODE_ALWAYS_OFFLINE:
+ args = ['--offline', ]
+ need_provider = False
+ else:
+ raise ValueError('unknown state: {0}!'.format(state))
+ if need_provider and not provider:
+ raise ValueError('Given state {0} requires a provider!'.format(
+ state))
+
+ # run binary
+ return ConndState.run_tell_connd(tell_connd_binary, args)
+
+
+def test():
+ """Get state and print it."""
+ state = ConndState.get_state()
+ if not isinstance(state, ConndState):
+ err_code, output_lines = state
+ print('tell-connd failed with error code {0} and output:'.format(
+ err_code))
+ for line in output_lines:
+ print('tell-connd: {0}'.format(line))
+ print('(end of tell-connd output)')
+ else:
+ print(state)
+ print(state.complete_str())
+
+
+def main():
+ """Main function, called when running file as script; runs test()."""
+ test()
+
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+# This Python file uses the following encoding: utf-8
+"""
+
+SUMMARY
+------------------------------------------------------
+Dialing, hangup, general provider online/offline state control.
+
+Copyright: 2017 Intra2net AG
+
+This used to be part of the sysmisc utilitiy initially which caused an include
+circle with the arnied wrapper.
+
+
+CONTENTS
+------------------------------------------------------
+dialout
+ Generic access to the system’s dialing mode. Allows for requesting manual
+ or permanently online dial state.
+
+arnied_dial_permanent
+ Enter permantly online dialing state.
+
+arnied_dial_do
+ Enter dial on command state.
+
+arnied_dial_hangup
+ Terminate uplink unconditionally.
+
+All API methods come with the optional argument ``block`` (bool) to request the
+call to block until the system completes the state transition successfully. The
+completion timeout is currently 10 seconds (see the definition of
+``DIALTOOLS_TIMEOUT`` below).
+
+
+IMPLEMENTATION
+------------------------------------------------------
+
+"""
+
+
+import re
+import io
+import time
+import logging
+log = logging.getLogger('dial')
+
+import arnied_wrapper
+import simple_cnf
+import sysmisc
+
+HAVE_IPADDRESS = True
+try:
+ import ipaddress
+except ImportError: # guest
+ HAVE_IPADDRESS = False
+
+__all__ = (
+ "arnied_dial_hangup", "arnied_dial_doc", "arnied_dial_permanent", "dialout", "get_wan_address", "DIALOUT_MODE_PERMANENT", "DIALOUT_MODE_MANUAL", "DIALOUT_MODE_DEFAULT", "DIALOUT_MODE_BY_NAME"
+)
+
+TELL_CONND_BIN = "/usr/intranator/bin/tell-connd"
+
+DIALOUT_MODE_PERMANENT = 0
+DIALOUT_MODE_MANUAL = 1
+DIALOUT_MODE_DEFAULT = DIALOUT_MODE_PERMANENT
+DIALOUT_MODE_BY_NAME = {"permanent": DIALOUT_MODE_PERMANENT, "manual": DIALOUT_MODE_MANUAL}
+DIALOUT_MODE_CNF = {DIALOUT_MODE_PERMANENT: "ONLINE", DIALOUT_MODE_MANUAL: "MANUAL"}
+
+# compiling this regex needs the provider id and is postponed due to
+# the peculiar implementation of the connd online condition
+NEEDLE_MEMO = " \[%s\] :(.*connected online fsm<online>.*)"
+NEEDLE_OFFLINE = re.compile("connection map:\nend of connection map")
+
+DIALTOOLS_HANGUP_BIN = "/usr/intranator/bin/hangup"
+DIALTOOLS_DOC_BIN = "/usr/intranator/bin/doc"
+DIALTOOLS_TIMEOUT = 10 # s
+TELL_CONND_BIN = "/usr/intranator/bin/tell-connd"
+
+
+def _connd_online(prid="P1"):
+ succ, out, _ = sysmisc.run_cmd_with_pipe([TELL_CONND_BIN, "--status"])
+ if succ is False:
+ return False
+ return re.search(NEEDLE_MEMO % prid, out) is not None
+
+
+def _connd_offline():
+ succ, out, _ = sysmisc.run_cmd_with_pipe([TELL_CONND_BIN, "--status"])
+ return succ and (NEEDLE_OFFLINE.search(out) is not None)
+
+
+def arnied_dial_hangup(block=False):
+ """
+ Take down any currently active provider. This leverages arnied to
+ accomplish the disconnect which is apparently more appropriate than
+ having connd do it directly.
+
+ :returns: Whether the ``hangup`` command succeeded.
+ :rtype: int (dial result as above)
+ """
+ log.debug("requested arnied_dial_hangup%s",
+ " (blocking)" if block else "")
+ if block is False:
+ succ, _, _ = sysmisc.run_cmd_with_pipe([DIALTOOLS_HANGUP_BIN])
+ return sysmisc.RUN_RESULT_OK if succ is True else sysmisc.RUN_RESULT_FAIL
+
+ res, err = sysmisc.cmd_block_till([DIALTOOLS_HANGUP_BIN],
+ DIALTOOLS_TIMEOUT, _connd_offline)
+ log.debug("arnied_dial_hangup → (%d, %r)", res, err)
+ return res
+
+
+def arnied_dial_doc(prid="P1", block=False):
+ """
+ Bring provider up via arnied manual dial.
+
+ :param prid: Provider id, default *P1*. It is up to the caller to ensure
+ this is a valid provider id.
+ :type prid: str
+ :returns: Whether the ``doc`` command succeeded.
+ :rtype: int (dial result as above)
+ """
+ log.debug("requested arnied_dial_doc%s", " (blocking)" if block else "")
+ if block is False:
+ succ, _, _ = sysmisc.run_cmd_with_pipe([DIALTOOLS_DOC_BIN, prid])
+ return sysmisc.RUN_RESULT_OK if succ is True else sysmisc.RUN_RESULT_FAIL
+ res, err = sysmisc.cmd_block_till([DIALTOOLS_DOC_BIN, prid],
+ DIALTOOLS_TIMEOUT, _connd_online,
+ prid=prid)
+ log.debug("arnied_dial_doc → (%d, %r)", res, err)
+ return res
+
+
+def arnied_dial_permanent(prid="P1", block=False):
+ """
+ Set permanent online state. Since the arnied dial helpers cannot initiate a
+ permanent online state, achieve this via arnied.
+
+ :param prid: Provider id, default *P1*. It is up to the caller to ensure
+ this is a valid provider id.
+ :type prid: str
+ :returns: Whether the ``tell-connd`` command succeeded.
+ :rtype: int (dial result as above)
+ """
+ log.debug("requested connd_dial_online%s (blocking)" if block else "")
+
+ cnf = simple_cnf.SimpleCnf()
+ cnf.add("DIALOUT_MODE", DIALOUT_MODE_CNF[DIALOUT_MODE_PERMANENT])
+ cnf.add("DIALOUT_DEFAULTPROVIDER_REF", "1")
+
+ def aux():
+ return arnied_wrapper.set_cnf_pipe(cnf, block=block), "", None
+
+ if block is False:
+ succ = aux()
+ return sysmisc.RUN_RESULT_OK if succ is True else sysmisc.RUN_RESULT_FAIL
+
+ res, err = sysmisc.cmd_block_till(aux, DIALTOOLS_TIMEOUT, _connd_online,
+ prid=prid)
+ log.debug("arnied_dial_permanent: result (%d, %r)", res, err)
+ return res
+
+
+def dialout(mode=DIALOUT_MODE_DEFAULT, prid="P1", block=True):
+ """
+
+ :param mode: How to dial (permanent vs. manual).
+ :type mode: int (``DIALOUT_MODE``) | string
+ :param prid: Provider id, default *P1*. It is up to the caller to ensure
+ this is a valid provider id.
+ :type prid: str (constrained by available providers, obviously).
+ :param block: Whether to block until completion of the command.
+ :type block: bool
+ :returns: Whether the command succeeded.
+ :rtype: int (dial result)
+ :raises: :py:class:`ValueError` if invalid dial mode was selected
+ """
+ log.info("go online with provider")
+
+ dmode = None
+ if isinstance(mode, int) is True:
+ dmode = mode
+ elif isinstance(mode, str) is True:
+ try:
+ dmode = DIALOUT_MODE_BY_NAME[mode]
+ except:
+ log.error("invalid online mode name “%s” requested" % mode)
+ pass
+
+ if dmode is None:
+ raise ValueError("exiting due to invalid online mode %r" % mode)
+
+ log.debug("go online, mode=%d(%s), id=%r", dmode, mode, prid)
+
+ if dmode == DIALOUT_MODE_PERMANENT:
+ return arnied_dial_permanent(prid, block=block)
+
+ if dmode == DIALOUT_MODE_MANUAL:
+ return arnied_dial_doc(prid, block=block)
+
+ raise ValueError("invalid dialout mode %r/%r requested" % (mode, dmode))
+
+
+def get_wan_address(vm=None):
+ """
+ Retrieve the current WAN IP address of client ``vm`` or localhost.
+
+ :param vm: Guest (client) to query; will as local connd if left unspecified.
+ :type vm: virttest.qemu_vm.VM | None
+
+ :returns: The IPv4 address. For correctness, it will use the
+ ipaddress module if available. Otherwise it falls back
+ on untyped data.
+ :rtype: None | (ipaddress.IPv4Address | str)
+ """
+ log.info("query current lease")
+ if vm is None:
+ succ, connstat, _ = sysmisc.run_cmd_with_pipe([TELL_CONND_BIN, "--status"])
+ if succ is False:
+ return None
+ else:
+ connstat = vm.session.cmd_output("%s --status" % TELL_CONND_BIN)
+ astr = io.StringIO(connstat)
+
+ while True:
+ l = astr.readline()
+ if l == "":
+ break
+ if l.find("connected online fsm<online> IP:") != -1:
+ addr = l[l.find("IP:")+3:l.find(" )\n")] # beurk
+ if HAVE_IPADDRESS is True:
+ return ipaddress.IPv4Address(str(addr))
+ else:
+ return addr
+ return None
--- /dev/null
+# This Python file uses the following encoding: utf-8
+"""
+
+SUMMARY
+------------------------------------------------------
+Guest utility for fetchmail, spamassassin, horde and other email functionality tests.
+
+Copyright: Intra2net AG
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+import time
+import os
+import difflib
+import socket
+from inspect import currentframe
+import re
+import subprocess
+import logging
+log = logging.getLogger('mail_utils')
+
+import smtplib
+from email.mime.audio import MIMEAudio
+from email.mime.base import MIMEBase
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.encoders import encode_base64
+from email.utils import formatdate
+from email.parser import Parser
+import mimetypes
+
+import arnied_wrapper
+
+
+class EmailException(Exception):
+ pass
+
+
+class EmailNotFound(EmailException):
+ pass
+
+
+class InvalidEmailHeader(EmailException):
+ pass
+
+
+class InvalidEmailContent(EmailException):
+ pass
+
+
+class EmailIDError(EmailException):
+ pass
+
+
+class MismatchedEmailID(EmailIDError):
+ pass
+
+
+class MissingEmailID(EmailIDError):
+ pass
+
+
+class EmailMismatch(EmailException):
+ pass
+
+
+class MailValidator():
+ """Class for validation of emails."""
+
+ def target_path(self, v=None):
+ if v is not None:
+ self._target_path = v
+ else:
+ return self._target_path
+ target_path = property(target_path, target_path)
+
+ def source_path(self, v=None):
+ if v is not None:
+ self._source_path = v
+ else:
+ return self._source_path
+ source_path = property(source_path, source_path)
+
+ def smtp_sender(self, v=None):
+ if v is not None:
+ self._smtp_sender = v
+ else:
+ return self._smtp_sender
+ smtp_sender = property(smtp_sender, smtp_sender)
+
+ def compare_emails_method(self, method="basic"):
+ """
+ Set email comparison method for validation.
+
+ :param str method: one of "basic", "headers"
+ :raises: :py:class:`ValueError` if chosen method is invalid
+ """
+ if method == "basic":
+ self._compare_emails_method = self._default_compare_emails
+ elif method == "headers":
+ self._compare_emails_method = self._compare_emails_by_basic_headers
+ elif method == "existence":
+ self._compare_emails_method = self._compare_emails_by_existence
+ else:
+ raise ValueError("Invalid email comparison method %s" % method)
+ compare_emails_method = property(fset=compare_emails_method)
+
+ def __init__(self, source_path, target_path):
+ """
+ Construct a validator instance.
+
+ :param str source_path: path to find source emails (not sent)
+ :param str target_path: path to find target emails (received)
+
+ .. note:: The comparison method can be redefined using the variety of
+ private method implementations.
+ """
+ self._target_path = target_path
+ self._source_path = source_path
+ self._smtp_sender = "no_source@inject.smtp"
+ self._compare_emails_method= self._default_compare_emails
+
+ def inject_emails(self, username, original_user):
+ """
+ Inject emails from `source_path` to `target_path`.
+
+ :param str username: username for the mail injection script
+ :param str original_user: original username for the mail injection script
+
+ In order to restore acl rights as well put a mailbox.dump file in the source path.
+ """
+ log.info("Injecting emails for user %s", username)
+
+ # inject emails from test data
+ cmd = "/usr/intranator/bin/restore_mail_inject.pl -u " + username + " -s " + self.source_path
+ if original_user != "":
+ cmd += " -m " + os.path.join(self.source_path, "mailboxes.dump") + " -o " + original_user
+
+ result = subprocess.check_output(cmd, shell=True)
+ log.debug(result)
+
+ def inject_smtp(self, usernames, emails):
+ """
+ Inject emails from `source_path` using python's SMTP library.
+
+ :param usernames: usernames of the localhost receivers for each email
+ :type usernames: [str]
+ :param emails: emails to be sent to each user
+ :type emails: [str]
+ """
+ usernames_string = ",".join(usernames)
+ log.info("Sending emails to %s", usernames_string)
+ server = smtplib.SMTP('localhost')
+ hostname = socket.gethostname()
+ users = [username + "@" + hostname for username in usernames]
+
+ for email in emails:
+ log.info("Sending email %s", email)
+ with open(os.path.join(self.source_path, email), 'rb') as f:
+ email_content = f.read()
+ server.sendmail(self.smtp_sender, users, email_content)
+ server.quit()
+
+ # Wait till SMTP queue is processed
+ arnied_wrapper.wait_for_email_transfer()
+
+ def verify_email_id(self, email, emails_list, timeout, in_target=True):
+ """
+ Verify that the id of an email is present in a list and return that email's
+ match in this list.
+
+ :param str email: email filename
+ :param emails_list: email among which the first email has to be found
+ :type emails_list: [str]
+ :param int timeout: timeout for extracting the source and target emails
+ :param bool in_target: whether the verified email is on the target side
+
+ If `in_target` is set to True we are getting the target id from the target list
+ of a source email. Otherwise we assume a target email from a source list.
+ """
+ if in_target:
+ email = self._extract_email_paths(self.source_path, [email], timeout)[0]
+ emails_list = self._extract_email_paths(self.target_path, emails_list, timeout)
+ else:
+ email = self._extract_email_paths(self.target_path, [email], timeout)[0]
+ emails_list = self._extract_email_paths(self.source_path, emails_list, timeout)
+
+ email_id = self._extract_message_id(email)
+ match = self._find_message_with_id(email_id, emails_list)
+ return os.path.basename(match)
+
+ def verify_emails(self, source_emails, target_emails, timeout):
+ """
+ Check injected e-mails for a user.
+
+ :param source_emails: emails at the source location
+ :type source_emails: [str]
+ :param target_emails: emails at the target (server) location
+ :type target_emails: [str]
+ :param int timeout: timeout for extracting the source and target emails
+ :raises: :py:class:`EmailNotFound` if target email is not found on server
+ """
+ source_paths = self._extract_email_paths(self.source_path, source_emails, timeout)
+ target_paths = self._extract_email_paths(self.target_path, target_emails, timeout)
+
+ log.info("Verifying emails at %s with %s", self.target_path, self.source_path)
+ for target in target_paths:
+ log.info("Verifying email %s", target)
+ target_id = self._extract_message_id(target)
+ source = self._find_message_with_id(target_id, source_paths)
+ source_paths.remove(source)
+ self._compare_emails_method(target, source, 1)
+
+ if len(source_paths) > 0:
+ raise EmailNotFound("%s target mails could not be found on server.\n%s"
+ % (len(source_paths), "\n".join(source_paths)))
+ else:
+ log.info("All e-mails at %s verified!", self.target_path)
+
+ def assert_header(self, emails, header, present_values=None, absent_values=None, timeout=30):
+ """
+ Check headers for contained and not contained strings in a list of messages.
+
+ :param emails: emails whose headers will be checked
+ :type emails: [str]
+ :param str header: header that will be validated for each email
+ :param present_values: strings that have to be present in the header
+ :type present_values: [str] or None
+ :param absent_values: strings that have to be absent in the header
+ :type absent_values: [str] or None
+ :param int timeout: timeout for extracting the source and target emails
+ :raises: :py:class:`InvalidEmailHeader` if email header is not valid
+
+ Every list of present and respectively absent values contains alternative values.
+ At least one of present and one of absent should be satisfied.
+ """
+ target_paths = self._extract_email_paths(self.target_path, emails, timeout)
+ for email_path in target_paths:
+ email_file = open(email_path, "r")
+ verified_email = Parser().parse(email_file, headersonly=True)
+ log.debug("Extracted email headers:\n%s", verified_email)
+ email_file.close()
+
+ log.info("Checking header '%s' in %s", header, email_path)
+ if not present_values:
+ present_values = []
+ else:
+ log.info("for present '%s'", "', '".join(present_values))
+ if not absent_values:
+ absent_values = []
+ else:
+ log.info("for absent '%s'", "', '".join(absent_values))
+ present_valid = False
+ for present in present_values:
+ if present in verified_email[header]:
+ present_valid = True
+ absent_valid = False
+ for absent in absent_values:
+ if absent not in verified_email[header]:
+ absent_valid = True
+
+ if not present_valid and len(present_values) > 0:
+ raise InvalidEmailHeader("Message header '%s' in %s is not valid:\n%s"
+ % (header, email_path, verified_email[header]))
+ if not absent_valid and len(absent_values) > 0:
+ raise InvalidEmailHeader("Message header '%s' in %s is not valid:\n%s"
+ % (header, email_path, verified_email[header]))
+ log.info("Message header '%s' in %s is valid!", header, email_path)
+
+ def assert_content(self, emails, content_type, present_values=None, absent_values=None, timeout=30):
+ """
+ Check headers for contained and not contained strings in a list of messages,
+
+ :param emails: emails whose content will be checked
+ :type emails: [str]
+ :param str content_type: type of the content that will be checked for values
+ :param present_values: strings that have to be present in the content
+ :type present_values: [str] or None
+ :param absent_values: strings that have to be absent in the content
+ :type absent_values: [str] or None
+ :param int timeout: timeout for extracting the source and target emails
+ :raises: :py:class:`InvalidEmailContent` if email content is not valid
+
+ Every list of present and respectively absent values contains alternative values.
+ At least one of present and one of absent should be satisfied.
+ """
+ target_paths = self._extract_email_paths(self.target_path, emails, timeout)
+ for email_path in target_paths:
+ email_file = open(email_path, "r")
+ verified_email = Parser().parse(email_file)
+ log.debug("Extracted email content:\n%s", verified_email)
+ content = ""
+ for part in verified_email.walk():
+ log.debug("Extracted %s part while looking for %s",
+ part.get_content_type(), content_type)
+ if part.get_content_type() == content_type:
+ content = part.get_payload(decode=True)
+ if isinstance(content, bytes):
+ content = content.decode()
+ # NOTE: only one such element is expected
+ break
+ email_file.close()
+
+ log.info("Checking content '%s' in %s", content_type, email_path)
+ if not present_values:
+ present_values = []
+ else:
+ log.info("for present '%s'", "', '".join(present_values))
+ if not absent_values:
+ absent_values = []
+ else:
+ log.info("for absent '%s'", "', '".join(absent_values))
+ present_valid = False
+ for present in present_values:
+ if present in content:
+ present_valid = True
+ absent_valid = False
+ for absent in absent_values:
+ if absent not in content:
+ absent_valid = True
+
+ if not present_valid and len(present_values) > 0:
+ raise InvalidEmailContent("Message content '%s' in %s is not valid:\n%s"
+ % (content_type, email_path, content))
+ if not absent_valid and len(absent_values) > 0:
+ raise InvalidEmailContent("Message content '%s' in %s is not valid:\n%s"
+ % (content_type, email_path, content))
+ log.info("Message content '%s' in %s is valid!", content_type, email_path)
+
+ def send_email_with_files(self, username, file_list,
+ wait_for_transfer=True,
+ autotest_signature=None,
+ subject="my subject"):
+ """
+ Send a generated email with attachments instead of an .eml file
+ containing attachments.
+
+ :param str username: username of a localhost receiver of the email
+ :param file_list: files attached to an email
+ :type file_list: [str]
+ :param wait_for_transfer: specify whether to wait until arnied_wrapper
+ confirms email transfer; you can also specify
+ a fixed timeout (seconds)
+ :type wait_for_transfer: bool or int
+ :param autotest_signature: text to insert as value for header
+ X-Autotest-Signature for simpler recognition
+ of mail (if None do not add header)
+ :type autotest_signature: str or None
+ """
+ text = 'This is an autogenerated email.\n'
+
+ server = smtplib.SMTP('localhost')
+ hostname = socket.gethostname()
+ user = username + "@" + hostname
+
+ if file_list: # empty or None or so
+ msg = MIMEMultipart() # pylint: disable=redefined-variable-type
+ msg.attach(MIMEText(text, _charset='utf-8'))
+ else:
+ msg = MIMEText(text, _charset='utf-8') # pylint: disable=redefined-variable-type
+ msg['From'] = self.smtp_sender
+ msg['To'] = user
+ msg['Subject'] = subject
+ msg['Date'] = formatdate(localtime=True)
+ msg.preamble = 'This is a multi-part message in MIME format.\n'
+ msg.add_header('X-Autotest-Creator',
+ self.__class__.__module__ + '.' +
+ self.__class__.__name__ + '.' +
+ currentframe().f_code.co_name)
+ # (with help from http://stackoverflow.com/questions/5067604/determine-
+ # function-name-from-within-that-function-without-using-traceback)
+ if autotest_signature:
+ msg.add_header('X-Autotest-Signature', autotest_signature)
+
+ # attach files
+ for filename in file_list:
+ fullpath = os.path.join(self.source_path, filename)
+
+ # Guess the content type based on the file's extension. Encoding
+ # will be ignored, although we should check for simple things like
+ # gzip'd or compressed files.
+ ctype, encoding = mimetypes.guess_type(fullpath)
+ if ctype is None or encoding is not None:
+ # No guess could be made, or the file is encoded (compressed), so
+ # use a generic bag-of-bits type.
+ ctype = 'application/octet-stream'
+
+ maintype, subtype = ctype.split('/', 1)
+ log.debug("Creating message containing file {} of mime type {}"
+ .format(filename, ctype))
+ part = None
+ if maintype == 'text':
+ with open(fullpath, 'rt') as file_handle:
+ # Note: we should handle calculating the charset
+ part = MIMEText(file_handle.read(), _subtype=subtype) # pylint:disable=redefined-variable-type
+ elif maintype == 'image':
+ with open(fullpath, 'rb') as file_handle:
+ part = MIMEImage(file_handle.read(), _subtype=subtype) # pylint:disable=redefined-variable-type
+ elif maintype == 'audio':
+ with open(fullpath, 'rb') as file_handle:
+ part = MIMEAudio(file_handle.read(), _subtype=subtype) # pylint:disable=redefined-variable-type
+ else:
+ part = MIMEBase(maintype, subtype) # pylint:disable=redefined-variable-type
+ with open(fullpath, 'rb') as file_handle:
+ part.set_payload(file_handle.read())
+ # Encode the payload using Base64
+ encode_base64(part)
+ # Set the filename parameter
+ part.add_header('Content-Disposition', 'attachment',
+ filename=filename)
+ msg.attach(part)
+
+ log.debug("Message successfully created")
+ # send via SMTP
+
+ log.debug("Sending message from %s to %s" %
+ (self.smtp_sender, user))
+ server.sendmail(self.smtp_sender, user, msg.as_string())
+ server.close()
+
+ if isinstance(wait_for_transfer, int):
+ arnied_wrapper.wait_for_email_transfer(timeout=wait_for_transfer)
+ elif wait_for_transfer:
+ arnied_wrapper.wait_for_email_transfer()
+
+ def _extract_email_paths(self, path, emails, timeout):
+ """Check and return the absolute paths of a list of emails."""
+ log.debug("Extracting messages %s", emails)
+ if len(emails) == 0:
+ emails = os.listdir(path)
+ email_paths = []
+ for expected_email in emails:
+ # TODO: this can be improved by matching the emails themselves
+ if expected_email in ["cyrus.cache", "cyrus.header", "cyrus.index",
+ "Entw&APw-rfe", "Gesendete Objekte", "Gel&APY-schte Elemente",
+ "mailboxes.dump", "tmp"]:
+ continue
+ email_path = os.path.join(path, expected_email)
+ for i in range(timeout):
+ if os.path.isfile(email_path):
+ email_paths.append(email_path)
+ break
+ elif i == timeout - 1:
+ raise EmailNotFound("Target message %s could not be found on server at %s within %ss"
+ % (expected_email, path, timeout))
+ time.sleep(1)
+ log.debug("%s mails extracted at %s.", len(email_paths), path)
+ return email_paths
+
+ def _find_message_with_id(self, message_id, message_paths):
+ """Find message with id among a list of message paths."""
+ log.debug("Looking for a match for the message with id %s", message_id)
+ for message_path in message_paths:
+ extracted_id = self._extract_message_id(message_path)
+ log.debug("Extracted id %s from candidate %s", extracted_id, message_path)
+ if message_id == extracted_id:
+ log.debug("Found match at %s", message_path)
+ return message_path
+ raise MismatchedEmailID("The message with id %s could not be matched or wasn't expected among %s"
+ % (message_id, ", ".join(message_paths)))
+
+ def _extract_message_id(self, message_path):
+ """
+ Given a message file path extract the Message-ID and raise error if
+ none was found.
+ """
+ message_id = ""
+ with open(message_path, errors='ignore') as f:
+ content = f.read()
+ for line in content.split("\n"):
+ match_id = re.match("Autotest-Message-ID: (.+)", line)
+ if match_id is not None:
+ message_id = match_id.group(1).rstrip('\r\n')
+ if message_id == "":
+ raise MissingEmailID("No id was found in target message %s so it cannot be properly matched"
+ % (message_path))
+ return message_id
+
+ def _default_compare_emails(self, source_email_path, target_email_path, tolerance=1):
+ """Use python provided diff functionality to compare target emails with source ones."""
+ source_email_file = open(source_email_path, "r")
+ target_email_file = open(target_email_path, "r")
+ source_email = source_email_file.read()
+ target_email = target_email_file.read()
+ s = difflib.SequenceMatcher(None, source_email, target_email)
+ diffratio = s.ratio()
+ log.debug("Target message comparison ratio is %s.", diffratio)
+ #log.info("%s $$$ %s", source_email, target_email)
+ if diffratio < tolerance:
+ raise EmailMismatch("Target message is too different from the source (difference %s < tolerance %s).",
+ diffratio, tolerance)
+
+ def _compare_emails_by_basic_headers(self, source_email_path, target_email_path, tolerance=1):
+ """Use python provided diff functionality to compare target emails with source ones."""
+ with open(source_email_path, errors="ignore") as f:
+ source_email = Parser().parse(f)
+ source_body = ""
+ for part in source_email.walk():
+ if part.get_content_type() in ["text/plain", "text/html"]:
+ source_body = part.get_payload()
+ break
+
+ with open(target_email_path, errors="ignore") as f:
+ target_email = Parser().parse(f)
+ target_body = ""
+ for part in target_email.walk():
+ if part.get_content_type() in ["text/plain", "text/html"]:
+ target_body = part.get_payload()
+ break
+
+ if source_email['From'] != target_email['From']:
+ raise EmailMismatch("Target message sender %s is too different from the source one %s" %
+ (target_email['From'], source_email['From']))
+ if source_email['To'] != target_email['To']:
+ raise EmailMismatch("Target message recipient %s is too different from the source one %s" %
+ (target_email['To'], source_email['To']))
+ if source_email['Subject'] != target_email['Subject']:
+ raise EmailMismatch("Target message subject '%s' is too different from the source one '%s'" %
+ (target_email['Subject'], source_email['Subject']))
+ if source_email['Date'] != target_email['Date']:
+ raise EmailMismatch("Target message date %s is too different from the source one %s" %
+ (target_email['Date'], source_email['Date']))
+ if source_body != target_body:
+ raise EmailMismatch("Target message body '%s' is too different from the source one '%s'" %
+ (target_body, source_body))
+
+ def _compare_emails_by_existence(self, source_email_path, target_email_path, tolerance=1):
+ """Weak email validation based only on presence of file"""
+ return True
+
+
+def prep_email_header(email_file, value, regex=None, criterion="envelopeto"):
+ """
+ Replace value in a provided email file.
+
+ :param str email_file: file to use for the replacement
+ :param str value: value to replace the first matched group with
+ :param regex: regular expression to use when replacing a header value
+ :type regex: str or None
+ :param str criterion: criterion to use for replacement, one
+ of 'envelopeto' or 'received'
+ :raises: :py:class:`ValueError` if the choice of criterion is invalid
+
+ In some cases this function is reusing arnied wrapper's cnf value
+ preparation but for email headers.
+ """
+ if criterion == "envelopeto":
+ logging.debug("Updating test emails' EnvelopeTo header")
+ arnied_wrapper.prep_cnf_value(email_file, value, regex=regex)
+ elif criterion == "received":
+ logging.debug("Updating test emails' Received header")
+ with open(email_file, "r") as f:
+ email_text = f.read()
+ email_text = re.sub(regex, value, email_text)
+ email_text = re.sub(regex, value, email_text)
+ with open(email_file, "w") as f:
+ f.write(email_text)
+ else:
+ raise ValueError("Invalid header preparation criterion '%s'" % criterion)
+
+
+def create_users(usernames, config_file, params):
+ """
+ Create cyrus users from an absolute path to a user configuration file.
+
+ :param usernames: usernames of the created users
+ :type usernames: [str]
+ :param str config_file: template config file to use for each user configuration
+ :param params: template config file to use for each user configuration
+ :type params: {str, str}
+ :raises: :py:class:`RuntimeError` if the user is already or cannot be created
+ """
+ log.info("Creating new cyrus users %s", ", ".join(usernames))
+ cyrus_user_path = params.get("cyrus_user_path", "/datastore/imap-mails/user/")
+
+ # check for existence round
+ for username in usernames:
+ if os.path.exists(os.path.join(cyrus_user_path,
+ username.replace(".", "^"))):
+ raise RuntimeError("The user %s was already created" % username)
+
+ for username in usernames:
+ params["user"] = '%i: "%s"' % (-1, username)
+ params["user_fullname"] = username
+ params_regex = {"user": '%s,(-?\d+: ".*")'}
+ arnied_wrapper.set_cnf_semidynamic([config_file],
+ params, params_regex)
+
+ for username in usernames:
+ if not os.path.exists(os.path.join(cyrus_user_path, username.replace(".", "^"))):
+ raise RuntimeError("The user %s could not be created" % username)
+ else:
+ log.info("Added new user %s", username)
+ log.info("%s users successfully created!", len(usernames))
+
+
+def parse_mail_file(file_name, headers_only=True):
+ """
+ Parse given email file (e.g. a banned message).
+
+ :param str file_name: file name for the email
+ :param bool headers_only: whether to parse only the email headers; set this
+ to False, e.g. if you want to check for
+ attachments using message.walk()
+ :returns: parsed email
+ :rtype: root message object (of class :py:class:`email.message.Message`)
+
+ Removes the SMTP envelope surrounding the email if present. Only left-over
+ might be a line with a '.' at end of non-multipart messages if `headers_only`
+ is False.
+ """
+ with open(file_name, 'r') as read_handle:
+ line = read_handle.readline()
+ if line.startswith('EHLO'):
+ # there is a smtp header. skip to its end
+ while line.strip() != 'DATA':
+ line = read_handle.readline()
+ # the rest is the email plus a trailing '.' (ignored by parser if
+ # multipart)
+ else:
+ read_handle.seek(0) # forget we read the first line already
+ return Parser().parse(read_handle, headersonly=headers_only)
+
+
+def get_user_mail(user, mailbox='INBOX', **kwargs):
+ """
+ Iterate over mails in given folder of given user; yields parsed mails
+
+ :param str mailbox: name of mailbox to use, INBOX (default) for base folder;
+ name is modified using :py:func:`cyrus_escape`
+ :param dict kwargs: all other args are forwarded to
+ :py:func:`parse_mail_file`
+ :returns: nothing; but yields 2-tuples (path, email_msg) where first is the
+ full path to the message on disc, and the latter is the outcome
+ of :py:func:`parse_mail_file` for that file
+ """
+
+ folder = os.path.join('/datastore', 'imap-mails', 'user', user)
+ if mailbox != 'INBOX':
+ folder = os.path.join(folder, cyrus_escape(mailbox))
+ for filename in os.listdir(folder):
+ if not re.match('\d+\.', filename):
+ continue
+ full_path = os.path.join(folder, filename)
+ yield full_path, parse_mail_file(os.path.join(folder, filename),
+ **kwargs)
+
+
+def cyrus_escape(user_or_folder, keep_path=False, regex=False):
+ """
+ Converts names of users or mailbox folders to cyrus format.
+
+ quite a hack, just does the following hard-coded replacements:
+
+ * . --> ^
+ * / --> . (except if keep_path is True)
+ * "u --> &APw- , "o --> &APY- , "a --> &AOQ-
+ (if need more: this is modified utf-7)
+ * inbox --> (the empty string)
+
+ Would like to use a general modified utf-7-encoder/decoder but python has
+ non built-in (see https://bugs.python.org/issue5305) and an extra lib like
+ https://bitbucket.org/mjs0/imapclient/ would be overkill. After all, we
+ control the input to this function via params and this is enough umlaut-
+ testing I think...
+
+ :param str user_or_folder: name of the user or folder string to escape
+ :param bool keep_path: do not replace '/' with '.' so can still use result
+ as path name
+ :param bool regex: result is used in grep or other regex, so ^, . and & are
+ escaped again with a backslash
+ :returns: escaped user or folder string
+ :rtype: str
+
+ .. seealso:: :py:func:`cyrus_unescape`
+ """
+ temp = user_or_folder.replace('.', '^') \
+ .replace('ü', '&APw-').replace('ä', '&AOQ-') \
+ .replace('ö', '&APY-') \
+ .replace('inbox', '').replace('INBOX', '').replace('Inbox', '')
+ if not keep_path:
+ temp = temp.replace('/', '.')
+ if regex:
+ return temp.replace('^', r'\^').replace('&', r'\&') \
+ .replace('.', r'\.').replace('$', r'\$')
+ else:
+ return temp
+
+
+def cyrus_unescape(user_or_folder):
+ """
+ Undo effects of :py:func:`cyrus_escape` (but not all of them).
+
+ :param str user_or_folder: name of the user or folder string to unescape
+ :returns: unescaped user or folder string
+ :rtype: str
+ """
+ if user_or_folder == '':
+ return 'inbox'
+ return user_or_folder.replace('.', '/')\
+ .replace(r'\^', '.').replace('^', '.')
--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+Utility for one-step dynamic cnfvar configuration.
+
+Copyright: Intra2net AG
+
+
+CONTENTS
+------------------------------------------------------
+Contains general as well as specialized versions of some of the main
+configurations performed by our tests.
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+import time
+import logging
+log = logging.getLogger('mk_config')
+
+# custom imports
+import arnied_wrapper as aw
+from arnied_wrapper import Delete, Update, Add, Child, batch_update_cnf, build_cnf
+from cnfline import build_cnfvar, build_group, build_intraclient
+from cnfline import build_nic, build_provider, build_user
+
+
+###############################################################################
+# MINOR CONFIGURATION
+###############################################################################
+
+def simple(varname, data, filename):
+ """
+ Generate and save a single-variable configuration file.
+
+ :param str varname: cnf variable name
+ :param str data: cnf variable data
+ :param str filename: config name
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create single-variable configuration file")
+ tmp = build_cnfvar.BuildCnfVar(name=varname, data=data)
+ [filename] = aw.prep_config_paths([filename], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving simple configuration to %s", filename)
+ tmp.save(filename)
+ return filename
+
+
+def user(username="admin", instance=1, suffix="host"):
+ """
+ Generate and save a user configuration file.
+
+ :param str username: username for the user variable
+ :param int instance: instance number (for multiple users, -1 for next available)
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied user configuration")
+ user = batch_update_cnf(
+ build_user.BuildUser(data=username, instance=instance, line_no=1),
+ [(Update, ("USER_FULLNAME", 0, username)),
+ (Update, ("USER_GROUP_MEMBER_REF", 0, "1")),
+ (Add, ("USER_GROUP_MEMBER_REF", 1, "2")),
+ (Delete, "USER_WEBMAIL_MESSAGES_PER_PAGE"),
+ (Delete, "USER_LOCALE"),
+ (Delete, "USER_TRASH_DELETEDAYS"),
+ (Delete, "USER_WEBMAIL_SIGNATURE")])
+ user_cnf = "user-%d-%s.cnf" % (time.time(), suffix)
+ [user_cnf] = aw.prep_config_paths([user_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving user configuration to %s", user_cnf)
+ user.save(user_cnf)
+ return user_cnf
+
+
+def group_admins(activesync_enable=False, xauth_enable=False, suffix="host"):
+ """
+ Generate and save an Administrators group configuration file.
+
+ :param bool activesync_enable: whether to enable ActiveSync for the group
+ :param bool xauth_enable: whether to enable XAUTH for the group
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied admin group configuration")
+ group = batch_update_cnf(build_group.BuildGroup(data="Administratoren",
+ instance=1),
+ [(Update, ("GROUP_ACCESS_REMOTE_ADMINISTRATION_ALLOWED", 0, "1")),
+ (Update, ("GROUP_EMAILFILTER_BAN_FILTERLIST_REF", 0, "-1")),
+ (Update, ("GROUP_PROXY_PROFILE_REF", 0, "1")),
+ (Update, ("GROUP_ACCESS_GO_ONLINE_ALLOWED", 0, "1")),
+ (Update, ("GROUP_EMAIL_RELAY_RIGHTS", 0, "RELAY_FROM_INTRANET")),
+ (Update, ("GROUP_ACTIVESYNC_ENABLE", 0, "1" if activesync_enable else "0")),
+ (Update, ("GROUP_XAUTH_ENABLE", 0, "1" if xauth_enable else "0")),
+ (Delete, ("GROUP_COMMENT"))])
+ group_cnf = "group-%d-%s.cnf" % (time.time(), suffix)
+ [group_cnf] = aw.prep_config_paths([group_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving group configuration to %s", group_cnf)
+ group.save(group_cnf)
+ return group_cnf
+
+
+def group_all(proxy_profile="1", suffix="host"):
+ """
+ Generate and save an All group configuration file.
+
+ :param str proxy_profile: proxy profile instance reference
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied all group configuration")
+ group = batch_update_cnf(build_group.BuildGroup(data="Alle",
+ instance=2),
+ [(Update, ("GROUP_ACCESS_GO_ONLINE_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_INFORMATION_VERSION_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_MAINPAGE_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_FORWARDING_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_GROUPWARE_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_SETTINGS_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_SORTING_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_SPAMFILTER_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_USERMANAGER_OWN_PROFILE_VACATION_ALLOWED", 0, "1")),
+ (Update, ("GROUP_ACCESS_GROUPWARE_ALLOWED", 0, "1")),
+ (Update, ("GROUP_EMAILFILTER_BAN_FILTERLIST_REF", 0, "-1")),
+ (Update, ("GROUP_EMAIL_RELAY_RIGHTS", 0, "RELAY_FROM_EVERYWHERE")),
+ (Update, ("GROUP_PROXY_PROFILE_REF", 0, proxy_profile)),
+ (Delete, ("GROUP_COMMENT"))])
+
+ group_cnf = "group-%d-%s.cnf" % (time.time(), suffix)
+ [group_cnf] = aw.prep_config_paths([group_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving group configuration to %s", group_cnf)
+ group.save(group_cnf)
+ return group_cnf
+
+
+def nic(ip="1.2.3.4", netmask="255.255.0.0", mac="00:00:00:00:00:00", suffix="host"):
+ """
+ Generate and save a nic configuration file.
+
+ :param str ip: IP address of the nic
+ :param str netmask: network mask of the nic
+ :param str mac: MAC address of the nic
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied nic configuration")
+ nic = batch_update_cnf(
+ build_nic.BuildNIC(data="", instance=2, line_no=1),
+ [(Update, ("NIC_TYPE", 0, "PROXYARP")),
+ (Update, ("NIC_LAN_IP", 0, ip)),
+ (Update, ("NIC_LAN_NETMASK", 0, netmask)),
+ (Update, ("NIC_MAC", 0, mac))])
+ nic_cnf = "nic-%d-%s.cnf" % (time.time(), suffix)
+ [nic_cnf] = aw.prep_config_paths([nic_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving nic configuration to %s", nic_cnf)
+ nic.save(nic_cnf)
+ return nic_cnf
+
+
+def intraclient(ip="1.2.3.4", mac="00:00:00:00:00:00", fwrules=5, suffix="host"):
+ """
+ Generate and save an intraclient configuration file.
+
+ :param str ip: IP address of the intraclient
+ :param str mac: MAC address of the intraclient
+ :param int fwrules: instance of the firewall rules to use
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied intraclient configuration")
+ intraclient = batch_update_cnf(build_intraclient.BuildIntraclient(instance=1, data="dmzserver"),
+ [(Update, ("INTRACLIENT_IP", 0, ip)),
+ (Update, ("INTRACLIENT_MAC", 0, mac)),
+ (Update, ("INTRACLIENT_FIREWALL_RULESET_REF", 0, fwrules))])
+
+ intraclient_cnf = "intraclient-%d-%s.cnf" % (time.time(), suffix)
+ [intraclient_cnf] = aw.prep_config_paths([intraclient_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving intraclient configuration to %s", intraclient_cnf)
+ intraclient.save(intraclient_cnf)
+ return intraclient_cnf
+
+
+def provider(mode="ROUTER", ip="1.2.3.4", localip=None,
+ netmask="255.255.0.0", dnsmode="IP", dns="1.2.3.4", fwrules=5,
+ dialretry=None, name=None, timeout="", mtumode="AUTO",
+ vlanid=None, mtusize=None, login=None, password=None,
+ modemip=None, providerid=None, localdhcp=None,
+ suffix="host"):
+ """
+ Generate and save a provider configuration file.
+
+ :param str mode: provider mode
+ :param str ip: IP address of the provider
+ :param localip: IP address of the configured machine (valid for some configurations)
+ :type localip: str or None
+ :param str dns: IP address of the DNS server
+ :param int fwrules: instance of the firewall rules to use
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied provider configuration")
+ def add_or_del(var, field):
+ if var is not None:
+ return (Add, (field, 0, str(var)))
+ return (Delete, field)
+ provider = batch_update_cnf(build_provider.BuildProvider(data=name),
+ [(Update, ("PROVIDER_MODE", 0, mode)),
+ ip and (Update, ("PROVIDER_IP", 0, ip))
+ or (Delete, "PROVIDER_IP"),
+ localip
+ and (Update, ("PROVIDER_LOCALIP", 0, localip))
+ or (Delete, "PROVIDER_LOCALIP"),
+ netmask and (Update, ("PROVIDER_NETMASK", 0,
+ netmask))
+ or (Delete, "PROVIDER_NETMASK"),
+ (Update, ("PROVIDER_TIMEOUT", 0, timeout)),
+ (Update, ("PROVIDER_DNS_MODE", 0, dnsmode)),
+ (Update, ("PROVIDER_DNS", 0,
+ dns if dnsmode == "IP" else "")),
+ (Update, ("PROVIDER_MTU_MODE", 0, mtumode)),
+ (Update, ("PROVIDER_MTU_SIZE", 0,
+ mtusize if mtumode != "AUTO" else "")),
+ (Update, ("PROVIDER_FIREWALL_RULESET_REF", 0, str(fwrules))),
+ add_or_del(vlanid, "PROVIDER_VLAN_ID"),
+ add_or_del(dialretry, "PROVIDER_DIAL_RETRY"),
+ add_or_del(login, "PROVIDER_LOGIN"),
+ add_or_del(password, "PROVIDER_PASSWORD"),
+ add_or_del(modemip, "PROVIDER_MODEM_IP"),
+ add_or_del(providerid, "PROVIDER_PROVIDERID"),
+ add_or_del(localdhcp, "PROVIDER_LOCAL_DHCP")])
+ provider_cnf = "provider-%d-%s.cnf" % (time.time(), suffix)
+ [provider_cnf] = aw.prep_config_paths([provider_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving provider configuration to %s", provider_cnf)
+ provider.save(provider_cnf)
+ return provider_cnf
+
+
+def provider_proxy(mode="ROUTER", ip="1.2.3.4", localip=None, proxy_port=3128, fwrules=7, suffix="host"):
+ """
+ Generate and save a provider configuration file for proxy.
+
+ :param str mode: provider mode
+ :param str ip: IP address of the provider (and DNS server)
+ :param localip: IP address of the configured machine (valid for some configurations)
+ :type localip: str or None
+ :param int proxy_port: port for the provider proxy
+ :param int fwrules: instance of the firewall rules to use
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create arnied provider configuration.")
+ provider = batch_update_cnf(build_provider.BuildProvider(),
+ [(Update, ("PROVIDER_MODE", 0, mode)),
+ (Update, ("PROVIDER_DNS", 0, ip)),
+ (Update, ("PROVIDER_DYNDNS_ENABLE", 0, "0")),
+ (Update, ("PROVIDER_IP", 0, ip)),
+ (Update, ("PROVIDER_PROXY_SERVER", 0, ip)),
+ (Update, ("PROVIDER_PROXY_PORT", 0, str(proxy_port))),
+ localip
+ and (Update, ("PROVIDER_LOCALIP", 0, localip))
+ or (Delete, "PROVIDER_LOCALIP"),
+ (Update, ("PROVIDER_DNS_MODE", 0, "IP")),
+ (Update, ("PROVIDER_FIREWALL_RULESET_REF", 0, str(fwrules)))])
+ provider_cnf = "provider-%d-%s.cnf" % (time.time(), suffix)
+ [provider_cnf] = aw.prep_config_paths([provider_cnf], aw.DUMP_CONFIG_DIR)
+ logging.info("Saving provider configuration to %s", provider_cnf)
+ provider.save(provider_cnf)
+ return provider_cnf
+
+
+def port_forwarding(dst_port_end="", protocol_type="TCP", src_port_end="", suffix="host"):
+ """
+ Generate and save a port forwarding configuration file.
+
+ :param str dst_port_end: port forwarding destination port end
+ :param str protocol_type: port forwarding protocol type
+ :param str src_port_end: port forwarding source port end
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create port forwarding configuration")
+ value_id = "test"
+ portforward_client_cnf = "portforward-%d-%s.cnf" % (time.time(), suffix)
+ return build_cnf("PORT_FORWARDING",
+ data=value_id,
+ filename=portforward_client_cnf,
+ vals=[(Child, ("PORT_FORWARDING_DST_IP_REF", 0, "1")),
+ (Child, ("PORT_FORWARDING_DST_PORT", 0, "123")),
+ (Child, ("PORT_FORWARDING_DST_PORT_END", 0, dst_port_end)),
+ (Child, ("PORT_FORWARDING_PROTOCOL_TYPE", 0, protocol_type)),
+ (Child, ("PORT_FORWARDING_SRC_PORT", 0, "123")),
+ (Child, ("PORT_FORWARDING_SRC_PORT_END", 0, src_port_end))])
+
+
+def firewall_ruleset_simple(suffix="host"):
+ """
+ Generate and save a simple firewall ruleset configuration file.
+
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create firewall ruleset")
+ fw_cnf = "fw-%d-%s.cnf" % (time.time(), suffix)
+ return build_cnf("FIREWALL_RULESET",
+ instance=101,
+ data="Port Forwarding libfirewall test",
+ filename=fw_cnf,
+ vals=[(Update, ("FIREWALL_RULESET_PROFILE_TYPE", 0, "SIMPLE_PROVIDER")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_HTTPS_OPEN", 0, "0")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_POP3SIMAPS_OPEN", 0, "0")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_PORT_FORWARDING_ENABLE", 0, "1")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_SMTP_OPEN", 0, "0")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_SSH_OPEN", 0, "0")),
+ (Update, ("FIREWALL_RULESET_PROVIDER_VPN_OPEN", 0, "0"))])
+
+
+def firewall_ruleset_port(suffix="host"):
+ """
+ Generate and save a firewall ruleset configuration file for port forwarding.
+
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create firewall ruleset")
+ fw_portforward_cnf = "fw-portforward-%d-%s.cnf" % (time.time(), suffix)
+ return build_cnf("FIREWALL_RULESET",
+ instance=100,
+ data="Port forwarding only",
+ filename=fw_portforward_cnf,
+ vals=[(Update, ("FIREWALL_RULESET_AUTOMATIC_ANSWER_RULE", 0, "1")),
+ (Update, ("FIREWALL_RULESET_PROFILE_TYPE", 0, "FULL")),
+ (Add, ("FIREWALL_RULESET_RULE", 1, "")),
+ (Child, ("FIREWALL_RULESET_RULE_ACTION", 0, "ACCEPT")),
+ (Child, ("FIREWALL_RULESET_RULE_CHECK_CONNECTION_STATUS", 0, "PORTFORWARDING")),
+ (Child, ("FIREWALL_RULESET_RULE_CHECK_TCP_FLAGS", 0, "DISABLED")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_FOR_ACTION_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_FOR_LOG_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_PACKETS_AVERAGE_COUNT", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_PACKETS_AVERAGE_PERIOD", 0, "SEC")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_PACKETS_PEAK_COUNT", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_LOG_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LOG_MESSAGE", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_TIME_INCLUDE_TIME_REF", 0, "-1")),
+ (Update, ("FIREWALL_RULESET_USAGE", 0, "PROVIDER"))])
+
+
+def firewall_ruleset_dmz(suffix="host"):
+ """
+ Generate and save a firewall ruleset configuration file for DMZ.
+
+ :param str suffix: optional suffix to use for config identification
+ :returns: generated config filename
+ :rtype: str
+ """
+ log.info("Create firewall ruleset")
+ fw_dmz_cnf = "fw-dmz-%d-%s.cnf" % (time.time(), suffix)
+ return build_cnf("FIREWALL_RULESET",
+ instance=100,
+ data="DMZ firewall rules",
+ filename=fw_dmz_cnf,
+ vals=[(Update, ("FIREWALL_RULESET_AUTOMATIC_ANSWER_RULE", 0, "1")),
+ (Update, ("FIREWALL_RULESET_PROFILE_TYPE", 0, "FULL")),
+ (Add, ("FIREWALL_RULESET_RULE", 1, "")),
+ (Child, ("FIREWALL_RULESET_RULE_ACTION", 0, "ACCEPT")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_FOR_ACTION_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_FOR_LOG_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_PACKETS_AVERAGE_COUNT", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_LIMIT_PACKETS_PEAK_COUNT", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_LOG_ENABLE", 0, "0")),
+ (Child, ("FIREWALL_RULESET_RULE_LOG_MESSAGE", 0, "")),
+ (Child, ("FIREWALL_RULESET_RULE_SERVICE_INCLUDE_SERVICEGROUP_REF", 0, "6")),
+ (Child, ("FIREWALL_RULESET_RULE_DST_INCLUDE_CLIENT_REF", 0, "2")),
+ (Update, ("FIREWALL_RULESET_USAGE", 0, "LANVPN"))])
--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+Read / write / merge guest cnf var sets, even on host.
+
+Copyright: Intra2net AG
+
+
+CONTENTS
+------------------------------------------------------
+
+This module can be viewed as a convenience-wrapper around module
+:py:mod:`cnfvar`. It uses many of its function but provides some higher-level
+interfaces, most of all class :py:class:`SimpleCnf`. It is completely
+independent of the :py:mod:`cnfline` package and its included subclasses
+(modules in `shared.cnfline`, starting with ``build_`` and ``configure_``).
+
+Class :py:class:`SimpleCnf` represents a hierarchical set of conf vars and
+provides functions to deal with that hierarchy. Under the hood, all functions
+here (and probably also in :py:mod:`cnfvar`) work with conf vars represented as
+dictionaries and lists thereof. Conf var dicts have keys `number`, `varname`,
+`instance`, `data`, `comment` and possibly `parent` and/or `children`.
+`varname` is a regular upper-case string, `data` is a (utf8) string, `comment`
+is usually None, `number`, `parent` and `instance` are int. If a conf var has
+children, then this is a list of conf var dicts. `parent` is only present if a
+conf-var is a child. Several conf vars, if not wrapped in a
+:py:class:`SimpleCnf`, appear either as simple list of conf var dicts or as a
+dict with a single key `cnf` whose value is a list of conf var dicts. (Function
+:py:func:`get_cnf` returns the former given the latter).
+
+.. todo:: Exceptions Invalid[Json]Cnf are used inconsistently (e.g. check type
+ of function arguments `-->` should be ValueError) and difference
+ between them is unclear. Also name differs only in case from
+ :py:class:`cnfvar.InvalidCNF`
+
+INTERFACE
+------------------------------------------------------
+"""
+
+import os
+import tempfile
+import time
+import logging
+log = logging.getLogger('simple_cnf')
+
+import arnied_wrapper
+import cnfvar
+import sysmisc
+import json
+
+###############################################################################
+# constants
+###############################################################################
+
+#: timeout for copying temporary config files to VM objects (seconds)
+COPY_FILES_TIMEOUT = 15
+
+#: additional location of configuration files
+ADD_CNFFILE_PATH = "/tmp/configs"
+
+
+###############################################################################
+# EXCEPTIONS
+###############################################################################
+
+
+class InvalidCnf(Exception):
+ """Exception that indicates a general problem with conf var processing."""
+
+ def __init__(self, m):
+ """Create an invalid config exception."""
+ msg = "Invalid CNF_VAR: %s" % m
+ super(InvalidCnf, self).__init__(msg)
+ self.msg = msg
+ self.pfx = "[CNF]"
+
+ def __str__(self):
+ """Get a string version of the exception message."""
+ return "%s %s" % (self.pfx, self.msg)
+
+
+class InvalidJsonCnf(InvalidCnf):
+ """Exception that indicates a general problem with conf var processing."""
+
+ def __init__(self, m):
+ """Create an invalid JSON config exception."""
+ super(InvalidJsonCnf, self).__init__(m)
+ self.pfx = "[CNF:JSON]"
+
+
+###############################################################################
+# auxiliary functions
+###############################################################################
+
+
+def get_cnf(cnf):
+ """
+ "Convert" a config dict to a list of conf var dicts.
+
+ This just removes the top-level 'cnf' key and returns its value.
+
+ :param cnf: config dictionary
+ :type cnf: {str, [dict]}
+ :returns: list of cnf var dicts
+ :rtype: [{str, int or str or None}]
+ :raises: :py:class:`InvalidJsonCnf` if there is no `cnf` field found
+ """
+ cnf_vars = cnf.get("cnf")
+ if cnf_vars is None:
+ raise InvalidJsonCnf("toplevel \"cnf\" field required")
+ return cnf_vars
+
+
+def gen_tmpname():
+ """
+ Get a (quite) safe temporary file name for config file.
+
+ :returns: temporary file name
+ :rtype: str
+ """
+ now = time.time()
+ file_handle, file_name = tempfile.mkstemp(prefix="simple_%d_" % int(now),
+ suffix=".cnf")
+ os.close(file_handle)
+ os.unlink(file_name)
+ return file_name
+
+
+def set_values(cnf_vars, replacements):
+ """
+ Recursively replace values in configuration
+
+ Works in-place, meaning that no new configuration is created and returned
+ but instead argument `cnf_vars` is modified (and nothing returned).
+
+ :param cnf_vars: config where replacements are to be made
+ :type cnf_vars: [{str, int or str or None}] or {str, [dict]}
+ :param replacements: what to replace and what to replace it with
+ :type replacements: {str, str} or [(str, str)]
+ :raises: :py:class:`InvalidJsonCnf` if cnf_vars is neither dict or list
+ """
+ # determine set replace_me of keys to replace and function get that returns
+ # value for key or empty string if key not in replacements
+ replace_me = None
+ get = None
+ if isinstance(replacements, dict):
+ replace_me = set(k.lower() for k in replacements.keys())
+ get = lambda var: str(replacements.get(var, "")) # pylint: disable=function-redefined
+ elif isinstance(replacements, list):
+ replace_me = set(r[0].lower() for r in replacements)
+
+ def get(var): # pylint: disable=function-redefined
+ """Get replacement value for given variable name."""
+ try:
+ return str(next(r[1] for r in replacements if r[0] == var))
+ except StopIteration:
+ return ""
+ else:
+ raise TypeError("replacements must be dictionary or key-value list")
+
+ # check type of arg "cnf_vars"
+ if isinstance(cnf_vars, dict):
+ cnf_vars = cnf_vars["cnf"] # operate on the var list
+ if not isinstance(cnf_vars, list):
+ raise InvalidJsonCnf("ill-formed CNF_VAR: expected list, got %s (%s)"
+ % (type(cnf_vars), cnf_vars))
+
+ def aux(varlist):
+ """Internal recursive function to replace values."""
+ for var in varlist:
+ varname = var["varname"].lower()
+ if varname in replace_me:
+ var["data"] = str(get(varname))
+ children = var.get("children", None)
+ if children is not None:
+ aux(children)
+
+ # apply function on complete cnf_vars
+ aux(cnf_vars)
+
+
+def lookup_cnf_file(fname):
+ """
+ Searches for config file with given name in default locations.
+
+ :param str fname: file name of config file (without path)
+ :returns: first existing config file found in default locations
+ :rtype: str
+ :raises: :py:class:`IOError` if no such config file was found
+ """
+ locations = [arnied_wrapper.SRC_CONFIG_DIR, ADD_CNFFILE_PATH]
+ for path in locations:
+ fullpath = os.path.join(path, fname)
+ if os.path.isfile(fullpath):
+ return fullpath
+ raise IOError("config file %s does not exist in any of the readable "
+ "locations %s" % (fname, locations))
+
+
+###############################################################################
+# primary class
+###############################################################################
+
+
+class SimpleCnf(object):
+ """
+ Representation of hierarchical configuration of variables.
+
+ Based on C++ `cnf_vars` as visualized by *get_cnf*.
+
+ Internal data representation: see module doc
+ """
+
+ def __init__(self, cnf=None):
+ """
+ Creates a simple configuration.
+
+ :param cnf: initial set of conf var data (default: None = empty conf)
+ :type cnf: list or tuple or anything that :py:func:`get_cnf` can read
+ """
+ if isinstance(cnf, (list, tuple)):
+ self.__cnfvars = cnf
+ elif cnf is not None:
+ self.__cnfvars = get_cnf(cnf)
+ else:
+ self.__cnfvars = []
+
+ def _find_new_number(self, cnf_vars):
+ """Recursive helper function to find new unique (line) number."""
+ if not cnf_vars:
+ return 1
+ new_numbers = [1, ] # in case cnf_vars is empty
+ for cnf_var in cnf_vars:
+ new_numbers.append(cnf_var['number'] + 1)
+ try:
+ new_numbers.append(self._find_new_number(cnf_var['children']))
+ except KeyError:
+ pass
+ return max(new_numbers) # this is max(all numbers) + 1
+
+ def _find_new_instance(self, varname):
+ """
+ Find an instance number for variable with non-unique varname.
+
+ Will only check on top level, is not recursive.
+
+ :param str varname: name of conf var; will be converted to upper-case
+ :returns: instance number for which there is no other conf var of same
+ name (0 if there is not other conf var with that name)
+ :rtype: int
+ """
+ result = 0
+ varname = varname.upper()
+ for entry in self.__cnfvars:
+ if entry['varname'] == varname:
+ result = max(result, entry['number']+1)
+ return result
+
+ def add(self, varname, data='', number=None, instance=None, children=None):
+ """
+ Add a cnf var to config on top level.
+
+ :param str varname: name of conf var; only required arg; upper-case
+ :param str data: conf var's value
+ :param int number: line number of that conf var; if given as None
+ (default) the function looks through config to find
+ a new number that is not taken; must be positive!
+ Value will be ignored if children are given.
+ :param int instance: Instance of the new conf var or None (default).
+ If None, then function looks through config to
+ find a new unique instance number
+ :param children: child confs for given conf var. Children's parent
+ and line attributes will be set in this function
+ :type children: :py:class:`SimpleCnf`
+ """
+ if instance is None:
+ instance = self._find_new_instance(varname)
+ if children:
+ number = self._find_new_number(self.__cnfvars) # need top number
+ new_children = []
+ for child in children:
+ new_dict = child.get_single_dict()
+ new_dict['parent'] = number
+ new_children.append(new_dict)
+ cnfvar.renumber_vars({'cnf':new_children}, number)
+ children = new_children
+ elif number is None:
+ number = self._find_new_number(self.__cnfvars)
+
+ new_var = dict(varname=varname.upper(), data=data,
+ number=number, comment=None, instance=instance)
+ if children:
+ new_var['children'] = children # only add if non-empty
+ self.__cnfvars.append(new_var)
+
+ def append_file_generic(self, reader, cnf, replacements=None):
+ """
+ Append conf var data from file.
+
+ If `replacements` are given, calls :py:meth:`set_values` with these
+ before adding values to config.
+
+ :param cnf: file name or dictionary of conf vars
+ :type cnf: str or {str, int or str or None}
+ :param replacements: see help in :py:meth:`set_values`
+ """
+ log.info("append CNF_VARs from file")
+ new_vars = None
+ if callable(reader) is False:
+ raise TypeError("append_file_generic: reader must be callable, "
+ "not %s" % type(reader))
+ if isinstance(cnf, dict):
+ new_vars = get_cnf(cnf)
+ elif isinstance(cnf, str):
+ fullpath = lookup_cnf_file(cnf)
+ with open(fullpath, "rb") as chan:
+ cnfdata = chan.read()
+ tmp = reader(cnfdata)
+ new_vars = get_cnf(tmp)
+ if new_vars is None:
+ raise InvalidCnf("Cannot append object \"%s\" of type \"%s\"."
+ % (cnf, type(cnf)))
+
+ if replacements is not None:
+ set_values(new_vars, replacements)
+
+ current = self.__cnfvars
+ current.extend(new_vars)
+
+ def append_file(self, cnf, replacements=None):
+ """Append conf var data from file."""
+ return self.append_file_generic(cnfvar.read_cnf, cnf,
+ replacements=replacements)
+
+ def append_file_json(self, cnf, replacements=None):
+ """Append conf var data from json file."""
+ return self.append_file_generic(cnfvar.read_cnf_json, cnf,
+ replacements=replacements)
+
+ def append_guest_vars(self, vm=None, varname=None, replacements=None):
+ """
+ Append content from machine's "real" config to this object.
+
+ Runs `get_cnf -j [varname]` on local host or VM (depending on arg
+ `vm`), converts output and appends it to this objects' conf var set.
+ If replacements are given, runs :py:meth:`set_values`, first.
+
+ :param vm: a guest vm or None to run on local host
+ :type vm: VM object or None
+ :param str varname: optional root of conf vars to append. If given as
+ None (default), append complete conf
+ :param replacements: see help in :py:meth:`set_values`
+ """
+ cnf = arnied_wrapper.get_cnfvar(varname=varname, vm=vm)
+ new_vars = get_cnf(cnf)
+
+ log.info("apply substitutions to extracted CNF_VARs")
+ if replacements is not None:
+ set_values(new_vars, replacements)
+
+ current = self.__cnfvars
+ current.extend(new_vars)
+
+ def save(self, filename=None):
+ """
+ Saves this object's configuration data to a file.
+
+ The output file's content can be interpreted by `set_cnf -j`.
+
+ :param str filename: name of file to write config to; if None (default)
+ the config will be written to a temporary file
+ :returns: filename that was written to
+ :rtype: str
+ """
+ log.info("save configuration")
+ current = self.__cnfvars
+ if not current:
+ raise InvalidCnf("No variables to write.")
+
+ if filename is None:
+ # create temporary filename
+ filename = arnied_wrapper.generate_config_path(dumped=True)
+
+ with open(filename, 'w') as out:
+ cnfvar.output_json({"cnf": current}, out, renumber=True)
+
+ return filename
+
+ def apply(self, vm=None, renumber=True):
+ """
+ Apply object's config on VM or local host.
+
+ Runs a `set_cnf` with complete internal config data, possibly waits for
+ generate to finish afterwards.
+
+ :param vm: a guest vm or None to apply on local host
+ :type vm: VM object or None
+ :param bool renumber: re-number conf vars before application
+ """
+ current = self.__cnfvars
+ if renumber:
+ log.info("enforce consistent CNF_LINE numbering")
+ cnfvar.renumber_vars(current)
+ log.info("inject configuration %s" % "into guest" if vm else "in place")
+ arnied_wrapper.set_cnf_dynamic({"cnf": current},
+ config_file=gen_tmpname(), vm=vm)
+
+ def __str__(self):
+ """
+ Get a config in json format, ready for `set_cnf -j`.
+
+ :returns: config in json format
+ :rtype: str
+ """
+ return cnfvar.dump_json_string({"cnf": self.__cnfvars}, renumber=True)
+
+ def pretty_print(self, print_func=None):
+ """
+ Get a string representation of this simple_cnf that is human-readable
+
+ Result is valid json with nice line breaks and indentation but not
+ renumbered (so may not be fit for parsing)
+ """
+ for line in json.dumps({"cnf": self.__cnfvars}, check_circular=False,
+ indent=4, sort_keys=True).splitlines():
+ if print_func is None:
+ print(line)
+ else:
+ print_func(line)
+
+ def __iter__(self):
+ """
+ Return an iterator over the contents of this simple cnf.
+
+ The iteration might not be ordered by line number nor entry nor
+ anything else. No guarantees made!
+
+ The entries returned by the iterator are :py:class:`SimpleCnf`.
+
+ Example::
+
+ for cnf_list in iter(my_cnf['PROXY_ACCESSLIST']):
+ print('checking proxy list {0} with {1} children'
+ .format(cnf_list.get_value(), len(cnf_list)))
+ """
+ # self.__cnfvars is a list of dicts, each with the same fields
+ for dict_entry in self.__cnfvars:
+ yield SimpleCnf([dict_entry, ])
+
+ def __getitem__(self, item):
+ """
+ Called by `cnf['key']` or `cnf[line_number]`; returns subset of cnf.
+
+ Processing time is O(n) where n is the number of top-level entries in
+ simple cnf.
+
+ Examples (on VM)::
+
+ all = SimpleCnf()
+ all.append_guest_vars()
+ len(all) # --> probably huge
+ len(all['user']) # should give the number of users
+
+ # should result in the same as all['user']:
+ users = SimpleCnf()
+ users.append_guest_vars(varname='user')
+
+ :param item: line number or value to specify a cnf subset;
+ if string value, will be converted to upper case
+ :type item: int or str
+ :returns: another simple cnf that contains a subset of this simple cnf
+ :rtype: :py:class:`SimpleCnf`
+
+ .. seealso:: method :py:func:`get` (more general than this)
+ """
+ # determine whether arg 'item' is a key name or a line number
+ if isinstance(item, int): # is line number
+ dict_key = 'number'
+ else: # assume key name
+ dict_key = 'varname'
+ item = item.upper()
+
+ # search all entries for matches
+ results = [dict_entry for dict_entry in self.__cnfvars
+ if dict_entry[dict_key] == item]
+
+ # convert result to a simple cnf
+ return SimpleCnf(results)
+
+ def __len__(self):
+ """
+ Get the number of top-level entries in cnf.
+
+ :returns: number of top-level entries in cnf
+ :rtype: int
+ """
+ return len(self.__cnfvars)
+
+ def get(self, name=None, value=None, instance=None, line=None):
+ """
+ Get a subset of this config that matches ALL of given criteria.
+
+ For example, if :py:func:`get_cnf` contains the line
+ '1121 USER,1: "admin"', all of these examples will result in the same
+ simple cnf::
+
+ cnf.get(name='user', value='admin')
+ cnf.get(name='user', instance=1)
+ cnf.get(name='user').get(value='admin')
+ cnf.get(line=1121)
+
+ :param str name: conf var name (key) or None to not use this criterion;
+ will be converted to upper case
+ :param str value: value of conf var or None to not use this criterion
+ :param int instance: instance number of value in a list (e.g. USERS)
+ or None to not use this criterion
+ :param int line: line number of None to not use this criterion
+ :returns: a simple cnf that contains only entries that match ALL of the
+ given criteria. If nothing matches the given criteria, an
+ empty simple cnf will be returned
+ :rtype: :py:class:`SimpleCnf`
+
+ .. seealso:: method :py:func:`__getitem__` (less general than this)
+ """
+ if name is None:
+ name_test = lambda test_val: True
+ else:
+ name = name.upper()
+ name_test = lambda test_val: name == test_val['varname']
+
+ if value is None:
+ value_test = lambda test_val: True
+ else:
+ value = str(value)
+ value_test = lambda test_val: test_val['data'] == value
+
+ if instance is None:
+ instance_test = lambda test_val: True
+ elif not isinstance(instance, int):
+ raise ValueError('expect int value for instance!')
+ else:
+ instance_test = lambda test_val: instance == test_val['instance']
+
+ if line is None:
+ line_test = lambda test_val: True
+ elif not isinstance(line, int):
+ raise ValueError('expect int value for line number!')
+ else:
+ line_test = lambda test_val: test_val['number'] == line
+
+ return SimpleCnf(tuple(entry for entry in self.__cnfvars
+ if name_test(entry) and value_test(entry)
+ and instance_test(entry) and line_test(entry)))
+
+ def get_children(self):
+ """
+ Get children of simple cnf of just 1 entry.
+
+ :returns: simple cnf children or an empty simple cnf if entry has
+ no children
+ :rtype: :py:class:`SimpleCnf`
+ :raises: :py:class:`ValueError` if this simple cnf has more
+ than 1 entry
+ """
+ if len(self) != 1:
+ raise ValueError('get_children only possible if len == 1 (is {0})!'
+ .format(len(self)))
+ try:
+ result = self.__cnfvars[0]['children']
+ except KeyError:
+ return SimpleCnf()
+
+ for entry in result:
+ try:
+ del entry['parent']
+ except KeyError:
+ pass
+ return SimpleCnf(result)
+
+ def get_value(self):
+ """
+ Get a value of a simple cnf of just 1 entry.
+
+ :returns: str cnf value/data
+ :rtype: str
+ :raises: :py:class:`ValueError` if this simple cnf has more
+ than 1 entry
+ """
+ if len(self) != 1:
+ raise ValueError('get_value only possible if len == 1 (is {0})!'
+ .format(len(self)))
+ return self.__cnfvars[0]['data']
+
+ def get_single_dict(self):
+ """
+ Get a dictionary of a simple cnf of just 1 entry.
+
+ :returns: dictionary of a simple cnf
+ :rtype: {str, int or str or None}
+ """
+ if len(self) != 1:
+ raise ValueError('get_value only possible if len == 1 (is {0})!'
+ .format(len(self)))
+ return self.__cnfvars[0]
+
+ def __eq__(self, other_cnf):
+ """
+ Determine wether `self` == `other_cnf`.
+
+ :param other_cnf: cnf to compare with
+ :type other_cnf: :py:class:`SimpleCnf`
+ :returns: whether all cnf var entries are equal
+ :rtype: bool
+ """
+ key_func = lambda cnf_var_entry: cnf_var_entry['number']
+
+ return sorted(self.__cnfvars, key=key_func) \
+ == sorted(other_cnf.__cnfvars, key=key_func) # pylint: disable=protected-access
--- /dev/null
+# This Python file uses the following encoding: utf-8
+
+"""
+
+SUMMARY
+------------------------------------------------------
+Miscellaneous system utility: Collection of various common system stuff / idioms.
+
+Copyright: 2015 Intra2net AG
+
+
+CONTENTS
+------------------------------------------------------
+The library exports the symbols below and some custom logging functions.
+
+run_cmd_with_pipe
+ Wrapper for the default use case of the cumbersome "subprocess" library.
+ Acceps a list of arguments that describe the command invocation. Returns
+ ``True`` and the contents of ``stdout`` if the pipe returned sucessfully,
+ ``False`` plus ``stderr`` and the exit status otherwise. For example::
+
+ import sysmisc
+ (success, output, _ret) = sysmisc.run_cmd_with_pipe([ "/usr/bin/date", "+%F" ])
+ if success is True:
+ print("Today is %s" % output)
+ else:
+ print("Failed to read date from pipe.")
+
+get_mountpoints_by_type
+ Extract mount points for the given file system type from */proc/mounts*.
+ Returns ``None`` if the file system is not mounted, a list of mount points
+ otherwise. Raises a test error if */proc/mounts* cannot be accessed.
+
+read_linewise
+ Similar to run_cmd_with_pipe but allows processing of output line-by-line
+ as it becomes available. This may be necessary when the underlying binary
+ creates lots of output that cannot be buffered until the process finishes.
+ Example::
+
+ import re
+ import sysmisc
+ def parse(line):
+ if re.match('\d', line):
+ print('found digits in line!')
+ sysmisc.read_linewise('dump_db', parse)
+
+hash_file
+ Return a hash of a flie.
+
+cheat_reboot
+ Replace the reboot binary with a fake one.
+
+cmd_block_till
+ Run a command and wait until a condition evaluates to True.
+
+The logging functions either use the format capability or play
+the simple role of providing shorter names.
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+from __future__ import print_function
+
+import re
+import subprocess
+import hashlib
+import os
+import stat
+import time
+import types
+import uuid
+import logging
+llog = logging.getLogger('sysmisc')
+
+
+__all__ = ("inf", "run_cmd_with_pipe", "get_mountpoints_by_type", "read_linewise", "hash_file", "cheat_reboot", "RUN_RESULT_OK", "RUN_RESULT_TIMEDOUT", "RUN_RESULT_FAIL", "RUN_RESULT_NAME", "cmd_block_till")
+
+
+###############################################################################
+# HELPERS
+###############################################################################
+
+def run_cmd_with_pipe(argv, inp=None):
+ """
+ Read from a process pipe.
+
+ :param argv: arguments to use for creating a process
+ :type argv: [str]
+ :param inp: Text to be piped into the program’s standard input.
+ :type inp: str
+ :returns: a processes' stdout along with a status info
+ :rtype: bool * str * (int option)
+
+ Executes a binary and reads its output from a pipe. Returns a triple
+ encoding the programm success, its output either from stdout or stderr,
+ as well the exit status if non-zero.
+
+ If your process creates a lot of output, consider using
+ :py:func:`read_linewise` instead.
+ """
+ llog.debug("About to execute \"" + " ".join(argv) + "\"")
+ stdin = None
+ if isinstance(inp, str):
+ stdin = subprocess.PIPE
+ p = subprocess.Popen(argv,
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ if p.stdin is not None:
+ p.stdin.write(inp.encode())
+ (stdout, stderr) = p.communicate()
+ exit_status = p.wait()
+ if exit_status != 0:
+ return False, stderr.decode(), exit_status
+ return True, stdout.decode(), None
+
+
+procmounts = "/proc/mounts"
+
+
+def get_mountpoints_by_type(fstype):
+ """
+ Determine where some filesystem is mounted by reading the list
+ of mountpoints from */proc/mounts*.
+
+ :param str fstype: filesystem type
+ :returns: any mountpoints found
+ :rtype: str list option or None
+ :raises: :py:class:`IOError` if failed to read the process mounts
+ """
+ llog.debug("Determine mountpoints of %s." % fstype)
+ mps = None
+ try:
+ with open(procmounts, "r") as m:
+ lines = list(m)
+ pat = re.compile("^[^\s]+\s+([^\s]+)\s+" + fstype + "\s+.*$")
+ mps = [mp.group(1)
+ for mp in map(lambda l: re.match(pat, l), lines)
+ if mp]
+ except IOError as e:
+ raise IOError("Failed to read %s." % procmounts)
+ if not mps:
+ return None
+ return mps
+
+
+def read_linewise(cmd, func, **kwargs):
+ """
+ Run `cmd` using subprocess, applying `func` to each line of stdout/stderr.
+
+ :param str cmd: command to read linewise
+ :param func: function to apply on each stdout line
+ :type func: function
+ :param kwargs: extra arguments for the subprocess initiation
+ :returns: the process' returncode from :py:meth:`subprocess.Popen.wait`
+ :rtype: int
+
+ Creates a subprocess.Popen object with given `cmd`, `bufsize`=1, `stdout`=PIPE,
+ `stderr`=STDOUT, `universal_newlines`=True and the given `kwargs`.
+
+ As opposed to :py:func:`run_cmd_with_pipe`, output is not gathered and
+ returned but processed as it becomes available and then discared. This
+ allows to process output even if there is much of it.
+ """
+ proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, universal_newlines=True,
+ **kwargs)
+
+ for line in proc.stdout:
+ func(line)
+ # (raises a ValueError in communicate() line:
+ # "Mixing iteration and read methods would lose data")
+ # if proc.poll() is not None:
+ # break
+
+ #rest_output,_ = proc.communicate()
+ #for line in rest_output:
+ # func(line)
+
+ return proc.wait()
+
+
+def hash_file(fname, new=hashlib.sha512, bs=4096):
+ """
+ Return a file hash.
+
+ :param str fname: name of the file to hash
+ :param new: constructor algorithm
+ :type new: builtin_function_or_method
+ :param int bs: read and write up to 'bs' bytes at a time
+ :returns: hexadecimal digest of the strings read from the file
+ :rtype: str
+ """
+ hsh = new()
+ with open(fname, "rb") as fp:
+ buf = fp.read(bs)
+ while len(buf) > 0:
+ hsh.update(buf)
+ buf = fp.read(bs)
+ return hsh.hexdigest()
+
+
+cheat_tpl = """\
+#!/bin/sh
+set -u
+
+path="%s" # <- location of original executable, == location of script
+backup="%s" # <- location of backup
+chksum="%s" # <- sha512(orig_reboot)
+log="%s" # <- stdout
+
+msg () {
+ echo "[$(date '+%%F %%T')] $*" &>> "${log}"
+}
+
+msg "Fake reboot invoked. Restoring executable from ${backup}."
+
+if [ "${chksum}" = "$(sha512sum ${path} | cut -d ' ' -f 1)" ]; then
+ msg "Real reboot executable already in place at ${path}. Aborting."
+ exit 1
+fi
+
+if [ ! -x "${backup}" ]; then
+ msg "No backup executable at ${backup}!."
+ exit 1
+fi
+
+if [ "${chksum}" = "$(sha512sum ${backup} | cut -d ' ' -f 1)" ]
+then
+ msg "Installing backup executable from ${backup} as ${path}."
+ if ! mv -f "${backup}" "${path}"; then
+ msg "Failed to replace ${path}."
+ fi
+else
+ msg "Checksum mismatch of ${backup}: Expected ${chksum}."
+ exit 1
+fi
+
+msg "Fake reboot successful -- next invocation will reboot indeed."
+
+"""
+
+backup_infix = "backup"
+backup_fmt = "%s.%s_%s"
+logfile = "/var/log/cheat_reboot.log"
+
+
+def cheat_reboot():
+ """
+ Skip one reboot.
+
+ :raises: :py:class:`exceptions.OSError` if backup target already exists
+
+ This replaces the ``reboot`` executable by script which replaces itself
+ by the backed up executable upon the next invocation.
+ """
+ #path = utils.system_output("which reboot")
+ path = "/usr/intranator/bin/reboot-intranator"
+ suffix = uuid.uuid1()
+ backup = backup_fmt % (path, backup_infix, suffix)
+
+ if os.path.exists(backup):
+ raise OSError("Target %s already exists." % backup)
+
+ hexa = hash_file(path)
+ llog.debug("Found reboot at %s, hash %s; dst %s." % (path, hexa, backup))
+ subprocess.check_call(["mv", "-f", path, backup]) # backup existing binary
+ script = cheat_tpl % (path, backup, hexa, logfile)
+ with open(path, "w") as fp:
+ fp.write(script) # write script content to original location
+ mode = os.stat(path).st_mode
+ os.chmod(path, mode | stat.S_IXUSR | stat.S_IXGRP) # ug+x
+
+
+RUN_RESULT_OK = 0 # → success, cond returned True within timeout
+RUN_RESULT_TIMEDOUT = 1 # → success, but timeout elapsed
+RUN_RESULT_FAIL = 2 # → fail
+
+RUN_RESULT_NAME = (
+ {RUN_RESULT_OK: "RUN_RESULT_OK", RUN_RESULT_TIMEDOUT: "RUN_RESULT_TIMEDOUT", RUN_RESULT_FAIL: "RUN_RESULT_FAIL"}
+)
+
+
+def cmd_block_till(cmd, timeout, cond, interval=1, *userdata, **kwuserdata):
+ """
+ Run ``cmd`` and wait until :py:func`cond` evaluates to True.
+
+ :param cmd: Command line or callback to execute. Function arguments must
+ have the same signature as :py:func:`run_cmd_with_pipe`.
+ :type cmd: [str] | types.FunctionType
+ :param int timeout: Blocking timeout
+
+ :returns: Pair of result and error message if appropriate or
+ :py:value:`None`.
+ :rtype: (run_result, str | None)
+ """
+ llog.debug("cmd_block_till: %r, %d s, %r", cmd, timeout, cond)
+ if isinstance(cmd, types.FunctionType):
+ succ, out, _ = cmd()
+ elif isinstance(cmd, list):
+ succ, out, _ = run_cmd_with_pipe(cmd) # caution: never pass further arguments!
+ else:
+ raise TypeError("cmd_block_till: invalid type (cmd=%r); expected "
+ "function or argv", cmd)
+
+ if timeout < 0:
+ return RUN_RESULT_FAIL, "cmd_block_till: invalid timeout; nonnegative " \
+ "integer expected"
+ if succ is False:
+ return RUN_RESULT_FAIL, "cmd_block_till: command %r failed (%s)" \
+ % (cmd, str(out))
+ t_0 = time.time() # brr; cf. PEP 418 as to why
+ while cond(*userdata, **kwuserdata) is False:
+ t_now = time.time()
+ dt = t_now - t_0
+ if dt > timeout:
+ return RUN_RESULT_TIMEDOUT, "cmd_block_till: command %r exceeded " \
+ "%d s timeout" % (cmd, timeout)
+ llog.debug("cmd_block_till: condition not satisfied after %d s, "
+ "retrying for another %d s" % (dt, timeout - dt))
+ time.sleep(interval)
+ return RUN_RESULT_OK, None
+
+
+###############################################################################
+# LOGGING
+###############################################################################
+
+CURRENT_TEST_STAGE = None
+CURRENT_TEST_NAME = None
+LOG_TAG = "%s/%s" % (os.path.basename(__file__), os.uname()[1])
+LOG_INDENT = " "
+
+
+def enter_test_stage(s):
+ """Group events into stages for status updates."""
+ global CURRENT_TEST_STAGE
+ CURRENT_TEST_STAGE = s
+ llog.info("Transitioning to test stage %s", s)
+
+
+def progress(fmt, *args):
+ """Status updates that stand out among the log noise."""
+ if isinstance(CURRENT_TEST_STAGE, str):
+ label = "/%s" % CURRENT_TEST_STAGE
+ else:
+ label = ""
+ name = CURRENT_TEST_NAME if isinstance(CURRENT_TEST_NAME, str) else ""
+ fmt, label = str(fmt), str(label) # yes
+ llog.info("[%s%s] %s" % (name, label, fmt), *args)
+ # TODO: this method is more dynamic
+ # llog.info("[%s%s] %s%s" % (LOG_TAG, "", LOG_INDENT*indent, fmt), *args)
+
+
+# these methods serve as shorter names
+def inf(fmt, *args):
+ """Short name for INFO logging."""
+ llog.info(fmt, *args)
+
+
+def dbg(fmt, *args):
+ """Short name for DEBUG logging."""
+ llog.debug(fmt, *args)
+
+
+def err(fmt, *args):
+ """Short name for ERROR logging."""
+ llog.error(fmt, *args)
+
+
+def wrn(fmt, *args):
+ """Short name for WARN logging."""
+ llog.error(fmt, *args)
+
+
+# these methods use the format capability
+def log(level, text, *args, **kwargs):
+ """Log at any level using format capability."""
+ llog.log(level, text.format(*args), **kwargs)
+
+
+def info(text, *args, **kwargs):
+ """Log at INFO level using format capability."""
+ log(logging.INFO, text, *args, **kwargs)
+
+
+def debug(text, *args, **kwargs):
+ """Log at DEBUG level using format capability."""
+ log(logging.DEBUG, text, *args, **kwargs)
+
+
+def error(text, *args, **kwargs):
+ """Log at ERROR level using format capability."""
+ log(logging.ERROR, text, *args, **kwargs)
+
+
+def warn(text, *args, **kwargs):
+ """Log at WARN level using format capability."""
+ log(logging.WARN, text, *args, **kwargs)
--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+Utility to customize email and other test data.
+
+Copyright: Intra2net AG
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+import os
+import re
+import logging
+log = logging.getLogger('test_data_sync')
+
+
+def append_email_id_header(data_dir):
+ """
+ Use to append unique autotest id header to emails.
+
+ :param str data_dir: directory containing the emails
+ """
+ email_id = 10000
+ files = []
+ for main_dir, _, files in os.walk(data_dir):
+ for i in range(len(files)):
+ file_path = os.path.join(main_dir, files[i])
+ if i % 100 == 0:
+ log.info("%i done\n", i)
+
+ if not re.match("^[0-9]+\.$", files[i]):
+ continue
+ if os.path.getsize(file_path) == 0:
+ log.warning("Skipping empty file %s", file_path)
+ continue
+
+ log.info("Adding header to email %s", file_path)
+ id_line = ""
+ message_file = open(file_path, "r")
+ for line in message_file:
+ if re.match("^Message-Id:", line, re.IGNORECASE):
+ id_line = line
+ message_file.close()
+ break
+
+ readed = open(file_path, "r").read()
+ autotest_id_line = "Autotest-Message-ID: <" + str(email_id + i) + ".63D49232CC@gwt-intranator.m.i2n>\r\n"
+ if id_line == "":
+ final = autotest_id_line + readed
+ else:
+ final = readed.replace(id_line, autotest_id_line)
+ log.info("%s >> %s", id_line, autotest_id_line)
+ open(file_path, "w").write(final)
+ log.info("%i mails replaced.", len(files))
--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+V4_addr_range class
+
+Copyright: Intra2net AG
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+default_addr_lo = 42
+default_addr_range = 42
+uint32_max = 0xffffffff
+
+
+class V4_addr_range:
+
+ def __init__(self, lo=None, addr_range=None):
+ self.val_lo = default_addr_lo # : int, initial offset for allocation
+ self.val_range = default_addr_range # : int, allocation block size
+ self.val_hi = 0 # : int, lo + range
+ self.var_alloc = {} # : mutable int, bool allocated values
+
+ if lo is not None:
+ if isinstance(lo, int) is False:
+ raise TypeError("Expected value of integer type, got \"%s\" : %s."
+ % (lo, type(lo)))
+ self.val_lo = lo
+ if addr_range is not None:
+ if isinstance(addr_range, int) is False:
+ raise TypeError("Expected value of integer type, got \"%s\" : %s."
+ % (lo, type(lo)))
+ self.val_range = addr_range
+ self.fix_range()
+ for val in range(self.val_lo, self.val_hi):
+ # we use ints as keys since the types are checked elsewhere
+ self.var_alloc[val] = False
+
+ def __len__(self):
+ length = 0
+ for v in self.var_alloc.values():
+ length += 1 if v is True else 0
+ return length
+
+ def __eq__(self, other):
+ if isinstance(other, int):
+ return other == len(self)
+ if isinstance(other, self.__class__):
+ return len(self) == len(other)
+ raise TypeError("Equality comparison of %s with type %s is undefined."
+ % (self.__class__.__name__, type(other)))
+
+ def __getitem__(self, k):
+ try:
+ return self.var_alloc[k]
+ except KeyError:
+ return False
+
+ def fix_range(self):
+ if self.val_range <= 0:
+ raise TypeError("IP address ranges need to be natural numbers > 0."
+ % (self.val_lo, self.val_range))
+ hi = self.val_lo + self.val_range
+ if hi <= self.val_lo or hi > uint32_max:
+ raise ValueError("Invalid IP address range: %d+%d."
+ % (self.val_lo, self.val_range))
+ self.val_hi = hi
+
+ def lo(self):
+ return self.val_lo
+
+ def range(self):
+ return self.val_range
+
+ def hi(self):
+ return self.val_hi
+
+ def get(self):
+ """
+ .get -- Return lowest unallocated number in range and insert as a
+ Python integer.
+ """
+ curlen = len(self)
+
+ if curlen == self.val_range:
+ raise IndexError("Address range (%d) exhausted."
+ % self.val_range)
+ for val in range(self.val_lo, self.val_hi):
+ if self.var_alloc[val] is False:
+ self.var_alloc[val] = True
+ return val
+
+ def rm(self, val):
+ """
+ Remove allocated number from range.
+ """
+ if isinstance(val, int) is False:
+ raise TypeError("Expected int or short, got %s." % type(val))
+ val = val
+ vali = val
+ if val < self.val_lo or self.val_hi < val:
+ raise IndexError("Address %d out of bounds ([%d, %d])."
+ % (vali, self.val_lo, self.val_hi))
+ if self.var_alloc[vali] is False:
+ raise ValueError("Address %d was never allocated." % vali)
+ self.var_alloc[vali] = True
+
+ def __str__(self):
+ return "v4 address range: [%d, %d] = %d+%d, at %d (%.2f%%)" \
+ % (self.val_lo,
+ self.val_hi,
+ self.val_lo,
+ self.val_range,
+ len(self.var_alloc),
+ (100.0 * (float(len(self.var_alloc)) / float(self.val_range))))
+
+ def __repr__(self):
+ return "(%d, %d, %s)" \
+ % (self.val_lo,
+ self.val_range,
+ self.var_alloc.__repr__())
--- /dev/null
+"""
+
+SUMMARY
+------------------------------------------------------
+Utility for HTTP based interaction with the arnied web page.
+
+Copyright: Intra2net AG
+
+
+INTERFACE
+------------------------------------------------------
+
+"""
+
+import re
+import http.client as client
+import urllib.parse as parse
+import logging
+log = logging.getLogger('web_interface')
+
+from arnied_wrapper import accept_licence
+
+
+def find_in_form(regex, form="status", escape=False):
+ """
+ Find a regex in I2N web page's status frame.
+
+ :param str regex: regular expression to find
+ :param bool escape: whether to escape the regex
+ :returns: whether the regex was found
+ :rtype: bool
+ """
+ accept_licence()
+ data = web_page_request(method="GET", url="/arnie?form=" + form)
+ if escape:
+ regex = re.escape(regex)
+ if re.search(regex, data):
+ return True
+ else:
+ log.debug("'%s' could not be found in:\n%s", regex, data)
+ return False
+
+
+def web_page_request(method="GET", url="/", body=None):
+ """
+ Send an HTTPS request and return any response data.
+
+ :param str method: GET or POST method for the request
+ :param str url: url location within the remote host
+ :param body: dictionary to be parsed and added to the url
+ :type body: {str, str} or None
+ :returns: data from the response if any
+ :rtype: str
+ """
+ body = parse.urlencode(body) if body is not None else ""
+ headers = {"Content-Type": "application/x-www-form-urlencoded",
+ "Accept": "text/plain"}
+
+ conn = client.HTTPSConnection("localhost")
+ conn.request(method, url, body, headers)
+ resp = conn.getresponse()
+ logging.info("Request status %s and response %s",
+ resp.status, resp.reason)
+ if resp.status != 200:
+ raise exceptions.TestError("POST request failed.")
+ data = resp.read().decode()
+ conn.close()
+
+ return data
--- /dev/null
+#!/usr/bin/env python
+
+"""
+.. note:: This unit tests cannot be run as a standalone module from the command line
+ because it has internal autotest dependencies. It can however be easilty run
+ from the test suite command line as a "setup" manual step.
+"""
+import unittest
+import unittest.mock as mock
+import subprocess
+
+import arnied_wrapper
+
+
+class DummyCmdOutputMapping(object):
+
+ fail_switch = False
+ cmds = [
+ {"cmd": "pgrep -l -x arnied", "stdout": b"", "returncode": 0},
+ {"cmd": 'echo "LICENSE_ACCEPTED,0: \\"1\\"" | set_cnf', "stdout": b"", "returncode": 0},
+ {"cmd": '/usr/intranator/bin/arnied_helper --wait-for-program-end GENERATE', "stdout": b"", "returncode": 0},
+ {"cmd": 'get_cnf PROVIDER 1', "stdout": b"PRODIVER 1, 'autotest'", "returncode": 0},
+ {"cmd": 'tell-connd --online P1', "stdout": b"", "returncode": 0},
+ {"cmd": 'get_cnf VIRSCAN_UPDATE_CRON | set_cnf -x', "stdout": b"", "returncode": 0},
+ {"cmd": '/usr/intranator/bin/arnied_helper --is-scheduled-or-running GENERATE', "stdout": b"", "returncode": 1},
+ {"cmd": '/usr/intranator/bin/arnied_helper --is-scheduled-or-running GENERATE_OFFLINE', "stdout": b"", "returncode": 1},
+ {"cmd": 'echo \'VIRSCAN_UPDATE_DNS_PUSH,0:"0"\' |set_cnf', "stdout": b"", "returncode": 0},
+ {"cmd": 'rm -f /var/intranator/schedule/UPDATE_VIRSCAN_NODIAL*', "stdout": b"", "returncode": 0},
+ {"cmd": '/usr/intranator/bin/arnied_helper --transfer-mail', "stdout": b"", "returncode": 0},
+ ]
+ asserted_cmds = []
+
+ def __init__(self, cmd, check=False, shell=False):
+ self.returncode = 0
+ self.stdout = ""
+ self._get_result(cmd)
+ if self.fail_switch:
+ self.returncode = 1
+
+ def __str__(self):
+ return "status %i, stdout %s" % (self.returncode, self.stdout)
+
+ def _get_result(self, cmd):
+ for dummy_cmd in self.asserted_cmds:
+ if dummy_cmd['cmd'] == cmd:
+ self.returncode = dummy_cmd['returncode']
+ self.stdout = dummy_cmd['stdout']
+ return
+ raise ValueError("Could not locate the command '%s' among the known answers "
+ "for the universe" % cmd)
+
+
+@mock.patch('src.arnied_wrapper.subprocess.run', DummyCmdOutputMapping)
+class ArniedWrapperTest(unittest.TestCase):
+
+ def setUp(self):
+ DummyCmdOutputMapping.fail_switch = False
+ DummyCmdOutputMapping.asserted_cmds = []
+ self.cmd_db = DummyCmdOutputMapping.cmds
+
+ def test_verify_running(self):
+ DummyCmdOutputMapping.asserted_cmds = self.cmd_db[0:1]
+ arnied_wrapper.verify_running(timeout=1)
+ DummyCmdOutputMapping.fail_switch = True
+ with self.assertRaises(RuntimeError):
+ arnied_wrapper.verify_running(timeout=1)
+
+ def test_accept_license(self):
+ DummyCmdOutputMapping.asserted_cmds = self.cmd_db[1:3]
+ arnied_wrapper.accept_licence()
+ # make sure an error is ignored since license might
+ # already be accepted
+ DummyCmdOutputMapping.fail_switch = True
+ arnied_wrapper.accept_licence()
+
+ def test_go_online(self):
+ DummyCmdOutputMapping.asserted_cmds = self.cmd_db[3:5]
+ arnied_wrapper.go_online(1)
+ # TODO: for some reason failing to go online doesn't raise
+ # an error which could be very misleading during debugging
+ # DummyCmdOutputMapping.fail_switch = True
+ # with self.assertRaises(Exception):
+ # arnied_wrapper.go_online(1)
+
+ def test_disable_virscan(self):
+ DummyCmdOutputMapping.asserted_cmds = self.cmd_db[5:10]
+ arnied_wrapper.disable_virscan()
+
+ def test_email_transfer(self):
+ DummyCmdOutputMapping.asserted_cmds = self.cmd_db[10:11]
+ arnied_wrapper.email_transfer()
--- /dev/null
+#!/usr/bin/env python
+
+"""Unit test for build_cnfvar.py"""
+import unittest
+import os
+from src.cnfline.build_cnfvar import BuildCnfVar
+
+TEST_CONFIG_FILENAME = 'some_config.cnf'
+
+
+class BuildCnfVarTest(unittest.TestCase):
+ def tearDown(self):
+ if os.path.isfile(TEST_CONFIG_FILENAME):
+ os.unlink(TEST_CONFIG_FILENAME)
+
+ def test_simple(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+
+ self.assertEqual('FOOBAR', cnfvar.name)
+ self.assertEqual(123, cnfvar.instance)
+ self.assertEqual('some_data', cnfvar.data)
+ self.assertEqual(10, cnfvar.line_no)
+
+ def test_find_child_line_no(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ new_child_line_no = cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+
+ self.assertEqual(new_child_line_no,
+ cnfvar.find_child_line_no('FOOBAR_CHILD'))
+
+ # Should not be found
+ self.assertEqual(0, cnfvar.find_child_line_no('FOOBAR_SPACESHARK'))
+
+ def test_find_free_line_no(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ self.assertEqual(11, cnfvar.find_free_line_no())
+
+ cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+ self.assertEqual(12, cnfvar.find_free_line_no())
+
+ def test_find_free_child_instance(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+
+ self.assertEqual(0, cnfvar.find_free_child_instance('FOOBAR_OTHER'))
+ self.assertEqual(1, cnfvar.find_free_child_instance('FOOBAR_CHILD'))
+
+ def test_update_cnf(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+
+ # Update existing cnfvar
+ cnfvar.update_cnf('FOOBAR_CHILD', 0, 'abc')
+
+ self.assertEqual('10 FOOBAR,123: "some_data"\n'
+ '11 (10) FOOBAR_CHILD,0: "abc"\n', str(cnfvar))
+
+ def test_string_output(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+ cnfvar.update_cnf('FOOBAR_CHILD', 0, 'abc')
+ cnfvar.add_cnf('FOOBAR_CHILD', 1, 'more data')
+
+ self.assertEqual('10 FOOBAR,123: "some_data"\n'
+ '11 (10) FOOBAR_CHILD,0: "abc"\n'
+ '12 (10) FOOBAR_CHILD,1: "more data"\n', str(cnfvar))
+
+ def test_del_cnf(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+ cnfvar.add_cnf('FOOBAR_CHILD', 1, 'more data')
+
+ cnfvar.del_cnf('FOOBAR_CHILD')
+
+ self.assertEqual('10 FOOBAR,123: "some_data"\n', str(cnfvar))
+
+ def test_add_different_parent_no(self):
+ cnfvar = BuildCnfVar('FOOBAR', 123, 'some_data', 10)
+ sub_parent = cnfvar.add_cnf('FOOBAR_CHILD', 0, 'xxx')
+ cnfvar.add_cnf('FOOBAR_OTHER', 0, 'foo')
+ cnfvar.add_cnf('FOOBAR_CHILD_TYPE', 1, 'spaceshark', sub_parent)
+ cnfvar.add_cnf('FOOBAR_OTHER2', 0, 'foo2')
+
+ self.assertEqual('10 FOOBAR,123: "some_data"\n'
+ '11 (10) FOOBAR_CHILD,0: "xxx"\n'
+ '12 (10) FOOBAR_OTHER,0: "foo"\n'
+ '13 (11) FOOBAR_CHILD_TYPE,1: "spaceshark"\n'
+ '14 (10) FOOBAR_OTHER2,0: "foo2"\n', str(cnfvar))
+
+ def test_add_defaults(self):
+ cnfvar = BuildCnfVar('FOOBAR', 0, 'some_data')
+
+ defaults = {'FOOBAR_SOMETHING': 'abc',
+ 'FOOBAR_MODE': 'optimize'}
+ cnfvar.add_defaults(defaults)
+
+ self.assertEqual('1 FOOBAR,0: "some_data"\n'
+ '2 (1) FOOBAR_SOMETHING,0: "abc"\n'
+ '3 (1) FOOBAR_MODE,0: "optimize"\n', str(cnfvar))
+
+ def test_mark_as_own_parent(self):
+ cnfvar = BuildCnfVar('FOOBAR_SOMETHING', 123, 'some_data', 10)
+
+ line_no = cnfvar.add_cnf('FOOBAR_OTHER', 0, 'xxx')
+ cnfvar.mark_as_own_parent(line_no)
+
+ self.assertEqual('10 FOOBAR_SOMETHING,123: "some_data"\n'
+ '11 FOOBAR_OTHER,0: "xxx"\n', str(cnfvar))
+
+ def test_save(self):
+ cnfvar = BuildCnfVar('FOOBAR', 0, 'some_data')
+
+ defaults = {'FOOBAR_SOMETHING': 'abc',
+ 'FOOBAR_MODE': 'optimize'}
+ cnfvar.add_defaults(defaults)
+
+ os.mkdir("debug")
+
+ cnfvar.save(TEST_CONFIG_FILENAME)
+ full_filename = os.path.join("debug", TEST_CONFIG_FILENAME)
+ with open(full_filename, 'r') as input:
+ read_back = input.read()
+
+ self.assertEqual('1 FOOBAR,0: "some_data"\n'
+ '2 (1) FOOBAR_SOMETHING,0: "abc"\n'
+ '3 (1) FOOBAR_MODE,0: "optimize"\n', read_back)
+
+ os.unlink(full_filename)
+
+
+ os.rmdir("debug")
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null
+#!/usr/bin/env python
+
+import unittest
+from src.cnfline.cnfline import CnfLine
+
+
+class CnfLineTest(unittest.TestCase):
+ def test_simple(self):
+ line = CnfLine('MY_NAME', 123, 'my_data', 888, 456)
+
+ self.assertEqual('MY_NAME', line.name)
+ self.assertEqual(123, line.instance)
+ self.assertEqual('my_data', line.data)
+ self.assertEqual(888, line.line_no)
+ self.assertEqual(456, line.parent_line_no)
+
+ def test_deny_empty_name(self):
+ with self.assertRaises(ValueError):
+ CnfLine('')
+
+ def test_deny_lineno_zero(self):
+ with self.assertRaises(ValueError):
+ CnfLine('foobar', 0, 'some_data', 0)
+
+ def test_str_output_parent(self):
+ line = CnfLine('MY_NAME', 123, 'my_data', 10, 0)
+ self.assertEqual('10 MY_NAME,123: "my_data"', str(line))
+
+ def test_str_output_child(self):
+ line = CnfLine('MY_NAME', 123, 'my_data', 10, 456)
+ self.assertEqual('10 (456) MY_NAME,123: "my_data"', str(line))
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null
+#!/usr/bin/env python
+
+import unittest
+import os
+
+import mail_utils
+
+
+class MailUtilsTest(unittest.TestCase):
+
+ def setUp(self):
+ os.mkdir("srcdir")
+ os.mkdir("trgdir")
+ self.validator = mail_utils.MailValidator("./srcdir", "./trgdir")
+
+ def tearDown(self):
+ os.rmdir("srcdir")
+ os.rmdir("trgdir")
+
+ def test_validator(self):
+ self.assertEqual(self.validator.source_path, "./srcdir")
+ self.assertEqual(self.validator.target_path, "./trgdir")
+ self.assertEqual(self.validator._compare_emails_method,
+ self.validator._default_compare_emails)
+
+ def test_verify_emails_empty(self):
+ self.validator.verify_emails([], [], 30)
+
+ def test_verify_email_id(self):
+ email_string = ('From: "House Baratheon" <baratheon@got.you>\n'
+ 'To: "House Lannister" <lannister@got.you>\n'
+ 'Subject: The iron throne\n'
+ 'Date: Wed, 28 Mar 1345 10:47:17 +0200\n'
+ 'Autotest-Message-ID: <20120328084717.63D49232CC@gwt-intranator.m.i2n>\n'
+ 'Importance: Normal\n')
+
+ with open("srcdir/.123", 'w') as email:
+ email.write(email_string)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string)
+
+ try:
+ # test defaults
+ matched_target = self.validator.verify_email_id(".123", [], 30)
+ self.assertEqual(matched_target, ".5267")
+
+ # test main
+ matched_target = self.validator.verify_email_id(".123", [".5267"],
+ 30, in_target=True)
+ self.assertEqual(matched_target, ".5267")
+ matched_source = self.validator.verify_email_id(".5267", [".123"],
+ 30, in_target=False)
+ self.assertEqual(matched_source, ".123")
+
+ # test mismatch
+ with open("trgdir/.5267", 'w') as email:
+ email.write("")
+ with self.assertRaises(mail_utils.MissingEmailID):
+ self.validator.verify_email_id(".123", [".5267"], 30)
+
+ finally:
+ os.unlink("srcdir/.123")
+ os.unlink("trgdir/.5267")
+
+ def test_verify_emails(self):
+ email_string = ('From: "House Baratheon" <baratheon@got.you>\n'
+ 'To: "House Lannister" <lannister@got.you>\n'
+ 'Subject: The iron throne\n'
+ 'Date: Wed, 28 Mar 1345 10:47:17 +0200\n'
+ 'Autotest-Message-ID: <20120328084717.63D49232CC@gwt-intranator.m.i2n>\n'
+ 'Importance: Normal\n')
+
+ with open("srcdir/.123", 'w') as email:
+ email.write(email_string)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string)
+
+ try:
+ self.validator.verify_emails([".123"], [".5267"], 30)
+
+ # different sender is already too different for the basic email
+ # comparison (tolerance 1.0)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string.replace("Baratheon", "Targaryen"))
+ with self.assertRaises(mail_utils.EmailMismatch):
+ self.validator.verify_emails([".123"], [".5267"], 30)
+
+ # test comparison by basic headers
+ self.validator.compare_emails_method = "headers"
+ with self.assertRaises(mail_utils.EmailMismatch):
+ self.validator.verify_emails([".123"], [".5267"], 30)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string.replace("Importance", "High"))
+ self.validator.verify_emails([".123"], [".5267"], 30)
+
+ finally:
+ os.unlink("srcdir/.123")
+ os.unlink("trgdir/.5267")
+
+ def test_assert_header(self):
+ email_string = ('From: "House Baratheon" <baratheon@got.you>\n'
+ 'To: "House Lannister" <lannister@got.you>\n'
+ 'Subject: The iron throne\n'
+ 'Date: Wed, 28 Mar 1345 10:47:17 +0200\n'
+ 'Autotest-Message-ID: <20120328084717.63D49232CC@gwt-intranator.m.i2n>\n'
+ 'Importance: Normal\n')
+
+ with open("trgdir/.123", 'w') as email:
+ email.write(email_string)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string.replace("Baratheon", "Targaryen"))
+
+ try:
+ self.validator.assert_header([".123", ".5267"], "Subject",
+ [], [], 30)
+ self.validator.assert_header([".123", ".5267"], "Subject",
+ ["The iron throne"], ["The wooden throne"], 30)
+ self.validator.assert_header([".123"], "From",
+ ["House Baratheon"], ["House Targaryen"], 30)
+ self.validator.assert_header([".5267"], "From",
+ ["House Targaryen"], ["House Baratheon"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".123", ".5267"], "Subject",
+ ["The wooden throne"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".123", ".5267"], "Subject",
+ [], ["The iron throne"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".123"], "From",
+ ["House Targaryen"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".123"], "From",
+ [], ["House Baratheon"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".5267"], "From",
+ ["House Baratheon"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailHeader):
+ self.validator.assert_header([".5267"], "From",
+ [], ["House Targaryen"], 30)
+
+ finally:
+ os.unlink("trgdir/.123")
+ os.unlink("trgdir/.5267")
+
+ def test_assert_content(self):
+ email_string = ('From: "House Baratheon" <baratheon@got.you>\n'
+ 'To: "House Lannister" <lannister@got.you>\n'
+ 'Subject: The iron throne\n'
+ 'Date: Wed, 28 Mar 1345 10:47:17 +0200\n'
+ 'Autotest-Message-ID: <20120328084717.63D49232CC@gwt-intranator.m.i2n>\n'
+ 'Content-Type: multipart/alternative;\n'
+ ' boundary="----=_NextPart_000_0027_01CD0CEA.77D94260\n'
+ '\n'
+ 'This is a multi-part message in MIME format.\n'
+ '\n'
+ '------=_NextPart_000_0027_01CD0CEA.77D94260\n'
+ 'Content-Type: text/plain;\n'
+ ' charset="utf-8"\n'
+ 'Content-Transfer-Encoding: 8bit\n'
+ '\n'
+ 'This is a life threat message.\n'
+ '\n'
+ '------=_NextPart_000_0027_01CD0CEA.77D94260\n'
+ 'Content-Type: text/html;\n'
+ ' boundary="----=_NextPart_000_0015_01CD0CDA.21206F30";\n'
+ ' charset="utf-8"\n'
+ 'Content-Transfer-Encoding: quoted-printable\n'
+ '\n'
+ 'This is a life threat message.\n'
+ '\n'
+ '------=_NextPart_000_0027_01CD0CEA.77D94260--\n')
+
+ with open("trgdir/.123", 'w') as email:
+ email.write(email_string)
+ with open("trgdir/.5267", 'w') as email:
+ email.write(email_string.replace("life threat", "friendly"))
+
+ try:
+ self.validator.assert_content([".123", ".5267"], "multipart/alternative",
+ [], [], 30)
+ self.validator.assert_content([".123", ".5267"], "multipart/alternative",
+ ["message"], ["war"], 30)
+ self.validator.assert_content([".123"], "multipart/alternative",
+ ["life threat", "message"], ["friendly"], 30)
+ self.validator.assert_content([".5267"], "multipart/alternative",
+ ["friendly", "message"], ["life threat"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".123", ".5267"], "multipart/alternative",
+ ["war"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".123", ".5267"], "multipart/alternative",
+ [], ["message"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".123"], "multipart/alternative",
+ [], ["life threat"], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".123"], "multipart/alternative",
+ ["friendly"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".5267"], "multipart/alternative",
+ ["life threat"], [], 30)
+ with self.assertRaises(mail_utils.InvalidEmailContent):
+ self.validator.assert_content([".5267"], "multipart/alternative",
+ [], ["friendly"], 30)
+
+ finally:
+ os.unlink("trgdir/.123")
+ os.unlink("trgdir/.5267")
--- /dev/null
+#!/usr/bin/env python
+# This Python file uses the following encoding: utf-8
+
+import unittest
+from traceback import print_exc
+from tempfile import mkstemp
+import os
+
+from src.simple_cnf import SimpleCnf
+
+TEST_SET = """
+1 USER,1: "admin"
+2 (1) USER_DISABLED,0: "0"
+3 (1) USER_FULLNAME,0: "Administrator"
+4 (1) USER_GROUPWARE_FOLDER_CALENDAR,0: "INBOX/Kalender"
+5 (1) USER_GROUPWARE_FOLDER_CONTACTS,0: "INBOX/Kontakte"
+6 (1) USER_GROUPWARE_FOLDER_DRAFTS,0: "INBOX/Entwürfe"
+7 (1) USER_GROUPWARE_FOLDER_NOTES,0: "INBOX/Notizen"
+8 (1) USER_GROUPWARE_FOLDER_OUTBOX,0: "INBOX/Gesendete Elemente"
+9 (1) USER_GROUPWARE_FOLDER_TASKS,0: "INBOX/Aufgaben"
+10 (1) USER_GROUPWARE_FOLDER_TRASH,0: "INBOX/Gelöschte Elemente"
+11 (1) USER_GROUP_MEMBER_REF,0: "1"
+12 (1) USER_GROUP_MEMBER_REF,1: "2"
+13 (1) USER_LOCALE,0: ""
+14 (1) USER_PASSWORD,0: "test1234"
+15 (1) USER_TRASH_DELETEDAYS,0: "30"
+16 USER,2: "test"
+17 (16) USER_DISABLED,0: "0"
+18 (16) USER_EMAIL_VACATION,0: "ON"
+19 (16) USER_EMAIL_VACATION_AUTOMATIC_END,0: ""
+20 (16) USER_EMAIL_VACATION_AUTOMATIC_START,0: ""
+21 (16) USER_EMAIL_VACATION_AUTOMATIC_STATE,0: "UNKNOWN"
+22 (16) USER_EMAIL_VACATION_REPLYDAYS,0: "1"
+23 (16) USER_EMAIL_VACATION_TEXT,0: "Bin im Urlaub"
+24 (16) USER_FULLNAME,0: "testnutzer"
+25 (16) USER_GROUPWARE_FOLDER_CALENDAR,0: "INBOX/Kalender"
+26 (16) USER_GROUPWARE_FOLDER_CONTACTS,0: "INBOX/Kontakte"
+27 (16) USER_GROUPWARE_FOLDER_DRAFTS,0: "INBOX/Entwürfe"
+28 (16) USER_GROUPWARE_FOLDER_NOTES,0: "INBOX/Notizen"
+29 (16) USER_GROUPWARE_FOLDER_OUTBOX,0: "INBOX/Gesendete Elemente"
+30 (16) USER_GROUPWARE_FOLDER_TASKS,0: "INBOX/Aufgaben"
+31 (16) USER_GROUPWARE_FOLDER_TRASH,0: "INBOX/Gelöschte Elemente"
+32 (16) USER_GROUP_MEMBER_REF,0: "2"
+33 (16) USER_GROUP_MEMBER_REF,1: "3"
+34 (16) USER_LOCALE,0: ""
+35 (16) USER_PASSWORD,0: "test1234"
+36 (16) USER_TRASH_DELETEDAYS,0: "30"
+37 (16) USER_WEBMAIL_MESSAGES_PER_PAGE,0: "25"
+38 (16) USER_WEBMAIL_SIGNATURE,0: ""
+39 USER,3: "mueller"
+40 (39) USER_DISABLED,0: "0"
+41 (39) USER_FULLNAME,0: "Kärößü"
+42 (39) USER_GROUPWARE_FOLDER_CALENDAR,0: "INBOX/Kalender"
+43 (39) USER_GROUPWARE_FOLDER_CONTACTS,0: "INBOX/Kontakte"
+44 (39) USER_GROUPWARE_FOLDER_DRAFTS,0: "INBOX/Entwürfe"
+45 (39) USER_GROUPWARE_FOLDER_NOTES,0: "INBOX/Notizen"
+46 (39) USER_GROUPWARE_FOLDER_OUTBOX,0: "INBOX/Gesendete Elemente"
+47 (39) USER_GROUPWARE_FOLDER_TASKS,0: "INBOX/Aufgaben"
+48 (39) USER_GROUPWARE_FOLDER_TRASH,0: "INBOX/Gelöschte Elemente"
+49 (39) USER_GROUP_MEMBER_REF,0: "2"
+50 (39) USER_GROUP_MEMBER_REF,1: "3"
+51 (39) USER_LOCALE,0: ""
+52 (39) USER_PASSWORD,0: "grmpfl"
+53 (39) USER_TRASH_DELETEDAYS,0: "30"
+54 (39) USER_WEBMAIL_MESSAGES_PER_PAGE,0: "25"
+55 (39) USER_WEBMAIL_SIGNATURE,0: ""
+56 BACKUP_COMPRESS_ENABLE,0: "1"
+57 BACKUP_CRON,0: "0123456"
+58 (57) BACKUP_CRON_BEGIN,0: "7200"
+59 BACKUP_ENCRYPT_ENABLE,0: "0"
+60 BACKUP_ENCRYPT_PASSWORD,0: ""
+61 TESTVAR,0: "test"
+"""
+
+
+class SimpleCnfTest(unittest.TestCase):
+ """ The one and only test case in this module `-->` see module doc """
+
+ # setup and cleanup
+
+ import_file = None
+ cnf = None
+
+ @classmethod
+ def _import_cnf(cls):
+ """ import conf var data from temp file """
+ cls.cnf = SimpleCnf()
+ cls.cnf.append_file(cls.import_file)
+
+ @classmethod
+ def setUpClass(cls):
+ """ before running tests: write conf var string to temp file """
+ try:
+ sys_file_descriptor, cls.import_file = mkstemp(text=True)
+ os.close(sys_file_descriptor)
+ with open(cls.import_file, 'wt') as file_handle:
+ file_handle.write(TEST_SET)
+ print('created temp file {0}'.format(cls.import_file))
+ except Exception:
+ print('exception creating temp file:')
+ print_exc()
+
+ # clean up
+ cls.tearDownClass()
+
+ # re-raise
+ raise
+
+ cls._import_cnf()
+
+ @classmethod
+ def tearDownClass(cls):
+ """ after all tests have run, delete temp file """
+ if cls.import_file is not None:
+ try:
+ os.unlink(cls.import_file)
+ print('deleted temp file {0}'.format(cls.import_file))
+ except Exception:
+ print('exception deleting temp file:')
+ print_exc()
+
+ # tests
+
+ def test_eq(self):
+ """ test method :py:meth:`SimpleCnf.__eq__` """
+ self.assertEqual(self.cnf[61], self.cnf[61])
+ self.assertEqual(self.cnf['testvar'], self.cnf[61])
+ self.assertEqual(self.cnf, self.cnf)
+ self.assertNotEqual(self.cnf[56], self.cnf[57])
+
+ def test_len(self):
+ """ test method :py:meth:`SimpleCnf.__len__` """
+ self.assertEqual(len(self.cnf), 8)
+
+ def test_getitem(self):
+ """ test method :py:meth:`SimpleCnf.__item__` """
+ self.assertEqual(len(self.cnf['user']), 3)
+ self.assertEqual(self.cnf['USER'], self.cnf['user'])
+ self.assertEqual(len(self.cnf['backup_encrypt_password']), 1)
+ self.assertEqual(len(self.cnf[12232]), 0)
+ self.assertEqual(len(self.cnf[55]), 0)
+ self.assertEqual(len(self.cnf[61]), 1)
+ self.assertEqual(len(self.cnf['user_webmail_signature']), 0)
+
+ def test_get(self):
+ """ test method :py:meth:`SimpleCnf.get` """
+ self.assertEqual(len(self.cnf.get()), 8)
+ self.assertEqual(len(self.cnf.get(name='user')), 3)
+
+ def test_get_value(self):
+ """ test method :py:meth:`SimpleCnf.get_value` """
+
+ with self.assertRaises(ValueError):
+ self.cnf.get_value()
+
+ self.assertEqual(self.cnf[56].get_value(), '1')
+ self.assertEqual(self.cnf[61].get_value(), 'test')
+
+ def test_get_children(self):
+ """ test method :py:meth:`SimpleCnf.get_children` """
+ with self.assertRaises(ValueError):
+ self.cnf.get_children()
+
+ self.assertEqual(len(self.cnf.get(name='user', value='mueller')
+ .get_children()), 16)
+ self.assertEqual(len(self.cnf[57].get_children()), 1)
+ self.assertEqual(self.cnf[57].get_children().get_value(), '7200')
+
+ def test_add_alone(self):
+ """ test method :py:meth:`SimpleCnf.add` on empty conf """
+ # do not use self.cnf since that would void other test methods
+ cnf = SimpleCnf()
+ self.assertEqual(len(cnf), 0)
+ cnf.add('new_var', 'new_value')
+ self.assertEqual(len(cnf), 1)
+ cnf_var = cnf.get(name='new_var').get_single_dict()
+ self.assertEqual(cnf_var['data'], 'new_value')
+ self.assertIsInstance(cnf_var['data'], str)
+ self.assertEqual(cnf_var['number'], 1)
+
+ def test_add_on_top(self):
+ """ test method :py:meth:`SimpleCnf.add` on regular conf """
+ cnf = SimpleCnf()
+ self.assertEqual(len(cnf), 0)
+ cnf.append_file(self.import_file)
+ self.assertEqual(len(cnf), 8)
+
+ cnf.add('new_var', 'new_value')
+ self.assertEqual(len(cnf), 9)
+ cnf_var = cnf.get(name='new_var').get_single_dict()
+ self.assertEqual(cnf_var['data'], 'new_value')
+ self.assertIsInstance(cnf_var['data'], str)
+ self.assertEqual(cnf_var['number'], 62)
+
+ def test_add_with_children(self):
+ """ test method :py:meth:`SimpleCnf.add` by adding var with children"""
+ # load config
+ cnf = SimpleCnf()
+ cnf.append_file(self.import_file)
+ self.assertEqual(len(cnf['user']), 3)
+
+ # get a certain user with all its sub config
+ user_cnf = cnf.get(name='user', value='admin')
+ self.assertEqual(len(user_cnf), 1)
+
+ # copy as new user with different name but same children
+ cnf.add('user', 'admin2', children=user_cnf.get_children())
+ self.assertEqual(len(cnf['user']), 4)
+ self.assertEqual(len(cnf.get(name='user', value='admin2')), 1)
+ self.assertEqual(len(cnf.get(name='user', value='admin2').get_children()), 14)
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null
+#!/usr/bin/env python
+
+from __future__ import absolute_import
+
+import unittest
+
+from src import v4_addr_range
+
+
+specific_range_lo = 13
+specific_range_large = 37
+specific_range_short = 2
+specific_range_extreme = 0xffffffff
+specific_range_invalid = -1
+
+
+class V4_addr_range_test(unittest.TestCase):
+
+ def test_create_default_range(self):
+ r = v4_addr_range.V4_addr_range()
+ self.assertIsNotNone(r)
+
+ def test_create_specific_range(self):
+ r = v4_addr_range.V4_addr_range(
+ specific_range_lo, specific_range_large)
+ self.assertIsNotNone(r)
+
+ def test_get_from_range(self):
+ r = v4_addr_range.V4_addr_range(specific_range_lo)
+ a = r.get()
+ self.assertEqual(a, specific_range_lo)
+ b = r.get()
+ self.assertEqual(b, specific_range_lo + 1)
+
+ def test_exhaust_range(self):
+ r = v4_addr_range.V4_addr_range(
+ specific_range_lo, specific_range_short)
+ self.assertIsNotNone(r.get())
+ self.assertIsNotNone(r.get())
+ # I have absolutely no idea why the following should work. The
+ # ``.assertRaises`` method should test exceptions for equality, yet
+ # ``error.TestError != Exception``. Whatever.
+ self.assertRaises(Exception, r.get)
+
+ def test_overflow(self):
+ self.assertRaises(Exception,
+ v4_addr_range.V4_addr_range,
+ specific_range_lo,
+ specific_range_extreme)
+
+ def test_nonnatural(self):
+ self.assertRaises(Exception,
+ v4_addr_range.V4_addr_range,
+ addr_range=specific_range_invalid)
+ self.assertRaises(Exception,
+ v4_addr_range.V4_addr_range,
+ addr_range=0)
+
+ def test_type_inconsistency(self):
+ self.assertRaises(TypeError,
+ v4_addr_range.V4_addr_range,
+ lo="<garbage>")
+ self.assertRaises(TypeError,
+ v4_addr_range.V4_addr_range,
+ addr_range="<garbage>")
+
+ def test_bound_methods(self):
+ r = v4_addr_range.V4_addr_range(
+ specific_range_lo, specific_range_large)
+ self.assertEquals(r.get(), specific_range_lo)
+ self.assertEquals(r.lo(), specific_range_lo)
+ self.assertEquals(r.range(), specific_range_large)
+ self.assertEquals(r.hi(), specific_range_lo + specific_range_large)
+
+ def test_access(self):
+ r = v4_addr_range.V4_addr_range(
+ specific_range_lo, specific_range_short)
+ self.assertEquals(r.get(), specific_range_lo)
+ self.assertTrue(r[specific_range_lo])
+ self.assertFalse(r[specific_range_lo + 42])
+
+ def test_len(self):
+ r = v4_addr_range.V4_addr_range(
+ specific_range_lo, specific_range_short)
+ _ = r.get()
+ self.assertEquals(len(r), 1)
+ _ = r.get()
+ self.assertEquals(len(r), 2)
+
+if __name__ == '__main__':
+ unittest.main()