diff --git a/.coveragerc b/.coveragerc index 5e7d51a..e137868 100644 --- a/.coveragerc +++ b/.coveragerc @@ -17,7 +17,7 @@ exclude_lines = if 0: if __name__ == .__main__.: source = - ./certcheck/ + ./check_cert/ [html] diff --git a/.gitignore b/.gitignore index 9022dbd..39160a1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,8 @@ *.pyc /.coverage /build* -certcheck.egg-info/* +check_cert.egg-info/* debian/files +*.swp +debian/check-cert* +.pybuild/ diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..d98f46e --- /dev/null +++ b/.travis.yml @@ -0,0 +1,12 @@ +language: python +python: + - "2.7" +install: "pip install -r requirements.txt --use-mirrors" +script: "./run_tests.py" +before_script: + - wget https://github.com/vespian/pymisc/archive/1.2.0.tar.gz -O /tmp/pymisc-1.2.0.tar.gz + - tar -xvf /tmp/pymisc-1.2.0.tar.gz -C /tmp/ + - cd /tmp/pymisc-1.2.0/ + - pip install -r ./requirements.txt --use-mirrors + - ./setup.py install + - cd $TRAVIS_BUILD_DIR diff --git a/README.md b/README.md index 9ebaf8c..0622787 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,32 @@ -# _Certchecker_ +# _check_cert_ -_Certchecker is a certificate expiration check capable of scanning GIT repos -and sending data on expiring/expired certificates back to the monitoring system -(currently only Riemann)._ +[![Build +Status](https://travis-ci.org/vespian/check-cert.svg?branch=master)](https://travis-ci.org/vespian/check-cert) + +_check_cert is a certificate expiration check capable of scanning GIT repos +and sending data on expiring/expired certificates back to the monitoring system._ ## Project Setup -In order to run certchecker you need to following dependencies installed: -- Bernhard - Riemann client library (https://github.com/banjiewen/bernhard) -- Google's protobuf library -- yaml bindings for python (http://pyyaml.org/) -- Dulwich - python implementation of GIT (https://www.samba.org/~jelmer/dulwich/docs/) -- ssh command in your PATH -- argparse library +In order to run check_cert you need to have following dependencies installed: +* Dulwich - python implementation of GIT (https://www.samba.org/~jelmer/dulwich/docs/) +* *ssh* command in your PATH +* argparse library +* pyOpenSSL (https://launchpad.net/pyopenssl/) +* pymisc (https://github.com/vespian/pymisc) +* python 2.6 or 2.7 +* dulwich library You can also use debian packaging rules from debian/ directory to build a deb package. +Unfortunatelly, dulwich library is broken on wheezy: + +https://bugs.launchpad.net/dulwich/+bug/1326213 + +so the script depends on the newest version (0.9.7) even though 0.8.5 is +sufficient when it comes to functionality. + ## Usage ### Configuration @@ -25,7 +35,7 @@ Actions taken by the script are determined by its command line and the configuration file. The command line has a build-in help system: ``` -usage: certcheck [-h] [--version] -c CONFIG_FILE [-v] [-s] [-d] +usage: check_cert [-h] [--version] -c CONFIG_FILE [-v] [-s] [-d] Simple certificate expiration check @@ -45,9 +55,12 @@ The configuration file is a plain YAML document. It's syntax is as follows: ``` --- -lockfile: /tmp/certcheck.lock -warn_treshold: 30 -critical_treshold: 15 +#Global +lockfile: /tmp/check_cert.lock + +#Riemann related: +riemann_enabled: False +riemann_ttl: 60 riemann_hosts: static: - 192.168.122.16:5555:udp @@ -57,19 +70,28 @@ riemann_hosts: - _riemann._udp riemann_tags: - production - - class::certcheck -repo_host: git.example.net + - class::check_cert + +#Nagios related: +nrpe_enabled: True + +#Repository related: +repo_host: git.example.com repo_port: 22 -repo_url: /example-repo +repo_url: /sample-repo repo_masterbranch: refs/heads/production -repo_localdir: /tmp/certcheck-temprepo -repo_user: certcheck -repo_pubkey: ./certcheck_id_rsa - # format - dict, hash as a key, and value as a comment - # sha1sum ./certificate_to_be_ignored +repo_localdir: /tmp/check_cert-temprepo +repo_user: check_cert +repo_pubkey: /home/vespian/work/tmp_tickets/cert_check/check_cert_id_rsa + +#Check related: +warn_treshold: 30 +critical_treshold: 15 +# sha1sum ./certificate_to_be_ignored +# format - dict, hash as a key, and value as a comment ignored_certs: - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: "some VPN key" - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: "some unused certificate" + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: "cert a" + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: "cert b" ``` ### Operation @@ -84,9 +106,9 @@ The connection is established using the $repo_pubkey pubkey, and the $repo_user itself should have very limited privileges. Next, the repository is scanned in search of files ending with one of the -certcheck:CERTIFICATE_EXTENSIONS extensions. Currently all possible +check_cert:CERTIFICATE_EXTENSIONS extensions. Currently all possible certificate extensions are listed but only ['pem', 'crt', 'cer'] are currently -supported (see certcheck:get_cert_expiration method). For the remaing ones +supported (see check_cert:get_cert_expiration method). For the remaing ones only a warning is issued. For each certificate found a sha1sum is computed, and if the result is found in @@ -99,9 +121,9 @@ $warn_tresh but more than $critical_tresh - a "warning" partial status is gene- rated. Unsuported certificate yields an 'unknown' state and expired ones of course the 'critical'. -All the 'partial status' updates are agregated and each message can only ele- -vate up the final status of the metric send to Riemann. Currently, the hierar- -chy is as follows: +All the 'partial status' updates are agregated by the 'pymisc' library and +each message can only elevate up the final status of the metric send to +monitoring system. Currently, the hierarchy is as follows: (lowest)ok->warn->critical->unknown(highest) @@ -109,23 +131,20 @@ script errors, exceptions and unexcpected conditions result in imidiate elevatio to 'unknown' status and sending the metric to monitoring system ASAP if only possible. -IP addresses/ports of the Riemann instances can be defined in two ways: - * statically, by providing a list of riemann instances in $riemann_servers - var. The format of the list entry is hostname:port:proto. 'proto' can be one - of 'udp' or 'tcp'. - * by providing a SRV record, i.e. '_riemann._udp'. All the values - (host, port) will be resolved automatically. Protocol is chosen basing on - the SRV entry itself. - -The final metric is send to *all* Riemann instances with TTL equal to -certcheck:DATA_TTL == 25 hours. +Interfacing with monitoring system is done by pymisc. Following options are +passed directly to the library. Please see pymisc's documentation for +information on their meaning: +* $riemann_enabled +* $riemann_ttl +* $riemann_hosts +* $riemann_tags +* $nrpe_enabled ### Maintenance In order to not to let the "$repo_tmpdir/repository" repository grow endlessly -a 'git gc' command should be executed once a day by i.e. a cronjob. It should +a 'git gc' command should be executed once a day by i.e. a cronjob. It will repack all the packs and remove dangling objects. -Please see the doc/USAGE.md file for details. ## Contributing @@ -144,7 +163,7 @@ test/ directory you can find: Unittests can be started either by using *nosetest* command: ``` -certcheck/ (master✗) # nosetests +check_cert/ (master✗) # nosetests [20:33:02] ...... ---------------------------------------------------------------------- @@ -156,7 +175,7 @@ OK or by issuing the *run_tests.py* command: ``` -certcheck/ (master✗) # run_tests.py +check_cert/ (master✗) # run_tests.py [20:33:04] Created test certificate expired_3_days.pem Created test certificate expire_6_days.pem diff --git a/bin/certcheck b/bin/check_cert similarity index 86% rename from bin/certcheck rename to bin/check_cert index 707f8c7..7d6af20 100755 --- a/bin/certcheck +++ b/bin/check_cert @@ -14,10 +14,10 @@ # License for the specific language governing permissions and limitations under # the License. -import certcheck +import check_cert if __name__ == '__main__': - args_dict = certcheck.parse_command_line() + args_dict = check_cert.parse_command_line() - certcheck.main(**args_dict) + check_cert.main(**args_dict) diff --git a/certcheck/__init__.py b/certcheck/__init__.py deleted file mode 100755 index 5e51f47..0000000 --- a/certcheck/__init__.py +++ /dev/null @@ -1,718 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2013 Spotify AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - - -#Make it a bit more like python3: -from __future__ import division -from __future__ import nested_scopes -from __future__ import print_function -from __future__ import with_statement - -#Imports: -from OpenSSL.crypto import FILETYPE_PEM -from OpenSSL.crypto import load_certificate -from collections import namedtuple -from datetime import datetime, timedelta -from dulwich.client import SSHGitClient, SubprocessWrapper, TraditionalGitClient -from dulwich.protocol import Protocol -from dulwich.repo import Repo -import argparse -import bernhard -import dns.resolver -import fcntl -import hashlib -import logging -import logging.handlers as lh -import os -import re -import socket -import subprocess -import sys -import yaml - -#Constants: -LOCKFILE_LOCATION = './'+os.path.basename(__file__)+'.lock' -CONFIGFILE_LOCATION = './'+os.path.basename(__file__)+'.conf' -DATA_TTL = 25*60*60 # Data gathered by the script run is valid for 25 hours. -SERVICE_NAME = 'certcheck' -CERTIFICATE_EXTENSIONS = ['der', 'crt', 'pem', 'cer', 'p12', 'pfx', ] - - -class RecoverableException(Exception): - """ - Exception used to differentiate between errors which should be reported - to Riemann, and the ones that should be only logged due to their severity - """ - pass - - -class PubkeySSHGitClient(SSHGitClient): - """ - Simple class used to add pubkey authentication to the SSHGitClient class. - In the base class it is not supported, and using password authentication - for a script is insecure. - """ - def __init__(self, host, pubkey, port=None, username=None, *args, **kwargs): - self.host = host - self.port = port - self.pubkey = pubkey - self.username = username - TraditionalGitClient.__init__(self, *args, **kwargs) - self.alternative_paths = {} - - def _connect(self, cmd, path): - #FIXME: This has no way to deal with passphrases.. - #FIXME: can we rely on ssh being in PATH here ? - args = ['ssh', '-x', '-oStrictHostKeyChecking=no'] - args.extend(['-i', self.pubkey]) - if self.port is not None: - args.extend(['-p', str(self.port)]) - if self.username is not None: - host = '{0}@{1}'.format(self.username, self.host) - else: - host = self.host - args.append(host) - args.extend(["{0} '{1}'".format(self._get_cmd_path(cmd), path)]) - proc = subprocess.Popen(args, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE) - con = SubprocessWrapper(proc) - logging.info("Connected to repo {0}:{1} via ssh".format(self.host, - self.port if self.port else 22)) - return (Protocol(con.read, - con.write, - report_activity=self._report_activity - ), - con.can_read) - - -class LocalMirrorRepo(Repo): - def lookup_files(self, determine_wants, root_sha=None, repo_path=''): - """ - Search the repo for files described by the determine_wants - function. The function itself operates on the file paths in a repo and - must return True for objects of interest. - - The search is done recursively, with each iteration scanning just one - repo directory. In case a directory is found the root_sha and repo_path - parameters are provided for a next iteration of the function. - - The result is a list of the filenames accumulated by all iterations. - """ - file_list = [] - if root_sha is None: - commit = self.get_object(self.head()) - root_sha = commit.tree - root = self.get_object(root_sha) - if repo_path: - #Extreme verbosity - #logging.debug("Scanning repo directory {0}".format(repo_path)) - pass - else: - logging.info("Scanning repo root directory") - - for item in root.iteritems(): - full_path = os.path.join(repo_path, item.path) - if item.mode & 0b0100000000000000: - #A directory: - subentries = self.lookup_files(determine_wants=determine_wants, - root_sha=item.sha, - repo_path=full_path) - file_list.extend(subentries) - if item.mode & 0b1000000000000000: - #A file, lets check if user wants it: - if determine_wants(item.path): - logging.info("Matching file found: {0}".format(full_path)) - buf = namedtuple("FileTuple", ['path', 'sha']) - buf.path = full_path - buf.content = self.get_object(item.sha).data - file_list.append(buf) - return file_list - - -class CertStore(object): - """ - Provides local clone of a remote repo plus some extra functionality to - ease extracting of the certificates from the repository - """ - _remote = None - _local = None - - @classmethod - def initialize(cls, host, port, pubkey, username, repo_localdir, repo_url, - repo_masterbranch): - if cls._remote is None: - cls._remote = PubkeySSHGitClient(host=host, - pubkey=pubkey, - username=username, - port=port, - thin_packs=False, # Not supported by - # dulwich properly - ) - if not os.path.exists(os.path.join(repo_localdir, "objects")): - if not os.path.exists(repo_localdir): - os.mkdir(repo_localdir, 0700) - cls._local = LocalMirrorRepo.init_bare(repo_localdir) - else: - cls._local = LocalMirrorRepo(repo_localdir) - - #We are only interested in 'production' branch, not the topic branches - #all the commits linked to the master will be downloaded as well of - #course - def wants_master_only(refs): - return [sha for (ref, sha) in refs.iteritems() - if ref == repo_masterbranch] - refs = cls._remote.fetch(path=repo_url, target=cls._local, - determine_wants=wants_master_only) - cls._local["HEAD"] = refs[repo_masterbranch] - - @classmethod - def lookup_certs(cls, cert_suffixes): - """ - Find all the certificates in the repository. The classification is made - by checking whether file suffix can be found in th list of certificate - suffixes found in cert_suffixes parameter. - """ - if cls._local is None: - raise RecoverableException("Local repo mirror has not been " + - "initialized yet") - - def wants_all_certs(path): - if len(path) >= 5 and path[-4] == '.' and \ - path[-3:] in cert_suffixes: - return True - else: - return False - certs = cls._local.lookup_files(determine_wants=wants_all_certs) - logging.info("{0} certificates found".format(len(certs))) - return certs - - -class ScriptConfiguration(object): - """ - Simple file configuration class basing on the YAML format - """ - _config = dict() - - @classmethod - def load_config(cls, file_path): - """ - @param string file_path path to the configuration file - """ - try: - with open(file_path, 'r') as fh: - cls._config = yaml.load(fh) - except IOError as e: - logging.error("Failed to open config file {0}: {1}".format( - file_path, e)) - sys.exit(1) - except (yaml.parser.ParserError, ValueError) as e: - logging.error("File {0} is not a proper yaml document: {1}".format( - file_path, e)) - sys.exit(1) - - @classmethod - def get_val(cls, key): - return cls._config[key] - - -class ScriptStatus(object): - - _STATES = {'ok': 0, - 'warn': 1, - 'critical': 2, - 'unknown': 3, - } - - _exit_status = 'ok' - _exit_message = '' - _riemann_connections = [] - _riemann_tags = None - _hostname = '' - _debug = None - - @classmethod - def _send_data(cls, event): - """ - Send script status to all Riemann servers using all the protocols that - were configured. - """ - for riemann_connection in cls._riemann_connections: - logging.info('Sending event {0}, '.format(str(event)) + - 'using Riemann conn {0}:{1}'.format( - riemann_connection.host, riemann_connection.port) - ) - if not cls._debug: - try: - riemann_connection.send(event) - except Exception as e: - logging.exception("Failed to send event to Rieman host: " + - "{0}".format(str(e)) - ) - continue - else: - logging.info("Event sent succesfully") - else: - logging.info('Debug flag set, I am performing no-op instead of ' - 'real sent call') - - @classmethod - def _name2ip(cls, name): - """ - Resolve a dns name. In case it is already an IP - just return it. - """ - if re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', name): - #IP entry: - return name - else: - #Hostname, we need to resolve it: - try: - ipaddr = dns.resolver.query(name, 'A') - except dns.resolver.NXDOMAIN: - logging.error("A record for {0} was not found".format(name)) - return name # Let somebody else worry about it ;) - - return ipaddr[0].to_text() - - @classmethod - def _resolve_srv_hosts(cls, name): - """ - Find Riemann servers by resolving SRV record, provide some sanity - checks as well. - """ - result = [] - logging.debug("Resolving " + name) - if name.find('._udp') > 0: - proto = 'udp' - elif name.find('._tcp') > 0: - proto = 'tcp' - else: - raise RecoverableException("Entry {0} ".format(name) + - "is not a valid SRV name") - - try: - resolved = dns.resolver.query(name, 'SRV') - except dns.resolver.NXDOMAIN: - logging.error("Entry {0} does not exist, skipping.") - return [] - - for rdata in resolved: - entry = namedtuple("RiemannHost", ['host', 'port', 'proto']) - entry.host = cls._name2ip(rdata.target.to_text()) - if entry.host is None: - continue - entry.port = rdata.port - entry.proto = proto - result.append(entry) - logging.debug("String {0} resolved as {1}".format(name, str(entry))) - - return result - - @classmethod - def _resolve_static_entry(cls, name): - """ - Find Riemann servers by resolving plain A record, provide some sanity - checks as well. - """ - entry = namedtuple("RiemannHost", ['host', 'port', 'proto']) - try: - a, b, c = name.split(":") - entry.host = cls._name2ip(a) - if entry.host is None: - raise ValueError() - entry.port = int(b) # Raises ValueError by itself - if c in ['tcp', 'udp']: - entry.proto = c - else: - raise ValueError() - except ValueError: - logging.error("String {0} is not a valid ip:port:proto entry") - return [] - - logging.debug("String {0} resolved as {1}".format(name, str(entry))) - return [entry] - - @classmethod - def initialize(cls, riemann_hosts_config, riemann_tags, debug=False): - cls._riemann_tags = riemann_tags - cls._hostname = socket.gethostname() - cls._debug = debug - cls._exit_status = 'ok' - cls._exit_message = '' - cls._riemann_connections = [] # FIXME - we should probably do - # some disconect here if we re-initialize - # probably using conn.shutdown() call - - if not riemann_tags: - logging.error('there should be at least one Riemann tag defined.') - return # should it sys.exit or just return ?? - tmp = [] - if "static" in riemann_hosts_config: - for line in riemann_hosts_config["static"]: - tmp.extend(cls._resolve_static_entry(line)) - - if "by_srv" in riemann_hosts_config: - for line in riemann_hosts_config["by_srv"]: - tmp.extend(cls._resolve_srv_hosts(line)) - - for riemann_host in tmp: - try: - if riemann_host.proto == 'tcp': - riemann_connection = bernhard.Client(riemann_host.host, - riemann_host.port, - bernhard.TCPTransport) - elif riemann_host.proto == 'udp': - riemann_connection = bernhard.Client(riemann_host.host, - riemann_host.port, - bernhard.UDPTransport) - else: - logging.error("Unsupported transport {0}".format(riemann_host.proto) + - ", not connected to {1}".format(riemann_host)) - except Exception as e: - logging.exception("Failed to connect to Rieman host " + - "{0}: {1}, ".format(riemann_host, str(e)) + - "address has been exluded from the list.") - continue - - logging.debug("Connected to Riemann instance {0}".format(riemann_host)) - cls._riemann_connections.append(riemann_connection) - - if not cls._riemann_connections: - logging.error("There are no active connections to Riemann, " + - "metrics will not be send!") - - @classmethod - def notify_immediate(cls, exit_status, exit_message): - """ - Imediatelly send given data to Riemann - """ - if exit_status not in cls._STATES: - logging.error("Trying to issue an immediate notification" + - "with malformed exit_status: " + exit_status) - return - - if not exit_message: - logging.error("Trying to issue an immediate" + - "notification without any message") - return - - logging.warn("notify_immediate, " + - "exit_status=<{0}>, exit_message=<{1}>".format( - exit_status, exit_message)) - event = { - 'host': cls._hostname, - 'service': SERVICE_NAME, - 'state': exit_status, - 'description': exit_message, - 'tags': cls._riemann_tags, - 'ttl': DATA_TTL, - } - - cls._send_data(event) - - @classmethod - def notify_agregated(cls): - """ - Send all agregated data to Riemann - """ - - if cls._exit_status == 'ok' and cls._exit_message == '': - cls._exit_message = 'All certificates are OK' - - logging.debug("notify_agregated, " + - "exit_status=<{0}>, exit_message=<{1}>".format( - cls._exit_status, cls._exit_message)) - - event = { - 'host': cls._hostname, - 'service': SERVICE_NAME, - 'state': cls._exit_status, - 'description': cls._exit_message, - 'tags': cls._riemann_tags, - 'ttl': DATA_TTL, - } - - cls._send_data(event) - - @classmethod - def update(cls, exit_status, exit_message): - """ - Accumullate a small bit of data in class fields - """ - if exit_status not in cls._STATES: - logging.error("Trying to do the status update" + - "with malformed exit_status: " + exit_status) - return - - logging.info("updating script status, " + - "exit_status=<{0}>, exit_message=<{1}>".format( - exit_status, exit_message)) - if cls._STATES[cls._exit_status] < cls._STATES[exit_status]: - cls._exit_status = exit_status - # ^ we only escalate up... - if exit_message: - if cls._exit_message: - cls._exit_message += '\n' - cls._exit_message += exit_message - - -class ScriptLock(object): - #python lockfile isn't usefull, we have to write our own class - _fh = None - _file_path = None - - @classmethod - def init(cls, file_path): - cls._file_path = file_path - - @classmethod - def aqquire(cls): - if cls._fh: - logging.warn("File lock already aquired") - return - try: - cls._fh = open(cls._file_path, 'w') - #flock is nice because it is automatically released when the - #process dies/terminates - fcntl.flock(cls._fh, fcntl.LOCK_EX | fcntl.LOCK_NB) - except IOError: - if cls._fh: - cls._fh.close() - raise RecoverableException("{0} ".format(cls._file_path) + - "is already locked by a different " + - "process or cannot be created.") - cls._fh.write(str(os.getpid())) - cls._fh.flush() - - @classmethod - def release(cls): - if not cls._fh: - raise RecoverableException("Trying to release non-existant lock") - cls._fh.close() - cls._fh = None - os.unlink(cls._file_path) - - -def parse_command_line(): - parser = argparse.ArgumentParser( - description='Certificate checking tool', - epilog="Author: vespian a t wp.pl", - add_help=True,) - parser.add_argument( - '--version', - action='version', - version='0.3.0') - parser.add_argument( - "-c", "--config-file", - action='store', - required=True, - help="Location of the configuration file") - parser.add_argument( - "-v", "--verbose", - action='store_true', - required=False, - help="Provide extra logging messages.") - parser.add_argument( - "-s", "--std-err", - action='store_true', - required=False, - help="Log to stderr instead of syslog") - parser.add_argument( - "-d", "--dont-send", - action='store_true', - required=False, - help="Do not send data to Riemann [use for debugging]") - - args = parser.parse_args() - return {'std_err': args.std_err, - 'verbose': args.verbose, - 'config_file': args.config_file, - 'dont_send': args.dont_send, - } - - -def get_cert_expiration(certificate, ignored_certs): - """ - Extract the certificate expiration date for a certificate blob. Handle - ignored certificates by comparing shasum of the blob with entries in the - ignored_certs list - """ - if certificate.path[-3:] in ['pem', 'crt', 'cer']: - try: - #Many bad things can happen here, but still - we can recover! :) - cert_hash = hashlib.sha1(certificate.content).hexdigest() - if cert_hash in ignored_certs: - #This cert should be ignored - logging.info("certificate {0} (sha1sum: {1})".format( - certificate.path, cert_hash) + " has been ignored.") - return None - #Workaround for -----BEGIN TRUSTED CERTIFICATE----- - if certificate.content.find('TRUSTED ') > -1: - logging.info("'TRUSTED' string has been removed from " + - "certificate {0} (sha1sum: {1})".format( - certificate.path, cert_hash)) - certificate.content = certificate.content.replace('TRUSTED ', - '') - cert_data = load_certificate(FILETYPE_PEM, certificate.content) - expiry_date = cert_data.get_notAfter() - #Return datetime object: - return datetime.strptime(expiry_date, '%Y%m%d%H%M%SZ') - except Exception as e: - msg = "Script cannot parse certificate {0}: {1}".format( - certificate.path, str(e)) - logging.warn(msg) - ScriptStatus.update('unknown', msg) - return None - else: - ScriptStatus.update('unknown', - "Certificate {0} is of unsupported type, ".format( - certificate.path) + - "the script cannot check the expiry date.") - return None - - -def main(config_file, std_err=False, verbose=True, dont_send=False): - try: - #Configure logging: - fmt = logging.Formatter('%(filename)s[%(process)d] %(levelname)s: ' + - '%(message)s') - logger = logging.getLogger() - if verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - if std_err: - handler = logging.StreamHandler() - else: - handler = lh.SysLogHandler(address='/dev/log', - facility=lh.SysLogHandler.LOG_USER) - handler.setFormatter(fmt) - logger.addHandler(handler) - - logger.info("Certcheck is starting, command line arguments:" + - "config_file={0}, ".format(config_file) + - "std_err={0}, ".format(std_err) + - "verbose={0}, ".format(verbose) - ) - - #FIXME - Remember to correctly configure syslog, otherwise rsyslog will - #discard messages - ScriptConfiguration.load_config(config_file) - - logger.debug("Remote repo is is: {0}@{1}:{2}{3}->{4}".format( - ScriptConfiguration.get_val("repo_user"), - ScriptConfiguration.get_val("repo_host"), - ScriptConfiguration.get_val("repo_port"), - ScriptConfiguration.get_val("repo_url"), - ScriptConfiguration.get_val("repo_masterbranch")) + - ", local repository dir is {0}".format( - ScriptConfiguration.get_val('repo_localdir')) + - ", repository key is {0}".format( - ScriptConfiguration.get_val('repo_pubkey')) + - ", warn_thresh is {0}".format( - ScriptConfiguration.get_val('warn_treshold')) + - ", crit_thresh is {0}".format( - ScriptConfiguration.get_val('critical_treshold')) - ) - - #Initialize Riemann reporting: - ScriptStatus.initialize( - riemann_hosts_config=ScriptConfiguration.get_val("riemann_hosts"), - riemann_tags=ScriptConfiguration.get_val("riemann_tags"), - debug=dont_send, - ) - - # verify the configuration - msg = [] - if ScriptConfiguration.get_val('warn_treshold') <= 0: - msg.append('certificate expiration warn threshold should be > 0.') - if ScriptConfiguration.get_val('critical_treshold') <= 0: - msg.append('certificate expiration critical threshold should be > 0.') - if ScriptConfiguration.get_val('critical_treshold') >= \ - ScriptConfiguration.get_val('warn_treshold'): - msg.append('warninig threshold should be greater than critical treshold.') - - #if there are problems with thresholds then there is no point in continuing: - if msg: - ScriptStatus.notify_immediate('unknown', - "Configuration file contains errors: " + - ','.join(msg)) - sys.exit(1) - - #Make sure that we are the only ones running on the server: - ScriptLock.init(ScriptConfiguration.get_val('lockfile')) - ScriptLock.aqquire() - - #Initialize our repo mirror: - CertStore.initialize(host=ScriptConfiguration.get_val("repo_host"), - port=ScriptConfiguration.get_val("repo_port"), - pubkey=ScriptConfiguration.get_val('repo_pubkey'), - username=ScriptConfiguration.get_val("repo_user"), - repo_localdir=ScriptConfiguration.get_val( - 'repo_localdir'), - repo_url=ScriptConfiguration.get_val("repo_url"), - repo_masterbranch=ScriptConfiguration.get_val( - "repo_masterbranch"), - ) - - for cert in CertStore.lookup_certs(CERTIFICATE_EXTENSIONS): - cert_expiration = get_cert_expiration(cert, - ignored_certs=ScriptConfiguration.get_val( - "ignored_certs") - ) - if cert_expiration is None: - continue - # -3 days is in fact -4 days, 23:59:58.817181 - # so we compensate and round up - # additionally, openssl uses utc dates - now = datetime.utcnow() - timedelta(days=1) - time_left = cert_expiration - now # timedelta object - if time_left.days < 0: - ScriptStatus.update('critical', - "Certificate {0} expired {1} days ago.".format( - cert.path, abs(time_left.days))) - elif time_left.days == 0: - ScriptStatus.update('critical', - "Certificate {0} expires today.".format( - cert.path)) - elif time_left.days < ScriptConfiguration.get_val("critical_treshold"): - ScriptStatus.update('critical', - "Certificate {0} is about to expire in {1} days.".format( - cert.path, time_left.days)) - elif time_left.days < ScriptConfiguration.get_val("warn_treshold"): - ScriptStatus.update('warn', - "Certificate {0} is about to expire in {1} days.".format( - cert.path, time_left.days)) - else: - logger.info("{0} expires in {1} days - OK!".format( - cert.path, time_left.days)) - - ScriptStatus.notify_agregated() - ScriptLock.release() - sys.exit(0) - - except RecoverableException as e: - msg = str(e) - logging.critical(msg) - ScriptStatus.notify_immediate('unknown', msg) - sys.exit(1) - except AssertionError as e: - #Unittest require it: - raise - except Exception as e: - msg = "Exception occured: {0}".format(e.__class__.__name__) - logging.exception(msg) - sys.exit(1) diff --git a/check_cert/__init__.py b/check_cert/__init__.py new file mode 100755 index 0000000..0a3726c --- /dev/null +++ b/check_cert/__init__.py @@ -0,0 +1,556 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Pawel Rozlach +# Copyright (c) 2013 Spotify AB +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + + +# Make it a bit more like python3: +from __future__ import division +from __future__ import nested_scopes +from __future__ import print_function +from __future__ import with_statement + +# Imports: +from OpenSSL.crypto import FILETYPE_PEM +from OpenSSL.crypto import load_certificate +from collections import namedtuple +from datetime import datetime, timedelta +from dulwich.client import SSHGitClient, SubprocessWrapper, TraditionalGitClient +from dulwich.errors import GitProtocolError +from dulwich.protocol import Protocol +from dulwich.repo import Repo +from pymisc.monitoring import ScriptStatus +from pymisc.script import RecoverableException, ScriptConfiguration, ScriptLock +import argparse +import hashlib +import logging +import logging.handlers as lh +import os +import re +import subprocess +import sys + +# Constants: +LOCKFILE_LOCATION = './'+os.path.basename(__file__)+'.lock' +CONFIGFILE_LOCATION = './'+os.path.basename(__file__)+'.conf' +SERVICE_NAME = 'check_cert' +CERTIFICATE_EXTENSIONS = ['der', 'crt', 'pem', 'cer', 'p12', 'pfx', ] + + +class PubkeySSHGitClient(SSHGitClient): + """ + Connect to GIT repos using pubkey authentication. + + This simple class extends SSHGitClient class with pubkey authentication. + In the base class it is not supported, and using password authentication + for a script is insecure. + """ + def __init__(self, host, pubkey, port=None, username=None, *args, **kwargs): + """ + Initialize the class with authdata and call superclass constructor. + + Please see SSHGitClient's class constructor for a documentation of + arguments not mentioned here. + + Args: + host: host to connect to + pubkey: file path of the publickey to use + port: SSH port to connect to + username: username to use while connecting + """ + self.host = host + self.port = port + self.pubkey = pubkey + self.username = username + TraditionalGitClient.__init__(self, *args, **kwargs) + self.alternative_paths = {} + + def _connect(self, cmd, path): + """ + Override connection establishment in SSHGitClient class so that pubkey + is used. + """ + # FIXME: This has no way to deal with passphrases.. + # FIXME: can we rely on ssh being in PATH here ? + args = ['ssh', '-x', '-oStrictHostKeyChecking=no'] + if not (os.path.exists(self.pubkey) and os.access(self.pubkey, os.R_OK)): + raise GitProtocolError("Public key file is missing or incaccesible") + args.extend(['-i', self.pubkey]) + if self.port is not None: + args.extend(['-p', str(self.port)]) + if self.username is not None: + host = '{0}@{1}'.format(self.username, self.host) + else: + host = self.host + args.append(host) + args.extend(["{0} '{1}'".format(self._get_cmd_path(cmd), path)]) + proc = subprocess.Popen(args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + con = SubprocessWrapper(proc) + logging.info("Connected to repo {0}:{1} via ssh, cmd: {2}".format( + self.host, self.port if self.port else 22, cmd)) + return (Protocol(con.read, + con.write, + report_activity=self._report_activity + ), + con.can_read) + + +class LocalMirrorRepo(Repo): + """ + Common GIT repo object extened with file searching capabilities. + """ + def lookup_files(self, determine_wants, root_sha=None, repo_path=''): + """ + Search the repo for files described by the determine_wants function. + + The search is done recursively, with each iteration scanning just one + repo directory. In case a directory is found the root_sha and repo_path + parameters are provided for a next iteration of the function. + + Args: + determine_wants: the function used to determine whether the file is + of interest. It operates on the file paths in a repo and must + return True for objects that match, False otherwise. + root_sha: sha of the tree object that search should be started from + repo_path: repo path of the tree object pointed by root_sha + + Returns: + The result is a list of the named tuples containing file paths and + their contents, accumulated by all recursive calls: + """ + file_list = [] + if root_sha is None: + commit = self.get_object(self.head()) + root_sha = commit.tree + logging.debug("Root sha is {0}".format(root_sha)) + try: + root = self.get_object(root_sha) + except KeyError: + msg = "Skipping object from submodule: {0}, dir: {1}".format(root_sha, repo_path) + logging.warning(msg) + return file_list + if repo_path: + # Extreme verbosity + logging.debug("Scanning repo directory {0}".format(repo_path)) + else: + logging.info("Scanning repo root directory") + + for item in root.iteritems(): + full_path = os.path.join(repo_path, item.path) + if item.mode & 0b0100000000000000: + # A directory: + subentries = self.lookup_files(determine_wants=determine_wants, + root_sha=item.sha, + repo_path=full_path) + file_list.extend(subentries) + if item.mode & 0b1000000000000000: + # A file, lets check if user wants it: + if determine_wants(item.path): + logging.info("Matching file found: {0}".format(full_path)) + buf = namedtuple("FileTuple", ['path', 'sha']) + buf.path = full_path + buf.content = self.get_object(item.sha).data + file_list.append(buf) + return file_list + + +class CertStore(object): + """ + Provide local clone of a remote repo plus some extra functionality. + + Class is meant to be an abstraction of the GIT repos complexity, allowing + easy extraction of certificates. + """ + _remote = None + _local = None + + @classmethod + def initialize(cls, host, port, pubkey, username, repo_localdir, repo_url, + repo_masterbranch): + """ + Initialize CertStore object. + + Args: + host: host to connect to + pubkey: file path of the publickey to use + port: SSH port to connect to + username: username to use while connecting + repo_localdir: path to use for local repo storage + repo_url: url of the repo to fetch + repo_masterbranch: git branch to fetch and scan + """ + if cls._remote is None: + cls._remote = PubkeySSHGitClient(host=host, + pubkey=pubkey, + username=username, + port=port, + thin_packs=False, # Not supported by + # dulwich properly + ) + if not os.path.exists(os.path.join(repo_localdir, "objects")): + if not os.path.exists(repo_localdir): + os.mkdir(repo_localdir, 0700) + cls._local = LocalMirrorRepo.init_bare(repo_localdir) + else: + cls._local = LocalMirrorRepo(repo_localdir) + + # We are only interested in 'production' branch, not the topic branches + # all the commits linked to the master will be downloaded as well of + # course + def wants_master_only(refs): + return [sha for (ref, sha) in refs.iteritems() + if ref == repo_masterbranch] + refs = cls._remote.fetch(path=repo_url, target=cls._local, + determine_wants=wants_master_only) + cls._local["HEAD"] = refs[repo_masterbranch] + + @classmethod + def lookup_certs(cls, cert_suffixes): + """ + Find all the certificates in the locally cached repository. + + The classification whether file is a certificate or not is made basing + on the file suffix. + + Args: + cert_suffixes: list of valid certificate suffixes + """ + if cls._local is None: + raise RecoverableException("Local repo mirror has not been " + + "initialized yet") + + def wants_all_certs(path): + if len(path) >= 5 and path[-4] == '.' and \ + path[-3:] in cert_suffixes: + return True + else: + return False + certs = cls._local.lookup_files(determine_wants=wants_all_certs) + logging.info("{0} certificates found".format(len(certs))) + return certs + + +def parse_command_line(): + """ + Convert command line arguments into script runtime configuration. + """ + parser = argparse.ArgumentParser( + description='Certificate checking tool', + epilog="Author: vespian a t wp.pl", + add_help=True,) + parser.add_argument( + '--version', + action='version', + version='0.3.0') + parser.add_argument( + "-c", "--config-file", + action='store', + required=True, + help="Location of the configuration file") + parser.add_argument( + "-v", "--verbose", + action='store_true', + required=False, + help="Provide extra logging messages.") + parser.add_argument( + "-s", "--std-err", + action='store_true', + required=False, + help="Log to stderr instead of syslog") + parser.add_argument( + "-d", "--dont-send", + action='store_true', + required=False, + help="Do not send data to Riemann [use for debugging]") + + args = parser.parse_args() + return {'std_err': args.std_err, + 'verbose': args.verbose, + 'config_file': args.config_file, + 'dont_send': args.dont_send, + } + + +def get_cert_expiration(certificate): + """ + Extract the certificate expiration date from a certificate blob. + + Args: + certificate: a named tuple object, containing path and content attributes + + Returns: + None if certificate was invalid or expiry date could not be extracted, + datetime object otherwise. + """ + try: + # Many bad things can happen here, but still - we can recover! :) + # Workaround for -----BEGIN TRUSTED CERTIFICATE----- + if certificate.content.find('TRUSTED ') > -1: + logging.info("'TRUSTED' string has been removed from " + + "certificate {0}".format(certificate.path)) + certificate.content = certificate.content.replace('TRUSTED ', + '') + cert_data = load_certificate(FILETYPE_PEM, certificate.content) + expiry_date = cert_data.get_notAfter() + # Return datetime object: + return datetime.strptime(expiry_date, '%Y%m%d%H%M%SZ') + except Exception: + raise RecoverableException() + + +def _verify_conf(conf_hash): + """ + Check if script configuration is sane. + + This function takes care of checking if the script configuration is + logically correct. + + Args: + conf_hash: A hash containing whole configuration, as defined in config + file. + + Returns: + A list of errors/issues found in the configuration, or an empty list + if the configuration is OK. + """ + + msg = [] + + try: + warn_treshold = conf_hash['warn_treshold'] + critical_treshold = conf_hash['critical_treshold'] + repo_host = conf_hash['repo_host'] + repo_url = conf_hash['repo_url'] + repo_masterbranch = conf_hash['repo_masterbranch'] + repo_localdir = conf_hash['repo_localdir'] + repo_user = conf_hash['repo_user'] + repo_pubkey = conf_hash['repo_pubkey'] + lockfile = conf_hash['lockfile'] + except KeyError as e: + msg.append('Mandatory parameter is missing: {0}'.format(str(e))) + + # Verify thresholds: + if warn_treshold <= 0: + msg.append('Certificate expiration warn threshold should be > 0.') + if critical_treshold <= 0: + msg.append('Certificate expiration critical threshold should be > 0.') + if critical_treshold >= warn_treshold: + msg.append('Warninig threshold should be greater than critical treshold.') + + # repo_host + if not re.match(r'^(([a-z0-9]\-*[a-z0-9]*){1,63}\.?){1,255}$', repo_host): + msg.append('Repo host {0} is not a valid domain name.'.format(repo_host)) + + # FIXME - add verification of other command line parameters + + return msg + + +def main(config_file, std_err=False, verbose=True, dont_send=False): + """ + Main function of the script + + Args: + config_file: file path of the config file to load + std_err: whether print logging output to stderr + verbose: whether to provide verbose logging messages + dont_send: whether to sent data to monitoring system or just do a dry + run + """ + try: + # Configure logging: + fmt = logging.Formatter('%(filename)s[%(process)d] %(levelname)s: ' + + '%(message)s') + logger = logging.getLogger() + if verbose: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + if std_err: + handler = logging.StreamHandler() + else: + handler = lh.SysLogHandler(address='/dev/log', + facility=lh.SysLogHandler.LOG_USER) + handler.setFormatter(fmt) + logger.addHandler(handler) + + logger.info("check_cert is starting, command line arguments:" + + "config_file={0}, ".format(config_file) + + "std_err={0}, ".format(std_err) + + "verbose={0}, ".format(verbose) + ) + + # FIXME - Remember to correctly configure syslog, otherwise rsyslog will + # discard messages + ScriptConfiguration.load_config(config_file) + + logger.debug("Loaded configuration: " + + str(ScriptConfiguration.get_config()) + ) + + # Provide some sane defaults: + try: + repo_port = ScriptConfiguration.get_val("repo_port") + except KeyError: + repo_port = 22 + + try: + ignored_certs = ScriptConfiguration.get_val("ignored_certs") + except KeyError: + ignored_certs = {} + + logger.debug("Remote repo is: {0}@{1}{3}->{4}, tcp port {2}".format( + ScriptConfiguration.get_val("repo_user"), + ScriptConfiguration.get_val("repo_host"), + repo_port, + ScriptConfiguration.get_val("repo_url"), + ScriptConfiguration.get_val("repo_masterbranch")) + + ", local repository dir is {0}".format( + ScriptConfiguration.get_val('repo_localdir')) + + ", repository key is {0}".format( + ScriptConfiguration.get_val('repo_pubkey')) + + ", warn_thresh is {0}".format( + ScriptConfiguration.get_val('warn_treshold')) + + ", crit_thresh is {0}".format( + ScriptConfiguration.get_val('critical_treshold')) + ) + + # Initialize Riemann/NRPE reporting: + if ScriptConfiguration.get_val("riemann_enabled") is True: + ScriptStatus.initialize( + riemann_enabled=True, + riemann_hosts_config=ScriptConfiguration.get_val("riemann_hosts"), + riemann_tags=ScriptConfiguration.get_val("riemann_tags"), + riemann_ttl=ScriptConfiguration.get_val("riemann_ttl"), + riemann_service_name=SERVICE_NAME, + nrpe_enabled=ScriptConfiguration.get_val("nrpe_enabled"), + debug=dont_send,) + else: + ScriptStatus.initialize( + nrpe_enabled=ScriptConfiguration.get_val("nrpe_enabled"), + debug=dont_send,) + + # Now, let's verify the configuration: + # FIXME - ScriptStatus might have been already initialized with + # incorrect config and in effect ScriptStatus.notify_immediate will + # not reach monitoring system + conf_issues = _verify_conf(ScriptConfiguration.get_config()) + if conf_issues: + logging.debug("Configuration problems:\n\t" + + '\n\t'.join(conf_issues)) + ScriptStatus.notify_immediate('unknown', + "Configuration file contains errors: " + + ' '.join(conf_issues)) + + # Make sure that we are the only ones running on the server: + ScriptLock.init(ScriptConfiguration.get_val('lockfile')) + ScriptLock.aqquire() + + # Initialize our repo mirror: + CertStore.initialize(host=ScriptConfiguration.get_val("repo_host"), + port=repo_port, + pubkey=ScriptConfiguration.get_val('repo_pubkey'), + username=ScriptConfiguration.get_val("repo_user"), + repo_localdir=ScriptConfiguration.get_val( + 'repo_localdir'), + repo_url=ScriptConfiguration.get_val("repo_url"), + repo_masterbranch=ScriptConfiguration.get_val( + "repo_masterbranch"), + ) + + unparsable_certs = {"number": 0, "paths": []} + + for cert in CertStore.lookup_certs(CERTIFICATE_EXTENSIONS): + # Check whether the cert needs to be included in checks at all: + cert_hash = hashlib.sha1(cert.content).hexdigest() + if cert_hash in ignored_certs: + # This cert should be ignored + logging.info("certificate {0} (sha1sum: {1})".format( + cert.path, cert_hash) + " has been ignored.") + continue + + # Check if certifice type is supported: + if cert.path[-3:] not in ['pem', 'crt', 'cer']: + ScriptStatus.update('unknown', + "Certificate {0} ".format(cert.path) + + "is not supported by the check script, " + + "please add it to ignore list or upgrade " + + "the script.") + continue + + # Check the expiry date: + try: + cert_expiration = get_cert_expiration(cert) + except RecoverableException: + unparsable_certs["number"] += 1 + unparsable_certs["paths"].append(cert.path) + continue + + # -3 days is in fact -4 days, 23:59:58.817181 + # so we compensate and round up + # additionally, openssl uses utc dates + now = datetime.utcnow() - timedelta(days=1) + time_left = cert_expiration - now # timedelta object + if time_left.days < 0: + ScriptStatus.update('critical', + "Certificate {0} expired {1} days ago.".format( + cert.path, abs(time_left.days))) + elif time_left.days == 0: + ScriptStatus.update('critical', + "Certificate {0} expires today.".format( + cert.path)) + elif time_left.days < ScriptConfiguration.get_val("critical_treshold"): + ScriptStatus.update('critical', + "Certificate {0} is about to expire in" + "{0} days.".format(cert.path, time_left.days)) + elif time_left.days < ScriptConfiguration.get_val("warn_treshold"): + ScriptStatus.update('warn', + "Certificate {0} is about to expire in" + "{0} days.".format(cert.path, time_left.days)) + else: + logger.info("{0} expires in {1} days - OK!".format( + cert.path, time_left.days)) + + # We do not want to pollute output in case when there are too many broken + # certs in the report. + if unparsable_certs["number"] > 0: + if unparsable_certs["number"] <= 2: + ScriptStatus.update('unknown', + 'Script cannot parse certificates: ' + ','.join(unparsable_certs["paths"])) + else: + ScriptStatus.update('unknown', 'Script cannot parse {0} '.format( + unparsable_certs["number"]) + + "certificates, please check with verbose out on") + + ScriptStatus.notify_agregated() + ScriptLock.release() + sys.exit(0) + + except RecoverableException as e: + msg = str(e) + logging.error(msg) + ScriptStatus.notify_immediate('unknown', msg) + sys.exit(1) + except AssertionError as e: + # Unittest require it: + raise + except Exception as e: + msg = "Exception occured: {0}, msg: {1}".format(e.__class__.__name__, str(e)) + logging.error(msg) + logging.exception(e) + sys.exit(1) diff --git a/debian/.gitignore b/debian/.gitignore index 6f373e7..14cd0c8 100644 --- a/debian/.gitignore +++ b/debian/.gitignore @@ -1,2 +1,2 @@ -certcheck* +check_cert* diff --git a/debian/changelog b/debian/changelog index 1298e41..fc00905 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,19 +1,45 @@ -certcheck (0.3.0) stable; urgency=low +check-cert (0.3.3) stable; urgency=low + + * Submodules support + + -- vespian Mon, 19 Jan 2015 14:50:19 +0100 + +check-cert (0.3.2) stable; urgency=low + + * Fixes, config checking refactoring + + -- vespian Mon, 06 Oct 2014 21:31:07 +0100 + +check-cert (0.3.1+1) stable; urgency=low + + * Fix dependencies for dulwich. + + -- vespian Mon, 06 Oct 2014 16:31:07 +0100 + +check-cert (0.3.1) stable; urgency=low + + * Small fixes + * formatting + * bump dependency on pymisc + + -- Pawel Rozlach Mon, 06 Oct 2014 16:33:08 +0200 + +check-cert (0.3.0) stable; urgency=low * Documentation refactoring * Make unittests nosetest compatible * splitting script into modules - -- Vespian Mon, 18 Sep 2013 14:33:43 +0000 + -- Pawel Rozlach Mon, 18 Sep 2013 14:33:43 +0000 -certcheck (0.2.0) stable; urgency=low +check-cert (0.2.0) stable; urgency=low * Git integration. * Drop scanning directories in favour of direct git interfacing. -- Pawel Rozlach Mon, 18 Sep 2013 14:33:43 +0000 -certcheck (0.1.0) unstable; urgency=low +check-cert (0.1.0) unstable; urgency=low * Initial release. diff --git a/debian/control b/debian/control index be17909..3bdc664 100644 --- a/debian/control +++ b/debian/control @@ -1,18 +1,18 @@ -Source: certcheck +Source: check-cert Section: utils Priority: extra -Maintainer: Vespian -Build-Depends: python (>= 2.6.6-3~), debhelper (>= 8), python-dnspython, - python-coverage, openssl, python-openssl, python-bernhard, python-argparse, - python-protobuf, python-unittest2, python-yaml, python-dulwich (>= 0.8), - openssh-client -Standards-Version: 3.9.3 +Standards-Version: 3.9.4 +Maintainer: Pawel Rozlach +Homepage: https://github.com/vespian/check_cert +Build-Depends: debhelper (>= 8), dh-python, python-all (>= 2.6.6-3~), + python-setuptools, python-coverage, python-mock (>= 1.0), python-openssl, + python-protobuf, python-dulwich (>= 0.9.7), openssh-client, openssl, + python-pymisc (>= 1.2.1), python-argparse X-Python-Version: >= 2.6 -Package: certcheck -Version: 0.2.0 +Package: check-cert Architecture: any -Depends: ${python:Depends}, python-openssl, python-bernhard, python-argparse, - python-protobuf, python-yaml, python-dulwich (>= 0.8), openssh-client, - python-dnspython +Depends: ${python:Depends}, ${misc:Depends}, python-openssl, + python-dulwich (>= 0.9.7), openssh-client, python-argparse, + python-pymisc (>= 1.2.1) Description: Simple certificate check diff --git a/debian/copyright b/debian/copyright index 68e26fd..3ce2fd0 100644 --- a/debian/copyright +++ b/debian/copyright @@ -1,7 +1,7 @@ Format-Specification: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Files: * -Copyright: 2012-2013 Spotify AB +Copyright: 2013 Spotify AB, 2014 Pawel Rozlach License: Apache-2.0 On Debian systems, the full text of the Apache 2.0 license can be found in the file `/usr/share/common-licenses/Apache-2.0'. diff --git a/debian/rules b/debian/rules index ffa3e2c..b917a97 100755 --- a/debian/rules +++ b/debian/rules @@ -1,8 +1,13 @@ #!/usr/bin/make -f +#export DH_VERBOSE=1 +export PYBUILD_NAME=pymisc + %: - dh $@ --with python2 + dh $@ --with python2 --buildsystem=pybuild + +override_dh_auto_test: + PYBUILD_SYSTEM=custom \ + PYBUILD_TEST_ARGS="nosetests {dir}/" dh_auto_test -override_dh_fixperms: - dh_fixperms diff --git a/icingaexchange.yml b/icingaexchange.yml new file mode 100644 index 0000000..fcb0001 --- /dev/null +++ b/icingaexchange.yml @@ -0,0 +1,16 @@ +name: check-cert +description: "file:///README.md" +url: "https://github.com/vespian/check-cert" +tags: certificate,git,ssl,tls +vendor: Linux +target: Website,Service +type: Plugin +license: Apache 2.0 +releases: + - name: 0.3.2 + description: "All the features implemented and unittested." + files: + - name: 0.3.2.tar.gz + description: plugin tarbal + url: "https://github.com/vespian/check-cert/archive/0.3.2.tar.gz" + checksum: 95985b6bca06fa410af7a0e8ca842f0b diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a09f3da --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +dulwich==0.9.6 +mock==1.0.1 +pyOpenSSL==0.14 diff --git a/run_tests.py b/run_tests.py index 8b4d90e..9fa986a 100755 --- a/run_tests.py +++ b/run_tests.py @@ -1,27 +1,27 @@ -#!/usr/bin/python -tt - -#Make it a bit more like python3: -from __future__ import absolute_import -from __future__ import print_function - -import coverage -import os -import shutil +#!/usr/bin/env python +# Copyright (c) 2013 Spotify AB +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +try: + import coverage +except ImportError: + pass import sys import unittest - +import os def main(): - major, minor, micro, releaselevel, serial = sys.version_info - - if major == 2 and minor < 7: - print("In order to run tests you need at least Python 2.7") - sys.exit(1) - - if major == 3: - print("Tests were not tested on Python 3.X, use at your own risk") - sys.exit(1) - #Cleanup old html report: for root, dirs, files in os.walk('test/output_coverage_html/'): for f in files: @@ -32,17 +32,24 @@ def main(): shutil.rmtree(os.path.join(root, d)) #Perform coverage analisys: - cov = coverage.coverage() + if "coverage" in sys.modules: + cov = coverage.coverage() + cov.start() - cov.start() - #Discover the test and execute them: + #Discover the tests and execute them: loader = unittest.TestLoader() tests = loader.discover('./test/') testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1) - testRunner.run(tests) - cov.stop() + res = testRunner.run(tests) + + if "coverage" in sys.modules: + cov.stop() + cov.html_report() - cov.html_report() + if res.wasSuccessful(): + sys.exit(0) + else: + sys.exit(1) if __name__ == '__main__': main() diff --git a/setup.py b/setup.py index 98583a8..2d21979 100755 --- a/setup.py +++ b/setup.py @@ -17,12 +17,12 @@ from setuptools import setup -setup(name='certcheck', +setup(name='check_cert', version='0.3.0', author='Vespian', author_email='vespian a t wp.pl', license='ASF2.0', - url='https://github.com/vespian/certcheck', + url='https://github.com/vespian/check_cert', description='Certificate checking tool', - packages=['certcheck'], - scripts=['bin/certcheck']) + packages=['check_cert'], + scripts=['bin/check_cert']) diff --git a/test/fabric/certcheck.yml b/test/fabric/certcheck.yml deleted file mode 100644 index c455908..0000000 --- a/test/fabric/certcheck.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -lockfile: ./certcheck.lock -warn_treshold: 30 -critical_treshold: 15 -riemann_hosts: - - 127.0.0.1 - - 127.0.0.2 -riemann_port: 1234 -riemann_tags: - - abc - - def -repo_host: git.foo.net -repo_port: 22 -repo_url: /foo-puppet -repo_masterbranch: refs/heads/foo -repo_localdir: /tmp/foo -repo_user: foo -repo_pubkey: ./foo -# sha1sum ./certificate_to_be_ignored -# format - dict, hash as a key, and value as a comment -ignored_certs: - 42b270cbd03eaa8c16c386e66f910195f769f8b1: "certificate used during unit-tests" diff --git a/test/fabric/certcheck_mopconfig.yml b/test/fabric/check_cert.yml similarity index 68% rename from test/fabric/certcheck_mopconfig.yml rename to test/fabric/check_cert.yml index 321cd5f..45a83ef 100644 --- a/test/fabric/certcheck_mopconfig.yml +++ b/test/fabric/check_cert.yml @@ -1,7 +1,10 @@ --- -lockfile: /tmp/certcheck.lock -warn_treshold: 30 -critical_treshold: 15 +#Global +lockfile: /tmp/check_cert.lock + +#Riemann related: +riemann_enabled: False +riemann_ttl: 60 riemann_hosts: static: - 192.168.122.16:5555:udp @@ -11,14 +14,23 @@ riemann_hosts: - _riemann._udp riemann_tags: - production - - class::certcheck + - class::check_cert + +#Nagios related: +nrpe_enabled: True + +#Repository related: repo_host: git.example.com repo_port: 22 repo_url: /sample-repo repo_masterbranch: refs/heads/production -repo_localdir: /tmp/certcheck-temprepo -repo_user: certcheck -repo_pubkey: /home/vespian/work/tmp_tickets/cert_check/certcheck_id_rsa +repo_localdir: /tmp/check_cert-temprepo +repo_user: check_cert +repo_pubkey: /home/vespian/work/tmp_tickets/cert_check/check_cert_id_rsa + +#Check related: +warn_treshold: 30 +critical_treshold: 15 # sha1sum ./certificate_to_be_ignored # format - dict, hash as a key, and value as a comment ignored_certs: diff --git a/test/fabric/malformed.yml b/test/fabric/malformed.yml index 62dd35b..c077e0d 100644 --- a/test/fabric/malformed.yml +++ b/test/fabric/malformed.yml @@ -1,5 +1,5 @@ { - "lockfile": "./certcheck.lock", + "lockfile": "./check_cert.lock", "warn_treshold": 30, "critical_treshold": 15, "riemann_hosts": [ diff --git a/test/fabric/sample_cert_dir/broken_certificate.crt b/test/fabric/sample_cert_dir/broken_certificate.crt index c8bcd59..a7483b7 100644 --- a/test/fabric/sample_cert_dir/broken_certificate.crt +++ b/test/fabric/sample_cert_dir/broken_certificate.crt @@ -1,28 +1,3 @@ -D,Mn -M"!0 -|\=e -tLY - e~P -|daB -pyc -Wdu -,y|] --?U n -xMDU -6eu.^ -R&,k -kU[cs -/.ck -(bux -: 20n2 O~FK ed!: -V \ No newline at end of file +V diff --git a/test/modules/file_paths.py b/test/modules/file_paths.py index 719b316..e29ca25 100644 --- a/test/modules/file_paths.py +++ b/test/modules/file_paths.py @@ -1,4 +1,4 @@ -#!/usr/bin/python -tt +#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2013 Spotify AB # @@ -30,6 +30,7 @@ EXPIRE_6_DAYS = op.join(CERTIFICATES_DIR, 'expire_6_days.pem') EXPIRE_21_DAYS = op.join(CERTIFICATES_DIR, 'expire_21_days.pem') EXPIRE_41_DAYS = op.join(CERTIFICATES_DIR, 'expire_41_days.pem') +TRUSTED_EXPIRE_41_CERT = op.join(CERTIFICATES_DIR, 'trusted_expire_41_days.pem') EXPIRE_41_DAYS_DER = op.join(CERTIFICATES_DIR, 'expire_41_days.der') BROKEN_CERT = op.join(CERTIFICATES_DIR, 'broken_certificate.crt') IGNORED_CERT = op.join(CERTIFICATES_DIR, 'ignored_certificate.crt') @@ -37,10 +38,10 @@ EXPIRE_41_DAYS_DER, BROKEN_CERT, IGNORED_CERT]) #Configfile location -TEST_CONFIG_FILE = op.join(_fabric_base_dir, 'certcheck.yml') +TEST_CONFIG_FILE = op.join(_fabric_base_dir, 'check_cert.yml') TEST_MALFORMED_CONFIG_FILE = op.join(_fabric_base_dir, 'malformed.yml') TEST_NONEXISTANT_CONFIG_FILE = op.join(_fabric_base_dir, - 'certcheck.yml.nonexistant') + 'check_cert.yml.nonexistant') #Test lockfile location: TEST_LOCKFILE = op.join(_fabric_base_dir, 'filelock.pid') diff --git a/test/moduletests/certcheck/test_certcheck.py b/test/moduletests/certcheck/test_certcheck.py deleted file mode 100644 index b6072c8..0000000 --- a/test/moduletests/certcheck/test_certcheck.py +++ /dev/null @@ -1,514 +0,0 @@ -#!/usr/bin/python -tt -# -*- coding: utf-8 -*- -# Copyright (c) 2013 Spotify AB -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - - -#Make it a bit more like python3: -from __future__ import absolute_import -from __future__ import division -from __future__ import nested_scopes -from __future__ import print_function -from __future__ import with_statement - -#Global imports: -from collections import namedtuple -from datetime import datetime, timedelta -import os -import platform -import subprocess -import sys -import time -major, minor, micro, releaselevel, serial = sys.version_info -if major == 2 and minor < 7: - import unittest2 as unittest -else: - import unittest -import mock - -#To perform local imports first we need to fix PYTHONPATH: -pwd = os.path.abspath(os.path.dirname(__file__)) -sys.path.append(os.path.abspath(pwd + '/../../modules/')) - -#Local imports: -import file_paths as paths -import certcheck - - -class TestCertCheck(unittest.TestCase): - @staticmethod - def _create_test_cert(days, path, is_der=False): - openssl_cmd = ["/usr/bin/openssl", "req", "-x509", "-nodes", - "-newkey", "rsa:1024", - "-subj", "/C=SE/ST=Stockholm/L=Stockholm/CN=www.example.com"] - - openssl_cmd.extend(["-days", str(days)]) - openssl_cmd.extend(["-out", path]) - - if is_der: - openssl_cmd.extend(["-outform", "DER"]) - openssl_cmd.extend(["-keyout", path + ".key"]) - else: - openssl_cmd.extend(["-keyout", path]) - - child = subprocess.Popen(openssl_cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - child_stdout, child_stderr = child.communicate() - if child.returncode != 0: - print("Failed to execute opensssl command:\n\t{0}\n".format( - ' '.join(openssl_cmd))) - print("Stdout+Stderr:\n{0}".format(child_stdout)) - sys.exit(1) - else: - print("Created test certificate {0}".format(os.path.basename(path))) - - @staticmethod - def _certpath2namedtuple(path): - with open(path, 'rb') as fh: - cert = namedtuple("FileTuple", ['path', 'content']) - cert.path = path - cert.content = fh.read() - return cert - - @classmethod - def setUpClass(cls): - #Prepare the test certificate tree: - cls._create_test_cert(-3, paths.EXPIRED_3_DAYS) - cls._create_test_cert(6, paths.EXPIRE_6_DAYS) - cls._create_test_cert(21, paths.EXPIRE_21_DAYS) - cls._create_test_cert(41, paths.EXPIRE_41_DAYS) - cls._create_test_cert(41, paths.EXPIRE_41_DAYS_DER, is_der=True,) - - @mock.patch('logging.error') - @mock.patch('sys.exit') - def test_config_file_parsing(self, SysExitMock, LoggingErrorMock): - #Test malformed file loading - certcheck.ScriptConfiguration.load_config(paths.TEST_MALFORMED_CONFIG_FILE) - self.assertTrue(LoggingErrorMock.called) - SysExitMock.assert_called_once_with(1) - SysExitMock.reset_mock() - - #Test non-existent file loading - certcheck.ScriptConfiguration.load_config(paths.TEST_NONEXISTANT_CONFIG_FILE) - self.assertTrue(LoggingErrorMock.called) - SysExitMock.assert_called_once_with(1) - - #Load the config file - certcheck.ScriptConfiguration.load_config(paths.TEST_CONFIG_FILE) - - #String: - self.assertEqual(certcheck.ScriptConfiguration.get_val("repo_host"), - "git.foo.net") - #List of strings - self.assertEqual(certcheck.ScriptConfiguration.get_val("riemann_tags"), - ['abc', 'def']) - #Integer: - self.assertEqual(certcheck.ScriptConfiguration.get_val("warn_treshold"), 30) - - #Key not in config file: - with self.assertRaises(KeyError): - certcheck.ScriptConfiguration.get_val("not_a_field") - - @mock.patch.object(certcheck.ScriptStatus, 'notify_immediate') # same as below - @mock.patch('logging.warn') # Unused, but masks error messages - @mock.patch.object(certcheck.ScriptStatus, 'update') - def test_cert_expiration_parsing(self, UpdateMock, *unused): - IGNORED_CERTS = ['42b270cbd03eaa8c16c386e66f910195f769f8b1'] - - # -3 days is in fact -4 days, 23:59:58.817181 - # so we compensate and round up - # additionally, openssl uses utc dates - now = datetime.utcnow() - timedelta(days=1) - - #Test an expired certificate: - cert = self._certpath2namedtuple(paths.EXPIRED_3_DAYS) - expiry_time = certcheck.get_cert_expiration( - cert, IGNORED_CERTS) - now - self.assertEqual(expiry_time.days, -3) - - #Test an ignored certificate: - cert = self._certpath2namedtuple(paths.IGNORED_CERT) - expiry_time = certcheck.get_cert_expiration(cert, - IGNORED_CERTS) - self.assertEqual(expiry_time, None) - - #Test a good certificate: - cert = self._certpath2namedtuple(paths.EXPIRE_21_DAYS) - expiry_time = certcheck.get_cert_expiration(cert, - IGNORED_CERTS) - now - self.assertEqual(expiry_time.days, 21) - - #Test a DER certificate: - cert = self._certpath2namedtuple(paths.EXPIRE_41_DAYS_DER) - certcheck.get_cert_expiration(cert, IGNORED_CERTS) - self.assertTrue(UpdateMock.called) - self.assertEqual(UpdateMock.call_args_list[0][0][0], 'unknown') - - #Test a broken certificate: - cert = self._certpath2namedtuple(paths.BROKEN_CERT) - certcheck.get_cert_expiration(cert, IGNORED_CERTS) - self.assertTrue(UpdateMock.called) - self.assertEqual(UpdateMock.call_args_list[0][0][0], 'unknown') - - @mock.patch('logging.warn') - def test_file_locking(self, LoggingWarnMock, *unused): - certcheck.ScriptLock.init(paths.TEST_LOCKFILE) - - with self.assertRaises(certcheck.RecoverableException): - certcheck.ScriptLock.release() - - certcheck.ScriptLock.aqquire() - - certcheck.ScriptLock.aqquire() - self.assertTrue(LoggingWarnMock.called) - - self.assertTrue(os.path.exists(paths.TEST_LOCKFILE)) - self.assertTrue(os.path.isfile(paths.TEST_LOCKFILE)) - self.assertFalse(os.path.islink(paths.TEST_LOCKFILE)) - - with open(paths.TEST_LOCKFILE, 'r') as fh: - pid_str = fh.read() - self.assertGreater(len(pid_str), 0) - pid = int(pid_str) - self.assertEqual(pid, os.getpid()) - - certcheck.ScriptLock.release() - - child = os.fork() - if not child: - #we are in the child process: - certcheck.ScriptLock.aqquire() - time.sleep(10) - #script should not do any cleanup - it is part of the tests :) - else: - #parent - timer = 0 - while timer < 3: - if os.path.isfile(paths.TEST_LOCKFILE): - break - else: - timer += 0.1 - time.sleep(0.1) - else: - # Child did not create pidfile in 3 s, - # we should clean up and bork: - os.kill(child, 9) - assert False - - with self.assertRaises(certcheck.RecoverableException): - certcheck.ScriptLock.aqquire() - - os.kill(child, 11) - - #now it should succed - certcheck.ScriptLock.aqquire() - - @mock.patch('logging.warn') # Unused, but masks error messages - @mock.patch('logging.info') - @mock.patch('logging.error') - @mock.patch('certcheck.bernhard') - def test_script_status(self, RiemannMock, LoggingErrorMock, LoggingInfoMock, - *unused): - #There should be at least one tag defined: - certcheck.ScriptStatus.initialize(riemann_hosts_config={}, riemann_tags=[]) - self.assertTrue(LoggingErrorMock.called) - LoggingErrorMock.reset_mock() - - #There should be at least one Riemann host defined: - certcheck.ScriptStatus.initialize(riemann_hosts_config={}, - riemann_tags=['tag1', 'tag2']) - self.assertTrue(LoggingErrorMock.called) - LoggingErrorMock.reset_mock() - - #Riemann exceptions should be properly handled/reported: - def side_effect(host, port): - raise Exception("Raising exception for {0}:{1} pair") - - RiemannMock.UDPTransport = 'UDPTransport' - RiemannMock.TCPTransport = 'TCPTransport' - RiemannMock.Client.side_effect = side_effect - - certcheck.ScriptStatus.initialize(riemann_hosts_config={ - 'static': ['192.168.122.16:5555:udp']}, - riemann_tags=['tag1', 'tag2']) - self.assertTrue(LoggingErrorMock.called) - LoggingErrorMock.reset_mock() - - RiemannMock.Client.side_effect = None - RiemannMock.Client.reset_mock() - - #Mock should only allow legitimate exit_statuses - certcheck.ScriptStatus.notify_immediate("not a real status", "message") - self.assertTrue(LoggingErrorMock.called) - LoggingErrorMock.reset_mock() - - certcheck.ScriptStatus.update("not a real status", "message") - self.assertTrue(LoggingErrorMock.called) - LoggingErrorMock.reset_mock() - - #Done with syntax checking, now initialize the class properly: - certcheck.ScriptStatus.initialize(riemann_hosts_config={ - 'static': ['1.2.3.4:1:udp', - '2.3.4.5:5555:tcp',] - }, - riemann_tags=['tag1', 'tag2']) - - proper_calls = [mock.call('1.2.3.4', 1, 'UDPTransport'), - mock.call('2.3.4.5', 5555, 'TCPTransport')] - RiemannMock.Client.assert_has_calls(proper_calls) - RiemannMock.Client.reset_mock() - - #Check if notify_immediate works - certcheck.ScriptStatus.notify_immediate("warn", "a warning message") - self.assertTrue(LoggingInfoMock.called) - LoggingErrorMock.reset_mock() - - proper_call = mock.call().send({'description': 'a warning message', - 'service': 'certcheck', - 'tags': ['tag1', 'tag2'], - 'state': 'warn', - 'host': platform.uname()[1], - 'ttl': 90000} - ) - # This call should be issued to *both* connection mocks, but we - # simplify things here a bit: - self.assertEqual(2, len([x for x in RiemannMock.Client.mock_calls - if x == proper_call])) - RiemannMock.Client.reset_mock() - - #update method shoul escalate only up: - certcheck.ScriptStatus.update('warn', "this is a warning message.") - certcheck.ScriptStatus.update('ok', '') - certcheck.ScriptStatus.update('unknown', "this is a not-rated message.") - certcheck.ScriptStatus.update('ok', "this is an informational message.") - - proper_call = mock.call().send({'description': - 'this is a warning message.\n' + - 'this is a not-rated message.\n' + - 'this is an informational message.', - 'service': 'certcheck', - 'tags': ['tag1', 'tag2'], - 'state': 'unknown', - 'host': platform.uname()[1], - 'ttl': 90000} - ) - # This call should be issued to *both* connection mocks, but we - # simplify things here a bit: - certcheck.ScriptStatus.notify_agregated() - self.assertEqual(2, len([x for x in RiemannMock.Client.mock_calls - if x == proper_call])) - RiemannMock.reset_mock() - - @mock.patch('sys.exit') - def test_command_line_parsing(self, SysExitMock): - old_args = sys.argv - - #General parsing: - sys.argv = ['./certcheck', '-v', '-s', '-d', '-c', './certcheck.json'] - parsed_cmdline = certcheck.parse_command_line() - self.assertEqual(parsed_cmdline, {'std_err': True, - 'config_file': './certcheck.json', - 'verbose': True, - 'dont_send': True, - }) - - #Config file should be a mandatory argument: - sys.argv = ['./certcheck', ] - # Suppres warnings from argparse - with mock.patch('sys.stderr'): - parsed_cmdline = certcheck.parse_command_line() - SysExitMock.assert_called_once_with(2) - - #Test default values: - sys.argv = ['./certcheck', '-c', './certcheck.json'] - parsed_cmdline = certcheck.parse_command_line() - self.assertEqual(parsed_cmdline, {'std_err': False, - 'config_file': './certcheck.json', - 'verbose': False, - 'dont_send': False, - }) - - sys.argv = old_args - - @mock.patch('certcheck.sys.exit') - @mock.patch('certcheck.get_cert_expiration') - @mock.patch('certcheck.CertStore') - @mock.patch('certcheck.ScriptLock', autospec=True) - @mock.patch('certcheck.ScriptStatus', autospec=True) - @mock.patch('certcheck.ScriptConfiguration', autospec=True) - @mock.patch('certcheck.logging', autospec=True) - def test_script_logic(self, LoggingMock, ScriptConfigurationMock, - ScriptStatusMock, ScriptLockMock, CertStoreMock, - CertExpirationMock, SysExitMock): - - #Fake configuration data: - def script_conf_factory(**kwargs): - good_configuration = {"warn_treshold": 30, - "critical_treshold": 15, - "riemann_hosts": { - 'static': ['1.2.3.4:1:udp', - '2.3.4.5:5555:tcp',] - }, - "riemann_tags": ["abc", "def"], - "repo_host": "git.foo.net", - "repo_port": 22, - "repo_url": "/foo-puppet", - "repo_masterbranch": "refs/heads/foo", - "repo_localdir": "/tmp/foo", - "repo_user": "foo", - "repo_pubkey": "./foo", - "lockfile": "./fake_lock.pid", - "ignored_certs": { - '42b270cbd03eaa8c16c386e66f910195f769f8b1': "certificate used during unit-tests" - } - } - - def func(key): - config = good_configuration.copy() - config.update(kwargs) - self.assertIn(key, config) - return config[key] - - return func - - # A bit of a workaround, but we cannot simply call sys.exit - def terminate_script(exit_status): - raise SystemExit(exit_status) - SysExitMock.side_effect = terminate_script - - #Provide fake data for the script: - fake_cert_tuple = namedtuple("FileTuple", ['path', 'content']) - fake_cert_tuple.path = 'some_cert' - fake_cert_tuple.content = 'some content' - - def fake_cert(cert_extensions): - return iter([fake_cert_tuple]) - CertStoreMock.lookup_certs.side_effect = fake_cert - - # Test if ScriptStatus gets properly initialized - # and whether warn > crit condition is - # checked as well - certcheck.ScriptConfiguration.get_val.side_effect = \ - script_conf_factory(warn_treshold=7) - - with self.assertRaises(SystemExit) as e: - certcheck.main(config_file='./certcheck.conf') - self.assertEqual(e.exception.code, 1) - - proper_init_call = dict(riemann_hosts_config= { - 'static': ['1.2.3.4:1:udp', - '2.3.4.5:5555:tcp',] - }, - riemann_tags=['abc', 'def'], - debug=False) - self.assertTrue(ScriptConfigurationMock.load_config.called) - self.assertTrue(ScriptStatusMock.notify_immediate.called) - certcheck.ScriptStatus.initialize.assert_called_once_with(**proper_init_call) - - #this time test only the negative warn threshold: - certcheck.ScriptConfiguration.get_val.side_effect = \ - script_conf_factory(warn_treshold=-30) - ScriptStatusMock.notify_immediate.reset_mock() - with self.assertRaises(SystemExit) as e: - certcheck.main(config_file='./certcheck.conf') - self.assertTrue(ScriptStatusMock.notify_immediate.called) - self.assertEqual(e.exception.code, 1) - - #this time test only the crit threshold == 0 condition check: - certcheck.ScriptConfiguration.get_val.side_effect = \ - script_conf_factory(critical_treshold=-1) - - ScriptStatusMock.notify_immediate.reset_mock() - with self.assertRaises(SystemExit) as e: - certcheck.main(config_file='./certcheck.conf') - self.assertTrue(ScriptStatusMock.notify_immediate.called) - self.assertEqual(e.exception.code, 1) - - #test if an expired cert is properly handled: - ScriptStatusMock.notify_immediate.reset_mock() - certcheck.ScriptConfiguration.get_val.side_effect = \ - script_conf_factory() - - def fake_cert_expiration(cert, ignored_certs): - self.assertEqual(cert, fake_cert_tuple) - return datetime.utcnow() - timedelta(days=4) - CertExpirationMock.side_effect = fake_cert_expiration - with self.assertRaises(SystemExit) as e: - certcheck.main(config_file='./certcheck.conf') - self.assertEqual(e.exception.code, 0) - self.assertTrue(ScriptStatusMock.update.called) - self.assertEqual(ScriptStatusMock.update.call_args[0][0], 'critical') - self.assertTrue(ScriptLockMock.aqquire.called) - self.assertTrue(ScriptLockMock.release.called) - self.assertTrue(ScriptStatusMock.notify_agregated.called) - self.assertFalse(ScriptStatusMock.notify_immediate.called) - - #test if soon to expire (