2022-01-19 17:25:41 +01:00
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8:noet
2023-04-09 20:16:37 +02:00
## Copyright 2023 Bashclub https://github.com/bashclub
2022-01-19 17:25:41 +01:00
## BSD-2-Clause
##
## Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
##
## 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
## BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
## GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## OPNsense CheckMK Agent
## to install
## copy to /usr/local/etc/rc.syshook.d/start/99-checkmk_agent and chmod +x
##
2023-04-11 20:38:52 +02:00
## default config file /usr/local/etc/checkmk.conf
2023-04-10 12:28:41 +02:00
##
2023-04-09 20:16:37 +02:00
## for server-side implementation of
## * smartdisk - install the mkp from https://github.com/bashclub/checkmk-smart plugins os-smart
## * squid - install the mkp from https://exchange.checkmk.com/p/squid and forwarder -> listen on loopback active
2022-01-19 17:25:41 +01:00
2023-06-22 15:51:52 +02:00
__VERSION__ = " 1.0.6 "
2022-01-19 17:25:41 +01:00
import sys
import os
import shlex
2022-02-20 13:25:19 +01:00
import glob
2022-01-19 17:25:41 +01:00
import re
import time
import json
import socket
import signal
import struct
import subprocess
import pwd
import threading
import ipaddress
import base64
2022-01-20 14:00:15 +01:00
import traceback
2022-06-28 19:21:12 +02:00
import syslog
import requests
from urllib3 . connection import HTTPConnection
from urllib3 . connectionpool import HTTPConnectionPool
from requests . adapters import HTTPAdapter
2022-01-19 17:25:41 +01:00
from cryptography import x509
from cryptography . hazmat . backends import default_backend as crypto_default_backend
2022-02-20 13:25:19 +01:00
from cryptography . hazmat . primitives import hashes
from cryptography . hazmat . primitives . ciphers import Cipher , algorithms , modes
from cryptography . hazmat . primitives . kdf . pbkdf2 import PBKDF2HMAC
2023-04-09 20:16:37 +02:00
from datetime import datetime
2022-01-19 17:25:41 +01:00
from xml . etree import cElementTree as ELementTree
from collections import Counter , defaultdict
from pprint import pprint
from socketserver import TCPServer , StreamRequestHandler
2022-06-28 19:21:12 +02:00
2023-04-09 20:16:37 +02:00
SCRIPTPATH = os . path . abspath ( __file__ )
SYSHOOK_METHOD = re . findall ( " rc \ .syshook \ .d \ /(start|stop)/ " , SCRIPTPATH )
2023-05-25 19:22:04 +02:00
BASEDIR = " /usr/local/check_mk_agent "
2023-04-10 12:28:41 +02:00
CHECKMK_CONFIG = " /usr/local/etc/checkmk.conf "
2023-04-11 20:38:52 +02:00
MK_CONFDIR = os . path . dirname ( CHECKMK_CONFIG )
2022-02-20 13:25:19 +01:00
LOCALDIR = os . path . join ( BASEDIR , " local " )
2023-04-09 20:16:37 +02:00
PLUGINSDIR = os . path . join ( BASEDIR , " plugins " )
2022-02-24 20:31:02 +01:00
SPOOLDIR = os . path . join ( BASEDIR , " spool " )
2022-01-19 17:25:41 +01:00
2023-05-26 21:41:42 +02:00
os . environ [ " MK_CONFDIR " ] = MK_CONFDIR
os . environ [ " MK_LIBDIR " ] = BASEDIR
os . environ [ " MK_VARDIR " ] = BASEDIR
2022-01-19 17:25:41 +01:00
class object_dict ( defaultdict ) :
def __getattr__ ( self , name ) :
return self [ name ] if name in self else " "
def etree_to_dict ( t ) :
d = { t . tag : { } if t . attrib else None }
children = list ( t )
if children :
dd = object_dict ( list )
for dc in map ( etree_to_dict , children ) :
for k , v in dc . items ( ) :
dd [ k ] . append ( v )
d = { t . tag : { k : v [ 0 ] if len ( v ) == 1 else v for k , v in dd . items ( ) } }
if t . attrib :
d [ t . tag ] . update ( ( ' @ ' + k , v ) for k , v in t . attrib . items ( ) )
if t . text :
text = t . text . strip ( )
if children or t . attrib :
if text :
d [ t . tag ] [ ' #text ' ] = text
else :
d [ t . tag ] = text
return d
2022-06-28 19:21:12 +02:00
def log ( message , prio = " notice " ) :
2023-05-10 16:52:08 +02:00
priority = {
2022-06-28 19:21:12 +02:00
" crit " : syslog . LOG_CRIT ,
" err " : syslog . LOG_ERR ,
" warning " : syslog . LOG_WARNING ,
" notice " : syslog . LOG_NOTICE ,
" info " : syslog . LOG_INFO ,
} . get ( str ( prio ) . lower ( ) , syslog . LOG_DEBUG )
syslog . openlog ( ident = " checkmk_agent " , logoption = syslog . LOG_PID | syslog . LOG_NDELAY , facility = syslog . LOG_DAEMON )
syslog . syslog ( priority , message )
2022-02-20 13:25:19 +01:00
def pad_pkcs7 ( message , size = 16 ) :
_pad = size - ( len ( message ) % size )
if type ( message ) == str :
return message + chr ( _pad ) * _pad
else :
return message + bytes ( [ _pad ] ) * _pad
2022-06-28 19:21:12 +02:00
class NginxConnection ( HTTPConnection ) :
def __init__ ( self ) :
super ( ) . __init__ ( " localhost " )
def connect ( self ) :
self . sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM )
self . sock . connect ( " /var/run/nginx_status.sock " )
class NginxConnectionPool ( HTTPConnectionPool ) :
def __init__ ( self ) :
super ( ) . __init__ ( " localhost " )
def _new_conn ( self ) :
return NginxConnection ( )
class NginxAdapter ( HTTPAdapter ) :
def get_connection ( self , url , proxies = None ) :
return NginxConnectionPool ( )
2022-03-09 21:19:33 +01:00
def check_pid ( pid ) :
try :
os . kill ( pid , 0 )
return True
except OSError : ## no permission check currently root
return False
2022-01-19 17:25:41 +01:00
class checkmk_handler ( StreamRequestHandler ) :
def handle ( self ) :
with self . server . _mutex :
try :
2022-02-20 13:25:19 +01:00
_strmsg = self . server . do_checks ( remote_ip = self . client_address [ 0 ] )
2022-01-19 17:25:41 +01:00
except Exception as e :
2022-02-20 13:25:19 +01:00
raise
_strmsg = str ( e ) . encode ( " utf-8 " )
try :
self . wfile . write ( _strmsg )
except :
pass
2022-01-19 17:25:41 +01:00
class checkmk_checker ( object ) :
2023-02-02 19:58:40 +01:00
_available_sysctl_list = [ ]
_available_sysctl_temperature_list = [ ]
2022-01-19 17:25:41 +01:00
_certificate_timestamp = 0
2022-02-20 13:25:19 +01:00
_check_cache = { }
2022-01-20 14:00:15 +01:00
_datastore_mutex = threading . RLock ( )
_datastore = object_dict ( )
2022-02-20 13:25:19 +01:00
2023-04-11 20:38:52 +02:00
def encrypt_msg ( self , message , password = ' secretpassword ' ) :
2022-02-20 13:25:19 +01:00
SALT_LENGTH = 8
KEY_LENGTH = 32
IV_LENGTH = 16
PBKDF2_CYCLES = 10_000
SALT = b " Salted__ "
_backend = crypto_default_backend ( )
_kdf_key = PBKDF2HMAC (
2023-10-11 19:55:35 +02:00
algorithm = hashes . SHA256 ( ) ,
2022-02-20 13:25:19 +01:00
length = KEY_LENGTH + IV_LENGTH ,
salt = SALT ,
iterations = PBKDF2_CYCLES ,
backend = _backend
) . derive ( password . encode ( " utf-8 " ) )
_key , _iv = _kdf_key [ : KEY_LENGTH ] , _kdf_key [ KEY_LENGTH : ]
_encryptor = Cipher (
algorithms . AES ( _key ) ,
modes . CBC ( _iv ) ,
backend = _backend
) . encryptor ( )
message = pad_pkcs7 ( message )
message = message . encode ( " utf-8 " )
_encrypted_message = _encryptor . update ( message ) + _encryptor . finalize ( )
return pad_pkcs7 ( b " 03 " , 10 ) + SALT + _encrypted_message
def do_checks ( self , debug = False , remote_ip = None , * * kwargs ) :
2022-01-19 17:25:41 +01:00
self . _getosinfo ( )
2022-01-20 10:32:26 +01:00
_errors = [ ]
2022-02-20 13:25:19 +01:00
_failed_sections = [ ]
2022-01-19 17:25:41 +01:00
_lines = [ " <<<check_mk>>> " ]
_lines . append ( " AgentOS: {os} " . format ( * * self . _info ) )
_lines . append ( f " Version: { __VERSION__ } " )
_lines . append ( " Hostname: {hostname} " . format ( * * self . _info ) )
2022-02-20 13:25:19 +01:00
if self . onlyfrom :
_lines . append ( " OnlyFrom: {0} " . format ( " , " . join ( self . onlyfrom ) ) )
_lines . append ( f " LocalDirectory: { LOCALDIR } " )
2023-04-09 20:16:37 +02:00
_lines . append ( f " PluginsDirectory: { PLUGINSDIR } " )
2022-02-24 20:31:02 +01:00
_lines . append ( f " AgentDirectory: { MK_CONFDIR } " )
_lines . append ( f " SpoolDirectory: { SPOOLDIR } " )
2022-02-20 13:25:19 +01:00
2022-01-20 09:42:52 +01:00
for _check in dir ( self ) :
if _check . startswith ( " check_ " ) :
2022-02-20 13:25:19 +01:00
_name = _check . split ( " _ " , 1 ) [ 1 ]
2022-03-10 19:38:37 +01:00
if _name in self . skipcheck :
continue
2022-01-20 09:42:52 +01:00
try :
_lines + = getattr ( self , _check ) ( )
2022-01-20 14:00:15 +01:00
except :
2022-02-20 13:25:19 +01:00
_failed_sections . append ( _name )
2022-01-20 14:00:15 +01:00
_errors . append ( traceback . format_exc ( ) )
2022-02-20 13:25:19 +01:00
2023-04-09 20:16:37 +02:00
if os . path . isdir ( PLUGINSDIR ) :
for _plugin_file in glob . glob ( f " { PLUGINSDIR } /** " , recursive = True ) :
if os . path . isfile ( _plugin_file ) and os . access ( _plugin_file , os . X_OK ) :
try :
_cachetime = int ( _plugin_file . split ( os . path . sep ) [ - 2 ] )
except :
_cachetime = 0
try :
2023-05-25 20:05:23 +02:00
if _cachetime > 0 :
_lines . append ( self . _run_cache_prog ( _plugin_file , _cachetime ) )
else :
_lines . append ( self . _run_prog ( _plugin_file ) )
2023-04-09 20:16:37 +02:00
except :
_errors . append ( traceback . format_exc ( ) )
2022-01-19 17:25:41 +01:00
_lines . append ( " <<<local:sep(0)>>> " )
2022-01-20 09:42:52 +01:00
for _check in dir ( self ) :
if _check . startswith ( " checklocal_ " ) :
2022-02-20 13:25:19 +01:00
_name = _check . split ( " _ " , 1 ) [ 1 ]
2022-03-10 19:38:37 +01:00
if _name in self . skipcheck :
continue
2022-01-20 09:42:52 +01:00
try :
_lines + = getattr ( self , _check ) ( )
2022-01-20 14:00:15 +01:00
except :
2022-02-20 13:25:19 +01:00
_failed_sections . append ( _name )
2022-01-20 14:00:15 +01:00
_errors . append ( traceback . format_exc ( ) )
2022-02-20 13:25:19 +01:00
if os . path . isdir ( LOCALDIR ) :
for _local_file in glob . glob ( f " { LOCALDIR } /** " , recursive = True ) :
if os . path . isfile ( _local_file ) and os . access ( _local_file , os . X_OK ) :
try :
_cachetime = int ( _local_file . split ( os . path . sep ) [ - 2 ] )
except :
_cachetime = 0
try :
2023-05-25 20:05:23 +02:00
if _cachetime > 0 :
_lines . append ( self . _run_cache_prog ( _local_file , _cachetime ) )
else :
_lines . append ( self . _run_prog ( _local_file ) )
2022-02-20 13:25:19 +01:00
except :
_errors . append ( traceback . format_exc ( ) )
2022-02-24 20:31:02 +01:00
if os . path . isdir ( SPOOLDIR ) :
_now = time . time ( )
for _filename in glob . glob ( f " { SPOOLDIR } /* " ) :
_maxage = re . search ( " ^ \ d+ " , _filename )
if _maxage :
_maxage = int ( _maxage . group ( ) )
_mtime = os . stat ( _filename ) . st_mtime
if _now - _mtime > _maxage :
continue
with open ( _filename ) as _f :
_lines . append ( _f . read ( ) )
2022-01-19 17:25:41 +01:00
_lines . append ( " " )
2022-01-31 01:04:15 +01:00
if debug :
2022-06-28 19:21:12 +02:00
sys . stdout . write ( " \n " . join ( _errors ) )
sys . stdout . flush ( )
2022-02-20 13:25:19 +01:00
if _failed_sections :
_lines . append ( " <<<check_mk>>> " )
_lines . append ( " FailedPythonPlugins: {0} " . format ( " , " . join ( _failed_sections ) ) )
2023-04-11 20:38:52 +02:00
if self . encrypt and not debug :
return self . encrypt_msg ( " \n " . join ( _lines ) , password = self . encrypt )
2022-02-20 13:25:19 +01:00
return " \n " . join ( _lines ) . encode ( " utf-8 " )
2022-01-19 17:25:41 +01:00
2023-04-09 20:16:37 +02:00
def do_zabbix_output ( self ) :
self . _getosinfo ( )
_regex_convert = re . compile ( " ^(?P<status>[0-3P]) \ s(?P<servicename> \" .*? \" | \ w+) \ s(?P<metrics>[ \ w=.;|]+| -) \ s(?P<details>.*) " )
_json = [ ]
for _check in dir ( self ) :
if _check . startswith ( " checklocal_ " ) :
_name = _check . split ( " _ " , 1 ) [ 1 ]
if _name in self . skipcheck :
continue
try :
for _line in getattr ( self , _check ) ( ) :
try :
_entry = _regex_convert . search ( _line ) . groupdict ( )
_entry [ " servicename " ] = _entry [ " servicename " ] . strip ( ' " ' )
_json . append ( _entry )
except :
raise
except :
raise
return json . dumps ( _json )
2022-01-20 14:00:15 +01:00
def _get_storedata ( self , section , key ) :
with self . _datastore_mutex :
return self . _datastore . get ( section , { } ) . get ( key )
def _set_storedata ( self , section , key , value ) :
with self . _datastore_mutex :
if section not in self . _datastore :
self . _datastore [ section ] = object_dict ( )
self . _datastore [ section ] [ key ] = value
2022-01-19 17:25:41 +01:00
def _getosinfo ( self ) :
_info = json . load ( open ( " /usr/local/opnsense/version/core " , " r " ) )
_changelog = json . load ( open ( " /usr/local/opnsense/changelog/index.json " , " r " ) )
2022-01-24 19:55:18 +01:00
_config_modified = os . stat ( " /conf/config.xml " ) . st_mtime
2022-01-20 20:00:56 +01:00
try :
2023-04-09 20:16:37 +02:00
_default_version = { ' series ' : _info . get ( " product_series " ) , ' version ' : _info . get ( " product_version " ) , ' date ' : time . strftime ( ' % B %d , % Y ' ) }
_latest_series = dict ( map ( lambda x : ( x . get ( " series " ) , x ) , _changelog ) )
_latest_versions = dict ( map ( lambda x : ( x . get ( " version " ) , x ) , _changelog ) )
_latest_firmware = _latest_series . get ( _info . get ( " product_series " ) , _default_version )
_current_firmware = _latest_versions . get ( _info . get ( " product_version " ) . split ( " _ " ) [ 0 ] , _default_version ) . copy ( )
2022-01-20 20:00:56 +01:00
_current_firmware [ " age " ] = int ( time . time ( ) - time . mktime ( time . strptime ( _current_firmware . get ( " date " ) , " % B %d , % Y " ) ) )
2022-02-24 20:31:02 +01:00
_current_firmware [ " version " ] = _info . get ( " product_version " )
2022-01-20 20:00:56 +01:00
except :
2023-04-09 20:16:37 +02:00
#raise
_latest_firmware = { }
2022-01-20 20:00:56 +01:00
_current_firmware = { }
2022-02-24 20:31:02 +01:00
try :
_upgrade_json = json . load ( open ( " /tmp/pkg_upgrade.json " , " r " ) )
_upgrade_packages = dict ( map ( lambda x : ( x . get ( " name " ) , x ) , _upgrade_json . get ( " upgrade_packages " ) ) )
2022-03-04 18:20:09 +01:00
_current_firmware [ " version " ] = _upgrade_packages . get ( " opnsense " ) . get ( " current_version " )
2022-02-24 20:31:02 +01:00
_latest_firmware [ " version " ] = _upgrade_packages . get ( " opnsense " ) . get ( " new_version " )
except :
2022-03-04 18:20:09 +01:00
_current_firmware [ " version " ] = _current_firmware [ " version " ] . split ( " _ " ) [ 0 ]
2022-02-24 20:31:02 +01:00
_latest_firmware [ " version " ] = _current_firmware [ " version " ] ## fixme ## no upgradepckg error on opnsense ... no new version
2022-01-19 17:25:41 +01:00
self . _info = {
" os " : _info . get ( " product_name " ) ,
2023-04-09 20:16:37 +02:00
" os_version " : _current_firmware . get ( " version " , " unknown " ) ,
2022-01-24 19:55:18 +01:00
" version_age " : _current_firmware . get ( " age " , 0 ) ,
" config_age " : int ( time . time ( ) - _config_modified ) ,
" last_configchange " : time . strftime ( " % H: % M %d . % m. % Y " , time . localtime ( _config_modified ) ) ,
2022-01-19 17:25:41 +01:00
" product_series " : _info . get ( " product_series " ) ,
2023-04-09 20:16:37 +02:00
" latest_version " : _latest_firmware . get ( " version " , " unknown " ) ,
" latest_date " : _latest_firmware . get ( " date " , " " ) ,
2022-01-19 17:25:41 +01:00
" hostname " : self . _run_prog ( " hostname " ) . strip ( " \n " )
}
2023-04-09 20:16:37 +02:00
if os . path . exists ( " /usr/local/opnsense/version/core.license " ) :
self . _info [ " business_expire " ] = datetime . strptime ( json . load ( open ( " /usr/local/opnsense/version/core.license " , " r " ) ) . get ( " valid_to " , " 2000-01-01 " ) , " % Y- % m- %d " )
2022-01-19 17:25:41 +01:00
@staticmethod
def ip2int ( ipaddr ) :
return struct . unpack ( " !I " , socket . inet_aton ( ipaddr ) ) [ 0 ]
@staticmethod
def int2ip ( intaddr ) :
return socket . inet_ntoa ( struct . pack ( " !I " , intaddr ) )
def pidof ( self , prog , default = None ) :
_allprogs = re . findall ( " ( \ w+) \ s+( \ d+) " , self . _run_prog ( " ps ax -c -o command,pid " ) )
return int ( dict ( _allprogs ) . get ( prog , default ) )
def _config_reader ( self , config = " " ) :
_config = ELementTree . parse ( " /conf/config.xml " )
_root = _config . getroot ( )
return etree_to_dict ( _root ) . get ( " opnsense " , { } )
@staticmethod
def get_common_name ( certrdn ) :
try :
2022-01-20 09:42:52 +01:00
return next ( filter ( lambda x : x . oid == x509 . oid . NameOID . COMMON_NAME , certrdn ) ) . value . strip ( )
2022-01-19 17:25:41 +01:00
except :
2022-01-20 09:42:52 +01:00
return str ( certrdn )
2022-01-19 17:25:41 +01:00
def _certificate_parser ( self ) :
self . _certificate_timestamp = time . time ( )
self . _certificate_store = { }
for _cert in self . _config_reader ( ) . get ( " cert " ) :
try :
_certpem = base64 . b64decode ( _cert . get ( " crt " ) )
_x509cert = x509 . load_pem_x509_certificate ( _certpem , crypto_default_backend ( ) )
_cert [ " not_valid_before " ] = _x509cert . not_valid_before . timestamp ( )
_cert [ " not_valid_after " ] = _x509cert . not_valid_after . timestamp ( )
_cert [ " serial " ] = _x509cert . serial_number
_cert [ " common_name " ] = self . get_common_name ( _x509cert . subject )
_cert [ " issuer " ] = self . get_common_name ( _x509cert . issuer )
except :
pass
self . _certificate_store [ _cert . get ( " refid " ) ] = _cert
def _get_certificate ( self , refid ) :
if time . time ( ) - self . _certificate_timestamp > 3600 :
self . _certificate_parser ( )
return self . _certificate_store . get ( refid )
def _get_certificate_by_cn ( self , cn , caref = None ) :
if time . time ( ) - self . _certificate_timestamp > 3600 :
self . _certificate_parser ( )
if caref :
_ret = filter ( lambda x : x . get ( " common_name " ) == cn and x . get ( " caref " ) == caref , self . _certificate_store . values ( ) )
else :
_ret = filter ( lambda x : x . get ( " common_name " ) == cn , self . _certificate_store . values ( ) )
2022-01-19 18:19:09 +01:00
try :
return next ( _ret )
except StopIteration :
return { }
2022-01-19 17:25:41 +01:00
2022-01-20 19:38:25 +01:00
def get_opnsense_ipaddr ( self ) :
try :
_ret = { }
2022-06-28 19:21:12 +02:00
for _if , _ip , _mask in re . findall ( " ^([ \ w_]+): \ sflags=(?:8943|8051|8043|8863).*?inet \ s([ \ d.]+) \ snetmask \ s0x([a-f0-9]+) " , self . _run_prog ( " ifconfig " ) , re . DOTALL | re . M ) :
2022-01-20 19:38:25 +01:00
_ret [ _if ] = " {0} / {1} " . format ( _ip , str ( bin ( int ( _mask , 16 ) ) ) . count ( " 1 " ) )
return _ret
except :
return { }
2022-01-19 17:25:41 +01:00
def get_opnsense_interfaces ( self ) :
_ifs = { }
for _name , _interface in self . _config_reader ( ) . get ( " interfaces " , { } ) . items ( ) :
if _interface . get ( " enable " ) != " 1 " :
continue
_desc = _interface . get ( " descr " )
_ifs [ _interface . get ( " if " , " _ " ) ] = _desc if _desc else _name . upper ( )
2022-01-20 09:42:52 +01:00
try :
_wgserver = self . _config_reader ( ) . get ( " OPNsense " ) . get ( " wireguard " ) . get ( " server " ) . get ( " servers " ) . get ( " server " )
if type ( _wgserver ) == dict :
_wgserver = [ _wgserver ]
_ifs . update (
dict (
map (
2022-01-20 10:32:26 +01:00
lambda x : ( " wg {} " . format ( x . get ( " instance " ) ) , " Wireguard_ {} " . format ( x . get ( " name " ) . strip ( ) . replace ( " " , " _ " ) ) ) ,
2022-01-20 09:42:52 +01:00
_wgserver
)
)
)
except :
2022-01-20 10:32:26 +01:00
pass
2022-01-19 17:25:41 +01:00
return _ifs
2022-01-20 09:42:52 +01:00
def checklocal_firmware ( self ) :
2022-01-19 17:25:41 +01:00
if self . _info . get ( " os_version " ) != self . _info . get ( " latest_version " ) :
2022-06-28 19:21:12 +02:00
return [ " 1 Firmware update_available=1|last_updated= {version_age:.0f} |apply_finish_time= {config_age:.0f} Version {os_version} ( {latest_version} available {latest_date} ) Config changed: {last_configchange} " . format ( * * self . _info ) ]
return [ " 0 Firmware update_available=0|last_updated= {version_age:.0f} |apply_finish_time= {config_age:.0f} Version {os_version} Config changed: {last_configchange} " . format ( * * self . _info ) ]
2022-03-04 18:20:09 +01:00
2023-04-09 20:16:37 +02:00
def checklocal_business ( self ) :
if self . _info . get ( " business_expire " ) :
_days = ( self . _info . get ( " business_expire " ) - datetime . now ( ) ) . days
_date = self . _info . get ( " business_expire " ) . strftime ( " %d . % m. % Y " )
return [ f ' P " Business Licence " expiredays= { _days } ;;;30;60; Licence Expire: { _date } ' ]
return [ ]
2022-03-09 21:19:33 +01:00
def check_label ( self ) :
_ret = [ " <<<labels:sep(0)>>> " ]
_dmsg = self . _run_prog ( " dmesg " , timeout = 10 )
if _dmsg . lower ( ) . find ( " hypervisor: " ) > - 1 :
2022-03-14 22:34:18 +01:00
_ret . append ( ' { " cmk/device_type " : " vm " } ' )
2022-03-09 21:19:33 +01:00
return _ret
2022-03-04 18:20:09 +01:00
2022-03-09 21:19:33 +01:00
def check_net ( self ) :
2022-03-04 18:20:09 +01:00
_now = int ( time . time ( ) )
2022-03-09 21:19:33 +01:00
_opnsense_ifs = self . get_opnsense_interfaces ( )
2022-03-04 18:20:09 +01:00
_ret = [ " <<<statgrab_net>>> " ]
_interface_data = [ ]
_interface_data = self . _run_prog ( " /usr/bin/netstat -i -b -d -n -W -f link " ) . split ( " \n " )
_header = _interface_data [ 0 ] . lower ( )
_header = _header . replace ( " pkts " , " packets " ) . replace ( " coll " , " collisions " ) . replace ( " errs " , " error " ) . replace ( " ibytes " , " rx " ) . replace ( " obytes " , " tx " )
_header = _header . split ( )
_interface_stats = dict (
map (
lambda x : ( x . get ( " name " ) , x ) ,
[
dict ( zip ( _header , _ifdata . split ( ) ) )
for _ifdata in _interface_data [ 1 : ] if _ifdata
]
)
)
2022-06-28 19:21:12 +02:00
_ifconfig_out = self . _run_prog ( " ifconfig -m -v -f inet:cidr,inet6:cidr " )
2022-03-04 18:20:09 +01:00
_ifconfig_out + = " END " ## fix regex
2022-06-28 19:21:12 +02:00
self . _all_interfaces = object_dict ( )
self . _carp_interfaces = object_dict ( )
2022-03-04 18:20:09 +01:00
for _interface , _data in re . findall ( " ^(?P<iface>[ \ w.]+): \ s(?P<data>.*?(?=^ \ w)) " , _ifconfig_out , re . DOTALL | re . MULTILINE ) :
_interface_dict = object_dict ( )
_interface_dict . update ( _interface_stats . get ( _interface , { } ) )
2022-03-09 21:19:33 +01:00
_interface_dict [ " interface_name " ] = _opnsense_ifs . get ( _interface , _interface )
2022-03-04 18:20:09 +01:00
_interface_dict [ " up " ] = " false "
2022-06-28 19:21:12 +02:00
#if _interface.startswith("vmx"): ## vmware fix 10GBe (as OS Support)
# _interface_dict["speed"] = "10000"
2022-03-09 21:19:33 +01:00
_interface_dict [ " systime " ] = _now
2022-03-04 18:20:09 +01:00
for _key , _val in re . findall ( " ^ \ s*( \ w+)[: \ s=]+(.*?)$ " , _data , re . MULTILINE ) :
if _key == " description " :
2023-04-09 20:16:37 +02:00
_interface_dict [ " interface_name " ] = re . sub ( " _ \ ((lan|wan|opt \ d+) \ )$ " , " " , _val . strip ( ) . replace ( " " , " _ " ) )
2022-03-09 21:19:33 +01:00
if _key == " groups " :
_interface_dict [ " groups " ] = _val . strip ( ) . split ( )
2022-03-04 18:20:09 +01:00
if _key == " ether " :
_interface_dict [ " phys_address " ] = _val . strip ( )
if _key == " status " and _val . strip ( ) == " active " :
_interface_dict [ " up " ] = " true "
2023-02-02 19:58:40 +01:00
if _interface . startswith ( " wg " ) and _interface_dict . get ( " flags " , 0 ) & 0x01 :
_interface_dict [ " up " ] = " true "
2022-03-09 21:19:33 +01:00
if _key == " flags " :
2022-06-28 19:21:12 +02:00
_interface_dict [ " flags " ] = int ( re . findall ( " ^[a-f \ d]+ " , _val ) [ 0 ] , 16 )
2022-09-26 12:52:44 +02:00
## hack pppoe no status active or pppd pid
2023-04-09 20:16:37 +02:00
if _interface . lower ( ) . startswith ( " pppoe " ) and _interface_dict [ " flags " ] & 0x10 and _interface_dict [ " flags " ] & 0x1 :
2022-09-26 12:52:44 +02:00
_interface_dict [ " up " ] = " true "
## http://web.mit.edu/freebsd/head/sys/net/if.h
2022-06-28 19:21:12 +02:00
## 0x1 UP
## 0x2 BROADCAST
## 0x8 LOOPBACK
## 0x10 POINTTOPOINT
## 0x40 RUNNING
## 0x100 PROMISC
## 0x800 SIMPLEX
## 0x8000 MULTICAST
2022-03-04 18:20:09 +01:00
if _key == " media " :
2023-04-09 20:16:37 +02:00
_match = re . search ( " \ ((?P<speed> \ d+G?)[Bb]ase(?:.*?<(?P<duplex>.*?)>)? " , _val )
2022-03-09 21:19:33 +01:00
if _match :
_interface_dict [ " speed " ] = _match . group ( " speed " ) . replace ( " G " , " 000 " )
_interface_dict [ " duplex " ] = _match . group ( " duplex " )
if _key == " inet " :
2022-06-28 19:21:12 +02:00
_match = re . search ( " ^(?P<ipaddr>[ \ d.]+) \ /(?P<cidr> \ d+).*?(?:vhid \ s(?P<vhid> \ d+)|$) " , _val , re . M )
2022-03-09 21:19:33 +01:00
if _match :
2022-06-28 19:21:12 +02:00
_cidr = _match . group ( " cidr " )
2022-03-09 21:19:33 +01:00
_ipaddr = _match . group ( " ipaddr " )
_vhid = _match . group ( " vhid " )
2022-06-28 21:38:28 +02:00
if not _vhid :
_interface_dict [ " cidr " ] = _cidr ## cidr wenn kein vhid
2022-03-09 21:19:33 +01:00
## fixme ipaddr dict / vhid dict
if _key == " inet6 " :
2022-06-28 19:21:12 +02:00
_match = re . search ( " ^(?P<ipaddr>[0-9a-f:]+) \ /(?P<prefix> \ d+).*?(?:vhid \ s(?P<vhid> \ d+)|$) " , _val , re . M )
2022-03-09 21:19:33 +01:00
if _match :
_ipaddr = _match . group ( " ipaddr " )
_prefix = _match . group ( " prefix " )
_vhid = _match . group ( " vhid " )
2022-06-28 21:38:28 +02:00
if not _vhid :
_interface_dict [ " prefix " ] = _prefix
2022-03-09 21:19:33 +01:00
## fixme ipaddr dict / vhid dict
if _key == " carp " :
_match = re . search ( " (?P<status>MASTER|BACKUP) \ svhid \ s(?P<vhid> \ d+) \ sadvbase \ s(?P<base> \ d+) \ sadvskew \ s(?P<skew> \ d+) " , _val , re . M )
if _match :
_carpstatus = _match . group ( " status " )
_vhid = _match . group ( " vhid " )
2022-06-28 19:21:12 +02:00
self . _carp_interfaces [ _vhid ] = ( _interface , _carpstatus )
2022-03-09 21:19:33 +01:00
_advbase = _match . group ( " base " )
_advskew = _match . group ( " skew " )
## fixme vhid dict
if _key == " id " :
_match = re . search ( " priority \ s( \ d+) " , _val )
if _match :
_interface_dict [ " bridge_prio " ] = _match . group ( 1 )
if _key == " member " :
_member = _interface_dict . get ( " member " , [ ] )
_member . append ( _val . split ( ) [ 0 ] )
_interface_dict [ " member " ] = _member
if _key == " Opened " :
try :
_pid = int ( _val . split ( " " ) [ - 1 ] )
if check_pid ( _pid ) :
_interface_dict [ " up " ] = " true "
except ValueError :
pass
2022-03-04 18:20:09 +01:00
2023-04-09 20:16:37 +02:00
if _interface_dict [ " flags " ] & 0x2 or _interface_dict [ " flags " ] & 0x10 or _interface_dict [ " flags " ] & 0x80 : ## nur broadcast oder ptp .. und noarp
2022-06-28 19:21:12 +02:00
self . _all_interfaces [ _interface ] = _interface_dict
else :
2022-03-09 21:19:33 +01:00
continue
2022-06-28 19:21:12 +02:00
#if re.search("^[*]?(pflog|pfsync|lo)\d?",_interface):
# continue
2022-03-13 12:06:43 +01:00
if not _opnsense_ifs . get ( _interface ) :
continue
2022-03-09 21:19:33 +01:00
for _key , _val in _interface_dict . items ( ) :
2022-06-28 19:21:12 +02:00
if _key in ( " mtu " , " ipackets " , " ierror " , " idrop " , " rx " , " opackets " , " oerror " , " tx " , " collisions " , " drop " , " interface_name " , " up " , " systime " , " phys_address " , " speed " , " duplex " ) :
if type ( _val ) in ( str , int , float ) :
2023-04-09 20:16:37 +02:00
_sanitized_interface = _interface . replace ( " . " , " _ " )
_ret . append ( f " { _sanitized_interface } . { _key } { _val } " )
2022-03-09 21:19:33 +01:00
return _ret
2022-02-03 13:49:28 +01:00
def checklocal_services ( self ) :
_phpcode = ' <?php require_once( " config.inc " );require_once( " system.inc " ); require_once( " plugins.inc " ); require_once( " util.inc " ); foreach(plugins_services() as $_service) { printf( " %s ; %s ; %s \n " ,$_service[ " name " ],$_service[ " description " ],service_status($_service));} ?> '
_proc = subprocess . Popen ( [ " php " ] , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL , encoding = " utf-8 " )
2022-02-20 13:25:19 +01:00
_data , _ = _proc . communicate ( input = _phpcode , timeout = 15 )
2022-02-03 13:49:28 +01:00
_services = [ ]
for _service in _data . strip ( ) . split ( " \n " ) :
_services . append ( _service . split ( " ; " ) )
_num_services = len ( _services )
_stopped_services = list ( filter ( lambda x : x [ 2 ] != ' 1 ' , _services ) )
_num_stopped = len ( _stopped_services )
_num_running = _num_services - _num_stopped
_stopped_services = " , " . join ( map ( lambda x : x [ 1 ] , _stopped_services ) )
if _num_stopped > 0 :
2022-06-28 19:21:12 +02:00
return [ f " 2 Services running_services= { _num_running : .0f } |stopped_service= { _num_stopped : .0f } Services: { _stopped_services } not running " ]
return [ f " 0 Services running_services= { _num_running : .0f } |stopped_service= { _num_stopped : .0f } All Services running " ]
2022-02-03 13:49:28 +01:00
2022-06-28 19:21:12 +02:00
def checklocal_carpstatus ( self ) :
_ret = [ ]
2022-06-28 21:38:28 +02:00
_virtual = self . _config_reader ( ) . get ( " virtualip " )
if not _virtual :
return [ ]
_virtual = _virtual . get ( " vip " )
2022-06-28 19:21:12 +02:00
if not _virtual :
return [ ]
if type ( _virtual ) != list :
_virtual = [ _virtual ]
for _vip in _virtual :
if _vip . get ( " mode " ) != " carp " :
continue
_vhid = _vip . get ( " vhid " )
_ipaddr = _vip . get ( " subnet " )
_interface , _carpstatus = self . _carp_interfaces . get ( _vhid , ( None , None ) )
_carpstatus_num = 1 if _carpstatus == " MASTER " else 0
_interface_name = self . _all_interfaces . get ( _interface , { } ) . get ( " interface_name " , _interface )
if int ( _vip . get ( " advskew " ) ) < 50 :
_status = 0 if _carpstatus == " MASTER " else 1
else :
_status = 0 if _carpstatus == " BACKUP " else 1
if not _interface :
continue
2022-06-28 21:38:28 +02:00
_ret . append ( f " { _status } \" CARP: { _interface_name } @ { _vhid } \" master= { _carpstatus_num } { _carpstatus } { _ipaddr } ( { _interface } ) " )
2022-06-28 19:21:12 +02:00
return _ret
2022-02-03 13:49:28 +01:00
2022-01-19 17:25:41 +01:00
def check_dhcp ( self ) :
2022-07-13 23:12:05 +02:00
if not os . path . exists ( " /var/dhcpd/var/db/dhcpd.leases " ) :
return [ ]
2022-01-19 17:25:41 +01:00
_ret = [ " <<<isc_dhcpd>>> " ]
_ret . append ( " [general] \n PID: {0} " . format ( self . pidof ( " dhcpd " , - 1 ) ) )
_dhcpleases = open ( " /var/dhcpd/var/db/dhcpd.leases " , " r " ) . read ( )
## FIXME
#_dhcpleases_dict = dict(map(lambda x: (self.ip2int(x[0]),x[1]),re.findall(r"lease\s(?P<ipaddr>[0-9.]+)\s\{.*?.\n\s+binding state\s(?P<state>\w+).*?\}",_dhcpleases,re.DOTALL)))
_dhcpleases_dict = dict ( re . findall ( r " lease \ s(?P<ipaddr>[0-9.]+) \ s \ { .*?. \ n \ s+binding state \ s(?P<state>active).*? \ } " , _dhcpleases , re . DOTALL ) )
_dhcpconf = open ( " /var/dhcpd/etc/dhcpd.conf " , " r " ) . read ( )
_ret . append ( " [pools] " )
for _subnet in re . finditer ( r " subnet \ s(?P<subnet>[0-9.]+) \ snetmask \ s(?P<netmask>[0-9.]+) \ s \ { .*?(?:pool \ s \ { .*? \ }.*?)*} " , _dhcpconf , re . DOTALL ) :
#_cidr = bin(self.ip2int(_subnet.group(2))).count("1")
#_available = 0
for _pool in re . finditer ( " pool \ s \ { .*?range \ s(?P<start>[0-9.]+) \ s(?P<end>[0-9.]+).*? \ } " , _subnet . group ( 0 ) , re . DOTALL ) :
#_start,_end = self.ip2int(_pool.group(1)), self.ip2int(_pool.group(2))
#_ips_in_pool = filter(lambda x: _start < x[0] < _end,_dhcpleases_dict.items())
#pprint(_dhcpleases_dict)
#pprint(sorted(list(map(lambda x: (self._int2ip(x[0]),x[1]),_ips_in_pool))))
#_available += (_end - _start)
_ret . append ( " {0} \t {1} " . format ( _pool . group ( 1 ) , _pool . group ( 2 ) ) )
#_ret.append("DHCP_{0}/{1} {2}".format(_subnet.group(1),_cidr,_available))
_ret . append ( " [leases] " )
for _ip in sorted ( _dhcpleases_dict . keys ( ) ) :
_ret . append ( _ip )
return _ret
2023-04-09 20:16:37 +02:00
def check_squid ( self ) :
_squid_config = self . _config_reader ( ) . get ( " OPNsense " , { } ) . get ( " proxy " , { } )
if _squid_config . get ( " general " , { } ) . get ( " enabled " ) != " 1 " :
return [ ]
_ret = [ " <<<squid>>> " ]
_port = _squid_config . get ( " forward " , { } ) . get ( " port " , " 3128 " )
try :
_response = requests . get ( f " http://127.0.0.1: { _port } /squid-internal-mgr/5min " , timeout = 0.2 )
if _response . status_code == 200 :
_ret + = _response . text . split ( " \n " )
except :
pass
return _ret
2022-06-28 19:21:12 +02:00
def checklocal_pkgaudit ( self ) :
try :
_data = json . loads ( self . _run_cache_prog ( " pkg audit -F --raw=json-compact -q " , cachetime = 360 , ignore_error = True ) )
_vulns = _data . get ( " pkg_count " , 0 )
if _vulns > 0 :
_packages = " , " . join ( _data . get ( " packages " , { } ) . keys ( ) )
return [ f " 1 Audit issues= { _vulns } Pkg: { _packages } vulnerable " ]
raise
except :
pass
return [ " 0 Audit issues=0 OK " ]
2022-01-20 09:42:52 +01:00
@staticmethod
def _read_from_openvpnsocket ( vpnsocket , cmd ) :
_sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM )
try :
_sock . connect ( vpnsocket )
assert ( _sock . recv ( 4096 ) . decode ( " utf-8 " ) ) . startswith ( " >INFO " )
cmd = cmd . strip ( ) + " \n "
_sock . send ( cmd . encode ( " utf-8 " ) )
_data = " "
while True :
_socket_data = _sock . recv ( 4096 ) . decode ( " utf-8 " )
_data + = _socket_data
2022-01-21 19:18:29 +01:00
if _data . strip ( ) . endswith ( " END " ) or _data . strip ( ) . startswith ( " SUCCESS: " ) or _data . strip ( ) . startswith ( " ERROR: " ) :
2022-01-20 09:42:52 +01:00
break
return _data
finally :
2022-01-21 09:34:45 +01:00
if _sock :
_sock . send ( " quit \n " . encode ( " utf-8 " ) )
2022-01-20 09:42:52 +01:00
_sock . close ( )
_sock = None
return " "
2022-01-31 01:04:15 +01:00
def _get_traffic ( self , modul , interface , totalbytesin , totalbytesout ) :
_hist_data = self . _get_storedata ( modul , interface )
2022-01-20 14:00:15 +01:00
_slot = int ( time . time ( ) )
_slot - = _slot % 60
_hist_slot = 0
_traffic_in = _traffic_out = 0
if _hist_data :
_hist_slot , _hist_bytesin , _hist_bytesout = _hist_data
_traffic_in = int ( totalbytesin - _hist_bytesin ) / max ( 1 , _slot - _hist_slot )
_traffic_out = int ( totalbytesout - _hist_bytesout ) / max ( 1 , _slot - _hist_slot )
if _hist_slot != _slot :
2022-01-31 01:04:15 +01:00
self . _set_storedata ( modul , interface , ( _slot , totalbytesin , totalbytesout ) )
2022-01-20 14:00:15 +01:00
return _traffic_in , _traffic_out
2022-01-20 19:38:25 +01:00
@staticmethod
def _get_dpinger_gateway ( gateway ) :
_path = " /var/run/dpinger_ {0} .sock " . format ( gateway )
if os . path . exists ( _path ) :
_sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM )
try :
_sock . connect ( _path )
_data = _sock . recv ( 1024 ) . decode ( " utf-8 " ) . strip ( )
_name , _rtt , _rttsd , _loss = re . findall ( " ( \ w+) \ s( \ d+) \ s( \ d+) \ s( \ d+)$ " , _data ) [ 0 ]
assert _name . strip ( ) == gateway
2022-06-28 19:21:12 +02:00
return int ( _rtt ) / 1_000_000.0 , int ( _rttsd ) / 1_000_000.0 , int ( _loss )
2022-01-20 19:38:25 +01:00
except :
raise
return - 1 , - 1 , - 1
def checklocal_gateway ( self ) :
_ret = [ ]
2022-03-01 18:22:01 +01:00
_gateways = self . _config_reader ( ) . get ( " gateways " )
if not _gateways :
return [ ]
_gateway_items = _gateways . get ( " gateway_item " , [ ] )
2022-01-20 20:00:56 +01:00
if type ( _gateway_items ) != list :
2022-01-21 15:21:48 +01:00
_gateway_items = [ _gateway_items ] if _gateway_items else [ ]
2022-01-20 19:38:25 +01:00
_interfaces = self . _config_reader ( ) . get ( " interfaces " , { } )
_ipaddresses = self . get_opnsense_ipaddr ( )
for _gateway in _gateway_items :
if type ( _gateway . get ( " descr " ) ) != str :
_gateway [ " descr " ] = _gateway . get ( " name " )
if _gateway . get ( " monitor_disable " ) == " 1 " or _gateway . get ( " disabled " ) == " 1 " :
continue
_interface = _interfaces . get ( _gateway . get ( " interface " ) , { } )
_gateway [ " realinterface " ] = _interface . get ( " if " )
if _gateway . get ( " ipprotocol " ) == " inet " :
_gateway [ " ipaddr " ] = _ipaddresses . get ( _interface . get ( " if " ) )
else :
_gateway [ " ipaddr " ] = " "
_gateway [ " rtt " ] , _gateway [ " rttsd " ] , _gateway [ " loss " ] = self . _get_dpinger_gateway ( _gateway . get ( " name " ) )
_gateway [ " status " ] = 0
if _gateway . get ( " loss " ) > 0 or _gateway . get ( " rtt " ) > 100 :
_gateway [ " status " ] = 1
if _gateway . get ( " loss " ) > 90 or _gateway . get ( " loss " ) == - 1 :
_gateway [ " status " ] = 2
2022-06-28 19:21:12 +02:00
_ret . append ( " {status} \" Gateway {descr} \" rtt= {rtt} |rttsd= {rttsd} |loss= {loss} Gateway on Interface: {realinterface} {gateway} " . format ( * * _gateway ) )
2022-01-20 19:38:25 +01:00
return _ret
2022-01-20 09:42:52 +01:00
def checklocal_openvpn ( self ) :
2022-01-20 19:38:25 +01:00
_ret = [ ]
2022-01-19 17:25:41 +01:00
_cfr = self . _config_reader ( ) . get ( " openvpn " )
2022-01-19 18:55:56 +01:00
if type ( _cfr ) != dict :
return _ret
2022-01-19 19:22:40 +01:00
_cso = _cfr . get ( " openvpn-csc " )
_monitored_clients = { }
if type ( _cso ) == dict :
_cso = [ _cso ]
if type ( _cso ) == list :
_monitored_clients = dict ( map ( lambda x : ( x . get ( " common_name " ) . upper ( ) , dict ( x , current = [ ] ) ) , _cso ) )
2022-01-19 17:25:41 +01:00
_now = time . time ( )
2022-01-21 13:44:09 +01:00
_vpnclient = _cfr . get ( " openvpn-client " , [ ] )
2022-01-19 19:22:40 +01:00
_vpnserver = _cfr . get ( " openvpn-server " , [ ] )
2022-01-21 13:44:09 +01:00
if type ( _vpnserver ) != list :
_vpnserver = [ _vpnserver ] if _vpnserver else [ ]
if type ( _vpnclient ) != list :
_vpnclient = [ _vpnclient ] if _vpnclient else [ ]
for _server in _vpnserver + _vpnclient :
## server_tls, p2p_shared_key p2p_tls
2022-06-28 19:21:12 +02:00
_server [ " name " ] = _server . get ( " description " ) . strip ( ) if _server . get ( " description " ) else " OpenVPN_ {protocoll} _ {local_port} " . format ( * * _server )
2022-01-19 17:25:41 +01:00
2022-01-21 13:44:09 +01:00
_caref = _server . get ( " caref " )
2022-01-19 17:25:41 +01:00
_server_cert = self . _get_certificate ( _server . get ( " certref " ) )
2022-01-21 13:44:09 +01:00
_server [ " status " ] = 3
2022-01-20 09:42:52 +01:00
_server [ " expiredays " ] = 0
2022-01-19 17:25:41 +01:00
_server [ " expiredate " ] = " no certificate found "
if _server_cert :
2022-02-24 20:31:02 +01:00
_notvalidafter = _server_cert . get ( " not_valid_after " , 0 )
2022-01-19 17:25:41 +01:00
_server [ " expiredays " ] = int ( ( _notvalidafter - _now ) / 86400 )
_server [ " expiredate " ] = time . strftime ( " Cert Expire: %d . % m. % Y " , time . localtime ( _notvalidafter ) )
2022-01-21 13:44:09 +01:00
if _server [ " expiredays " ] < 61 :
_server [ " status " ] = 2 if _server [ " expiredays " ] < 31 else 1
else :
_server [ " expiredate " ] = " \\ n " + _server [ " expiredate " ]
2022-01-20 19:38:25 +01:00
2022-01-21 13:44:09 +01:00
_server [ " type " ] = " server " if _server . get ( " local_port " ) else " client "
if _server . get ( " mode " ) in ( " p2p_shared_key " , " p2p_tls " ) :
_unix = " /var/etc/openvpn/ {type} {vpnid} .sock " . format ( * * _server )
2022-01-19 17:25:41 +01:00
try :
2022-01-20 14:00:15 +01:00
2022-01-31 01:04:15 +01:00
_server [ " bytesin " ] , _server [ " bytesout " ] = self . _get_traffic ( " openvpn " ,
2022-01-20 14:00:15 +01:00
" SRV_ {name} " . format ( * * _server ) ,
* ( map ( lambda x : int ( x ) , re . findall ( " bytes \ w+=( \ d+) " , self . _read_from_openvpnsocket ( _unix , " load-stats " ) ) ) )
)
2022-01-21 19:18:29 +01:00
_laststate = self . _read_from_openvpnsocket ( _unix , " state 1 " ) . strip ( ) . split ( " \r \n " ) [ - 2 ]
_timestamp , _server [ " connstate " ] , _data = _laststate . split ( " , " , 2 )
if _server [ " connstate " ] == " CONNECTED " :
_data = _data . split ( " , " )
_server [ " vpn_ipaddr " ] = _data [ 1 ]
_server [ " remote_ipaddr " ] = _data [ 2 ]
_server [ " remote_port " ] = _data [ 3 ]
_server [ " source_addr " ] = _data [ 4 ]
_server [ " status " ] = 0 if _server [ " status " ] == 3 else _server [ " status " ]
_ret . append ( ' {status} " OpenVPN Connection: {name} " connections_ssl_vpn=1;;|if_in_octets= {bytesin} |if_out_octets= {bytesout} |expiredays= {expiredays} Connected {remote_ipaddr} : {remote_port} {vpn_ipaddr} {expiredate} \ Source IP: {source_addr} ' . format ( * * _server ) )
else :
if _server [ " type " ] == " client " :
_server [ " status " ] = 2
_ret . append ( ' {status} " OpenVPN Connection: {name} " connections_ssl_vpn=0;;|if_in_octets= {bytesin} |if_out_octets= {bytesout} |expiredays= {expiredays} {connstate} {expiredate} ' . format ( * * _server ) )
else :
_server [ " status " ] = 1 if _server [ " status " ] != 2 else 2
_ret . append ( ' {status} " OpenVPN Connection: {name} " connections_ssl_vpn=0;;|if_in_octets= {bytesin} |if_out_octets= {bytesout} |expiredays= {expiredays} waiting on Port {local_port} / {protocol} {expiredate} ' . format ( * * _server ) )
2022-01-20 09:42:52 +01:00
except :
2022-01-21 13:44:09 +01:00
_ret . append ( ' 2 " OpenVPN Connection: {name} " connections_ssl_vpn=0;;|expiredays= {expiredays} |if_in_octets=0|if_out_octets=0 Server down Port:/ {protocol} {expiredate} ' . format ( * * _server ) )
2022-02-03 13:49:28 +01:00
continue
2022-01-21 13:44:09 +01:00
else :
if not _server . get ( " maxclients " ) :
_max_clients = ipaddress . IPv4Network ( _server . get ( " tunnel_network " ) ) . num_addresses - 2
if _server . get ( " topology_subnet " ) != " yes " :
_max_clients = max ( 1 , int ( _max_clients / 4 ) ) ## p2p
_server [ " maxclients " ] = _max_clients
try :
_unix = " /var/etc/openvpn/ {type} {vpnid} .sock " . format ( * * _server )
try :
2022-01-31 01:04:15 +01:00
_server [ " bytesin " ] , _server [ " bytesout " ] = self . _get_traffic ( " openvpn " ,
2022-01-21 13:44:09 +01:00
" SRV_ {name} " . format ( * * _server ) ,
* ( map ( lambda x : int ( x ) , re . findall ( " bytes \ w+=( \ d+) " , self . _read_from_openvpnsocket ( _unix , " load-stats " ) ) ) )
)
_server [ " status " ] = 0 if _server [ " status " ] == 3 else _server [ " status " ]
except :
_server [ " bytesin " ] , _server [ " bytesout " ] = 0 , 0
raise
_number_of_clients = 0
_now = int ( time . time ( ) )
_response = self . _read_from_openvpnsocket ( _unix , " status 2 " )
for _client_match in re . finditer ( " ^CLIENT_LIST,(.*?)$ " , _response , re . M ) :
_number_of_clients + = 1
_client_raw = list ( map ( lambda x : x . strip ( ) , _client_match . group ( 1 ) . split ( " , " ) ) )
_client = {
" server " : _server . get ( " name " ) ,
" common_name " : _client_raw [ 0 ] ,
2022-09-29 09:47:59 +02:00
" remote_ip " : _client_raw [ 1 ] . rsplit ( " : " , 1 ) [ 0 ] , ## ipv6
2022-01-21 13:44:09 +01:00
" vpn_ip " : _client_raw [ 2 ] ,
" vpn_ipv6 " : _client_raw [ 3 ] ,
" bytes_received " : int ( _client_raw [ 4 ] ) ,
" bytes_sent " : int ( _client_raw [ 5 ] ) ,
" uptime " : _now - int ( _client_raw [ 7 ] ) ,
" username " : _client_raw [ 8 ] if _client_raw [ 8 ] != " UNDEF " else _client_raw [ 0 ] ,
" clientid " : int ( _client_raw [ 9 ] ) ,
" cipher " : _client_raw [ 11 ] . strip ( " \r \n " )
}
if _client [ " username " ] . upper ( ) in _monitored_clients :
_monitored_clients [ _client [ " username " ] . upper ( ) ] [ " current " ] . append ( _client )
_server [ " clientcount " ] = _number_of_clients
_ret . append ( ' {status} " OpenVPN Server: {name} " connections_ssl_vpn= {clientcount} ;; {maxclients} |if_in_octets= {bytesin} |if_out_octets= {bytesout} |expiredays= {expiredays} {clientcount} / {maxclients} Connections Port: {local_port} / {protocol} {expiredate} ' . format ( * * _server ) )
except :
2023-04-09 20:16:37 +02:00
_ret . append ( ' 2 " OpenVPN Server: {name} " connections_ssl_vpn=0;; {maxclients} |expiredays= {expiredays} |if_in_octets=0|if_out_octets=0 Server down Port: {local_port} / {protocol} {expiredate} ' . format ( * * _server ) )
2022-01-19 17:25:41 +01:00
for _client in _monitored_clients . values ( ) :
_current_conn = _client . get ( " current " , [ ] )
2022-09-26 12:52:44 +02:00
if _client . get ( " disable " ) == 1 :
continue
2022-01-19 17:25:41 +01:00
if not _client . get ( " description " ) :
_client [ " description " ] = _client . get ( " common_name " )
2022-01-20 10:32:26 +01:00
_client [ " description " ] = _client [ " description " ] . strip ( " \r \n " )
2022-01-19 17:25:41 +01:00
_client [ " expiredays " ] = 0
_client [ " expiredate " ] = " no certificate found "
2022-02-24 20:31:02 +01:00
_client [ " status " ] = 3
2022-01-19 17:25:41 +01:00
_cert = self . _get_certificate_by_cn ( _client . get ( " common_name " ) )
if _cert :
_notvalidafter = _cert . get ( " not_valid_after " )
_client [ " expiredays " ] = int ( ( _notvalidafter - _now ) / 86400 )
_client [ " expiredate " ] = time . strftime ( " Cert Expire: %d . % m. % Y " , time . localtime ( _notvalidafter ) )
if _client [ " expiredays " ] < 61 :
_client [ " status " ] = 2 if _client [ " expiredays " ] < 31 else 1
else :
_client [ " expiredate " ] = " \\ n " + _client [ " expiredate " ]
if _current_conn :
_client [ " uptime " ] = max ( map ( lambda x : x . get ( " uptime " ) , _current_conn ) )
_client [ " count " ] = len ( _current_conn )
2022-01-31 01:04:15 +01:00
_client [ " bytes_received " ] , _client [ " bytes_sent " ] = self . _get_traffic ( " openvpn " ,
2022-01-20 14:00:15 +01:00
" CL_ {description} " . format ( * * _client ) ,
sum ( map ( lambda x : x . get ( " bytes_received " ) , _current_conn ) ) ,
sum ( map ( lambda x : x . get ( " bytes_sent " ) , _current_conn ) )
)
2022-02-24 20:31:02 +01:00
_client [ " status " ] = 0 if _client [ " status " ] == 3 else _client [ " status " ]
2022-01-19 17:25:41 +01:00
_client [ " longdescr " ] = " "
for _conn in _current_conn :
2022-01-21 13:44:09 +01:00
_client [ " longdescr " ] + = " Server: {server} {remote_ip} : {vpn_ip} {cipher} " . format ( * * _conn )
2022-01-20 09:42:52 +01:00
_ret . append ( ' {status} " OpenVPN Client: {description} " connectiontime= {uptime} |connections_ssl_vpn= {count} |if_in_octets= {bytes_received} |if_out_octets= {bytes_sent} |expiredays= {expiredays} {longdescr} {expiredate} ' . format ( * * _client ) )
2022-01-19 17:25:41 +01:00
else :
2022-02-24 20:31:02 +01:00
_ret . append ( ' {status} " OpenVPN Client: {description} " connectiontime=0|connections_ssl_vpn=0|if_in_octets=0|if_out_octets=0|expiredays= {expiredays} Nicht verbunden {expiredate} ' . format ( * * _client ) )
2022-01-19 17:25:41 +01:00
return _ret
2022-01-24 19:55:18 +01:00
def checklocal_ipsec ( self ) :
2023-04-09 20:16:37 +02:00
_ret = [ ]
2022-06-28 19:21:12 +02:00
_ipsec_config = self . _config_reader ( ) . get ( " ipsec " )
if type ( _ipsec_config ) != dict :
return [ ]
2023-06-22 15:51:52 +02:00
if _ipsec_config . get ( " enable " ) != " 1 " :
return [ ]
2022-06-28 19:21:12 +02:00
_phase1config = _ipsec_config . get ( " phase1 " )
2023-04-09 20:16:37 +02:00
_phase2config = _ipsec_config . get ( " phase2 " )
2022-06-28 19:21:12 +02:00
if type ( _phase1config ) != list :
_phase1config = [ _phase1config ]
2023-04-10 09:33:07 +02:00
if type ( _phase2config ) != list :
_phase2config = [ _phase2config ]
2022-02-20 13:25:19 +01:00
_json_data = self . _run_prog ( " /usr/local/opnsense/scripts/ipsec/list_status.py " )
2023-04-09 20:16:37 +02:00
if len ( _json_data . strip ( ) ) > 20 :
_json_data = json . loads ( _json_data )
else :
_json_data = { }
for _phase1 in _phase1config :
_ikeid = _phase1 . get ( " ikeid " )
_name = _phase1 . get ( " descr " )
if len ( _name . strip ( ) ) < 1 :
_name = _phase1 . get ( " remote-gateway " )
_condata = _json_data . get ( f " con { _ikeid } " , { } )
_con = {
" status " : 2 ,
" bytes-received " : 0 ,
" bytes-sent " : 0 ,
" life-time " : 0 ,
" state " : " unknown " ,
" remote-host " : " unknown " ,
" remote-name " : _name ,
" local-id " : _condata . get ( " local-id " ) ,
" remote-id " : _condata . get ( " remote-id " )
}
_phase2_up = 0
for _sas in _condata . get ( " sas " , [ ] ) :
_con [ " state " ] = _sas . get ( " state " )
_con [ " local-id " ] = _sas . get ( " local-id " )
_con [ " remote-id " ] = _sas . get ( " remote-id " )
if _sas . get ( " state " ) != " ESTABLISHED " :
continue
_con [ " remote-host " ] = _sas . get ( " remote-host " )
for _child in _sas . get ( " child-sas " , { } ) . values ( ) :
if _child . get ( " state " ) != " INSTALLED " :
continue
2023-05-10 16:52:08 +02:00
_phase2_up + = 1
2023-04-09 20:16:37 +02:00
_install_time = max ( 1 , int ( _child . get ( " install-time " , " 1 " ) ) )
_con [ " bytes-received " ] + = int ( int ( _child . get ( " bytes-in " , " 0 " ) ) / _install_time )
_con [ " bytes-sent " ] + = int ( int ( _child . get ( " bytes-out " , " 0 " ) ) / _install_time )
_con [ " life-time " ] = max ( _con [ " life-time " ] , _install_time )
_con [ " status " ] = 0 if _con [ " status " ] != 1 else 1
2023-05-10 16:52:08 +02:00
2023-06-22 15:51:52 +02:00
## QuickHack #FIXME remote-id/local-id translate type to ip, set and check if sas and config is same count
#_required_phase2 = len(list(filter(lambda x: x.get("ikeid") == _ikeid,_phase2config)))
2023-04-09 20:16:37 +02:00
2023-06-22 15:51:52 +02:00
#if _phase2_up >= _required_phase2:
if _phase2_up > 0 :
2023-04-09 20:16:37 +02:00
_ret . append ( " {status} \" IPsec Tunnel: { remote-name} \" if_in_octets= { bytes-received}|if_out_octets= { bytes-sent}|lifetime= { life-time} {state} { local-id} - { remote-id}( { remote-host}) " . format ( * * _con ) )
elif _phase2_up == 0 :
if _condata . keys ( ) :
2022-06-28 19:21:12 +02:00
_ret . append ( " {status} \" IPsec Tunnel: { remote-name} \" if_in_octets=0|if_out_octets=0|lifetime=0 not connected { local-id} - { remote-id}( { remote-host}) " . format ( * * _con ) )
2023-04-09 20:16:37 +02:00
else :
_ret . append ( " {status} \" IPsec Tunnel: { remote-name} \" if_in_octets=0|if_out_octets=0|lifetime=0 not running " . format ( * * _con ) )
else :
_con [ " status " ] = max ( _con [ " status " ] , 1 )
_con [ " phase2 " ] = f " { _phase2_up } / { _required_phase2 } "
_ret . append ( " {status} \" IPsec Tunnel: { remote-name} \" if_in_octets= { bytes-received}|if_out_octets= { bytes-sent}|lifetime= { life-time} {phase2} {state} { local-id} - { remote-id}( { remote-host}) " . format ( * * _con ) )
2022-01-24 19:55:18 +01:00
return _ret
2022-01-31 01:04:15 +01:00
def checklocal_wireguard ( self ) :
_ret = [ ]
try :
_clients = self . _config_reader ( ) . get ( " OPNsense " ) . get ( " wireguard " ) . get ( " client " ) . get ( " clients " ) . get ( " client " )
if type ( _clients ) != list :
_clients = [ _clients ] if _clients else [ ]
_clients = dict ( map ( lambda x : ( x . get ( " pubkey " ) , x ) , _clients ) )
except :
return [ ]
_now = time . time ( )
for _client in _clients . values ( ) : ## fill defaults
_client [ " interface " ] = " "
_client [ " endpoint " ] = " "
_client [ " last_handshake " ] = 0
_client [ " bytes_received " ] = 0
_client [ " bytes_sent " ] = 0
_client [ " status " ] = 2
2022-02-20 13:25:19 +01:00
_dump = self . _run_prog ( [ " wg " , " show " , " all " , " dump " ] ) . strip ( )
2022-01-31 01:04:15 +01:00
for _line in _dump . split ( " \n " ) :
_values = _line . split ( " \t " )
if len ( _values ) != 9 :
continue
_client = _clients . get ( _values [ 1 ] . strip ( ) )
if not _client :
continue
_client [ " interface " ] = _values [ 0 ] . strip ( )
2022-09-29 09:47:59 +02:00
_client [ " endpoint " ] = _values [ 3 ] . strip ( ) . rsplit ( " : " , 1 ) [ 0 ]
2022-01-31 01:04:15 +01:00
_client [ " last_handshake " ] = int ( _values [ 5 ] . strip ( ) )
_client [ " bytes_received " ] , _client [ " bytes_sent " ] = self . _get_traffic ( " wireguard " , " " , int ( _values [ 6 ] . strip ( ) ) , int ( _values [ 7 ] . strip ( ) ) )
_client [ " status " ] = 2 if _now - _client [ " last_handshake " ] > 300 else 0 ## 5min timeout
for _client in _clients . values ( ) :
if _client . get ( " status " ) == 2 and _client . get ( " endpoint " ) != " " :
_client [ " endpoint " ] = " last IP: " + _client [ " endpoint " ]
_ret . append ( ' {status} " WireGuard Client: {name} " if_in_octets= {bytes_received} |if_out_octets= {bytes_sent} {interface} : {endpoint} - {tunneladdress} ' . format ( * * _client ) )
return _ret
2022-01-24 19:55:18 +01:00
def checklocal_unbound ( self ) :
_ret = [ ]
try :
2022-02-20 13:25:19 +01:00
_output = self . _run_prog ( [ " /usr/local/sbin/unbound-control " , " -c " , " /var/unbound/unbound.conf " , " stats_noreset " ] )
2022-01-24 19:55:18 +01:00
_unbound_stat = dict (
map (
lambda x : ( x [ 0 ] . replace ( " . " , " _ " ) , float ( x [ 1 ] ) ) ,
re . findall ( " total \ .([ \ w.]+)=([ \ d.]+) " , _output )
)
)
2022-06-28 19:21:12 +02:00
_ret . append ( " 0 \" Unbound DNS \" dns_successes= {num_queries:.0f} |dns_recursion= {num_recursivereplies:.0f} |dns_cachehits= {num_cachehits:.0f} |dns_cachemiss= {num_cachemiss:.0f} |avg_response_time= {recursion_time_avg} Unbound running " . format ( * * _unbound_stat ) )
2022-01-24 19:55:18 +01:00
except :
_ret . append ( " 2 \" Unbound DNS \" dns_successes=0|dns_recursion=0|dns_cachehits=0|dns_cachemiss=0|avg_response_time=0 Unbound not running " )
return _ret
def checklocal_acmeclient ( self ) :
_ret = [ ]
_now = time . time ( )
try :
_acmecerts = self . _config_reader ( ) . get ( " OPNsense " ) . get ( " AcmeClient " ) . get ( " certificates " ) . get ( " certificate " )
2022-01-31 01:04:15 +01:00
if type ( _acmecerts ) == dict :
_acmecerts = [ _acmecerts ]
2022-01-24 19:55:18 +01:00
except :
_acmecerts = [ ]
for _cert_info in _acmecerts :
if _cert_info . get ( " enabled " ) != " 1 " :
continue
2022-02-24 20:31:02 +01:00
if not _cert_info . get ( " description " ) :
_cert_info [ " description " ] = _cert_info . get ( " name " , " unknown " )
2022-01-24 19:55:18 +01:00
_certificate = self . _get_certificate ( _cert_info . get ( " certRefId " ) )
2022-02-24 20:31:02 +01:00
_cert_info [ " status " ] = 1
if _certificate :
if type ( _certificate ) != dict :
_certificate = { }
_expiredays = _certificate . get ( " not_valid_after " , _now ) - _now
_not_valid_before = _certificate . get ( " not_valid_before " , _cert_info . get ( " lastUpdate " ) )
_certificate_age = _now - int ( _not_valid_before if _not_valid_before else _now )
_cert_info [ " age " ] = int ( _certificate_age )
if _cert_info . get ( " statusCode " ) == " 200 " :
if _certificate_age > float ( _cert_info . get ( " renewInterval " , " inf " ) ) :
_cert_info [ " status " ] = 0
if _expiredays < 10 :
_cert_info [ " status " ] = 2
_cert_info [ " issuer " ] = _certificate . get ( " issuer " )
_cert_info [ " lastupdatedate " ] = time . strftime ( " %d . % m. % Y " , time . localtime ( int ( _cert_info . get ( " lastUpdate " , 0 ) ) ) )
_cert_info [ " expiredate " ] = time . strftime ( " %d . % m. % Y " , time . localtime ( _certificate . get ( " not_valid_after " , 0 ) ) )
_ret . append ( " {status} \" ACME Cert: {description} \" age= {age} Last Update: {lastupdatedate} Status: {statusCode} Cert expire: {expiredate} " . format ( * * _cert_info ) )
2022-01-24 19:55:18 +01:00
else :
2022-02-24 20:31:02 +01:00
if _cert_info . get ( " statusCode " ) == " 100 " :
_ret . append ( " 1 \" ACME Cert: {description} \" age=0 Status: pending " . format ( * * _cert_info ) )
else :
_ret . append ( " 2 \" ACME Cert: {description} \" age=0 Error Status: {statusCode} " . format ( * * _cert_info ) )
2022-01-24 19:55:18 +01:00
return _ret
2022-06-28 19:21:12 +02:00
def _read_nginx_socket ( self ) :
session = requests . Session ( )
session . mount ( " http://nginx/ " , NginxAdapter ( ) )
response = session . get ( " http://nginx/vts " )
return response . json ( )
def checklocal_nginx ( self ) :
_ret = [ ]
_config = self . _config_reader ( ) . get ( " OPNsense " ) . get ( " Nginx " )
if type ( _config ) != dict :
return [ ]
2023-04-09 20:16:37 +02:00
if _config . get ( " general " , { } ) . get ( " enabled " ) != " 1 " :
return [ ]
2022-06-28 19:21:12 +02:00
try :
_data = self . _read_nginx_socket ( )
except ( requests . exceptions . ConnectionError , FileNotFoundError ) :
2023-04-09 20:16:37 +02:00
_data = { }
pass ## no socket
_uptime = _data . get ( " loadMsec " , 0 ) / 1000
if _uptime > 0 :
_starttime = datetime . fromtimestamp ( _uptime ) . strftime ( " %d . % m. % Y % H: % M " )
_uptime = time . time ( ) - _uptime
_ret . append ( f " 0 \" Nginx Uptime \" uptime= { _uptime } Up since { _starttime } " )
else :
_ret . append ( " 2 \" Nginx Uptime \" uptime=0 Down " )
_upstream_config = _config . get ( " upstream " )
_location_config = _config . get ( " location " )
if type ( _upstream_config ) != list :
_upstream_config = [ _upstream_config ] if _upstream_config else [ ]
_upstream_config = dict ( map ( lambda x : ( x . get ( " @uuid " ) , x ) , _upstream_config ) )
if type ( _location_config ) != list :
_location_config = [ _location_config ] if _location_config else [ ]
2022-06-28 19:21:12 +02:00
2023-04-09 20:16:37 +02:00
_upstream_data = _data . get ( " upstreamZones " , { } )
for _location in _location_config :
_upstream = _upstream_config . get ( _location . get ( " upstream " , " __ " ) )
_location [ " upstream_name " ] = " "
if _upstream :
_location [ " upstream_name " ] = _upstream . get ( " description " )
_uuid = " upstream {0} " . format ( _upstream . get ( " @uuid " , " " ) . replace ( " - " , " " ) )
_upstream_info = _upstream_data . get ( _uuid )
if not _upstream_info :
_ret . append ( " 1 \" Nginx Location: {description} \" connections=0|if_in_octets=0|if_out_octets=0 Upstream: {upstream_name} no Data " . format ( * * _location ) )
continue
else :
_ret . append ( " 1 \" Nginx Location: {description} \" connections=0|if_in_octets=0|if_out_octets=0 No Upstream " . format ( * * _location ) )
continue
_location [ " requestCounter " ] = 0
_location [ " inBytes " ] = 0
_location [ " outBytes " ] = 0
_isup = 0
for _server in _upstream_info :
if _server . get ( " down " ) == False :
_isup + = 1
for _key in ( " requestCounter " , " inBytes " , " outBytes " ) :
_location [ _key ] + = _server . get ( _key , 0 )
if _isup > 0 :
_available_upstreams = len ( _upstream_info )
_location [ " available_upstream " ] = " {0} / {1} " . format ( _isup , _available_upstreams )
if _available_upstreams == _isup :
_ret . append ( " 0 \" Nginx Location: {description} \" connections= {requestCounter} |if_in_octets= {inBytes} |if_out_octets= {outBytes} Upstream: {upstream_name} OK " . format ( * * _location ) )
else :
_ret . append ( " 1 \" Nginx Location: {description} \" connections= {requestCounter} |if_in_octets= {inBytes} |if_out_octets= {outBytes} Upstream: {upstream_name} {available_upstream} OK " . format ( * * _location ) )
else :
_ret . append ( " 2 \" Nginx Location: {description} \" connections= {requestCounter} |if_in_octets= {inBytes} |if_out_octets= {outBytes} Upstream: {upstream_name} down " . format ( * * _location ) )
2022-06-28 19:21:12 +02:00
return _ret
2022-01-24 19:55:18 +01:00
def check_haproxy ( self ) :
_ret = [ " <<<haproxy:sep(44)>>> " ]
_path = " /var/run/haproxy.socket "
try :
_haproxy_servers = dict ( map ( lambda x : ( x . get ( " @uuid " ) , x ) , self . _config_reader ( ) . get ( " OPNsense " ) . get ( " HAProxy " ) . get ( " servers " ) . get ( " server " ) ) )
_healthcheck_servers = [ ]
for _backend in self . _config_reader ( ) . get ( " OPNsense " ) . get ( " HAProxy " ) . get ( " backends " ) . get ( " backend " ) :
if _backend . get ( " healthCheckEnabled " ) == " 1 " and _backend . get ( " healthCheck " ) != None :
for _server_id in _backend . get ( " linkedServers " , " " ) . split ( " , " ) :
_server = _haproxy_servers . get ( _server_id )
_healthcheck_servers . append ( " {0} , {1} " . format ( _backend . get ( " name " , " " ) , _server . get ( " name " , " " ) ) )
except :
return [ ]
if os . path . exists ( _path ) :
_sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM )
_sock . connect ( _path )
_sock . send ( " show stat \n " . encode ( " utf-8 " ) )
_data = " "
while True :
_sockdata = _sock . recv ( 4096 )
if not _sockdata :
break
_data + = _sockdata . decode ( " utf-8 " )
for _line in _data . split ( " \n " ) :
_linedata = _line . split ( " , " )
if len ( _linedata ) < 33 :
continue
#pprint(list(enumerate(_linedata)))
if _linedata [ 32 ] == " 2 " :
if " {0} , {1} " . format ( * _linedata ) not in _healthcheck_servers :
continue ## ignore backends check disabled
_ret . append ( _line )
return _ret
2022-01-31 01:04:15 +01:00
def check_smartinfo ( self ) :
if not os . path . exists ( " /usr/local/sbin/smartctl " ) :
return [ ]
REGEX_DISCPATH = re . compile ( " (sd[a-z]+|da[0-9]+|nvme[0-9]+|ada[0-9]+)$ " )
_ret = [ " <<<disk_smart_info:sep(124)>>> " ]
for _dev in filter ( lambda x : REGEX_DISCPATH . match ( x ) , os . listdir ( " /dev/ " ) ) :
try :
_ret . append ( str ( smart_disc ( _dev ) ) )
except :
pass
return _ret
2022-03-10 19:38:37 +01:00
def check_ipmi ( self ) :
if not os . path . exists ( " /usr/local/bin/ipmitool " ) :
return [ ]
_ret = [ " <<<ipmi:sep(124)>>> " ]
_out = self . _run_prog ( " /usr/local/bin/ipmitool sensor list " )
_ret + = re . findall ( " ^(?!.* \ sna \ s.*$).* " , _out , re . M )
return _ret
2023-04-09 20:16:37 +02:00
def check_apcupsd ( self ) :
if self . _config_reader ( ) . get ( " OPNsense " , { } ) . get ( " apcupsd " , { } ) . get ( " general " , { } ) . get ( " Enabled " ) != " 1 " :
return [ ]
_ret = [ " <<<apcaccess:sep(58)>>> " ]
_ret . append ( " [[apcupsd.conf]] " )
_ret . append ( self . _run_prog ( " apcaccess " ) . strip ( ) )
return _ret
2022-01-19 17:25:41 +01:00
def check_df ( self ) :
_ret = [ " <<<df>>> " ]
_ret + = self . _run_prog ( " df -kTP -t ufs " ) . split ( " \n " ) [ 1 : ]
return _ret
2023-04-09 20:16:37 +02:00
def check_ssh ( self ) :
if self . _config_reader ( ) . get ( " system " , { } ) . get ( " ssh " , { } ) . get ( " enabled " ) != " enabled " :
return [ ]
_ret = [ " <<<sshd_config>>> " ]
with open ( " /usr/local/etc/ssh/sshd_config " , " r " ) as _f :
for _line in _f . readlines ( ) :
if re . search ( " ^[a-zA-Z] " , _line ) :
_ret . append ( _line . replace ( " \n " , " " ) )
return _ret
2022-02-02 13:49:35 +01:00
def check_kernel ( self ) :
_ret = [ " <<<kernel>>> " ]
2022-02-24 20:31:02 +01:00
_out = self . _run_prog ( " sysctl vm.stats " , timeout = 10 )
2022-02-02 13:49:35 +01:00
_kernel = dict ( [ _v . split ( " : " ) for _v in _out . split ( " \n " ) if len ( _v . split ( " : " ) ) == 2 ] )
_ret . append ( " {0:.0f} " . format ( time . time ( ) ) )
2022-02-24 20:31:02 +01:00
_ret . append ( " cpu {0} {1} {2} {4} {3} " . format ( * ( self . _run_prog ( " sysctl -n kern.cp_time " , " " ) . split ( " " ) ) ) )
2022-02-02 13:49:35 +01:00
_ret . append ( " ctxt {0} " . format ( _kernel . get ( " vm.stats.sys.v_swtch " ) ) )
_sum = sum ( map ( lambda x : int ( x [ 1 ] ) , ( filter ( lambda x : x [ 0 ] in ( " vm.stats.vm.v_forks " , " vm.stats.vm.v_vforks " , " vm.stats.vm.v_rforks " , " vm.stats.vm.v_kthreads " ) , _kernel . items ( ) ) ) ) )
_ret . append ( " processes {0} " . format ( _sum ) )
return _ret
2022-09-26 12:52:44 +02:00
def check_temperature ( self ) :
_ret = [ " <<<lnx_thermal:sep(124)>>> " ]
_out = self . _run_prog ( " sysctl dev.cpu " , timeout = 10 )
_cpus = dict ( [ _v . split ( " : " ) for _v in _out . split ( " \n " ) if len ( _v . split ( " : " ) ) == 2 ] )
_cpu_temperatures = list ( map (
lambda x : float ( x [ 1 ] . replace ( " C " , " " ) ) ,
filter (
lambda x : x [ 0 ] . endswith ( " temperature " ) ,
_cpus . items ( )
)
) )
if _cpu_temperatures :
_cpu_temperature = int ( max ( _cpu_temperatures ) * 1000 )
_ret . append ( f " CPU|enabled|unknown| { _cpu_temperature } " )
2022-09-29 09:47:59 +02:00
_count = 0
for _tempsensor in self . _available_sysctl_temperature_list :
2023-02-02 19:58:40 +01:00
_out = self . _run_prog ( f " sysctl -n { _tempsensor } " , timeout = 10 )
2022-09-29 09:47:59 +02:00
if _out :
try :
_zone_temp = int ( float ( _out . replace ( " C " , " " ) ) * 1000 )
except ValueError :
_zone_temp = None
if _zone_temp :
2023-02-02 19:58:40 +01:00
if _tempsensor . find ( " .pchtherm. " ) > - 1 :
_ret . append ( f " thermal_zone { _count } |enabled|unknown| { _zone_temp } |111000|critical|108000|passive " )
else :
_ret . append ( f " thermal_zone { _count } |enabled|unknown| { _zone_temp } " )
2022-09-29 09:47:59 +02:00
_count + = 1
2022-09-26 12:52:44 +02:00
if len ( _ret ) < 2 :
return [ ]
return _ret
2022-02-24 20:31:02 +01:00
def check_mem ( self ) :
_ret = [ " <<<statgrab_mem>>> " ]
_pagesize = int ( self . _run_prog ( " sysctl -n hw.pagesize " ) )
_out = self . _run_prog ( " sysctl vm.stats " , timeout = 10 )
_mem = dict ( map ( lambda x : ( x [ 0 ] , int ( x [ 1 ] ) ) , [ _v . split ( " : " ) for _v in _out . split ( " \n " ) if len ( _v . split ( " : " ) ) == 2 ] ) )
_mem_cache = _mem . get ( " vm.stats.vm.v_cache_count " ) * _pagesize
_mem_free = _mem . get ( " vm.stats.vm.v_free_count " ) * _pagesize
_mem_inactive = _mem . get ( " vm.stats.vm.v_inactive_count " ) * _pagesize
_mem_total = _mem . get ( " vm.stats.vm.v_page_count " ) * _pagesize
_mem_avail = _mem_inactive + _mem_cache + _mem_free
_mem_used = _mem_total - _mem_avail # fixme mem.hw
_ret . append ( " mem.cache {0} " . format ( _mem_cache ) )
_ret . append ( " mem.free {0} " . format ( _mem_free ) )
_ret . append ( " mem.total {0} " . format ( _mem_total ) )
_ret . append ( " mem.used {0} " . format ( _mem_used ) )
_ret . append ( " swap.free 0 " )
_ret . append ( " swap.total 0 " )
_ret . append ( " swap.used 0 " )
return _ret
2022-02-02 13:49:35 +01:00
def check_zpool ( self ) :
_ret = [ " <<<zpool_status>>> " ]
try :
for _line in self . _run_prog ( " zpool status -x " ) . split ( " \n " ) :
if _line . find ( " errors: No known data errors " ) == - 1 :
_ret . append ( _line )
except :
return [ ]
return _ret
2022-01-19 17:25:41 +01:00
def check_zfs ( self ) :
_ret = [ " <<<zfsget>>> " ]
_ret . append ( self . _run_prog ( " zfs get -t filesystem,volume -Hp name,quota,used,avail,mountpoint,type " ) )
_ret . append ( " [df] " )
_ret . append ( self . _run_prog ( " df -kP -t zfs " ) )
_ret . append ( " <<<zfs_arc_cache>>> " )
2022-02-02 13:49:35 +01:00
_ret . append ( self . _run_prog ( " sysctl -q kstat.zfs.misc.arcstats " ) . replace ( " kstat.zfs.misc.arcstats. " , " " ) . replace ( " : " , " = " ) . strip ( ) )
2022-01-19 17:25:41 +01:00
return _ret
def check_mounts ( self ) :
_ret = [ " <<<mounts>>> " ]
_ret . append ( self . _run_prog ( " mount -p -t ufs " ) . strip ( ) )
return _ret
def check_cpu ( self ) :
_ret = [ " <<<cpu>>> " ]
_loadavg = self . _run_prog ( " sysctl -n vm.loadavg " ) . strip ( " {} \n " )
_proc = self . _run_prog ( " top -b -n 1 " ) . split ( " \n " ) [ 1 ] . split ( " " )
_proc = " {0} / {1} " . format ( _proc [ 3 ] , _proc [ 0 ] )
_lastpid = self . _run_prog ( " sysctl -n kern.lastpid " ) . strip ( " \n " )
_ncpu = self . _run_prog ( " sysctl -n hw.ncpu " ) . strip ( " \n " )
_ret . append ( f " { _loadavg } { _proc } { _lastpid } { _ncpu } " )
return _ret
def check_netctr ( self ) :
_ret = [ " <<<netctr>>> " ]
_out = self . _run_prog ( " netstat -inb " )
for _line in re . finditer ( " ^(?!Name|lo|plip)(?P<iface> \ w+) \ s+(?P<mtu> \ d+).*?Link.*? \ s+.*? \ s+(?P<inpkts> \ d+) \ s+(?P<inerr> \ d+) \ s+(?P<indrop> \ d+) \ s+(?P<inbytes> \ d+) \ s+(?P<outpkts> \ d+) \ s+(?P<outerr> \ d+) \ s+(?P<outbytes> \ d+) \ s+(?P<coll> \ d+)$ " , _out , re . M ) :
_ret . append ( " {iface} {inbytes} {inpkts} {inerr} {indrop} 0 0 0 0 {outbytes} {outpkts} {outerr} 0 0 0 0 0 " . format ( * * _line . groupdict ( ) ) )
return _ret
def check_ntp ( self ) :
_ret = [ " <<<ntp>>> " ]
2022-02-20 13:25:19 +01:00
for _line in self . _run_prog ( " ntpq -np " , timeout = 30 ) . split ( " \n " ) [ 2 : ] :
2022-01-19 17:25:41 +01:00
if _line . strip ( ) :
_ret . append ( " {0} {1} " . format ( _line [ 0 ] , _line [ 1 : ] ) )
return _ret
def check_tcp ( self ) :
_ret = [ " <<<tcp_conn_stats>>> " ]
_out = self . _run_prog ( " netstat -na " )
counts = Counter ( re . findall ( " ESTABLISHED|LISTEN " , _out ) )
for _key , _val in counts . items ( ) :
_ret . append ( f " { _key } { _val } " )
return _ret
def check_ps ( self ) :
_ret = [ " <<<ps>>> " ]
_out = self . _run_prog ( " ps ax -o state,user,vsz,rss,pcpu,command " )
for _line in re . finditer ( " ^(?P<stat> \ w+) \ s+(?P<user> \ w+) \ s+(?P<vsz> \ d+) \ s+(?P<rss> \ d+) \ s+(?P<cpu>[ \ d.]+) \ s+(?P<command>.*)$ " , _out , re . M ) :
_ret . append ( " ( {user} , {vsz} , {rss} , {cpu} ) {command} " . format ( * * _line . groupdict ( ) ) )
return _ret
def check_uptime ( self ) :
_ret = [ " <<<uptime>>> " ]
_uptime_sec = time . time ( ) - int ( self . _run_prog ( " sysctl -n kern.boottime " ) . split ( " " ) [ 3 ] . strip ( " , " ) )
_idle_sec = re . findall ( " ( \ d+):[ \ d.]+ \ s+ \ [idle \ ] " , self . _run_prog ( " ps axw " ) ) [ 0 ]
_ret . append ( f " { _uptime_sec } { _idle_sec } " )
return _ret
2022-06-28 19:21:12 +02:00
def _run_prog ( self , cmdline = " " , * args , shell = False , timeout = 60 , ignore_error = False ) :
2022-02-20 13:25:19 +01:00
if type ( cmdline ) == str :
_process = shlex . split ( cmdline , posix = True )
else :
_process = cmdline
2022-01-19 17:25:41 +01:00
try :
2022-02-20 13:25:19 +01:00
return subprocess . check_output ( _process , encoding = " utf-8 " , shell = shell , stderr = subprocess . DEVNULL , timeout = timeout )
2022-01-19 17:25:41 +01:00
except subprocess . CalledProcessError as e :
2022-06-28 19:21:12 +02:00
if ignore_error :
return e . stdout
2022-01-19 17:25:41 +01:00
return " "
2022-02-20 13:25:19 +01:00
except subprocess . TimeoutExpired :
return " "
2022-01-19 17:25:41 +01:00
2022-06-28 19:21:12 +02:00
def _run_cache_prog ( self , cmdline = " " , cachetime = 10 , * args , shell = False , ignore_error = False ) :
2022-02-20 13:25:19 +01:00
if type ( cmdline ) == str :
_process = shlex . split ( cmdline , posix = True )
else :
_process = cmdline
_process_id = " " . join ( _process )
_runner = self . _check_cache . get ( _process_id )
if _runner == None :
2022-06-28 19:21:12 +02:00
_runner = checkmk_cached_process ( _process , shell = shell , ignore_error = ignore_error )
2022-02-20 13:25:19 +01:00
self . _check_cache [ _process_id ] = _runner
return _runner . get ( cachetime )
class checkmk_cached_process ( object ) :
2022-06-28 19:21:12 +02:00
def __init__ ( self , process , shell = False , ignore_error = False ) :
2022-02-20 13:25:19 +01:00
self . _processs = process
self . _islocal = os . path . dirname ( process [ 0 ] ) . startswith ( LOCALDIR )
self . _shell = shell
2022-06-28 19:21:12 +02:00
self . _ignore_error = ignore_error
2022-02-20 13:25:19 +01:00
self . _mutex = threading . Lock ( )
with self . _mutex :
self . _data = ( 0 , " " )
self . _thread = None
def _runner ( self , timeout ) :
try :
_data = subprocess . check_output ( self . _processs , shell = self . _shell , encoding = " utf-8 " , stderr = subprocess . DEVNULL , timeout = timeout )
except subprocess . CalledProcessError as e :
2022-06-28 19:21:12 +02:00
if self . _ignore_error :
_data = e . stdout
else :
_data = " "
2022-02-20 13:25:19 +01:00
except subprocess . TimeoutExpired :
_data = " "
with self . _mutex :
self . _data = ( int ( time . time ( ) ) , _data )
self . _thread = None
def get ( self , cachetime ) :
with self . _mutex :
_now = time . time ( )
_mtime = self . _data [ 0 ]
if _now - _mtime > cachetime or cachetime == 0 :
if not self . _thread :
if cachetime > 0 :
_timeout = cachetime * 2 - 1
else :
_timeout = None
with self . _mutex :
self . _thread = threading . Thread ( target = self . _runner , args = [ _timeout ] )
self . _thread . start ( )
self . _thread . join ( 30 ) ## waitmax
with self . _mutex :
_mtime , _data = self . _data
if not _data . strip ( ) :
return " "
if self . _islocal :
_data = " " . join ( [ f " cached( { _mtime } , { cachetime } ) { _line } " for _line in _data . splitlines ( True ) if len ( _line . strip ( ) ) > 0 ] )
else :
_data = re . sub ( " \ B[<] {3} (.*?)[>] {3} \ B " , f " <<< \\ 1:cached( { _mtime } , { cachetime } )>>> " , _data )
return _data
2022-01-19 17:25:41 +01:00
class checkmk_server ( TCPServer , checkmk_checker ) :
2023-04-11 20:38:52 +02:00
def __init__ ( self , port , pidfile , onlyfrom = None , encrypt = None , skipcheck = None , * * kwargs ) :
2022-01-19 17:25:41 +01:00
self . pidfile = pidfile
2022-03-10 19:38:37 +01:00
self . onlyfrom = onlyfrom . split ( " , " ) if onlyfrom else None
self . skipcheck = skipcheck . split ( " , " ) if skipcheck else [ ]
2022-09-29 09:47:59 +02:00
self . _available_sysctl_list = self . _run_prog ( " sysctl -aN " ) . split ( )
2023-02-02 19:58:40 +01:00
self . _available_sysctl_temperature_list = list ( filter ( lambda x : x . lower ( ) . find ( " temperature " ) > - 1 and x . lower ( ) . find ( " cpu " ) == - 1 , self . _available_sysctl_list ) )
2023-04-11 20:38:52 +02:00
self . encrypt = encrypt
2022-01-19 17:25:41 +01:00
self . _mutex = threading . Lock ( )
2023-04-09 20:16:37 +02:00
self . user = pwd . getpwnam ( " root " )
2022-01-19 17:25:41 +01:00
self . allow_reuse_address = True
TCPServer . __init__ ( self , ( " " , port ) , checkmk_handler , bind_and_activate = False )
2023-04-09 20:16:37 +02:00
def verify_request ( self , request , client_address ) :
if self . onlyfrom and client_address [ 0 ] not in self . onlyfrom :
log ( " Client {0} not allowed " . format ( * client_address ) , " warn " )
return False
return True
2022-01-19 17:25:41 +01:00
def _change_user ( self ) :
_ , _ , _uid , _gid , _ , _ , _ = self . user
if os . getuid ( ) != _uid :
os . setgid ( _gid )
os . setuid ( _uid )
def server_start ( self ) :
2023-04-09 20:16:37 +02:00
log ( " starting checkmk_agent " )
2022-01-19 17:25:41 +01:00
signal . signal ( signal . SIGTERM , self . _signal_handler )
signal . signal ( signal . SIGINT , self . _signal_handler )
signal . signal ( signal . SIGHUP , self . _signal_handler )
self . _change_user ( )
try :
self . server_bind ( )
self . server_activate ( )
except :
self . server_close ( )
raise
try :
self . serve_forever ( )
except KeyboardInterrupt :
sys . stdout . flush ( )
sys . stdout . write ( " \n " )
pass
def _signal_handler ( self , signum , * args ) :
if signum in ( signal . SIGTERM , signal . SIGINT ) :
2022-06-28 19:21:12 +02:00
log ( " stopping checkmk_agent " )
2022-01-19 17:25:41 +01:00
threading . Thread ( target = self . shutdown , name = ' shutdown ' ) . start ( )
sys . exit ( 0 )
def daemonize ( self ) :
try :
pid = os . fork ( )
if pid > 0 :
## first parent
sys . exit ( 0 )
except OSError as e :
2023-04-09 20:16:37 +02:00
sys . stderr . write ( " Fork failed \n " )
sys . stderr . flush ( )
2022-01-19 17:25:41 +01:00
sys . exit ( 1 )
os . chdir ( " / " )
os . setsid ( )
os . umask ( 0 )
try :
pid = os . fork ( )
if pid > 0 :
## second
sys . exit ( 0 )
except OSError as e :
2023-04-09 20:16:37 +02:00
sys . stderr . write ( " Fork 2 failed \n " )
sys . stderr . flush ( )
2022-01-19 17:25:41 +01:00
sys . exit ( 1 )
sys . stdout . flush ( )
sys . stderr . flush ( )
self . _redirect_stream ( sys . stdin , None )
self . _redirect_stream ( sys . stdout , None )
2022-06-28 19:21:12 +02:00
self . _redirect_stream ( sys . stderr , None )
2022-01-19 17:25:41 +01:00
with open ( self . pidfile , " wt " ) as _pidfile :
_pidfile . write ( str ( os . getpid ( ) ) )
os . chown ( self . pidfile , self . user [ 2 ] , self . user [ 3 ] )
try :
self . server_start ( )
finally :
try :
os . remove ( self . pidfile )
except :
pass
@staticmethod
def _redirect_stream ( system_stream , target_stream ) :
if target_stream is None :
target_fd = os . open ( os . devnull , os . O_RDWR )
else :
target_fd = target_stream . fileno ( )
os . dup2 ( target_fd , system_stream . fileno ( ) )
def __del__ ( self ) :
pass ## todo
2022-01-31 01:04:15 +01:00
REGEX_SMART_VENDOR = re . compile ( r " ^ \ s*(?P<num> \ d+) \ s(?P<name>[- \ w]+).* \ s { 2,}(?P<value>[ \ w \ /() ]+)$ " , re . M )
REGEX_SMART_DICT = re . compile ( r " ^(.*?): \ s*(.*?)$ " , re . M )
class smart_disc ( object ) :
def __init__ ( self , device ) :
self . device = device
MAPPING = {
" Model Family " : ( " model_family " , lambda x : x ) ,
" Model Number " : ( " model_family " , lambda x : x ) ,
" Product " : ( " model_family " , lambda x : x ) ,
" Vendor " : ( " vendor " , lambda x : x ) ,
" Revision " : ( " revision " , lambda x : x ) ,
" Device Model " : ( " model_type " , lambda x : x ) ,
" Serial Number " : ( " serial_number " , lambda x : x ) ,
" Serial number " : ( " serial_number " , lambda x : x ) ,
" Firmware Version " : ( " firmware_version " , lambda x : x ) ,
" User Capacity " : ( " capacity " , lambda x : x . split ( " " ) [ 0 ] . replace ( " , " , " " ) ) ,
" Total NVM Capacity " : ( " capacity " , lambda x : x . split ( " " ) [ 0 ] . replace ( " , " , " " ) ) ,
" Rotation Rate " : ( " rpm " , lambda x : x . replace ( " rpm " , " " ) ) ,
" Form Factor " : ( " formfactor " , lambda x : x ) ,
" SATA Version is " : ( " transport " , lambda x : x . split ( " , " ) [ 0 ] ) ,
" Transport protocol " : ( " transport " , lambda x : x ) ,
" SMART support is " : ( " smart " , lambda x : int ( x . lower ( ) == " enabled " ) ) ,
" Critical Warning " : ( " critical " , lambda x : self . _saveint ( x , base = 16 ) ) ,
" Temperature " : ( " temperature " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Data Units Read " : ( " data_read_bytes " , lambda x : x . split ( " " ) [ 0 ] . replace ( " , " , " " ) ) ,
" Data Units Written " : ( " data_write_bytes " , lambda x : x . split ( " " ) [ 0 ] . replace ( " , " , " " ) ) ,
" Power On Hours " : ( " poweronhours " , lambda x : x . replace ( " , " , " " ) ) ,
" Power Cycles " : ( " powercycles " , lambda x : x . replace ( " , " , " " ) ) ,
" NVMe Version " : ( " transport " , lambda x : f " NVMe { x } " ) ,
2023-04-09 20:16:37 +02:00
" Raw_Read_Error_Rate " : ( " error_rate " , lambda x : x . split ( " " ) [ - 1 ] . replace ( " , " , " " ) ) ,
2022-01-31 01:04:15 +01:00
" Reallocated_Sector_Ct " : ( " reallocate " , lambda x : x . replace ( " , " , " " ) ) ,
2023-04-09 20:16:37 +02:00
" Seek_Error_Rate " : ( " seek_error_rate " , lambda x : x . split ( " " ) [ - 1 ] . replace ( " , " , " " ) ) ,
2022-01-31 01:04:15 +01:00
" Power_Cycle_Count " : ( " powercycles " , lambda x : x . replace ( " , " , " " ) ) ,
" Temperature_Celsius " : ( " temperature " , lambda x : x . split ( " " ) [ 0 ] ) ,
2023-04-09 20:16:37 +02:00
" Temperature_Internal " : ( " temperature " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Drive_Temperature " : ( " temperature " , lambda x : x . split ( " " ) [ 0 ] ) ,
2022-01-31 01:04:15 +01:00
" UDMA_CRC_Error_Count " : ( " udma_error " , lambda x : x . replace ( " , " , " " ) ) ,
" Offline_Uncorrectable " : ( " uncorrectable " , lambda x : x . replace ( " , " , " " ) ) ,
" Power_On_Hours " : ( " poweronhours " , lambda x : x . replace ( " , " , " " ) ) ,
" Spin_Retry_Count " : ( " spinretry " , lambda x : x . replace ( " , " , " " ) ) ,
" Current_Pending_Sector " : ( " pendingsector " , lambda x : x . replace ( " , " , " " ) ) ,
" Current Drive Temperature " : ( " temperature " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Reallocated_Event_Count " : ( " reallocate_ev " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Warning Comp. Temp. Threshold " : ( " temperature_warn " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Critical Comp. Temp. Threshold " : ( " temperature_crit " , lambda x : x . split ( " " ) [ 0 ] ) ,
" Media and Data Integrity Errors " : ( " media_errors " , lambda x : x ) ,
" Airflow_Temperature_Cel " : ( " temperature " , lambda x : x ) ,
2023-04-09 20:16:37 +02:00
" number of hours powered up " : ( " poweronhours " , lambda x : x . split ( " . " ) [ 0 ] ) ,
" Accumulated start-stop cycles " : ( " powercycles " , lambda x : x ) ,
" SMART overall-health self-assessment test result " : ( " smart_status " , lambda x : int ( x . lower ( ) . strip ( ) == " passed " ) ) ,
2022-01-31 01:04:15 +01:00
" SMART Health Status " : ( " smart_status " , lambda x : int ( x . lower ( ) == " ok " ) ) ,
}
self . _get_data ( )
for _key , _value in REGEX_SMART_DICT . findall ( self . _smartctl_output ) :
if _key in MAPPING . keys ( ) :
_map = MAPPING [ _key ]
setattr ( self , _map [ 0 ] , _map [ 1 ] ( _value ) )
for _vendor_num , _vendor_text , _value in REGEX_SMART_VENDOR . findall ( self . _smartctl_output ) :
if _vendor_text in MAPPING . keys ( ) :
_map = MAPPING [ _vendor_text ]
setattr ( self , _map [ 0 ] , _map [ 1 ] ( _value ) )
def _saveint ( self , val , base = 10 ) :
try :
return int ( val , base )
except ( TypeError , ValueError ) :
return 0
def _get_data ( self ) :
try :
2022-02-20 13:25:19 +01:00
self . _smartctl_output = subprocess . check_output ( [ " smartctl " , " -a " , " -n " , " standby " , f " /dev/ { self . device } " ] , encoding = sys . stdout . encoding , timeout = 10 )
2022-01-31 01:04:15 +01:00
except subprocess . CalledProcessError as e :
if e . returncode & 0x1 :
raise
_status = " "
self . _smartctl_output = e . output
if e . returncode & 0x2 :
_status = " SMART Health Status: CRC Error "
if e . returncode & 0x4 :
_status = " SMART Health Status: PREFAIL "
if e . returncode & 0x3 :
_status = " SMART Health Status: DISK FAILING "
self . _smartctl_output + = f " \n { _status } \n "
2022-02-20 13:25:19 +01:00
except subprocess . TimeoutExpired :
self . _smartctl_output + = " \n SMART smartctl Timeout \n "
2022-01-31 01:04:15 +01:00
def __str__ ( self ) :
_ret = [ ]
2023-04-09 20:16:37 +02:00
if getattr ( self , " transport " , " " ) . lower ( ) == " iscsi " : ## ignore ISCSI
return " "
2022-01-31 01:04:15 +01:00
if not getattr ( self , " model_type " , None ) :
self . model_type = getattr ( self , " model_family " , " unknown " )
for _k , _v in self . __dict__ . items ( ) :
if _k . startswith ( " _ " ) or _k in ( " device " ) :
continue
_ret . append ( f " { self . device } | { _k } | { _v } " )
return " \n " . join ( _ret )
2022-01-19 17:25:41 +01:00
if __name__ == " __main__ " :
import argparse
2022-03-10 19:38:37 +01:00
class SmartFormatter ( argparse . HelpFormatter ) :
def _split_lines ( self , text , width ) :
if text . startswith ( ' R| ' ) :
return text [ 2 : ] . splitlines ( )
# this is the RawTextHelpFormatter._split_lines
return argparse . HelpFormatter . _split_lines ( self , text , width )
_checks_available = sorted ( list ( map ( lambda x : x . split ( " _ " ) [ 1 ] , filter ( lambda x : x . startswith ( " check_ " ) or x . startswith ( " checklocal_ " ) , dir ( checkmk_checker ) ) ) ) )
2022-01-19 17:25:41 +01:00
_ = lambda x : x
2023-04-09 20:16:37 +02:00
_parser = argparse . ArgumentParser (
2023-04-11 20:38:52 +02:00
add_help = False ,
formatter_class = SmartFormatter
2023-04-09 20:16:37 +02:00
)
2023-04-11 20:38:52 +02:00
_parser . add_argument ( " --help " , action = " store_true " ,
help = _ ( " show help message " ) )
2022-01-19 17:25:41 +01:00
_parser . add_argument ( " --start " , action = " store_true " ,
2022-02-20 13:25:19 +01:00
help = _ ( " start the daemon " ) )
2022-01-19 17:25:41 +01:00
_parser . add_argument ( " --stop " , action = " store_true " ,
2022-02-20 13:25:19 +01:00
help = _ ( " stop the daemon " ) )
2023-04-09 20:16:37 +02:00
_parser . add_argument ( " --status " , action = " store_true " ,
help = _ ( " show daemon status " ) )
2022-01-19 17:25:41 +01:00
_parser . add_argument ( " --nodaemon " , action = " store_true " ,
2022-02-20 13:25:19 +01:00
help = _ ( " run in foreground " ) )
2023-04-11 20:38:52 +02:00
_parser . add_argument ( " --update " , nargs = " ? " , const = " main " , type = str , choices = [ " main " , " testing " ] ,
help = _ ( " check for update " ) )
2022-03-10 19:38:37 +01:00
_parser . add_argument ( " --config " , type = str , dest = " configfile " , default = CHECKMK_CONFIG ,
help = _ ( " path to config file " ) )
2023-04-09 20:16:37 +02:00
_parser . add_argument ( " --port " , type = int , default = 6556 ,
help = _ ( " port checkmk_agent listen " ) )
2023-04-11 20:38:52 +02:00
_parser . add_argument ( " --encrypt " , type = str , dest = " encrypt " ,
2023-04-09 20:16:37 +02:00
help = _ ( " encryption password (do not use from cmdline) " ) )
2022-01-19 17:25:41 +01:00
_parser . add_argument ( " --pidfile " , type = str , default = " /var/run/checkmk_agent.pid " ,
2023-04-09 20:16:37 +02:00
help = _ ( " path to pid file " ) )
2022-02-20 13:25:19 +01:00
_parser . add_argument ( " --onlyfrom " , type = str ,
help = _ ( " comma seperated ip addresses to allow " ) )
2022-03-10 19:38:37 +01:00
_parser . add_argument ( " --skipcheck " , type = str ,
help = _ ( " R|comma seperated checks that will be skipped \n {0} " . format ( " \n " . join ( [ " , " . join ( _checks_available [ i : i + 10 ] ) for i in range ( 0 , len ( _checks_available ) , 10 ) ] ) ) ) )
2023-04-09 20:16:37 +02:00
_parser . add_argument ( " --zabbix " , action = " store_true " ,
help = _ ( " only output local checks as json for zabbix parsing " ) )
2022-01-19 17:25:41 +01:00
_parser . add_argument ( " --debug " , action = " store_true " ,
help = _ ( " debug Ausgabe " ) )
2023-04-11 20:38:52 +02:00
def _args_error ( message ) :
print ( " # " * 35 )
print ( " checkmk_agent for opnsense " )
print ( f " Version: { __VERSION__ } " )
print ( " # " * 35 )
print ( message )
print ( " " )
print ( " use --help or -h for help " )
sys . exit ( 1 )
_parser . error = _args_error
2022-01-19 17:25:41 +01:00
args = _parser . parse_args ( )
2023-04-11 20:38:52 +02:00
2022-03-10 19:38:37 +01:00
if args . configfile and os . path . exists ( args . configfile ) :
for _k , _v in re . findall ( f " ^( \ w+): \ s*(.*?)(?: \ s+#|$) " , open ( args . configfile , " rt " ) . read ( ) , re . M ) :
if _k == " port " :
args . port = int ( _v )
if _k == " encrypt " :
2023-04-11 20:38:52 +02:00
args . encrypt = _v
2022-03-10 19:38:37 +01:00
if _k == " onlyfrom " :
args . onlyfrom = _v
if _k == " skipcheck " :
args . skipcheck = _v
if _k . lower ( ) == " localdir " :
LOCALDIR = _v
2023-04-09 20:16:37 +02:00
if _k . lower ( ) == " plugindir " :
PLUGINSDIR = _v
2022-03-10 19:38:37 +01:00
if _k . lower ( ) == " spooldir " :
SPOOLDIR = _v
2022-01-19 17:25:41 +01:00
_server = checkmk_server ( * * args . __dict__ )
2023-04-09 20:16:37 +02:00
_pid = 0
2022-01-19 17:25:41 +01:00
try :
with open ( args . pidfile , " rt " ) as _pidfile :
_pid = int ( _pidfile . read ( ) )
2023-04-09 20:16:37 +02:00
except ( FileNotFoundError , IOError , ValueError ) :
2022-01-19 17:25:41 +01:00
_out = subprocess . check_output ( [ " sockstat " , " -l " , " -p " , str ( args . port ) , " -P " , " tcp " ] , encoding = sys . stdout . encoding )
try :
_pid = int ( re . findall ( " \ s( \ d+) \ s " , _out . split ( " \n " ) [ 1 ] ) [ 0 ] )
except ( IndexError , ValueError ) :
pass
2023-04-11 20:38:52 +02:00
_active_methods = [ getattr ( args , x , False ) for x in ( " start " , " stop " , " status " , " zabbix " , " nodaemon " , " debug " , " update " , " help " ) ]
2023-04-09 20:16:37 +02:00
if SYSHOOK_METHOD and any ( _active_methods ) == False :
2023-05-10 16:52:08 +02:00
log ( f " using syshook { SYSHOOK_METHOD [ 0 ] } " )
2023-04-09 20:16:37 +02:00
setattr ( args , SYSHOOK_METHOD [ 0 ] , True )
2022-01-19 17:25:41 +01:00
if args . start :
2023-04-09 20:16:37 +02:00
if _pid > 0 :
2022-01-19 17:25:41 +01:00
try :
os . kill ( _pid , 0 )
2023-04-09 20:16:37 +02:00
sys . stderr . write ( f " allready running with pid { _pid } \n " )
sys . stderr . flush ( )
sys . exit ( 1 )
2022-01-19 17:25:41 +01:00
except OSError :
pass
_server . daemonize ( )
elif args . status :
2023-04-09 20:16:37 +02:00
if _pid < = 0 :
2023-04-11 20:38:52 +02:00
print ( " not running " )
2022-01-19 17:25:41 +01:00
else :
2023-04-09 20:16:37 +02:00
try :
os . kill ( _pid , 0 )
2023-04-11 20:38:52 +02:00
print ( " running " )
2023-04-09 20:16:37 +02:00
except OSError :
2023-04-11 20:38:52 +02:00
print ( " not running " )
2023-04-09 20:16:37 +02:00
2022-01-19 17:25:41 +01:00
elif args . stop :
2023-04-09 20:16:37 +02:00
if _pid == 0 :
sys . stderr . write ( " not running \n " )
sys . stderr . flush ( )
2022-01-19 17:25:41 +01:00
sys . exit ( 1 )
2023-04-09 20:16:37 +02:00
try :
os . kill ( _pid , signal . SIGTERM )
except ProcessLookupError :
if os . path . exists ( args . pidfile ) :
os . remove ( args . pidfile )
2022-01-19 17:25:41 +01:00
elif args . debug :
2022-02-24 20:31:02 +01:00
sys . stdout . write ( _server . do_checks ( debug = True ) . decode ( sys . stdout . encoding ) )
sys . stdout . flush ( )
2023-04-09 20:16:37 +02:00
elif args . zabbix :
sys . stdout . write ( _server . do_zabbix_output ( ) )
sys . stdout . flush ( )
2022-01-19 17:25:41 +01:00
elif args . nodaemon :
_server . server_start ( )
2023-04-09 20:16:37 +02:00
2023-04-11 20:38:52 +02:00
elif args . update :
import hashlib
import difflib
from pkg_resources import parse_version
_github_req = requests . get ( f " https://api.github.com/repos/bashclub/check-opnsense/contents/opnsense_checkmk_agent.py?ref= { args . update } " )
if _github_req . status_code != 200 :
raise Exception ( " Github Error " )
_github_version = _github_req . json ( )
2023-05-10 16:52:08 +02:00
_github_last_modified = datetime . strptime ( _github_req . headers . get ( " last-modified " ) , " %a , %d % b % Y %X % Z " )
2023-04-11 20:38:52 +02:00
_new_script = base64 . b64decode ( _github_version . get ( " content " ) ) . decode ( " utf-8 " )
_new_version = re . findall ( " ^__VERSION__.*? \" ([0-9.]*) \" " , _new_script , re . M )
_new_version = _new_version [ 0 ] if _new_version else " 0.0.0 "
_script_location = os . path . realpath ( __file__ )
2023-05-10 16:52:08 +02:00
_current_last_modified = datetime . fromtimestamp ( int ( os . path . getmtime ( _script_location ) ) )
2023-04-11 20:38:52 +02:00
with ( open ( _script_location , " rb " ) ) as _f :
_content = _f . read ( )
_current_sha = hashlib . sha1 ( f " blob { len ( _content ) } \0 " . encode ( " utf-8 " ) + _content ) . hexdigest ( )
_content = _content . decode ( " utf-8 " )
if _current_sha == _github_version . get ( " sha " ) :
print ( f " allready up to date { _current_sha } " )
sys . exit ( 0 )
else :
_version = parse_version ( __VERSION__ )
_nversion = parse_version ( _new_version )
if _version == _nversion :
print ( " same Version but checksums mismatch " )
elif _version > _nversion :
print ( f " ATTENTION: Downgrade from { __VERSION__ } to { _new_version } " )
while True :
try :
_answer = input ( f " Update { _script_location } to { _new_version } (y/n) or show difference (d)? " )
except KeyboardInterrupt :
2023-05-10 16:52:08 +02:00
print ( " " )
2023-04-11 20:38:52 +02:00
sys . exit ( 0 )
if _answer in ( " Y " , " y " , " yes " , " j " , " J " ) :
with open ( _script_location , " wb " ) as _f :
_f . write ( _new_script . encode ( " utf-8 " ) )
print ( f " updated to Version { _new_version } " )
if _pid > 0 :
try :
os . kill ( _pid , 0 )
try :
_answer = input ( f " Daemon is running (pid: { _pid } ), reload and restart (Y/N)? " )
except KeyboardInterrupt :
2023-05-10 16:52:08 +02:00
print ( " " )
2023-04-11 20:38:52 +02:00
sys . exit ( 0 )
if _answer in ( " Y " , " y " , " yes " , " j " , " J " ) :
print ( " stopping Daemon " )
os . kill ( _pid , signal . SIGTERM )
print ( " waiting " )
time . sleep ( 5 )
print ( " restart " )
os . system ( f " { _script_location } --start " )
sys . exit ( 0 )
except OSError :
pass
break
elif _answer in ( " D " , " d " ) :
2023-05-10 16:52:08 +02:00
for _line in difflib . unified_diff ( _content . split ( " \n " ) ,
_new_script . split ( " \n " ) ,
fromfile = f " Version: { __VERSION__ } " ,
fromfiledate = _current_last_modified . isoformat ( ) ,
tofile = f " Version: { _new_version } " ,
tofiledate = _github_last_modified . isoformat ( ) ,
n = 1 ,
lineterm = " " ) :
print ( _line )
2023-04-11 20:38:52 +02:00
else :
break
elif args . help :
print ( " # " * 35 )
print ( " checkmk_agent for opnsense " )
print ( f " Version: { __VERSION__ } " )
print ( " # " * 35 )
print ( " " )
print ( " Latest Version under https://github.com/bashclub/check-opnsense " )
print ( " Questions under https://forum.opnsense.org/index.php?topic=26594.0 \n " )
print ( " Server-side implementation for " )
print ( " - " * 35 )
print ( " \t * smartdisk - install the mkp from https://github.com/bashclub/checkmk-smart plugins os-smart " )
print ( " \t * squid - install the mkp from https://exchange.checkmk.com/p/squid and forwarder -> listen on loopback active \n " )
2023-04-09 20:16:37 +02:00
_parser . print_help ( )
2023-04-11 20:38:52 +02:00
print ( " \n " )
print ( f " The CHECKMK_BASEDIR is under { BASEDIR } (local,plugin,spool). " )
print ( f " Default config file location is { args . configfile } , create it if it doesn ' t exist. " )
print ( " Config file options port,encrypt,onlyfrom,skipcheck with a colon and the value like the commandline option \n " )
print ( " active config: " )
print ( " - " * 35 )
for _opt in ( " port " , " encrypt " , " onlyfrom " , " skipcheck " ) :
_val = getattr ( args , _opt , None )
if _val :
print ( f " { _opt } : { _val } " )
print ( " " )
else :
2023-05-10 16:52:08 +02:00
log ( " no arguments " )
2023-04-11 20:38:52 +02:00
print ( " # " * 35 )
print ( " checkmk_agent for opnsense " )
print ( f " Version: { __VERSION__ } " )
print ( " # " * 35 )
print ( " use --help or -h for help " )