'show-model': False,
},
'sshd': {
- 'resolve-domains': ''
+ 'sshd-resolve-domains': ''
},
'smbd': {
- 'resolve-domains': ''
+ 'smbd-resolve-domains': ''
},
'httpd': {
- 'resolve-domains': ''
+ 'httpd-resolve-domains': ''
},
'du': {
'paths': ['/', '/etc', '/home'],
import logparse
import logparse.config
from logparse.config import prefs, loadconf
-from logparse import formatting, mail, config
-from .parsers import load_parsers
+from logparse import formatting, mail, config, load_parsers
global argparser
# Find parsers
- parser_providers = []
+ loader = load_parsers.ParserLoader("logparse.parsers")
+ parser_names = set([x.name for x in loader.parsers])
+
if argparser.parse_args().logs:
- log_src = argparser.parse_args().logs.split()
+ parser_names = parser_names.intersection(set(argparser.parse_args().logs.split()))
elif config.prefs.get("logparse", "parsers"):
- log_src = config.prefs.get("logparse", "parsers").split()
- else:
- log_src = load_parsers.default_parsers
-
- for parser_name in log_src:
- parser = load_parsers.search(parser_name)
- if parser == None:
- logger.warning("Can't find parser {0}".format(parser_name))
- continue
- else:
- parser_providers.append(load_parsers.load(parser))
+ parser_names = parser_names.intersection(set(config.prefs.get("logparse", "parsers").split()))
if argparser.parse_args().ignore_logs:
- ignore_src = argparser.parse_args().ignore_logs.split()
+ parser_names = parser_names.difference(set(argparser.parse_args().ignore_logs.split()))
elif config.prefs.get("logparse", "ignore-parsers"):
- ignore_src = config.prefs.get("logparse", "ignore-parsers").split()
- else:
- ignore_src = []
- if len(ignore_src) > 0:
- for parser_name in ignore_src:
- if parser_name in [x.__name__.rpartition('.')[2] for x in parser_providers]:
- logger.info("Ignoring default parser {0}".format(parser_name))
- parser_providers_new = []
- for p in parser_providers:
- if p.__name__.rpartition('.')[2] != parser_name:
- parser_providers_new.append(p)
- parser_providers = parser_providers_new
- continue
+ parser_names = parser_names.difference(set(config.prefs.get("logparse", "ignore-parsers").split()))
# Execute parsers
- logger.debug(str(parser_providers))
- for parser in parser_providers:
- output.append_section(parser.parse_log())
+ logger.debug("Queued the following parsers: " + str(loader.parsers))
+ for parser in loader.parsers:
+ if parser.name in parser_names:
+ output.append_section(parser.parse_log())
# Write HTML footer
output.append_footer()
--- /dev/null
+#
+# load_parsers.py
+#
+# Search for and load files which parse logs for particular services
+#
+
+import imp
+import importlib
+import os
+import glob
+import pkgutil
+import inspect
+from pathlib import Path
+from sys import path
+from typing import NamedTuple
+
+parser_dir = "/usr/share/logparse/"
+main_module = "__init__"
+default_parsers = ["cron_journald", "httpd", "mem", "postfix", "smbd", "sshd_journald", "sudo", "sysinfo", "temperature", "zfs"]
+deprecated_parsers = ["sshd", "cron"]
+
+import logging
+logger = logging.getLogger(__name__)
+
+class Parser():
+ """
+ Base class that every parser should inherit
+ """
+ def __init__(self, name=None, path=None, info=None):
+ self.name = str(name) if name else None
+ self.path = Path(path) if path else None
+ self.info = dict(info) if info else None
+ self.logger = logging.getLogger(__name__)
+
+ def load(self):
+ logger.debug("Loading parser {0} from {1}".format(self.name, str(self.path) if self.path != None else "defaults"))
+ return importlib.import_module(self.name)
+
+ def parse_log(self, **args):
+ """
+ Every parser should provide the parse_log method which is executed at
+ runtime to analyse logs.
+ """
+ raise NotImplementedError("Failed to find an entry point for parser " + self.name)
+
+class ParserLoader:
+ """
+ This class searches for parsers in the main logparse package and
+ optionally in another external package (default /usr/share/logparse).
+ """
+
+ def __init__(self, pkg):
+ """
+ Initiate search for parsers
+ """
+ self.pkg = pkg
+ self.parsers= []
+ self.reload()
+
+
+ def reload(self):
+ """
+ Reset parsers list and iterate through package modules
+ """
+ self.parsers= []
+ self.seen_paths = []
+ logger.debug("Looking for parsers in package {0}".format(str(self.pkg)))
+ self.walk_package(self.pkg)
+
+ def walk_package(self, package):
+ """
+ Check package and subdirectories for loadable modules
+ """
+
+ imported_package = __import__(package, fromlist=["null"]) # fromlist must be non-empty to load target module rather than parent package
+
+ for _, parser_name, ispkg in pkgutil.iter_modules(imported_package.__path__, imported_package.__name__ + '.'):
+ if not ispkg:
+ parser_module = __import__(parser_name, fromlist=["null"])
+ clsmembers = inspect.getmembers(parser_module, inspect.isclass)
+ for (_, c) in clsmembers:
+ # Ignore the base Parser class
+ if issubclass(c, Parser) & (c is not Parser):
+ logger.debug("Found parser {0}.{1}".format(c.__module__, c.__name__))
+ self.parsers.append(c())
+
+
+ # Recurse subpackages
+
+ all_current_paths = []
+ if isinstance(imported_package.__path__, str):
+ all_current_paths.append(imported_package.__path__)
+ else:
+ all_current_paths.extend([x for x in imported_package.__path__])
+
+ for pkg_path in all_current_paths:
+ if pkg_path not in self.seen_paths:
+ self.seen_paths.append(pkg_path)
+
+ # Get subdirectories of package
+ child_pkgs = [p for p in os.listdir(pkg_path) if os.path.isdir(os.path.join(pkg_path, p))]
+
+ # Walk through each subdirectory
+ for child_pkg in child_pkgs:
+ self.walk_package(package + '.' + child_pkg)
+
+def findall():
+ logger.debug("Searching for parsers in {0}".format(parser_dir))
+ path.append(os.path.abspath(parser_dir))
+ parsers = []
+ parser_candidates = os.listdir(parser_dir)
+ for parser_name in parser_candidates:
+ location = os.path.join(parser_dir, parser_name)
+ if not os.path.isdir(location) or not main_module + '.py' in os.listdir(location):
+ logger.warning("Rejecting parser {0} due to invalid structure".format(location))
+ continue
+ info = imp.find_module(main_module, [location])
+ parser_obj = Parser(parser_name, location, info)
+ parsers.append(parser_obj)
+ logger.debug("Added parser {0}".format(parser_obj.name))
+ return parsers
+
+def search(name):
+ logger.debug("Searching for parser {0}".format(name))
+ if name in default_parsers:
+ logger.debug("Found parser {0} in default modules".format(name))
+ return Parser('.'.join(__name__.split('.')[:-1] + [name]))
+ elif name in deprecated_parsers:
+ logger.debug("Found parser {0} in deprecated modules".format(name))
+ return Parser('.'.join(__name__.split('.')[:-1] + [name]))
+ else:
+ return None
+
+def load(parser):
+ logger.debug("Loading parser {0} from {1}".format(parser.name, parser.path if parser.path != None else "defaults"))
+ return importlib.import_module(parser.name)
+++ /dev/null
-#
-# cron-journald.py
-#
-# List the logged (executed) cron jobs and their commands (uses journald module)
-#
-# TODO: also output a list of scheduled (future) jobs
-#
-
-from systemd import journal
-
-from logparse.formatting import *
-from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- logger.debug("Starting cron section")
- section = Section("cron")
-
- # Initiate journald reader
- j = journal.Reader()
- j.this_boot()
- j.this_machine()
- j.log_level(journal.LOG_INFO)
- j.add_match(_COMM="cron")
-
- logger.info("Obtaining cron logs")
-
- messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry and " CMD " in entry["MESSAGE"]]
-
- total_jobs = len(messages)
-
- if total_jobs == 0:
- logger.warning("Couldn't find any cron commands")
- return 1
-
- logger.info("Found " + str(total_jobs) + " cron jobs")
- section.append_data(Data("Total of " + plural("cron session", total_jobs) + " executed across all users"))
-
- logger.debug("Analysing cron commands for each user")
- users = {}
-
- for msg in messages:
- usr_cmd = re.search('\((\S+)\) CMD (.*)', msg) # [('user', 'cmd')]
- if usr_cmd:
- if not usr_cmd.group(1) in users:
- users[usr_cmd.group(1)] = []
- users[usr_cmd.group(1)].append(usr_cmd.group(2))
-
- for usr, cmdlist in users.items():
- user_data = Data()
- user_data.subtitle = plural("cron session", len(cmdlist)) + " for " + usr
- user_data.items = ("`{0}`".format(cmd) for cmd in cmdlist)
- user_data.orderbyfreq()
- user_data.truncl(config.prefs.getint("logparse", "maxcmd"))
- section.append_data(user_data)
-
- logger.info("Finished cron section")
-
- return section
from logparse.formatting import *
from logparse.util import readlog
from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- logger.warning("NOTE: This cron parser is now deprecated. Please use cron-journald if possible.")
-
- logger.debug("Starting cron section")
- section = Section("cron")
-
- matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs.get("logs", "cron")))
- num = len(matches)
- commands = []
- for match in matches:
- commands.append(str(match))
- logger.info("Found " + str(num) + " cron jobs")
- jobs_data = Data(str(num) + " cron jobs run")
- section.append_data(jobs_data)
-
- if (num > 0):
- logger.debug("Analysing cron commands")
- cmd_data = Data("Top cron commands")
- cmd_data.items = ("`{0}`".format(x) for x in commands)
- cmd_data.orderbyfreq()
- cmd_data.truncl(config.prefs.getint("logparse", "maxcmd"))
- section.append_data(cmd_data)
-
- logger.info("Finished cron section")
- return section
+from logparse.load_parsers import Parser
+from logparse.load_parsers import Parser
+
+class Cron(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "cron"
+ self.info = "List the logged (executed) cron jobs and their commands (uses static syslog file)"
+
+ def parse_log(self):
+
+ logger.warning("NOTE: This cron parser is now deprecated. Please use cron-journald if possible.")
+
+ logger.debug("Starting cron section")
+ section = Section("cron")
+
+ matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs.get("logs", "cron")))
+ num = len(matches)
+ commands = []
+ for match in matches:
+ commands.append(str(match))
+ logger.info("Found " + str(num) + " cron jobs")
+ jobs_data = Data(str(num) + " cron jobs run")
+ section.append_data(jobs_data)
+
+ if (num > 0):
+ logger.debug("Analysing cron commands")
+ cmd_data = Data("Top cron commands")
+ cmd_data.items = ("`{0}`".format(x) for x in commands)
+ cmd_data.orderbyfreq()
+ cmd_data.truncl(config.prefs.getint("logparse", "maxcmd"))
+ section.append_data(cmd_data)
+
+ logger.info("Finished cron section")
+ return section
--- /dev/null
+#
+# cron_journald.py
+#
+# List the logged (executed) cron jobs and their commands (uses journald module)
+#
+# TODO: also output a list of scheduled (future) jobs
+#
+
+from systemd import journal
+
+from logparse.formatting import *
+from logparse import config
+from logparse.load_parsers import Parser
+
+class CronJournald(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "cron_journald"
+ self.info = "List the logged (executed) cron jobs and their commands (uses journald module)"
+
+ def parse_log(self):
+
+ logger.debug("Starting cron section")
+ section = Section("cron")
+
+ # Initiate journald reader
+ j = journal.Reader()
+ j.this_boot()
+ j.this_machine()
+ j.log_level(journal.LOG_INFO)
+ j.add_match(_COMM="cron")
+
+ logger.info("Obtaining cron logs")
+
+ messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry and " CMD " in entry["MESSAGE"]]
+
+ total_jobs = len(messages)
+
+ if total_jobs == 0:
+ logger.warning("Couldn't find any cron commands")
+ return 1
+
+ logger.info("Found " + str(total_jobs) + " cron jobs")
+ section.append_data(Data("Total of " + plural("cron session", total_jobs) + " executed across all users"))
+
+ logger.debug("Analysing cron commands for each user")
+ users = {}
+
+ for msg in messages:
+ usr_cmd = re.search('\((\S+)\) CMD (.*)', msg) # [('user', 'cmd')]
+ if usr_cmd:
+ if not usr_cmd.group(1) in users:
+ users[usr_cmd.group(1)] = []
+ users[usr_cmd.group(1)].append(usr_cmd.group(2))
+
+ for usr, cmdlist in users.items():
+ user_data = Data()
+ user_data.subtitle = plural("cron session", len(cmdlist)) + " for " + usr
+ user_data.items = ("`{0}`".format(cmd) for cmd in cmdlist)
+ user_data.orderbyfreq()
+ user_data.truncl(config.prefs.getint("logparse", "maxcmd"))
+ section.append_data(user_data)
+
+ logger.info("Finished cron section")
+
+ return section
from logparse.formatting import *
from logparse.util import readlog, resolve
from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
+from logparse.load_parsers import Parser
ACCESS_REGEX = "^\s*(\S+).*\"GET (\S+) HTTP(?:\/\d\.\d)?\" (\d{3}) (\d*) \".+\" \"(.*)\""
self.bytes = int(fields.group(4))
self.useragent = fields.group(5)
-def parse_log():
-
- logger.debug("Starting httpd section")
- section = Section("httpd")
-
- accesslog = readlog(prefs("logs", "httpd-access"))
-
- errorlog= readlog(prefs("logs", "httpd-error"))
- total_errors = len(errorlog.splitlines())
+class Httpd(Parser):
- logger.debug("Retrieved log data")
+ def __init__(self):
+ super().__init__()
+ self.name = "httpd"
+ self.info = "Analyse Apache (httpd) server logs, including data transferred, requests, clients, and errors."
- logger.debug("Searching through access log")
+ def parse_log(self):
- accesses = []
+ logger.debug("Starting httpd section")
+ section = Section("httpd")
- for line in accesslog.splitlines():
- if "GET" in line:
- accesses.append(AccessLine(line))
+ accesslog = readlog(prefs("logs", "httpd-access"))
- total_requests = len(accesses)
-
- section.append_data(Data("Total of " + plural("request", total_requests)))
- section.append_data(Data(plural("error", total_errors)))
+ errorlog= readlog(prefs("logs", "httpd-error"))
+ total_errors = len(errorlog.splitlines())
- size = Data()
- size.subtitle = "Transferred " + parsesize(sum([ac.bytes for ac in accesses]))
- section.append_data(size)
+ logger.debug("Retrieved log data")
- clients = Data()
- clients.items = [resolve(ac.client, config.prefs.get("httpd", "resolve-domains")) for ac in accesses]
- clients.orderbyfreq()
- clients.subtitle = "Received requests from " + plural("client", len(clients.items))
- clients.truncl(config.prefs.getint("logparse", "maxlist"))
- section.append_data(clients)
+ logger.debug("Searching through access log")
- files = Data()
- files.items = [ac.file for ac in accesses]
- files.orderbyfreq()
- files.subtitle = plural("file", len(files.items)) + " requested"
- files.truncl(config.prefs.getint("logparse", "maxlist"))
- section.append_data(files)
+ accesses = []
- useragents = Data()
- useragents.items = [ac.useragent for ac in accesses]
- useragents.orderbyfreq()
- useragents.subtitle = plural("user agent", len(useragents.items))
- useragents.truncl(config.prefs.getint("logparse", "maxlist"))
- section.append_data(useragents)
+ for line in accesslog.splitlines():
+ if "GET" in line:
+ accesses.append(AccessLine(line))
- logger.info("httpd has received " + str(total_requests) + " requests with " + str(total_errors) + " errors")
-
-
- logger.info("Finished httpd section")
- return section
+ total_requests = len(accesses)
+
+ section.append_data(Data("Total of " + plural("request", total_requests)))
+ section.append_data(Data(plural("error", total_errors)))
+
+ size = Data()
+ size.subtitle = "Transferred " + parsesize(sum([ac.bytes for ac in accesses]))
+ section.append_data(size)
+
+ clients = Data()
+ clients.items = [resolve(ac.client, config.prefs.get("httpd", "httpd-resolve-domains")) for ac in accesses]
+ clients.orderbyfreq()
+ clients.subtitle = "Received requests from " + plural("client", len(clients.items))
+ clients.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(clients)
+
+ files = Data()
+ files.items = [ac.file for ac in accesses]
+ files.orderbyfreq()
+ files.subtitle = plural("file", len(files.items)) + " requested"
+ files.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(files)
+
+ useragents = Data()
+ useragents.items = [ac.useragent for ac in accesses]
+ useragents.orderbyfreq()
+ useragents.subtitle = plural("user agent", len(useragents.items))
+ useragents.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(useragents)
+
+ logger.info("httpd has received " + str(total_requests) + " requests with " + str(total_errors) + " errors")
+
+
+ logger.info("Finished httpd section")
+ return section
+++ /dev/null
-#
-# load_parsers.py
-#
-# Search for and load files which parse logs for particular services
-#
-
-import imp
-import importlib
-import os
-import glob
-from pathlib import Path
-from sys import path
-from typing import NamedTuple
-
-parser_dir = "/usr/share/logparse/"
-main_module = "__init__"
-default_parsers = ["cron-journald", "httpd", "mem", "postfix", "smbd", "sshd-journald", "sudo", "sysinfo", "temperature", "zfs"]
-deprecated_parsers = ["sshd", "cron"]
-
-import logging
-logger = logging.getLogger(__name__)
-
-class Parser():
- def __init__(self, name, path=None, info=None):
- self.name = str(name)
- self.path = Path(path) if path else None
- self.info = dict(info) if info else None
-
-def findall():
- logger.debug("Searching for parsers in {0}".format(parser_dir))
- path.append(os.path.abspath(parser_dir))
- parsers = []
- parser_candidates = os.listdir(parser_dir)
- for parser_name in parser_candidates:
- location = os.path.join(parser_dir, parser_name)
- if not os.path.isdir(location) or not main_module + '.py' in os.listdir(location):
- logger.warning("Rejecting parser {0} due to invalid structure".format(location))
- continue
- info = imp.find_module(main_module, [location])
- parser_obj = Parser(parser_name, location, info)
- parsers.append(parser_obj)
- logger.debug("Added parser {0}".format(parser_obj.name))
- return parsers
-
-def search(name):
- logger.debug("Searching for parser {0}".format(name))
- if name in default_parsers:
- logger.debug("Found parser {0} in default modules".format(name))
- return Parser('.'.join(__name__.split('.')[:-1] + [name]))
- elif name in deprecated_parsers:
- logger.debug("Found parser {0} in deprecated modules".format(name))
- return Parser('.'.join(__name__.split('.')[:-1] + [name]))
- else:
- return None
-
-def load(parser):
- logger.debug("Loading parser {0} from {1}".format(parser.name, parser.path if parser.path != None else "defaults"))
- return importlib.import_module(parser.name)
from logparse.formatting import *
from logparse import config
+from logparse.load_parsers import Parser
-import logging
-logger = logging.getLogger(__name__)
+class Mem(Parser):
-def parse_log():
+ def __init__(self):
+ super().__init__()
+ self.name = "mem"
+ self.info = "Get instantaneous memory statistics (installed, total, free, available)"
- logger.debug("Starting memory section")
- section = Section("memory")
-
- table = Table()
+ def parse_log(self):
- ram_b = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
- table.add_row(Row([Column("Installed"), Column(parsesize(ram_b))]))
+ logger.debug("Starting memory section")
+ section = Section("memory")
+
+ table = Table()
- raw_mem = util.readlog(config.prefs.get("logs", "meminfo"))
- line_regex = re.compile("^Mem(\w+):\s*(\d*)\s*kB$")
+ ram_b = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
+ table.add_row(Row([Column("Installed"), Column(parsesize(ram_b))]))
- for line in raw_mem.splitlines():
+ raw_mem = util.readlog(config.prefs.get("logs", "meminfo"))
+ line_regex = re.compile("^Mem(\w+):\s*(\d*)\s*kB$")
- matches = line_regex.findall(line)
+ for line in raw_mem.splitlines():
- if len(matches) > 0:
- logger.debug("Detected {0} memory of {1} kB".format(matches[0][0].lower(), matches[0][1]))
- table.add_row(Row([Column(matches[0][0]), Column(parsesize(float(matches[0][1])*1000))]))
+ matches = line_regex.findall(line)
- table.align_column(0, "right")
- section.append_table(table)
+ if len(matches) > 0:
+ logger.debug("Detected {0} memory of {1} kB".format(matches[0][0].lower(), matches[0][1]))
+ table.add_row(Row([Column(matches[0][0]), Column(parsesize(float(matches[0][1])*1000))]))
- logger.info("Finished memory section")
- return section
+ table.align_column(0, "right")
+ section.append_table(table)
+
+ logger.info("Finished memory section")
+ return section
from logparse.formatting import *
from logparse.util import readlog
from logparse import config
+from logparse.load_parsers import Parser
-import logging
-logger = logging.getLogger(__name__)
+class Postfix(Parser):
-def parse_log():
- section = Section("postfix")
- logger.debug("Starting postfix section")
- logger.debug("Searching through postfix logs")
- messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs.get("logs", "postfix")))
- r = []
- s = []
- size = 0
- logger.debug("Analysing message size")
- for message in messages:
- r.append(message[2])
- s.append(message[0])
- size += int(message[1])
- # size = sum([int(x) for x in messages])
- size = parsesize(size)
- n = str(len(messages))
+ def __init__(self):
+ super().__init__()
+ self.name = "postfix"
+ self.info = "Get message statistics from postfix/sendmail logs"
- logger.debug("Analysing message recipients")
- if (len(r) > 0):
- rec_data = Data()
- s = list(set(r)) # unique recipients
- if (len(s) > 1):
- rec_data.items = r
- rec_data.orderbyfreq()
- rec_data.truncl(config.prefs.getint("logparse", "maxlist"))
- rec_data.subtitle = n + " messages sent to"
+ def parse_log(self):
+ section = Section("postfix")
+ logger.debug("Starting postfix section")
+ logger.debug("Searching through postfix logs")
+ messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs.get("logs", "postfix")))
+ r = []
+ s = []
+ size = 0
+ logger.debug("Analysing message size")
+ for message in messages:
+ r.append(message[2])
+ s.append(message[0])
+ size += int(message[1])
+ # size = sum([int(x) for x in messages])
+ size = parsesize(size)
+ n = str(len(messages))
+
+ logger.debug("Analysing message recipients")
+ if (len(r) > 0):
+ rec_data = Data()
+ s = list(set(r)) # unique recipients
+ if (len(s) > 1):
+ rec_data.items = r
+ rec_data.orderbyfreq()
+ rec_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ rec_data.subtitle = n + " messages sent to"
+ else:
+ rec_data.subtitle = n + " messages sent to " + r[0]
+ section.append_data(rec_data)
else:
- rec_data.subtitle = n + " messages sent to " + r[0]
- section.append_data(rec_data)
- else:
- section.append_data(Data(subtitle=n + " messages sent"))
- logger.info("Found {0} messages sent to {1} recipients".format(n, str(len(r))))
- section.append_data(Data(subtitle="Total of " + size))
- logger.info("Finished postfix section")
- return section
+ section.append_data(Data(subtitle=n + " messages sent"))
+ logger.info("Found {0} messages sent to {1} recipients".format(n, str(len(r))))
+ section.append_data(Data(subtitle="Total of " + size))
+ logger.info("Finished postfix section")
+ return section
from logparse.formatting import *
from logparse.util import readlog, resolve
from logparse import config
+from logparse.load_parsers import Parser
-import logging
-logger = logging.getLogger(__name__)
+class Smbd(Parser):
-def parse_log():
- logger.debug("Starting smbd section")
- section = Section("smbd")
- files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]") # find list of logfiles
- # for f in files:
+ def __init__(self):
+ super().__init__()
+ self.name = "smbd"
+ self.info = "Get login statistics for a samba server."
- # file_mod_time = os.stat(f).st_mtime
+ def parse_log(self):
+ logger.debug("Starting smbd section")
+ section = Section("smbd")
+ files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]") # find list of logfiles
+ # for f in files:
- # Time in seconds since epoch for time, in which logfile can be unmodified.
- # should_time = time.time() - (30 * 60)
+ # file_mod_time = os.stat(f).st_mtime
- # Time in minutes since last modification of file
- # last_time = (time.time() - file_mod_time)
- # logger.debug(last_time)
+ # Time in seconds since epoch for time, in which logfile can be unmodified.
+ # should_time = time.time() - (30 * 60)
- # if (file_mod_time - should_time) < args.time:
- # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
- # else:
+ # Time in minutes since last modification of file
+ # last_time = (time.time() - file_mod_time)
+ # logger.debug(last_time)
- # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
- # files.remove(f)
- logger.debug("Found log files " + str(files))
- n_auths = 0 # total number of logins from all users
- sigma_auths = [] # contains users
+ # if (file_mod_time - should_time) < args.time:
+ # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
+ # else:
- for file in files: # one log file for each client
+ # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
+ # files.remove(f)
+ logger.debug("Found log files " + str(files))
+ n_auths = 0 # total number of logins from all users
+ sigma_auths = [] # contains users
- logger.debug("Looking at file " + file)
+ for file in files: # one log file for each client
- # find the machine (ip or hostname) that this file represents
- ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
- host = resolve(ip, fqdn=config.prefs.get("smbd", "resolve-domains"))
- if host == ip and (config.prefs.get("smbd", "resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"): # if ip has disappeared, fall back to a hostname from logfile
- newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
- if (len(set(newhost)) == 1): # all hosts in one file should be the same
- host = newhost[0].lower()
+ logger.debug("Looking at file " + file)
- # count number of logins from each user-host pair
- matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
- for match in matches:
- userhost = match + "@" + host
- sigma_auths.append(userhost)
- # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
- # if (exists == []):
- # sigma_auths.append([userhost, 1])
- # else:
- # sigma_auths[exists[0]][1] += 1
- n_auths += 1
- auth_data = Data(subtitle=plural("login", n_auths) + " from")
- if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
- auth_data.subtitle += ' ' + sigma_auths[0][0]
+ # find the machine (ip or hostname) that this file represents
+ ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
+ host = resolve(ip, fqdn=config.prefs.get("smbd", "smbd-resolve-domains"))
+ if host == ip and (config.prefs.get("smbd", "smbd-resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"): # if ip has disappeared, fall back to a hostname from logfile
+ newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
+ if (len(set(newhost)) == 1): # all hosts in one file should be the same
+ host = newhost[0].lower()
+
+ # count number of logins from each user-host pair
+ matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
+ for match in matches:
+ userhost = match + "@" + host
+ sigma_auths.append(userhost)
+ # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
+ # if (exists == []):
+ # sigma_auths.append([userhost, 1])
+ # else:
+ # sigma_auths[exists[0]][1] += 1
+ n_auths += 1
+ auth_data = Data(subtitle=plural("login", n_auths) + " from")
+ if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
+ auth_data.subtitle += ' ' + sigma_auths[0][0]
+ section.append_data(auth_data)
+ else: # multiple users
+ auth_data.items = sigma_auths
+ auth_data.orderbyfreq()
+ auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ logger.debug("Found {0} samba logins".format(str(n_auths)))
section.append_data(auth_data)
- else: # multiple users
- auth_data.items = sigma_auths
- auth_data.orderbyfreq()
- auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
- logger.debug("Found {0} samba logins".format(str(n_auths)))
- section.append_data(auth_data)
- logger.info("Finished smbd section")
- return section
+ logger.info("Finished smbd section")
+ return section
+++ /dev/null
-#
-# sshd.py
-#
-# Find number of ssh logins and authorised users
-#
-
-import re
-from systemd import journal
-
-from logparse.formatting import *
-from logparse.util import resolve
-from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- logger.debug("Starting sshd section")
- section = Section("ssh")
-
- j = journal.Reader()
- j.this_boot()
- j.log_level(journal.LOG_DEBUG)
- j.add_match(_COMM="sshd")
-
- messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
-
- login_data = Data("successful", [])
- invalid_data = Data("invalid", [])
- failed_data = Data("failed", [])
-
- for msg in messages:
-
- if "Accepted publickey" in msg:
- entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', msg) # [('user', 'ip')]
- user = entry.group(1)
- ip = entry.group(2)
-
- userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
- login_data.items.append(userhost)
-
- elif "Connection closed by authenticating user root" in msg:
- entry = re.search('^.*Connection closed by authenticating user (\S+) (\S+)', msg) # [('user', 'ip')]
- user = entry.group(1)
- ip = entry.group(2)
-
- userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
- failed_data.items.append(userhost)
-
- elif "Invalid user" in msg:
- entry = re.search('^.*Invalid user (\S+) from (\S+).*', msg) # [('user', 'ip')]
- user = entry.group(1)
- ip = entry.group(2)
-
- userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
- invalid_data.items.append(userhost)
-
- login_data.subtitle = plural("successful login", len(login_data.items)) + " from"
- login_data.orderbyfreq()
- login_data.truncl(config.prefs.getint("logparse", "maxlist"))
-
- invalid_data.subtitle = plural("attempted login", len(invalid_data.items))
- invalid_data.orderbyfreq()
- invalid_data.subtitle += plural(" from invalid user", len(invalid_data.items), False)
- invalid_data.truncl(config.prefs.getint("logparse", "maxlist"))
-
- failed_data.subtitle = plural("failed login", len(failed_data.items)) + " from"
- failed_data.orderbyfreq()
- failed_data.truncl(config.prefs.getint("logparse", "maxlist"))
-
- section.append_data(login_data)
- section.append_data(invalid_data)
- section.append_data(failed_data)
-
- logger.info("Finished sshd section")
- return section
#
-# sshd_auth.py
+# sshd.py
#
# Find number of ssh logins and authorised users (uses /var/log/auth.log)
#
from logparse.formatting import *
from logparse.util import readlog, resolve
from logparse import config
+from logparse.load_parsers import Parser
+
+class Sshd(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "sshd"
+ self.info = "Find number of ssh logins and authorised users (uses /var/log/auth.log)"
+
+ def parse_log(self):
+
+ logger.warning("NOTE: This sshd parser is now deprecated. Please use sshd-journald if possible.")
+
+ logger.debug("Starting sshd section")
+ section = Section("ssh")
+ logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+ matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs.get("logs", "auth"))) # get all logins
+ logger.debug("Finished searching for logins")
+
+ logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+ authlog = readlog(config.prefs.get("logs", "auth"))
+
+ matches = re.findall('.*sshd.*Accepted publickey for .* from .*', authlog) # get all logins
+ invalid_matches = re.findall(".*sshd.*Invalid user .* from .*", authlog)
+ root_matches = re.findall("Disconnected from authenticating user root", authlog)
+ logger.debug("Finished searching for logins")
+
+ users = [] # list of users with format [username, number of logins] for each item
+ data = []
+ num = len(matches) # total number of logins
+ for match in matches:
+ entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- logger.warning("NOTE: This sshd parser is now deprecated. Please use sshd-journald if possible.")
-
- logger.debug("Starting sshd section")
- section = Section("ssh")
- logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
- matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs.get("logs", "auth"))) # get all logins
- logger.debug("Finished searching for logins")
-
- logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
- authlog = readlog(config.prefs.get("logs", "auth"))
-
- matches = re.findall('.*sshd.*Accepted publickey for .* from .*', authlog) # get all logins
- invalid_matches = re.findall(".*sshd.*Invalid user .* from .*", authlog)
- root_matches = re.findall("Disconnected from authenticating user root", authlog)
- logger.debug("Finished searching for logins")
-
- users = [] # list of users with format [username, number of logins] for each item
- data = []
- num = len(matches) # total number of logins
- for match in matches:
- entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
-
- user = entry.group(1)
- ip = entry.group(2)
-
- userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
- users.append(userhost)
- logger.debug("Parsed list of authorised users")
-
- auth_data = Data(subtitle=plural('login', num) + ' from', items=users)
-
- if (len(auth_data.items) == 1): # if only one user, do not display no of logins for this user
- logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0])
- auth_data.subtitle += ' ' + auth_data.items[0]
- auth_data.orderbyfreq()
- auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
- logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data))
- section.append_data(auth_data)
-
- invalid_users = []
- for match in invalid_matches:
- entry = re.search('^.*Invalid user (\S+) from (\S+).*', match) # [('user', 'ip')]
-
- try:
user = entry.group(1)
ip = entry.group(2)
- except: # blank user field
- continue
-
- userhost = user + '@' + ip
- invalid_users.append(userhost)
- logger.debug("Parsed list of invalid users")
- invalid_data = Data(subtitle=plural("attempted login", len(invalid_matches)) + " from " + plural("invalid user", len(invalid_users), print_quantity=False), items=invalid_users)
- if (len(invalid_data.items) == 1): # if only one user, do not display no of logins for this user
- logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid user " + invalid_users[0])
- invalid_data.subtitle += ' ' + invalid_data.items[0]
- invalid_data.orderbyfreq()
- invalid_data.truncl(config.prefs.get("logparse", "maxlist"))
- logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid users " + str(data))
- section.append_data(invalid_data)
-
- logger.debug("Found {0} attempted logins for root".format(str(len(root_matches))))
-
- section.append_data(Data(subtitle=plural("attempted login", str(len(root_matches))) + " for root"))
-
- logger.info("Finished sshd section")
- return section
+
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+ users.append(userhost)
+ logger.debug("Parsed list of authorised users")
+
+ auth_data = Data(subtitle=plural('login', num) + ' from', items=users)
+
+ if (len(auth_data.items) == 1): # if only one user, do not display no of logins for this user
+ logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0])
+ auth_data.subtitle += ' ' + auth_data.items[0]
+ auth_data.orderbyfreq()
+ auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data))
+ section.append_data(auth_data)
+
+ invalid_users = []
+ for match in invalid_matches:
+ entry = re.search('^.*Invalid user (\S+) from (\S+).*', match) # [('user', 'ip')]
+
+ try:
+ user = entry.group(1)
+ ip = entry.group(2)
+ except: # blank user field
+ continue
+
+ userhost = user + '@' + ip
+ invalid_users.append(userhost)
+ logger.debug("Parsed list of invalid users")
+ invalid_data = Data(subtitle=plural("attempted login", len(invalid_matches)) + " from " + plural("invalid user", len(invalid_users), print_quantity=False), items=invalid_users)
+ if (len(invalid_data.items) == 1): # if only one user, do not display no of logins for this user
+ logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid user " + invalid_users[0])
+ invalid_data.subtitle += ' ' + invalid_data.items[0]
+ invalid_data.orderbyfreq()
+ invalid_data.truncl(config.prefs.get("logparse", "maxlist"))
+ logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid users " + str(data))
+ section.append_data(invalid_data)
+
+ logger.debug("Found {0} attempted logins for root".format(str(len(root_matches))))
+
+ section.append_data(Data(subtitle=plural("attempted login", str(len(root_matches))) + " for root"))
+
+ logger.info("Finished sshd section")
+ return section
--- /dev/null
+#
+# sshd_journald.py
+#
+# Find number of ssh logins and authorised users (uses journald)
+#
+
+import re
+from systemd import journal
+
+from logparse.formatting import *
+from logparse.util import resolve
+from logparse import config
+from logparse.load_parsers import Parser
+
+class SshdJournald(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "sshd_journald"
+ self.info = "Find number of ssh logins and authorised users (uses journald)"
+
+ def parse_log(self):
+
+ logger.debug("Starting sshd section")
+ section = Section("ssh")
+
+ j = journal.Reader()
+ j.this_boot()
+ j.log_level(journal.LOG_DEBUG)
+ j.add_match(_COMM="sshd")
+
+ messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
+
+ login_data = Data("successful", [])
+ invalid_data = Data("invalid", [])
+ failed_data = Data("failed", [])
+
+ for msg in messages:
+
+ if "Accepted publickey" in msg:
+ entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', msg) # [('user', 'ip')]
+ user = entry.group(1)
+ ip = entry.group(2)
+
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+ login_data.items.append(userhost)
+
+ elif "Connection closed by authenticating user root" in msg:
+ entry = re.search('^.*Connection closed by authenticating user (\S+) (\S+)', msg) # [('user', 'ip')]
+ user = entry.group(1)
+ ip = entry.group(2)
+
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+ failed_data.items.append(userhost)
+
+ elif "Invalid user" in msg:
+ entry = re.search('^.*Invalid user (\S+) from (\S+).*', msg) # [('user', 'ip')]
+ user = entry.group(1)
+ ip = entry.group(2)
+
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+ invalid_data.items.append(userhost)
+
+ login_data.subtitle = plural("successful login", len(login_data.items)) + " from"
+ login_data.orderbyfreq()
+ login_data.truncl(config.prefs.getint("logparse", "maxlist"))
+
+ invalid_data.subtitle = plural("attempted login", len(invalid_data.items))
+ invalid_data.orderbyfreq()
+ invalid_data.subtitle += plural(" from invalid user", len(invalid_data.items), False)
+ invalid_data.truncl(config.prefs.getint("logparse", "maxlist"))
+
+ failed_data.subtitle = plural("failed login", len(failed_data.items)) + " from"
+ failed_data.orderbyfreq()
+ failed_data.truncl(config.prefs.getint("logparse", "maxlist"))
+
+ section.append_data(login_data)
+ section.append_data(invalid_data)
+ section.append_data(failed_data)
+
+ logger.info("Finished sshd section")
+ return section
import re
-from logparse..formatting import *
+from logparse.formatting import *
from logparse.util import readlog
from logparse.config import prefs
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
- logger.debug("Starting sudo section")
- section = Section("sudo")
- logger.debug("Searching for matches in {0}".format(prefs.get("logs", "auth")))
- umatches = re.findall('.*sudo:session\): session opened.*', readlog(prefs.get("logs", "auth")))
- num = sum(1 for line in umatches) # total number of sessions
- users = []
- data = []
- for match in umatches:
- user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
- exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
- if (exists == []):
- users.append([user, 1])
+from logparse.load_parsers import Parser
+
+class Sudo(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "sudo"
+ self.info = "Get number of sudo sessions for each user"
+
+ def parse_log(self):
+ logger.debug("Starting sudo section")
+ section = Section("sudo")
+ logger.debug("Searching for matches in {0}".format(prefs.get("logs", "auth")))
+ umatches = re.findall('.*sudo:session\): session opened.*', readlog(prefs.get("logs", "auth")))
+ num = sum(1 for line in umatches) # total number of sessions
+ users = []
+ data = []
+ for match in umatches:
+ user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
+ exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
+ if (exists == []):
+ users.append([user, 1])
+ else:
+ users[exists[0]][1] += 1
+ commands = []
+ cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(prefs.get("logs", "auth")))
+ for cmd in cmatches:
+ commands.append(cmd)
+ logger.debug("Finished parsing sudo sessions")
+
+ auth_data = Data(subtitle=plural("sudo session", num) + " for")
+
+ if (len(users) == 1):
+ logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
+ auth_data.subtitle += ' ' + users[0][0]
else:
- users[exists[0]][1] += 1
- commands = []
- cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(prefs.get("logs", "auth")))
- for cmd in cmatches:
- commands.append(cmd)
- logger.debug("Finished parsing sudo sessions")
-
- auth_data = Data(subtitle=plural("sudo session", num) + " for")
-
- if (len(users) == 1):
- logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
- auth_data.subtitle += ' ' + users[0][0]
- else:
- for user in users:
- auth_data.items.append(user[0] + ' (' + str(user[1]) + ')')
- logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
- section.append_data(auth_data)
-
- if (len(commands) > 0):
- command_data = Data(subtitle="top sudo commands")
- commands = backticks(commands)
- command_data.items = commands
- command_data.orderbyfreq()
- command_data.truncl(prefs.getint("logparse", "maxcmd"))
- section.append_data(command_data)
-
- logger.info("Finished sudo section")
-
- return section
+ for user in users:
+ auth_data.items.append(user[0] + ' (' + str(user[1]) + ')')
+ logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
+ section.append_data(auth_data)
+
+ if (len(commands) > 0):
+ command_data = Data(subtitle="top sudo commands")
+ commands = backticks(commands)
+ command_data.items = commands
+ command_data.orderbyfreq()
+ command_data.truncl(prefs.getint("logparse", "maxcmd"))
+ section.append_data(command_data)
+
+ logger.info("Finished sudo section")
+
+ return section
from logparse.formatting import *
from logparse.config import prefs
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- logger.debug("Starting sysinfo section")
- section = Section("system")
- table = Table()
-
- table.add_row(Row([Column("Hostname"), Column(util.hostname(prefs.get("logparse", "hostname-path")))]))
- table.add_row(Row([Column("OS"), Column(platform.platform())]))
- table.add_row(Row([Column("OS version"), Column(platform.version())]))
- table.add_row(Row([Column("Platform"), Column(platform.system() + " " + platform.machine())]))
-
- processors = []
- raw_proc = util.readlog(prefs.get("logs", "cpuinfo"))
- line_regex = re.compile(".*model name.*:\s*")
- proc_regex = re.compile("\s*(\(R\)|\(TM\)|CPU)")
- for line in raw_proc.splitlines():
- if "model name" in line:
- processor = line_regex.sub("", line, 1)
- processor = " ".join(proc_regex.sub("", processor).split()) # remove extraneous text and whitespace
- if not processor in processors:
- processors.append(processor)
- else:
- logger.debug("Found duplicate entry (perhaps multiple cores?) for {0}".format(processor))
- table.align_column(0, "right")
- if len(processors) == 1:
- table.add_row(Row([Column("Processor"), Column("; ".join(processors))]))
- section.append_table(table)
- elif len(processors) > 1:
- section.append_table(table)
- proc_data = Data("Processors")
- proc_data.items = processors
- section.append_data(proc_data)
- else:
- logger.warning("Failed to find processor data")
-
- logger.info("Finished sysinfo section")
- return section
+from logparse.load_parsers import Parser
+
+class Sysinfo(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "sysinfo"
+ self.info = "Get standard system information from basic Unix commands"
+
+ def parse_log(self):
+
+ logger.debug("Starting sysinfo section")
+ section = Section("system")
+ table = Table()
+
+ table.add_row(Row([Column("Hostname"), Column(util.hostname(prefs.get("logparse", "hostname-path")))]))
+ table.add_row(Row([Column("OS"), Column(platform.platform())]))
+ table.add_row(Row([Column("OS version"), Column(platform.version())]))
+ table.add_row(Row([Column("Platform"), Column(platform.system() + " " + platform.machine())]))
+
+ processors = []
+ raw_proc = util.readlog(prefs.get("logs", "cpuinfo"))
+ line_regex = re.compile(".*model name.*:\s*")
+ proc_regex = re.compile("\s*(\(R\)|\(TM\)|CPU)")
+ for line in raw_proc.splitlines():
+ if "model name" in line:
+ processor = line_regex.sub("", line, 1)
+ processor = " ".join(proc_regex.sub("", processor).split()) # remove extraneous text and whitespace
+ if not processor in processors:
+ processors.append(processor)
+ else:
+ logger.debug("Found duplicate entry (perhaps multiple cores?) for {0}".format(processor))
+ table.align_column(0, "right")
+ if len(processors) == 1:
+ table.add_row(Row([Column("Processor"), Column("; ".join(processors))]))
+ section.append_table(table)
+ elif len(processors) > 1:
+ section.append_table(table)
+ proc_data = Data("Processors")
+ proc_data.items = processors
+ section.append_data(proc_data)
+ else:
+ logger.warning("Failed to find processor data")
+
+ logger.info("Finished sysinfo section")
+ return section
import logging
logger = logging.getLogger(__name__)
+from logparse.load_parsers import Parser
class Drive(NamedTuple):
path: str
return 1
-def parse_log():
-
- logger.debug("Starting temp section")
- section = Section("temperatures")
-
- sensors.init()
-
- systemp = Data("Sys", [])
- coretemp = Data("Cores", [])
- pkgtemp = Data("Processor", [])
-
- try:
- for chip in sensors.iter_detected_chips():
- for feature in chip:
- if "Core" in feature.label:
- coretemp.items.append([feature.label, float(feature.get_value())])
- continue
- if "CPUTIN" in feature.label:
- pkgtemp.items.append([feature.label, float(feature.get_value())])
- continue
- if "SYS" in feature.label:
- systemp.items.append([feature.label, float(feature.get_value())])
- continue
-
- logger.debug("Core data is {0}".format(str(coretemp.items)))
- logger.debug("Sys data is {0}".format(str(systemp.items)))
- logger.debug("Pkg data is {0}".format(str(pkgtemp.items)))
- for temp_data in [systemp, coretemp, pkgtemp]:
- logger.debug("Looking at temp data {0}".format(str(temp_data.items)))
- if len(temp_data.items) > 1:
- avg = float(sum(feature[1] for feature in temp_data.items)) / len(temp_data.items)
- logger.debug("Avg temp for {0} is {1} {2}{3}".format(temp_data.subtitle, str(avg), DEG, CEL))
- temp_data.subtitle += " (avg {0}{1}{2})".format(str(avg), DEG, CEL)
- temp_data.items = ["{0}: {1}{2}{3}".format(feature[0], str(feature[1]), DEG, CEL) for feature in temp_data.items]
+class Temperature(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "temperature"
+ self.info = "Find current temperature of various system components (CPU, motherboard, hard drives, ambient)."
+
+ def parse_log(self):
+
+ logger.debug("Starting temp section")
+ section = Section("temperatures")
+
+ sensors.init()
+
+ systemp = Data("Sys", [])
+ coretemp = Data("Cores", [])
+ pkgtemp = Data("Processor", [])
+
+ try:
+ for chip in sensors.iter_detected_chips():
+ for feature in chip:
+ if "Core" in feature.label:
+ coretemp.items.append([feature.label, float(feature.get_value())])
+ continue
+ if "CPUTIN" in feature.label:
+ pkgtemp.items.append([feature.label, float(feature.get_value())])
+ continue
+ if "SYS" in feature.label:
+ systemp.items.append([feature.label, float(feature.get_value())])
+ continue
+
+ logger.debug("Core data is {0}".format(str(coretemp.items)))
+ logger.debug("Sys data is {0}".format(str(systemp.items)))
+ logger.debug("Pkg data is {0}".format(str(pkgtemp.items)))
+ for temp_data in [systemp, coretemp, pkgtemp]:
+ logger.debug("Looking at temp data {0}".format(str(temp_data.items)))
+ if len(temp_data.items) > 1:
+ avg = float(sum(feature[1] for feature in temp_data.items)) / len(temp_data.items)
+ logger.debug("Avg temp for {0} is {1} {2}{3}".format(temp_data.subtitle, str(avg), DEG, CEL))
+ temp_data.subtitle += " (avg {0}{1}{2})".format(str(avg), DEG, CEL)
+ temp_data.items = ["{0}: {1}{2}{3}".format(feature[0], str(feature[1]), DEG, CEL) for feature in temp_data.items]
+ else:
+ temp_data.items = [str(temp_data.items[0][1]) + DEG + CEL]
+ section.append_data(temp_data)
+
+ finally:
+ logger.debug("Finished reading onboard temperatures")
+ sensors.cleanup()
+
+
+ # drive temp
+
+ # For this to work, `hddtemp` must be running in daemon mode.
+ # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
+
+ received = ''
+ sumtemp = 0.0
+ data = ""
+ hddtemp_data = Data("Disks")
+
+ client = HddtempClient(
+ host=config.prefs.get("temperatures", "host"),
+ port=config.prefs.getint("temperatures", "port"),
+ sep=config.prefs.get("temperatures", "separator"),
+ timeout=int(config.prefs.get("temperatures", "timeout")))
+ drives = client.get_drives()
+ logger.debug("Received drive info: " + str(drives))
+
+ for drive in sorted(drives, key=lambda x: x.path):
+ if drive.path in config.prefs.get("temperatures", "drives").split():
+ sumtemp += drive.temperature
+ hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs.getboolean("temperatures", "show-model") else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
else:
- temp_data.items = [str(temp_data.items[0][1]) + DEG + CEL]
- section.append_data(temp_data)
-
- finally:
- logger.debug("Finished reading onboard temperatures")
- sensors.cleanup()
-
-
- # drive temp
-
- # For this to work, `hddtemp` must be running in daemon mode.
- # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
-
- received = ''
- sumtemp = 0.0
- data = ""
- hddtemp_data = Data("Disks")
-
- client = HddtempClient(
- host=config.prefs.get("temperatures", "host"),
- port=config.prefs.getint("temperatures", "port"),
- sep=config.prefs.get("temperatures", "separator"),
- timeout=int(config.prefs.get("temperatures", "timeout")))
- drives = client.get_drives()
- logger.debug("Received drive info: " + str(drives))
-
- for drive in sorted(drives, key=lambda x: x.path):
- if drive.path in config.prefs.get("temperatures", "drives").split():
- sumtemp += drive.temperature
- hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs.getboolean("temperatures", "show-model") else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
- else:
- drives.remove(drive)
- logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
- logger.debug("Sorted drive info: " + str(drives))
-
- hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
- logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg))
- hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
-
- logger.debug("Finished processing drive temperatures")
- logger.info("Finished temp section")
-
- return section
+ drives.remove(drive)
+ logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
+ logger.debug("Sorted drive info: " + str(drives))
+
+ hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
+ logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg))
+ hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
+
+ logger.debug("Finished processing drive temperatures")
+ logger.info("Finished temp section")
+
+ return section
import re
import sys, traceback
-from formatting import *
+from logparse.formatting import *
from logparse.util import readlog
from logparse.config import prefs
+from logparse.load_parsers import Parser
-import logging
-logger = logging.getLogger(__name__)
+class Zfs(Parser):
-def parse_log():
+ def __init__(self):
+ super().__init__()
+ self.name = "zfs"
+ self.info = "Look through ZFS logs to find latest scrub and its output."
- logger.debug("Starting zfs section")
- section = Section("zfs")
+ def parse_log(self):
- zfslog = readlog(prefs.get("logs", "zfs"))
+ logger.debug("Starting zfs section")
+ section = Section("zfs")
- logger.debug("Analysing zpool log")
- pool = re.search('.*---\n(\w*)', zfslog).group(1)
- scrub = re.search('.* scrub repaired (\d+\s*\w+) in .* with (\d+) errors on (\w+)\s+(\w+)\s+(\d+)\s+(\d{1,2}:\d{2}):\d+\s+(\d{4})', zfslog)
- logger.debug("Found groups {0}".format(scrub.groups()))
- iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
- scrubrepairs = scruberrors = scrubdate = None
- alloc = iostat.group(1)
- free = iostat.group(2)
+ zfslog = readlog(prefs.get("logs", "zfs"))
- try:
- scrubrepairs = scrub.group(1)
- scruberrors = scrub.group(2)
- scrubdate = ' '.join(scrub.groups()[2:-1])
- except Exception as e:
- logger.debug("Error getting scrub data: " + str(e))
- traceback.print_exc(limit=2, file=sys.stdout)
+ logger.debug("Analysing zpool log")
+ pool = re.search('.*---\n(\w*)', zfslog).group(1)
+ scrub = re.search('.* scrub repaired (\d+\s*\w+) in .* with (\d+) errors on (\w+)\s+(\w+)\s+(\d+)\s+(\d{1,2}:\d{2}):\d+\s+(\d{4})', zfslog)
+ logger.debug("Found groups {0}".format(scrub.groups()))
+ iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
+ scrubrepairs = scruberrors = scrubdate = None
+ alloc = iostat.group(1)
+ free = iostat.group(2)
- if (scrubdate != None):
- scrub_data = Data("Scrub of " + pool + " on " + scrubdate)
- scrub_data.items = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
- else:
- scrub_data = Data(pool)
- scrub_data.items = [alloc + " used", free + " free"]
+ try:
+ scrubrepairs = scrub.group(1)
+ scruberrors = scrub.group(2)
+ scrubdate = ' '.join(scrub.groups()[2:-1])
+ except Exception as e:
+ logger.debug("Error getting scrub data: " + str(e))
+ traceback.print_exc(limit=2, file=sys.stdout)
- section.append_data(scrub_data)
+ if (scrubdate != None):
+ scrub_data = Data("Scrub of " + pool + " on " + scrubdate)
+ scrub_data.items = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
+ else:
+ scrub_data = Data(pool)
+ scrub_data.items = [alloc + " used", free + " free"]
- logger.info("Finished zfs section")
- return section
+ section.append_data(scrub_data)
+
+ logger.info("Finished zfs section")
+ return section
from pkg_resources import Requirement, resource_filename
-from logparse.config import prefs
+from logparse import config
def hostname(path): # get the hostname of current server
hnfile = open(path, 'r')
# resolve-domains defined in individual sections of the config take priority over global config
if not fqdn:
- fqdn = prefs.getboolean("logparse", "resolve-domains")
+ fqdn = config.prefs.get("logparse", "resolve-domains")
if fqdn == 'ip':
return(ip)