new parser class structure
authorAndrew Lorimer <andrew@charles.cortex>
Tue, 3 Sep 2019 12:02:58 +0000 (22:02 +1000)
committerAndrew Lorimer <andrew@charles.cortex>
Tue, 3 Sep 2019 12:02:58 +0000 (22:02 +1000)
19 files changed:
logparse/config.py
logparse/interface.py
logparse/load_parsers.py [new file with mode: 0644]
logparse/parsers/cron-journald.py [deleted file]
logparse/parsers/cron.py
logparse/parsers/cron_journald.py [new file with mode: 0644]
logparse/parsers/httpd.py
logparse/parsers/load_parsers.py [deleted file]
logparse/parsers/mem.py
logparse/parsers/postfix.py
logparse/parsers/smbd.py
logparse/parsers/sshd-journald.py [deleted file]
logparse/parsers/sshd.py
logparse/parsers/sshd_journald.py [new file with mode: 0644]
logparse/parsers/sudo.py
logparse/parsers/sysinfo.py
logparse/parsers/temperature.py
logparse/parsers/zfs.py
logparse/util.py
index 53f332d798524d9e786ad74eb4b333303cc059ab..8d1b21eb2065e1fe5bcedbb073c7c37bb3ebf9a0 100644 (file)
@@ -73,13 +73,13 @@ defaults = {
             'show-model': False, 
         },
         'sshd': {
-            'resolve-domains': ''
+            'sshd-resolve-domains': ''
         },
         'smbd': {
-            'resolve-domains': ''
+            'smbd-resolve-domains': ''
         },
         'httpd': {
-            'resolve-domains': ''
+            'httpd-resolve-domains': ''
         },
         'du': {
             'paths': ['/', '/etc', '/home'],
index ef8d7fcb869e79f4121724c1abc8b601faca3fd4..8d08c48487f418bba2a9b2746e75d918c12ef19a 100644 (file)
@@ -16,8 +16,7 @@ from datetime import datetime
 import logparse
 import logparse.config
 from logparse.config import prefs, loadconf
-from logparse import formatting, mail, config
-from .parsers import load_parsers
+from logparse import formatting, mail, config, load_parsers
 
 global argparser
 
@@ -110,44 +109,25 @@ def main():
 
     # Find parsers
     
-    parser_providers = []
+    loader = load_parsers.ParserLoader("logparse.parsers") 
+    parser_names = set([x.name for x in loader.parsers])
+
     if argparser.parse_args().logs:
-        log_src = argparser.parse_args().logs.split()
+        parser_names = parser_names.intersection(set(argparser.parse_args().logs.split()))
     elif config.prefs.get("logparse", "parsers"):
-        log_src = config.prefs.get("logparse", "parsers").split()
-    else:
-        log_src = load_parsers.default_parsers
-
-    for parser_name in log_src:
-        parser = load_parsers.search(parser_name)
-        if parser == None:
-            logger.warning("Can't find parser {0}".format(parser_name))
-            continue
-        else:
-            parser_providers.append(load_parsers.load(parser))
+        parser_names = parser_names.intersection(set(config.prefs.get("logparse", "parsers").split()))
 
     if argparser.parse_args().ignore_logs:
-        ignore_src = argparser.parse_args().ignore_logs.split()
+        parser_names = parser_names.difference(set(argparser.parse_args().ignore_logs.split()))
     elif config.prefs.get("logparse", "ignore-parsers"):
-        ignore_src = config.prefs.get("logparse", "ignore-parsers").split()
-    else:
-        ignore_src = []
-    if len(ignore_src) > 0:
-        for parser_name in ignore_src:
-            if parser_name in [x.__name__.rpartition('.')[2] for x in parser_providers]:
-                logger.info("Ignoring default parser {0}".format(parser_name))
-                parser_providers_new = []
-                for p in parser_providers:
-                    if p.__name__.rpartition('.')[2] != parser_name:
-                        parser_providers_new.append(p)
-                parser_providers = parser_providers_new
-                continue
+        parser_names = parser_names.difference(set(config.prefs.get("logparse", "ignore-parsers").split()))
 
     # Execute parsers
 
-    logger.debug(str(parser_providers))
-    for parser in parser_providers:
-        output.append_section(parser.parse_log())
+    logger.debug("Queued the following parsers: " + str(loader.parsers))
+    for parser in loader.parsers:
+        if parser.name in parser_names:
+            output.append_section(parser.parse_log())
 
     # Write HTML footer
     output.append_footer()
diff --git a/logparse/load_parsers.py b/logparse/load_parsers.py
new file mode 100644 (file)
index 0000000..8ea7a03
--- /dev/null
@@ -0,0 +1,136 @@
+#
+#   load_parsers.py
+#   
+#   Search for and load files which parse logs for particular services
+#
+
+import imp
+import importlib
+import os
+import glob
+import pkgutil
+import inspect
+from pathlib import Path
+from sys import path
+from typing import NamedTuple
+
+parser_dir = "/usr/share/logparse/"
+main_module = "__init__"
+default_parsers = ["cron_journald", "httpd", "mem", "postfix", "smbd", "sshd_journald", "sudo", "sysinfo", "temperature", "zfs"]
+deprecated_parsers = ["sshd", "cron"]
+
+import logging
+logger = logging.getLogger(__name__)
+
+class Parser():
+    """
+    Base class that every parser should inherit
+    """
+    def __init__(self, name=None, path=None, info=None):
+        self.name = str(name) if name else None
+        self.path = Path(path) if path else None
+        self.info = dict(info) if info else None
+        self.logger = logging.getLogger(__name__)
+
+    def load(self):
+        logger.debug("Loading parser {0} from {1}".format(self.name, str(self.path) if self.path != None else "defaults"))
+        return importlib.import_module(self.name)
+
+    def parse_log(self, **args):
+        """
+        Every parser should provide the parse_log method which is executed at
+        runtime to analyse logs.
+        """
+        raise NotImplementedError("Failed to find an entry point for parser " + self.name)
+
+class ParserLoader:
+    """
+    This class searches for parsers in the main logparse package and
+    optionally in another external package (default /usr/share/logparse).
+    """
+
+    def __init__(self, pkg):
+        """
+        Initiate search for parsers
+        """
+        self.pkg = pkg 
+        self.parsers= []
+        self.reload()
+
+
+    def reload(self):
+        """
+        Reset parsers list and iterate through package modules
+        """
+        self.parsers= []
+        self.seen_paths = []
+        logger.debug("Looking for parsers in package {0}".format(str(self.pkg)))
+        self.walk_package(self.pkg)
+
+    def walk_package(self, package):
+        """
+        Check package and subdirectories for loadable modules
+        """
+
+        imported_package = __import__(package, fromlist=["null"]) # fromlist must be non-empty to load target module rather than parent package
+
+        for _, parser_name, ispkg in pkgutil.iter_modules(imported_package.__path__, imported_package.__name__ + '.'):
+            if not ispkg:
+                parser_module = __import__(parser_name, fromlist=["null"])
+                clsmembers = inspect.getmembers(parser_module, inspect.isclass)
+                for (_, c) in clsmembers:
+                    # Ignore the base Parser class
+                    if issubclass(c, Parser) & (c is not Parser):
+                        logger.debug("Found parser {0}.{1}".format(c.__module__, c.__name__))
+                        self.parsers.append(c())
+
+
+        # Recurse subpackages
+
+        all_current_paths = []
+        if isinstance(imported_package.__path__, str):
+            all_current_paths.append(imported_package.__path__)
+        else:
+            all_current_paths.extend([x for x in imported_package.__path__])
+
+        for pkg_path in all_current_paths:
+            if pkg_path not in self.seen_paths:
+                self.seen_paths.append(pkg_path)
+
+                # Get subdirectories of package
+                child_pkgs = [p for p in os.listdir(pkg_path) if os.path.isdir(os.path.join(pkg_path, p))]
+
+                # Walk through each subdirectory
+                for child_pkg in child_pkgs:
+                    self.walk_package(package + '.' + child_pkg)
+
+def findall():
+    logger.debug("Searching for parsers in {0}".format(parser_dir))
+    path.append(os.path.abspath(parser_dir))
+    parsers = []
+    parser_candidates = os.listdir(parser_dir)
+    for parser_name in parser_candidates:
+        location = os.path.join(parser_dir, parser_name)
+        if not os.path.isdir(location) or not main_module + '.py' in os.listdir(location):
+            logger.warning("Rejecting parser {0} due to invalid structure".format(location))
+            continue
+        info = imp.find_module(main_module, [location])
+        parser_obj = Parser(parser_name, location, info)
+        parsers.append(parser_obj)
+        logger.debug("Added parser {0}".format(parser_obj.name))
+    return parsers
+
+def search(name):
+    logger.debug("Searching for parser {0}".format(name))
+    if name in default_parsers:
+        logger.debug("Found parser {0} in default modules".format(name))
+        return Parser('.'.join(__name__.split('.')[:-1] + [name]))
+    elif name in deprecated_parsers:
+        logger.debug("Found parser {0} in deprecated modules".format(name))
+        return Parser('.'.join(__name__.split('.')[:-1] + [name]))
+    else:
+        return None
+
+def load(parser):
+    logger.debug("Loading parser {0} from {1}".format(parser.name, parser.path if parser.path != None else "defaults"))
+    return importlib.import_module(parser.name)
diff --git a/logparse/parsers/cron-journald.py b/logparse/parsers/cron-journald.py
deleted file mode 100644 (file)
index 63a0e95..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-#
-#   cron-journald.py
-#
-#   List the logged (executed) cron jobs and their commands (uses journald module)
-#
-#   TODO: also output a list of scheduled (future) jobs
-#
-
-from systemd import journal
-
-from logparse.formatting import *
-from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
-    logger.debug("Starting cron section")
-    section = Section("cron")
-
-    # Initiate journald reader
-    j = journal.Reader()
-    j.this_boot()
-    j.this_machine()
-    j.log_level(journal.LOG_INFO)
-    j.add_match(_COMM="cron")
-
-    logger.info("Obtaining cron logs")
-
-    messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry and " CMD " in entry["MESSAGE"]]
-
-    total_jobs = len(messages)
-
-    if total_jobs == 0:
-        logger.warning("Couldn't find any cron commands")
-        return 1
-
-    logger.info("Found " + str(total_jobs) + " cron jobs")
-    section.append_data(Data("Total of " + plural("cron session", total_jobs) + " executed across all users"))
-
-    logger.debug("Analysing cron commands for each user")
-    users = {}
-
-    for msg in messages:
-        usr_cmd = re.search('\((\S+)\) CMD (.*)', msg)  # [('user', 'cmd')]
-        if usr_cmd:
-            if not usr_cmd.group(1) in users:
-                users[usr_cmd.group(1)] = []
-            users[usr_cmd.group(1)].append(usr_cmd.group(2))
-
-    for usr, cmdlist in users.items():
-        user_data = Data()
-        user_data.subtitle = plural("cron session", len(cmdlist)) + " for " + usr
-        user_data.items = ("`{0}`".format(cmd) for cmd in cmdlist)
-        user_data.orderbyfreq()
-        user_data.truncl(config.prefs.getint("logparse", "maxcmd"))
-        section.append_data(user_data)
-
-    logger.info("Finished cron section")
-
-    return section 
index 83cb1cfd734469cdc9c6cb535ace27f44826b4bb..02ea2dda5d87c0136aaf74af8b215874ba0420e8 100644 (file)
@@ -13,33 +13,39 @@ import re
 from logparse.formatting import *
 from logparse.util import readlog
 from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
-    logger.warning("NOTE: This cron parser is now deprecated. Please use cron-journald if possible.")
-
-    logger.debug("Starting cron section")
-    section = Section("cron")
-
-    matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs.get("logs", "cron")))
-    num = len(matches)
-    commands = []
-    for match in matches:
-        commands.append(str(match))
-    logger.info("Found " + str(num) + " cron jobs")
-    jobs_data = Data(str(num) + " cron jobs run")
-    section.append_data(jobs_data)
-
-    if (num > 0):
-        logger.debug("Analysing cron commands")
-        cmd_data = Data("Top cron commands")
-        cmd_data.items = ("`{0}`".format(x) for x in commands)
-        cmd_data.orderbyfreq()
-        cmd_data.truncl(config.prefs.getint("logparse", "maxcmd"))
-        section.append_data(cmd_data)
-
-    logger.info("Finished cron section")
-    return section 
+from logparse.load_parsers import Parser
+from logparse.load_parsers import Parser
+
+class Cron(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "cron"
+        self.info = "List the logged (executed) cron jobs and their commands (uses static syslog file)"
+
+    def parse_log(self):
+
+        logger.warning("NOTE: This cron parser is now deprecated. Please use cron-journald if possible.")
+
+        logger.debug("Starting cron section")
+        section = Section("cron")
+
+        matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs.get("logs", "cron")))
+        num = len(matches)
+        commands = []
+        for match in matches:
+            commands.append(str(match))
+        logger.info("Found " + str(num) + " cron jobs")
+        jobs_data = Data(str(num) + " cron jobs run")
+        section.append_data(jobs_data)
+
+        if (num > 0):
+            logger.debug("Analysing cron commands")
+            cmd_data = Data("Top cron commands")
+            cmd_data.items = ("`{0}`".format(x) for x in commands)
+            cmd_data.orderbyfreq()
+            cmd_data.truncl(config.prefs.getint("logparse", "maxcmd"))
+            section.append_data(cmd_data)
+
+        logger.info("Finished cron section")
+        return section 
diff --git a/logparse/parsers/cron_journald.py b/logparse/parsers/cron_journald.py
new file mode 100644 (file)
index 0000000..e5bc769
--- /dev/null
@@ -0,0 +1,67 @@
+#
+#   cron_journald.py
+#
+#   List the logged (executed) cron jobs and their commands (uses journald module)
+#
+#   TODO: also output a list of scheduled (future) jobs
+#
+
+from systemd import journal
+
+from logparse.formatting import *
+from logparse import config
+from logparse.load_parsers import Parser
+
+class CronJournald(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "cron_journald"
+        self.info = "List the logged (executed) cron jobs and their commands (uses journald module)"
+
+    def parse_log(self):
+
+        logger.debug("Starting cron section")
+        section = Section("cron")
+
+        # Initiate journald reader
+        j = journal.Reader()
+        j.this_boot()
+        j.this_machine()
+        j.log_level(journal.LOG_INFO)
+        j.add_match(_COMM="cron")
+
+        logger.info("Obtaining cron logs")
+
+        messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry and " CMD " in entry["MESSAGE"]]
+
+        total_jobs = len(messages)
+
+        if total_jobs == 0:
+            logger.warning("Couldn't find any cron commands")
+            return 1
+
+        logger.info("Found " + str(total_jobs) + " cron jobs")
+        section.append_data(Data("Total of " + plural("cron session", total_jobs) + " executed across all users"))
+
+        logger.debug("Analysing cron commands for each user")
+        users = {}
+
+        for msg in messages:
+            usr_cmd = re.search('\((\S+)\) CMD (.*)', msg)  # [('user', 'cmd')]
+            if usr_cmd:
+                if not usr_cmd.group(1) in users:
+                    users[usr_cmd.group(1)] = []
+                users[usr_cmd.group(1)].append(usr_cmd.group(2))
+
+        for usr, cmdlist in users.items():
+            user_data = Data()
+            user_data.subtitle = plural("cron session", len(cmdlist)) + " for " + usr
+            user_data.items = ("`{0}`".format(cmd) for cmd in cmdlist)
+            user_data.orderbyfreq()
+            user_data.truncl(config.prefs.getint("logparse", "maxcmd"))
+            section.append_data(user_data)
+
+        logger.info("Finished cron section")
+
+        return section 
index 0abf6edeb2a05432d59d05489b7fe3759e9f32de..2e6ae2ff6e8c3dec92f11b8d8fedb4271d34e7bd 100644 (file)
@@ -13,9 +13,7 @@ import re
 from logparse.formatting import *
 from logparse.util import readlog, resolve
 from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
+from logparse.load_parsers import Parser
 
 ACCESS_REGEX = "^\s*(\S+).*\"GET (\S+) HTTP(?:\/\d\.\d)?\" (\d{3}) (\d*) \".+\" \"(.*)\""
 
@@ -31,58 +29,65 @@ class AccessLine(object):
         self.bytes = int(fields.group(4))
         self.useragent = fields.group(5)
 
-def parse_log():
-
-    logger.debug("Starting httpd section")
-    section = Section("httpd")
-
-    accesslog = readlog(prefs("logs", "httpd-access"))
-
-    errorlog= readlog(prefs("logs", "httpd-error"))
-    total_errors = len(errorlog.splitlines())
+class Httpd(Parser):
 
-    logger.debug("Retrieved log data")
+    def __init__(self):
+        super().__init__()
+        self.name = "httpd"
+        self.info = "Analyse Apache (httpd) server logs, including data transferred, requests, clients, and errors."
 
-    logger.debug("Searching through access log")
+    def parse_log(self):
 
-    accesses = []
+        logger.debug("Starting httpd section")
+        section = Section("httpd")
 
-    for line in accesslog.splitlines():
-        if "GET" in line:
-            accesses.append(AccessLine(line))
+        accesslog = readlog(prefs("logs", "httpd-access"))
 
-    total_requests = len(accesses)
-    
-    section.append_data(Data("Total of " + plural("request", total_requests)))
-    section.append_data(Data(plural("error", total_errors)))
+        errorlog= readlog(prefs("logs", "httpd-error"))
+        total_errors = len(errorlog.splitlines())
 
-    size = Data()
-    size.subtitle = "Transferred " + parsesize(sum([ac.bytes for ac in accesses]))
-    section.append_data(size)
+        logger.debug("Retrieved log data")
 
-    clients = Data()
-    clients.items = [resolve(ac.client, config.prefs.get("httpd", "resolve-domains")) for ac in accesses]
-    clients.orderbyfreq()
-    clients.subtitle = "Received requests from " + plural("client", len(clients.items))
-    clients.truncl(config.prefs.getint("logparse", "maxlist"))
-    section.append_data(clients)
+        logger.debug("Searching through access log")
 
-    files = Data()
-    files.items = [ac.file for ac in accesses]
-    files.orderbyfreq()
-    files.subtitle = plural("file", len(files.items)) + " requested"
-    files.truncl(config.prefs.getint("logparse", "maxlist"))
-    section.append_data(files)
+        accesses = []
 
-    useragents = Data()
-    useragents.items = [ac.useragent for ac in accesses]
-    useragents.orderbyfreq()
-    useragents.subtitle = plural("user agent", len(useragents.items))
-    useragents.truncl(config.prefs.getint("logparse", "maxlist"))
-    section.append_data(useragents)
+        for line in accesslog.splitlines():
+            if "GET" in line:
+                accesses.append(AccessLine(line))
 
-    logger.info("httpd has received " + str(total_requests) + " requests with " + str(total_errors) + " errors")
-
-
-    logger.info("Finished httpd section")
-    return section
+        total_requests = len(accesses)
+        
+        section.append_data(Data("Total of " + plural("request", total_requests)))
+        section.append_data(Data(plural("error", total_errors)))
+
+        size = Data()
+        size.subtitle = "Transferred " + parsesize(sum([ac.bytes for ac in accesses]))
+        section.append_data(size)
+
+        clients = Data()
+        clients.items = [resolve(ac.client, config.prefs.get("httpd", "httpd-resolve-domains")) for ac in accesses]
+        clients.orderbyfreq()
+        clients.subtitle = "Received requests from " + plural("client", len(clients.items))
+        clients.truncl(config.prefs.getint("logparse", "maxlist"))
+        section.append_data(clients)
+
+        files = Data()
+        files.items = [ac.file for ac in accesses]
+        files.orderbyfreq()
+        files.subtitle = plural("file", len(files.items)) + " requested"
+        files.truncl(config.prefs.getint("logparse", "maxlist"))
+        section.append_data(files)
+
+        useragents = Data()
+        useragents.items = [ac.useragent for ac in accesses]
+        useragents.orderbyfreq()
+        useragents.subtitle = plural("user agent", len(useragents.items))
+        useragents.truncl(config.prefs.getint("logparse", "maxlist"))
+        section.append_data(useragents)
+
+        logger.info("httpd has received " + str(total_requests) + " requests with " + str(total_errors) + " errors")
+
+
+        logger.info("Finished httpd section")
+        return section
diff --git a/logparse/parsers/load_parsers.py b/logparse/parsers/load_parsers.py
deleted file mode 100644 (file)
index 85ad141..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-#   load_parsers.py
-#   
-#   Search for and load files which parse logs for particular services
-#
-
-import imp
-import importlib
-import os
-import glob
-from pathlib import Path
-from sys import path
-from typing import NamedTuple
-
-parser_dir = "/usr/share/logparse/"
-main_module = "__init__"
-default_parsers = ["cron-journald", "httpd", "mem", "postfix", "smbd", "sshd-journald", "sudo", "sysinfo", "temperature", "zfs"]
-deprecated_parsers = ["sshd", "cron"]
-
-import logging
-logger = logging.getLogger(__name__)
-
-class Parser():
-    def __init__(self, name, path=None, info=None):
-        self.name = str(name)
-        self.path = Path(path) if path else None
-        self.info = dict(info) if info else None
-
-def findall():
-    logger.debug("Searching for parsers in {0}".format(parser_dir))
-    path.append(os.path.abspath(parser_dir))
-    parsers = []
-    parser_candidates = os.listdir(parser_dir)
-    for parser_name in parser_candidates:
-        location = os.path.join(parser_dir, parser_name)
-        if not os.path.isdir(location) or not main_module + '.py' in os.listdir(location):
-            logger.warning("Rejecting parser {0} due to invalid structure".format(location))
-            continue
-        info = imp.find_module(main_module, [location])
-        parser_obj = Parser(parser_name, location, info)
-        parsers.append(parser_obj)
-        logger.debug("Added parser {0}".format(parser_obj.name))
-    return parsers
-
-def search(name):
-    logger.debug("Searching for parser {0}".format(name))
-    if name in default_parsers:
-        logger.debug("Found parser {0} in default modules".format(name))
-        return Parser('.'.join(__name__.split('.')[:-1] + [name]))
-    elif name in deprecated_parsers:
-        logger.debug("Found parser {0} in deprecated modules".format(name))
-        return Parser('.'.join(__name__.split('.')[:-1] + [name]))
-    else:
-        return None
-
-def load(parser):
-    logger.debug("Loading parser {0} from {1}".format(parser.name, parser.path if parser.path != None else "defaults"))
-    return importlib.import_module(parser.name)
index 82b2e507edcfc33585eb51995e7feaf49dda78a3..8ee3239b719212a13fd20cf95355ef1cefb76f31 100644 (file)
@@ -8,33 +8,38 @@ import re
 
 from logparse.formatting import *
 from logparse import config
+from logparse.load_parsers import Parser
 
-import logging
-logger = logging.getLogger(__name__)
+class Mem(Parser):
 
-def parse_log():
+    def __init__(self):
+        super().__init__()
+        self.name = "mem"
+        self.info = "Get instantaneous memory statistics (installed, total, free, available)"
 
-    logger.debug("Starting memory section")
-    section = Section("memory")
-    
-    table = Table()
+    def parse_log(self):
 
-    ram_b = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
-    table.add_row(Row([Column("Installed"), Column(parsesize(ram_b))]))
+        logger.debug("Starting memory section")
+        section = Section("memory")
+        
+        table = Table()
 
-    raw_mem = util.readlog(config.prefs.get("logs", "meminfo"))
-    line_regex = re.compile("^Mem(\w+):\s*(\d*)\s*kB$")
+        ram_b = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
+        table.add_row(Row([Column("Installed"), Column(parsesize(ram_b))]))
 
-    for line in raw_mem.splitlines():
+        raw_mem = util.readlog(config.prefs.get("logs", "meminfo"))
+        line_regex = re.compile("^Mem(\w+):\s*(\d*)\s*kB$")
 
-        matches = line_regex.findall(line)
+        for line in raw_mem.splitlines():
 
-        if len(matches) > 0:
-            logger.debug("Detected {0} memory of {1} kB".format(matches[0][0].lower(), matches[0][1]))
-            table.add_row(Row([Column(matches[0][0]), Column(parsesize(float(matches[0][1])*1000))]))
+            matches = line_regex.findall(line)
 
-    table.align_column(0, "right")
-    section.append_table(table)
+            if len(matches) > 0:
+                logger.debug("Detected {0} memory of {1} kB".format(matches[0][0].lower(), matches[0][1]))
+                table.add_row(Row([Column(matches[0][0]), Column(parsesize(float(matches[0][1])*1000))]))
 
-    logger.info("Finished memory section")
-    return section
+        table.align_column(0, "right")
+        section.append_table(table)
+
+        logger.info("Finished memory section")
+        return section
index 1b7ff80c0d5ccdead8a594050a1d743dc5449a53..72e287e230dcc76f1c5173ff7a131f51eb769ab9 100644 (file)
@@ -9,42 +9,47 @@ import re
 from logparse.formatting import *
 from logparse.util import readlog
 from logparse import config
+from logparse.load_parsers import Parser
 
-import logging
-logger = logging.getLogger(__name__)
+class Postfix(Parser):
 
-def parse_log():
-    section = Section("postfix")
-    logger.debug("Starting postfix section")
-    logger.debug("Searching through postfix logs")
-    messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs.get("logs", "postfix")))
-    r = []
-    s = []
-    size = 0
-    logger.debug("Analysing message size")
-    for message in messages:
-        r.append(message[2])
-        s.append(message[0])
-        size += int(message[1])
-    # size = sum([int(x) for x in messages])
-    size = parsesize(size)
-    n = str(len(messages))
+    def __init__(self):
+        super().__init__()
+        self.name = "postfix"
+        self.info = "Get message statistics from postfix/sendmail logs"
 
-    logger.debug("Analysing message recipients")
-    if (len(r) > 0):
-        rec_data = Data()
-        s = list(set(r))    # unique recipients
-        if (len(s) > 1):
-            rec_data.items = r
-            rec_data.orderbyfreq()
-            rec_data.truncl(config.prefs.getint("logparse", "maxlist"))
-            rec_data.subtitle = n + " messages sent to"
+    def parse_log(self):
+        section = Section("postfix")
+        logger.debug("Starting postfix section")
+        logger.debug("Searching through postfix logs")
+        messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs.get("logs", "postfix")))
+        r = []
+        s = []
+        size = 0
+        logger.debug("Analysing message size")
+        for message in messages:
+            r.append(message[2])
+            s.append(message[0])
+            size += int(message[1])
+        # size = sum([int(x) for x in messages])
+        size = parsesize(size)
+        n = str(len(messages))
+
+        logger.debug("Analysing message recipients")
+        if (len(r) > 0):
+            rec_data = Data()
+            s = list(set(r))    # unique recipients
+            if (len(s) > 1):
+                rec_data.items = r
+                rec_data.orderbyfreq()
+                rec_data.truncl(config.prefs.getint("logparse", "maxlist"))
+                rec_data.subtitle = n + " messages sent to"
+            else:
+                rec_data.subtitle = n + " messages sent to " + r[0]
+            section.append_data(rec_data)
         else:
-            rec_data.subtitle = n + " messages sent to " + r[0]
-        section.append_data(rec_data)
-    else:
-        section.append_data(Data(subtitle=n + " messages sent"))
-    logger.info("Found {0} messages sent to {1} recipients".format(n, str(len(r))))
-    section.append_data(Data(subtitle="Total of " + size))
-    logger.info("Finished postfix section")
-    return section 
+            section.append_data(Data(subtitle=n + " messages sent"))
+        logger.info("Found {0} messages sent to {1} recipients".format(n, str(len(r))))
+        section.append_data(Data(subtitle="Total of " + size))
+        logger.info("Finished postfix section")
+        return section 
index 133f8da8147fa096e27649e4d6622cf624c7afa6..963c8e6213fddf6cc13139c3029a49547c483f13 100644 (file)
@@ -11,67 +11,72 @@ import glob
 from logparse.formatting import *
 from logparse.util import readlog, resolve
 from logparse import config
+from logparse.load_parsers import Parser
 
-import logging
-logger = logging.getLogger(__name__)
+class Smbd(Parser):
 
-def parse_log():
-    logger.debug("Starting smbd section")
-    section = Section("smbd")
-    files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]")    # find list of logfiles
-    # for f in files:
+    def __init__(self):
+        super().__init__()
+        self.name = "smbd"
+        self.info = "Get login statistics for a samba server."
 
-        # file_mod_time = os.stat(f).st_mtime
+    def parse_log(self):
+        logger.debug("Starting smbd section")
+        section = Section("smbd")
+        files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]")    # find list of logfiles
+        # for f in files:
 
-        # Time in seconds since epoch for time, in which logfile can be unmodified.
-        # should_time = time.time() - (30 * 60)
+            # file_mod_time = os.stat(f).st_mtime
 
-        # Time in minutes since last modification of file
-        # last_time = (time.time() - file_mod_time)
-        # logger.debug(last_time)
+            # Time in seconds since epoch for time, in which logfile can be unmodified.
+            # should_time = time.time() - (30 * 60)
 
-        # if (file_mod_time - should_time) < args.time:
-            # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
-        # else:
+            # Time in minutes since last modification of file
+            # last_time = (time.time() - file_mod_time)
+            # logger.debug(last_time)
 
-        # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
-            # files.remove(f)
-    logger.debug("Found log files " + str(files))
-    n_auths = 0         # total number of logins from all users
-    sigma_auths = []    # contains users
+            # if (file_mod_time - should_time) < args.time:
+                # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
+            # else:
 
-    for file in files:  # one log file for each client
+            # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
+                # files.remove(f)
+        logger.debug("Found log files " + str(files))
+        n_auths = 0         # total number of logins from all users
+        sigma_auths = []    # contains users
 
-        logger.debug("Looking at file " + file)
+        for file in files:  # one log file for each client
 
-        # find the machine (ip or hostname) that this file represents
-        ip = re.search('log\.(.*)', file).group(1)    # get ip or hostname from file path (/var/log/samba/log.host)
-        host = resolve(ip, fqdn=config.prefs.get("smbd", "resolve-domains"))
-        if host == ip and (config.prefs.get("smbd", "resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"):    # if ip has disappeared, fall back to a hostname from logfile
-            newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
-            if (len(set(newhost)) == 1):    # all hosts in one file should be the same
-                host = newhost[0].lower()
+            logger.debug("Looking at file " + file)
 
-        # count number of logins from each user-host pair
-        matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
-        for match in matches:
-            userhost = match + "@" + host
-            sigma_auths.append(userhost)
-            # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
-            # if (exists == []):
-            #     sigma_auths.append([userhost, 1])
-            # else:
-            #     sigma_auths[exists[0]][1] += 1
-            n_auths += 1
-    auth_data = Data(subtitle=plural("login", n_auths) + " from")
-    if (len(sigma_auths) == 1):             # if only one user, do not display no of logins for this user
-        auth_data.subtitle += ' ' + sigma_auths[0][0]
+            # find the machine (ip or hostname) that this file represents
+            ip = re.search('log\.(.*)', file).group(1)    # get ip or hostname from file path (/var/log/samba/log.host)
+            host = resolve(ip, fqdn=config.prefs.get("smbd", "smbd-resolve-domains"))
+            if host == ip and (config.prefs.get("smbd", "smbd-resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"):    # if ip has disappeared, fall back to a hostname from logfile
+                newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
+                if (len(set(newhost)) == 1):    # all hosts in one file should be the same
+                    host = newhost[0].lower()
+
+            # count number of logins from each user-host pair
+            matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
+            for match in matches:
+                userhost = match + "@" + host
+                sigma_auths.append(userhost)
+                # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
+                # if (exists == []):
+                #     sigma_auths.append([userhost, 1])
+                # else:
+                #     sigma_auths[exists[0]][1] += 1
+                n_auths += 1
+        auth_data = Data(subtitle=plural("login", n_auths) + " from")
+        if (len(sigma_auths) == 1):             # if only one user, do not display no of logins for this user
+            auth_data.subtitle += ' ' + sigma_auths[0][0]
+            section.append_data(auth_data)
+        else:       # multiple users
+            auth_data.items = sigma_auths
+            auth_data.orderbyfreq()
+            auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
+            logger.debug("Found {0} samba logins".format(str(n_auths)))
         section.append_data(auth_data)
-    else:       # multiple users
-        auth_data.items = sigma_auths
-        auth_data.orderbyfreq()
-        auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
-        logger.debug("Found {0} samba logins".format(str(n_auths)))
-    section.append_data(auth_data)
-    logger.info("Finished smbd section")
-    return section
+        logger.info("Finished smbd section")
+        return section
diff --git a/logparse/parsers/sshd-journald.py b/logparse/parsers/sshd-journald.py
deleted file mode 100644 (file)
index 179fc2b..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-#
-#   sshd.py
-#   
-#   Find number of ssh logins and authorised users
-#
-
-import re
-from systemd import journal
-
-from logparse.formatting import *
-from logparse.util import resolve
-from logparse import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
-    logger.debug("Starting sshd section")
-    section = Section("ssh")
-
-    j = journal.Reader()
-    j.this_boot()
-    j.log_level(journal.LOG_DEBUG)
-    j.add_match(_COMM="sshd")
-    
-    messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
-
-    login_data = Data("successful", [])
-    invalid_data = Data("invalid", [])
-    failed_data = Data("failed", [])
-
-    for msg in messages:
-
-        if "Accepted publickey" in msg:
-            entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', msg)  # [('user', 'ip')]
-            user = entry.group(1)
-            ip = entry.group(2)
-
-            userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
-            login_data.items.append(userhost)
-
-        elif "Connection closed by authenticating user root" in msg:
-            entry = re.search('^.*Connection closed by authenticating user (\S+) (\S+)', msg)  # [('user', 'ip')]
-            user = entry.group(1)
-            ip = entry.group(2)
-
-            userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
-            failed_data.items.append(userhost)
-
-        elif "Invalid user" in msg:
-            entry = re.search('^.*Invalid user (\S+) from (\S+).*', msg)  # [('user', 'ip')]
-            user = entry.group(1)
-            ip = entry.group(2)
-
-            userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
-            invalid_data.items.append(userhost)
-
-    login_data.subtitle = plural("successful login", len(login_data.items)) + " from"
-    login_data.orderbyfreq()
-    login_data.truncl(config.prefs.getint("logparse", "maxlist"))
-    
-    invalid_data.subtitle = plural("attempted login", len(invalid_data.items))
-    invalid_data.orderbyfreq()
-    invalid_data.subtitle +=  plural(" from invalid user", len(invalid_data.items), False)
-    invalid_data.truncl(config.prefs.getint("logparse", "maxlist"))
-
-    failed_data.subtitle = plural("failed login", len(failed_data.items)) + " from"
-    failed_data.orderbyfreq()
-    failed_data.truncl(config.prefs.getint("logparse", "maxlist"))
-
-    section.append_data(login_data)
-    section.append_data(invalid_data)
-    section.append_data(failed_data)
-
-    logger.info("Finished sshd section")
-    return section
index 8a7ed483eef1ce0412f7b8618aaefadb3eee76ef..a20ec61914ec57f8693e32621f7ee30c46ee7b0c 100644 (file)
@@ -1,5 +1,5 @@
 #
-#   sshd_auth.py
+#   sshd.py
 #   
 #   Find number of ssh logins and authorised users (uses /var/log/auth.log)
 #   
@@ -13,76 +13,81 @@ import re
 from logparse.formatting import *
 from logparse.util import readlog, resolve
 from logparse import config
+from logparse.load_parsers import Parser
+
+class Sshd(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "sshd"
+        self.info = "Find number of ssh logins and authorised users (uses /var/log/auth.log)"
+
+    def parse_log(self):
+
+        logger.warning("NOTE: This sshd parser is now deprecated. Please use sshd-journald if possible.")
+
+        logger.debug("Starting sshd section")
+        section = Section("ssh")
+        logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+        matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs.get("logs", "auth")))    # get all logins
+        logger.debug("Finished searching for logins")
+
+        logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+        authlog = readlog(config.prefs.get("logs", "auth"))
+       
+        matches = re.findall('.*sshd.*Accepted publickey for .* from .*', authlog)    # get all logins
+        invalid_matches = re.findall(".*sshd.*Invalid user .* from .*", authlog)
+        root_matches = re.findall("Disconnected from authenticating user root", authlog)
+        logger.debug("Finished searching for logins")
+        
+        users = []  # list of users with format [username, number of logins] for each item
+        data = []
+        num = len(matches)     # total number of logins
+        for match in matches:
+            entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match)  # [('user', 'ip')]
 
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
-    logger.warning("NOTE: This sshd parser is now deprecated. Please use sshd-journald if possible.")
-
-    logger.debug("Starting sshd section")
-    section = Section("ssh")
-    logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
-    matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs.get("logs", "auth")))    # get all logins
-    logger.debug("Finished searching for logins")
-
-    logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
-    authlog = readlog(config.prefs.get("logs", "auth"))
-   
-    matches = re.findall('.*sshd.*Accepted publickey for .* from .*', authlog)    # get all logins
-    invalid_matches = re.findall(".*sshd.*Invalid user .* from .*", authlog)
-    root_matches = re.findall("Disconnected from authenticating user root", authlog)
-    logger.debug("Finished searching for logins")
-    
-    users = []  # list of users with format [username, number of logins] for each item
-    data = []
-    num = len(matches)     # total number of logins
-    for match in matches:
-        entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match)  # [('user', 'ip')]
-
-        user = entry.group(1)
-        ip = entry.group(2)
-
-        userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
-        users.append(userhost)
-    logger.debug("Parsed list of authorised users")
-
-    auth_data = Data(subtitle=plural('login', num) + ' from', items=users)
-
-    if (len(auth_data.items) == 1):             # if only one user, do not display no of logins for this user
-        logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0])
-        auth_data.subtitle += ' ' + auth_data.items[0]
-    auth_data.orderbyfreq()
-    auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
-    logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data))
-    section.append_data(auth_data)
-
-    invalid_users = []
-    for match in invalid_matches:
-        entry = re.search('^.*Invalid user (\S+) from (\S+).*', match)  # [('user', 'ip')]
-
-        try:
             user = entry.group(1)
             ip = entry.group(2)
-        except:     # blank user field
-            continue
-
-        userhost = user + '@' + ip
-        invalid_users.append(userhost)
-    logger.debug("Parsed list of invalid users")
-    invalid_data = Data(subtitle=plural("attempted login", len(invalid_matches)) + " from " + plural("invalid user", len(invalid_users), print_quantity=False), items=invalid_users)
-    if (len(invalid_data.items) == 1):             # if only one user, do not display no of logins for this user
-        logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid user " + invalid_users[0])
-        invalid_data.subtitle += ' ' + invalid_data.items[0]
-    invalid_data.orderbyfreq()
-    invalid_data.truncl(config.prefs.get("logparse", "maxlist"))
-    logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid users " + str(data))
-    section.append_data(invalid_data)
-
-    logger.debug("Found {0} attempted logins for root".format(str(len(root_matches))))
-
-    section.append_data(Data(subtitle=plural("attempted login", str(len(root_matches))) + " for root"))
-
-    logger.info("Finished sshd section")
-    return section
+
+            userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+            users.append(userhost)
+        logger.debug("Parsed list of authorised users")
+
+        auth_data = Data(subtitle=plural('login', num) + ' from', items=users)
+
+        if (len(auth_data.items) == 1):             # if only one user, do not display no of logins for this user
+            logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0])
+            auth_data.subtitle += ' ' + auth_data.items[0]
+        auth_data.orderbyfreq()
+        auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
+        logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data))
+        section.append_data(auth_data)
+
+        invalid_users = []
+        for match in invalid_matches:
+            entry = re.search('^.*Invalid user (\S+) from (\S+).*', match)  # [('user', 'ip')]
+
+            try:
+                user = entry.group(1)
+                ip = entry.group(2)
+            except:     # blank user field
+                continue
+
+            userhost = user + '@' + ip
+            invalid_users.append(userhost)
+        logger.debug("Parsed list of invalid users")
+        invalid_data = Data(subtitle=plural("attempted login", len(invalid_matches)) + " from " + plural("invalid user", len(invalid_users), print_quantity=False), items=invalid_users)
+        if (len(invalid_data.items) == 1):             # if only one user, do not display no of logins for this user
+            logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid user " + invalid_users[0])
+            invalid_data.subtitle += ' ' + invalid_data.items[0]
+        invalid_data.orderbyfreq()
+        invalid_data.truncl(config.prefs.get("logparse", "maxlist"))
+        logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid users " + str(data))
+        section.append_data(invalid_data)
+
+        logger.debug("Found {0} attempted logins for root".format(str(len(root_matches))))
+
+        section.append_data(Data(subtitle=plural("attempted login", str(len(root_matches))) + " for root"))
+
+        logger.info("Finished sshd section")
+        return section
diff --git a/logparse/parsers/sshd_journald.py b/logparse/parsers/sshd_journald.py
new file mode 100644 (file)
index 0000000..841d195
--- /dev/null
@@ -0,0 +1,82 @@
+#
+#   sshd_journald.py
+#   
+#   Find number of ssh logins and authorised users (uses journald)
+#
+
+import re
+from systemd import journal
+
+from logparse.formatting import *
+from logparse.util import resolve
+from logparse import config
+from logparse.load_parsers import Parser
+
+class SshdJournald(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "sshd_journald"
+        self.info = "Find number of ssh logins and authorised users (uses journald)"
+
+    def parse_log(self):
+
+        logger.debug("Starting sshd section")
+        section = Section("ssh")
+
+        j = journal.Reader()
+        j.this_boot()
+        j.log_level(journal.LOG_DEBUG)
+        j.add_match(_COMM="sshd")
+        
+        messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
+
+        login_data = Data("successful", [])
+        invalid_data = Data("invalid", [])
+        failed_data = Data("failed", [])
+
+        for msg in messages:
+
+            if "Accepted publickey" in msg:
+                entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', msg)  # [('user', 'ip')]
+                user = entry.group(1)
+                ip = entry.group(2)
+
+                userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+                login_data.items.append(userhost)
+
+            elif "Connection closed by authenticating user root" in msg:
+                entry = re.search('^.*Connection closed by authenticating user (\S+) (\S+)', msg)  # [('user', 'ip')]
+                user = entry.group(1)
+                ip = entry.group(2)
+
+                userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+                failed_data.items.append(userhost)
+
+            elif "Invalid user" in msg:
+                entry = re.search('^.*Invalid user (\S+) from (\S+).*', msg)  # [('user', 'ip')]
+                user = entry.group(1)
+                ip = entry.group(2)
+
+                userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "sshd-resolve-domains"))
+                invalid_data.items.append(userhost)
+
+        login_data.subtitle = plural("successful login", len(login_data.items)) + " from"
+        login_data.orderbyfreq()
+        login_data.truncl(config.prefs.getint("logparse", "maxlist"))
+        
+        invalid_data.subtitle = plural("attempted login", len(invalid_data.items))
+        invalid_data.orderbyfreq()
+        invalid_data.subtitle +=  plural(" from invalid user", len(invalid_data.items), False)
+        invalid_data.truncl(config.prefs.getint("logparse", "maxlist"))
+
+        failed_data.subtitle = plural("failed login", len(failed_data.items)) + " from"
+        failed_data.orderbyfreq()
+        failed_data.truncl(config.prefs.getint("logparse", "maxlist"))
+
+        section.append_data(login_data)
+        section.append_data(invalid_data)
+        section.append_data(failed_data)
+
+        logger.info("Finished sshd section")
+        return section
index 77feaa6bd0978f7a96dd8c5f6c6ec40875ab815c..d1c3b81c0d72771963a3a1a806d8705ae5ad903c 100644 (file)
@@ -6,53 +6,58 @@
 
 import re
 
-from logparse..formatting import *
+from logparse.formatting import *
 from logparse.util import readlog
 from logparse.config import prefs
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-    logger.debug("Starting sudo section")
-    section = Section("sudo")
-    logger.debug("Searching for matches in {0}".format(prefs.get("logs", "auth")))
-    umatches = re.findall('.*sudo:session\): session opened.*', readlog(prefs.get("logs", "auth")))
-    num = sum(1 for line in umatches)    # total number of sessions
-    users = []
-    data = []
-    for match in umatches:
-        user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
-        exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
-        if (exists == []):
-            users.append([user, 1])
+from logparse.load_parsers import Parser
+
+class Sudo(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "sudo"
+        self.info = "Get number of sudo sessions for each user"
+
+    def parse_log(self):
+        logger.debug("Starting sudo section")
+        section = Section("sudo")
+        logger.debug("Searching for matches in {0}".format(prefs.get("logs", "auth")))
+        umatches = re.findall('.*sudo:session\): session opened.*', readlog(prefs.get("logs", "auth")))
+        num = sum(1 for line in umatches)    # total number of sessions
+        users = []
+        data = []
+        for match in umatches:
+            user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
+            exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
+            if (exists == []):
+                users.append([user, 1])
+            else:
+                users[exists[0]][1] += 1
+        commands = []
+        cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(prefs.get("logs", "auth")))
+        for cmd in cmatches:
+            commands.append(cmd)
+        logger.debug("Finished parsing sudo sessions")
+
+        auth_data = Data(subtitle=plural("sudo session", num) + " for")
+
+        if (len(users) == 1):
+            logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
+            auth_data.subtitle += ' ' + users[0][0]
         else:
-            users[exists[0]][1] += 1
-    commands = []
-    cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(prefs.get("logs", "auth")))
-    for cmd in cmatches:
-        commands.append(cmd)
-    logger.debug("Finished parsing sudo sessions")
-
-    auth_data = Data(subtitle=plural("sudo session", num) + " for")
-
-    if (len(users) == 1):
-        logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
-        auth_data.subtitle += ' ' + users[0][0]
-    else:
-        for user in users:
-            auth_data.items.append(user[0] + ' (' + str(user[1]) + ')')
-        logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
-    section.append_data(auth_data)
-
-    if (len(commands) > 0):
-        command_data = Data(subtitle="top sudo commands")
-        commands = backticks(commands)
-        command_data.items = commands
-        command_data.orderbyfreq()
-        command_data.truncl(prefs.getint("logparse", "maxcmd"))
-        section.append_data(command_data)
-
-    logger.info("Finished sudo section")
-
-    return section
+            for user in users:
+                auth_data.items.append(user[0] + ' (' + str(user[1]) + ')')
+            logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
+        section.append_data(auth_data)
+
+        if (len(commands) > 0):
+            command_data = Data(subtitle="top sudo commands")
+            commands = backticks(commands)
+            command_data.items = commands
+            command_data.orderbyfreq()
+            command_data.truncl(prefs.getint("logparse", "maxcmd"))
+            section.append_data(command_data)
+
+        logger.info("Finished sudo section")
+
+        return section
index e0168b8b91c542371c982693cf7bba268fb6c2bd..6b89f82448bf84fe1ce5e92d7617a4263060cad7 100644 (file)
@@ -11,44 +11,49 @@ import re
 
 from logparse.formatting import *
 from logparse.config import prefs
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
-    logger.debug("Starting sysinfo section")
-    section = Section("system")
-    table = Table()
-
-    table.add_row(Row([Column("Hostname"), Column(util.hostname(prefs.get("logparse", "hostname-path")))]))
-    table.add_row(Row([Column("OS"), Column(platform.platform())]))
-    table.add_row(Row([Column("OS version"), Column(platform.version())]))
-    table.add_row(Row([Column("Platform"), Column(platform.system() + " " + platform.machine())]))
-
-    processors = []
-    raw_proc = util.readlog(prefs.get("logs", "cpuinfo"))
-    line_regex = re.compile(".*model name.*:\s*")
-    proc_regex = re.compile("\s*(\(R\)|\(TM\)|CPU)")
-    for line in raw_proc.splitlines():
-        if "model name" in line:
-            processor = line_regex.sub("", line, 1)
-            processor = " ".join(proc_regex.sub("", processor).split()) # remove extraneous text and whitespace
-            if not processor in processors:
-                processors.append(processor)
-            else:
-                logger.debug("Found duplicate entry (perhaps multiple cores?) for {0}".format(processor))
-    table.align_column(0, "right")
-    if len(processors) == 1:
-        table.add_row(Row([Column("Processor"), Column("; ".join(processors))]))
-        section.append_table(table)
-    elif len(processors) > 1:
-        section.append_table(table)
-        proc_data = Data("Processors")
-        proc_data.items = processors
-        section.append_data(proc_data)
-    else:
-        logger.warning("Failed to find processor data")
-
-    logger.info("Finished sysinfo section")
-    return section
+from logparse.load_parsers import Parser
+
+class Sysinfo(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "sysinfo"
+        self.info = "Get standard system information from basic Unix commands"
+
+    def parse_log(self):
+
+        logger.debug("Starting sysinfo section")
+        section = Section("system")
+        table = Table()
+
+        table.add_row(Row([Column("Hostname"), Column(util.hostname(prefs.get("logparse", "hostname-path")))]))
+        table.add_row(Row([Column("OS"), Column(platform.platform())]))
+        table.add_row(Row([Column("OS version"), Column(platform.version())]))
+        table.add_row(Row([Column("Platform"), Column(platform.system() + " " + platform.machine())]))
+
+        processors = []
+        raw_proc = util.readlog(prefs.get("logs", "cpuinfo"))
+        line_regex = re.compile(".*model name.*:\s*")
+        proc_regex = re.compile("\s*(\(R\)|\(TM\)|CPU)")
+        for line in raw_proc.splitlines():
+            if "model name" in line:
+                processor = line_regex.sub("", line, 1)
+                processor = " ".join(proc_regex.sub("", processor).split()) # remove extraneous text and whitespace
+                if not processor in processors:
+                    processors.append(processor)
+                else:
+                    logger.debug("Found duplicate entry (perhaps multiple cores?) for {0}".format(processor))
+        table.align_column(0, "right")
+        if len(processors) == 1:
+            table.add_row(Row([Column("Processor"), Column("; ".join(processors))]))
+            section.append_table(table)
+        elif len(processors) > 1:
+            section.append_table(table)
+            proc_data = Data("Processors")
+            proc_data.items = processors
+            section.append_data(proc_data)
+        else:
+            logger.warning("Failed to find processor data")
+
+        logger.info("Finished sysinfo section")
+        return section
index ebca1ac1eb5ece03125f1d7b3703547f078a1671..afc652b4fe00eba694f5127de062a43845922d8e 100644 (file)
@@ -24,6 +24,7 @@ from logparse import config
 
 import logging
 logger = logging.getLogger(__name__)
+from logparse.load_parsers import Parser
 
 class Drive(NamedTuple):
     path: str
@@ -70,81 +71,88 @@ class HddtempClient:
             return 1
 
 
-def parse_log():
-
-    logger.debug("Starting temp section")
-    section = Section("temperatures")
-
-    sensors.init()
-
-    systemp = Data("Sys", [])
-    coretemp = Data("Cores", [])
-    pkgtemp = Data("Processor", [])
-
-    try:
-        for chip in sensors.iter_detected_chips():
-            for feature in chip:
-                if "Core" in feature.label:
-                    coretemp.items.append([feature.label, float(feature.get_value())])
-                    continue
-                if "CPUTIN" in feature.label:
-                    pkgtemp.items.append([feature.label, float(feature.get_value())])
-                    continue
-                if "SYS" in feature.label:
-                    systemp.items.append([feature.label, float(feature.get_value())])
-                    continue
-
-        logger.debug("Core data is {0}".format(str(coretemp.items)))
-        logger.debug("Sys data is {0}".format(str(systemp.items)))
-        logger.debug("Pkg data is {0}".format(str(pkgtemp.items)))
-        for temp_data in [systemp, coretemp, pkgtemp]:
-            logger.debug("Looking at temp data {0}".format(str(temp_data.items)))
-            if len(temp_data.items) > 1:
-                avg = float(sum(feature[1] for feature in temp_data.items)) / len(temp_data.items)
-                logger.debug("Avg temp for {0} is {1} {2}{3}".format(temp_data.subtitle, str(avg), DEG, CEL))
-                temp_data.subtitle += " (avg {0}{1}{2})".format(str(avg), DEG, CEL)
-                temp_data.items = ["{0}: {1}{2}{3}".format(feature[0], str(feature[1]), DEG, CEL) for feature in temp_data.items]
+class Temperature(Parser):
+
+    def __init__(self):
+        super().__init__()
+        self.name = "temperature"
+        self.info = "Find current temperature of various system components (CPU, motherboard, hard drives, ambient)."
+
+    def parse_log(self):
+
+        logger.debug("Starting temp section")
+        section = Section("temperatures")
+
+        sensors.init()
+
+        systemp = Data("Sys", [])
+        coretemp = Data("Cores", [])
+        pkgtemp = Data("Processor", [])
+
+        try:
+            for chip in sensors.iter_detected_chips():
+                for feature in chip:
+                    if "Core" in feature.label:
+                        coretemp.items.append([feature.label, float(feature.get_value())])
+                        continue
+                    if "CPUTIN" in feature.label:
+                        pkgtemp.items.append([feature.label, float(feature.get_value())])
+                        continue
+                    if "SYS" in feature.label:
+                        systemp.items.append([feature.label, float(feature.get_value())])
+                        continue
+
+            logger.debug("Core data is {0}".format(str(coretemp.items)))
+            logger.debug("Sys data is {0}".format(str(systemp.items)))
+            logger.debug("Pkg data is {0}".format(str(pkgtemp.items)))
+            for temp_data in [systemp, coretemp, pkgtemp]:
+                logger.debug("Looking at temp data {0}".format(str(temp_data.items)))
+                if len(temp_data.items) > 1:
+                    avg = float(sum(feature[1] for feature in temp_data.items)) / len(temp_data.items)
+                    logger.debug("Avg temp for {0} is {1} {2}{3}".format(temp_data.subtitle, str(avg), DEG, CEL))
+                    temp_data.subtitle += " (avg {0}{1}{2})".format(str(avg), DEG, CEL)
+                    temp_data.items = ["{0}: {1}{2}{3}".format(feature[0], str(feature[1]), DEG, CEL) for feature in temp_data.items]
+                else:
+                    temp_data.items = [str(temp_data.items[0][1]) + DEG + CEL]
+                section.append_data(temp_data)
+
+        finally:
+            logger.debug("Finished reading onboard temperatures")
+            sensors.cleanup()
+
+
+        # drive temp
+
+        # For this to work, `hddtemp` must be running in daemon mode.
+        # Start it like this (bash):   sudo hddtemp -d /dev/sda /dev/sdX...
+        
+        received = ''
+        sumtemp = 0.0 
+        data = ""
+        hddtemp_data = Data("Disks")
+        
+        client = HddtempClient(
+            host=config.prefs.get("temperatures", "host"),
+            port=config.prefs.getint("temperatures", "port"),
+            sep=config.prefs.get("temperatures", "separator"),
+            timeout=int(config.prefs.get("temperatures", "timeout")))
+        drives = client.get_drives()
+        logger.debug("Received drive info: " + str(drives))
+
+        for drive in sorted(drives, key=lambda x: x.path):
+            if drive.path in config.prefs.get("temperatures", "drives").split():
+                sumtemp += drive.temperature
+                hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs.getboolean("temperatures", "show-model") else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
             else:
-                temp_data.items = [str(temp_data.items[0][1]) + DEG + CEL]
-            section.append_data(temp_data)
-
-    finally:
-        logger.debug("Finished reading onboard temperatures")
-        sensors.cleanup()
-
-
-    # drive temp
-
-    # For this to work, `hddtemp` must be running in daemon mode.
-    # Start it like this (bash):   sudo hddtemp -d /dev/sda /dev/sdX...
-    
-    received = ''
-    sumtemp = 0.0 
-    data = ""
-    hddtemp_data = Data("Disks")
-    
-    client = HddtempClient(
-        host=config.prefs.get("temperatures", "host"),
-        port=config.prefs.getint("temperatures", "port"),
-        sep=config.prefs.get("temperatures", "separator"),
-        timeout=int(config.prefs.get("temperatures", "timeout")))
-    drives = client.get_drives()
-    logger.debug("Received drive info: " + str(drives))
-
-    for drive in sorted(drives, key=lambda x: x.path):
-        if drive.path in config.prefs.get("temperatures", "drives").split():
-            sumtemp += drive.temperature
-            hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs.getboolean("temperatures", "show-model") else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
-        else:
-            drives.remove(drive)
-            logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
-    logger.debug("Sorted drive info: " + str(drives))
-
-    hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
-    logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg)) 
-    hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
-
-    logger.debug("Finished processing drive temperatures")
-    logger.info("Finished temp section")
-
-    return section
+                drives.remove(drive)
+                logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
+        logger.debug("Sorted drive info: " + str(drives))
+
+        hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
+        logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg)) 
+        hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
+
+        logger.debug("Finished processing drive temperatures")
+        logger.info("Finished temp section")
+
+        return section
index 75d414eccbe59c031dd31bfab43084b7d26ca643..ddef5f2594462bdd4cfea878be5bacced1155460 100644 (file)
 import re
 import sys, traceback
 
-from formatting import *
+from logparse.formatting import *
 from logparse.util import readlog
 from logparse.config import prefs
+from logparse.load_parsers import Parser
 
-import logging
-logger = logging.getLogger(__name__)
+class Zfs(Parser):
 
-def parse_log():
+    def __init__(self):
+        super().__init__()
+        self.name = "zfs"
+        self.info = "Look through ZFS logs to find latest scrub and its output."
 
-    logger.debug("Starting zfs section")
-    section = Section("zfs")
+    def parse_log(self):
 
-    zfslog = readlog(prefs.get("logs", "zfs"))
+        logger.debug("Starting zfs section")
+        section = Section("zfs")
 
-    logger.debug("Analysing zpool log")
-    pool = re.search('.*---\n(\w*)', zfslog).group(1)
-    scrub = re.search('.* scrub repaired (\d+\s*\w+) in .* with (\d+) errors on (\w+)\s+(\w+)\s+(\d+)\s+(\d{1,2}:\d{2}):\d+\s+(\d{4})', zfslog)
-    logger.debug("Found groups {0}".format(scrub.groups()))
-    iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
-    scrubrepairs = scruberrors = scrubdate = None
-    alloc = iostat.group(1)
-    free = iostat.group(2)
+        zfslog = readlog(prefs.get("logs", "zfs"))
 
-    try:
-        scrubrepairs = scrub.group(1)
-        scruberrors = scrub.group(2)
-        scrubdate = ' '.join(scrub.groups()[2:-1])
-    except Exception as e:
-        logger.debug("Error getting scrub data: " + str(e))
-        traceback.print_exc(limit=2, file=sys.stdout)
+        logger.debug("Analysing zpool log")
+        pool = re.search('.*---\n(\w*)', zfslog).group(1)
+        scrub = re.search('.* scrub repaired (\d+\s*\w+) in .* with (\d+) errors on (\w+)\s+(\w+)\s+(\d+)\s+(\d{1,2}:\d{2}):\d+\s+(\d{4})', zfslog)
+        logger.debug("Found groups {0}".format(scrub.groups()))
+        iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
+        scrubrepairs = scruberrors = scrubdate = None
+        alloc = iostat.group(1)
+        free = iostat.group(2)
 
-    if (scrubdate != None):
-        scrub_data = Data("Scrub of " + pool + " on " + scrubdate)
-        scrub_data.items = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
-    else:
-        scrub_data = Data(pool)
-        scrub_data.items = [alloc + " used", free + " free"]
+        try:
+            scrubrepairs = scrub.group(1)
+            scruberrors = scrub.group(2)
+            scrubdate = ' '.join(scrub.groups()[2:-1])
+        except Exception as e:
+            logger.debug("Error getting scrub data: " + str(e))
+            traceback.print_exc(limit=2, file=sys.stdout)
 
-    section.append_data(scrub_data)
+        if (scrubdate != None):
+            scrub_data = Data("Scrub of " + pool + " on " + scrubdate)
+            scrub_data.items = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
+        else:
+            scrub_data = Data(pool)
+            scrub_data.items = [alloc + " used", free + " free"]
 
-    logger.info("Finished zfs section")
-    return section
+        section.append_data(scrub_data)
+
+        logger.info("Finished zfs section")
+        return section
index ae27eeefd4260d78a8028b50e4aa67fc4e6bf5b8..8f905f470b297dcf2d60d5c4722f3641267dba52 100644 (file)
@@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
 
 from pkg_resources import Requirement, resource_filename
 
-from logparse.config import prefs
+from logparse import config
 
 def hostname(path): # get the hostname of current server
     hnfile = open(path, 'r')
@@ -40,7 +40,7 @@ def resolve(ip, fqdn=None):        # try to resolve an ip to hostname
     # resolve-domains defined in individual sections of the config take priority over global config
     
     if not fqdn:
-        fqdn = prefs.getboolean("logparse", "resolve-domains")
+        fqdn = config.prefs.get("logparse", "resolve-domains")
 
     if fqdn == 'ip':
         return(ip)