#
# config.py
#
-# Default config values and basic wrapper for PyYaml. New config options
+# Default config values and basic wrapper for ConfigParser. New config options
# should be added to the dictionary below, along with appropriate defaults.
#
+# Runtime configuration is done through /etc/logparse/logparse.conf (default)
+# or the path specified in the "--config" argument. The file uses the INI
+# syntax, with general options being declared in the [logparse] section and
+# parser-specific options declared in their own sections.
+#
-import yaml
-import types
-import os
+from configparser import ConfigParser
from pkg_resources import Requirement, resource_filename
-from types import SimpleNamespace
import logparse
import logging
logger = logging.getLogger(__name__)
+global prefs
+prefs = None
+
+defaults = {
+ 'logparse': {
+ 'output': '',
+ 'overwrite': False,
+ 'title': logparse.__name__,
+ 'maxlist': 10,
+ 'maxcmd': 6,
+ 'resolve-domains': 'fqdn',
+ 'rotate': False,
+ 'verbose': False,
+ 'quiet': False,
+ 'hostname-path': '/etc/hostname',
+ 'parsers': '',
+ 'ignore-parsers': ''
+ },
+ 'html': {
+ 'header': '/etc/logparse/header.html',
+ 'css': '/etc/logparse/main.css',
+ 'embed-styles': False,
+ 'css-relpath': True
+ },
+ 'plain': {
+ 'plain': False,
+ 'linewidth': 80
+ },
+ 'logs': {
+ 'auth': '/var/log/auth.log',
+ 'cron': '/var/log/cron.log',
+ 'cpuinfo': '/proc/cpuinfo',
+ 'meminfo': '/proc/meminfo',
+ 'sys': '/var/log/syslog',
+ 'smbd': '/var/log/samba',
+ 'zfs': '/var/log/zpool.log',
+ 'alloc': '/var/log/du.log',
+ 'postfix': '/var/log/mail.log',
+ 'httpd-access': '/var/log/apache2/access.log',
+ 'httpd-error': '/var/log/apache2/error.log'
+ },
+ 'mail': {
+ 'to': '',
+ 'from': '',
+ 'subject': 'logparse from $hostname',
+ 'mailbin': '/usr/bin/mail'
+ },
+ 'temperatures': {
+ 'drives': ['/dev/sda'],
+ 'host': '127.0.0.1',
+ 'separator': '|',
+ 'timeout': 10,
+ 'port': 7634,
+ 'show-model': False,
+ },
+ 'sshd': {
+ 'resolve-domains': ''
+ },
+ 'smbd': {
+ 'resolve-domains': ''
+ },
+ 'httpd': {
+ 'resolve-domains': ''
+ },
+ 'du': {
+ 'paths': ['/', '/etc', '/home'],
+ 'force-write': False
+ }
+}
+
def locate(filename):
+ """
+ DEPRECATED: draft method for what is now parsers/load_parsers.py. Kept here
+ for historical purposes.
+ """
logger.debug("Searching for {0}".format(filename))
loc = resource_filename(Requirement.parse(__package__), filename)
logger.debug("Found {0}".format(loc))
return loc
-class Configuration(dict):
-
- def __init__(self, *arg, **kw):
- super(Configuration, self).__init__(*arg, **kw)
-
- def _str2bool(x):
- positives = ["yes", "true", "1", "y"]
- negatives = ["no", "false", "0", "n"]
- x = x.lower()
- if x in positives:
- return True
- elif x in negatives:
- return False
- else:
- raise ValueError("Unknown option %s" % x)
-
-defaults = Configuration({
- 'output': '',
- 'header': '/etc/logparse/header.html',
- 'css': '/etc/logparse/main.css',
- 'linewidth': 80,
- 'embed-styles': False,
- 'plain': False,
- 'overwrite': False,
- 'title': logparse.__name__,
- 'maxlist': 10,
- 'maxcmd': 6,
- 'resolve-domains': 'fqdn',
- 'mail': {
- 'to': '',
- 'from': '',
- 'subject': 'logparse from $hostname$',
- 'mailbin': '/usr/bin/mail',
- },
- 'rotate': False,
- 'verbose': False,
- 'quiet': False,
- 'hddtemp': {
- 'drives': ['/dev/sda'],
- 'host': '127.0.0.1',
- 'separator': '|',
- 'timeout': 10,
- 'port': 7634,
- 'show-model': False,
- },
- 'apache': {
- 'resolve-domains': '',
- },
- 'sshd': {
- 'resolve-domains': '',
- },
- 'smbd': {
- 'resolve-domains': '',
- },
- 'httpd': {
- 'resolve-domains': '',
- },
- 'du': {
- 'paths': ['/', '/etc', '/home'],
- 'force-write': False,
- },
- 'hostname-path': '/etc/hostname',
- 'parsers': {},
- 'ignore-parsers': {},
- 'logs': {
- 'auth': '/var/log/auth.log',
- 'cron': '/var/log/cron.log',
- 'cpuinfo': '/proc/cpuinfo',
- 'meminfo': '/proc/meminfo',
- 'sys': '/var/log/syslog',
- 'smb': '/var/log/samba',
- 'zfs': '/var/log/zpool.log',
- 'alloc': '/var/log/du.log',
- 'postfix': '/var/log/mail.log',
- 'httpd': '/var/log/apache2'
- }
-})
-
-def verify(raw_dict, defaults):
- for key, value in raw_dict.items():
- if key in defaults: # valid key
- logger.debug("Found valid key {0} with value {1}".format(key, value))
- if (isinstance(value, dict)):
- verify(value, defaults[key]) # recurse nested dictionaries
-
- else: # invalid key
- logger.warning("Invalid key {0} with value {1}".format(key, value))
-
-def loadconf(argparser, configfile = "/etc/logparse/logparse.conf"):
- logger.debug("Getting config from {0}".format(configfile))
+def loadconf(configpaths):
+ """
+ Initial setup for a ConfigParser object. `configpaths` should be a list of
+ configuration files to load (typically only one). To use the generated
+ ConfigParser, use `import logparse.config` and then `config.prefs.get(..)`.
+ The prefs object is returned after creation as a convenience but this method
+ should only be called once per runtime.
+ """
+ prefs= ConfigParser()
+ prefs.read_dict(defaults)
try:
- raw_dict = yaml.safe_load(open(configfile))
- # verify fields
- verify(raw_dict, defaults)
- prefs = defaults
- for value in raw_dict:
- if(isinstance(raw_dict[value], dict)):
- for key in raw_dict[value].items():
- logger.debug("Inserting key {0} with value {1}".format(key[0], key[1]))
- if not value in prefs:
- prefs[value] = {}
- prefs[value][key[0]] = key[1]
- else:
- prefs[value] = raw_dict[value]
- if argparser.parse_args().to is not None:
- prefs['mail']['to'] = argparser.parse_args().to
- if not prefs['mail']['to']:
- logger.info("No recipient address provided, outputting to stdout")
- else:
- logger.info("Email will be sent to " + prefs['mail']['to'])
- return prefs
+ success = prefs.read(configpaths)
+ logger.debug("Loaded {0} config file(s): {1}".format(str(len(success)), str(success)))
except Exception as e:
logger.warning("Error processing config: " + str(e))
+ return prefs
from tabulate import tabulate
import logparse
-from . import interface, util, config, mail
+from logparse import interface, util, mail, config
import logging
logger = logging.getLogger(__name__)
BULLET = "• "
INDENT = " "
+global VARSUBST
+
+def init_var():
+ global VARSUBST
+ css_path = config.prefs.get("html", "css")
+ if config.prefs.getboolean("html", "css-relpath"):
+ if interface.argparser.parse_args().no_write:
+ css_path = os.path.relpath(css_path, ".")
+ elif interface.argparser.parse_args().destination:
+ css_path = os.path.relpath(css_path, interface.argparser.parse_args().destination())
+ elif config.prefs.get("logparse", "output"):
+ css_path = os.path.relpath(css_path, config.prefs.get("logparse", "output"))
+ VARSUBST = {
+ "title": config.prefs.get("logparse", "title"),
+ "date": interface.start.strftime(DATEFMT),
+ "time": interface.start.strftime(TIMEFMT),
+ "hostname": util.hostname(config.prefs.get("logparse", "hostname-path")),
+ "version": logparse.__version__,
+ "css": css_path
+ }
+
class Output:
"""
f.write(self.content)
logger.info("Written output to {}".format(destination))
+ def print_stdout(self, lines=False):
+ """
+ Echo the contents to the console
+ """
+ print()
+ if lines:
+ line = PlaintextLine(linewidth=config.prefs.getint("plain", "linewidth"), double=True)
+ print(line.draw())
+ print(self.content)
+ if lines:
+ print(line.draw())
+ print()
+
+
class PlaintextOutput(Output):
"""
"""
Print details with some primitive formatting
"""
- init_varfilter()
- box = PlaintextBox(content=Template("$title $version on $hostname\n\n$time $date").safe_substitute(varsubst), vpadding=2, hpadding="\t\t", linewidth=config.prefs['linewidth'])
+ box = PlaintextBox(content=Template("$title $version on $hostname\n\n$time $date").safe_substitute(VARSUBST), vpadding=2, hpadding="\t\t", linewidth=self.linewidth)
line = PlaintextLine(self.linewidth)
self.append(box.draw() + line.draw())
"""
Append a horizontal line and some details
"""
- init_varfilter()
self.append(PlaintextLine(self.linewidth, vpadding=1).draw())
- self.append(Template("$hostname $time $date").safe_substitute(varsubst))
+ self.append(Template("$hostname $time $date").safe_substitute(VARSUBST))
def append_section(self, section):
"""
itemoutput = subtitle + '\n'
for datum in data:
datum = BULLET + datum
- if len(datum) > config.prefs['linewidth'] - 3:
+ if len(datum) > self.linewidth - 3:
words = datum.split()
- if max(map(len, words)) > config.prefs['linewidth'] - len(INDENT):
+ if max(map(len, words)) > self.linewidth - len(INDENT):
continue
res, part, others = [], words[0], words[1:]
for word in others:
- if 1 + len(word) > config.prefs['linewidth'] - len(part):
+ if 1 + len(word) > self.linewidth - len(part):
res.append(part)
part = word
else:
self.content = ""
self.destination = ""
self.css = ""
+ self._embedded = ""
def embed_css(self, css):
"""
Convert stylesheet to inline tags
"""
- self.content = mail.mailprep(self.content, css)
- return self.content
+ if not self._embedded:
+ self._embedded = mail.mailprep(self.content, css)
+ return self._embedded
def append_header(self, template):
"""
Insert variables into header template file and append HTML tags
"""
- init_varfilter()
headercontent = Template(open(template, 'r').read())
- self.append(headercontent.safe_substitute(varsubst))
+ self.append(headercontent.safe_substitute(VARSUBST))
self.append(opentag('div', id='main'))
def append_footer(self):
def __init__(self, linewidth=80, double=True, vpadding=1, hpadding=""):
self.linewidth = linewidth
- self.double = False
+ self.double = double
self.vpadding = vpadding
self.hpadding = hpadding
return ('\n').join(contentlines)
-def init_varfilter():
- global varfilter
- global varpattern
- global varsubst
- varfilter = {"$title$": config.prefs['title'], "$date$": interface.start.strftime(DATEFMT),"$time$": interface.start.strftime(TIMEFMT), "$hostname$": util.hostname(config.prefs['hostname-path']), "$version$": logparse.__version__, "$css$": os.path.relpath(config.prefs['css'], os.path.dirname(config.prefs['output']))}
- varfilter = dict((re.escape(k), v) for k, v in varfilter.items())
- varpattern = re.compile("|".join(varfilter.keys()))
- varsubst = dict(title=config.prefs['title'], date=interface.start.strftime(DATEFMT), time=interface.start.strftime(TIMEFMT), hostname=util.hostname(config.prefs['hostname-path']), version=logparse.__version__, css=os.path.relpath(config.prefs['css'], os.path.dirname(config.prefs['output'])))
-
def backticks(l):
return ["`" + x + "`" for x in l]
return "%.1f%s%s" % (num, 'Yi', suffix)
-def fsubject(template):
+def fsubject(subject):
"""
Replace variables in the title template provided in config
"""
- r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
+ r = Template(subject).safe_substitute(VARSUBST)
logger.debug("Returning subject line " + r)
return r
from datetime import datetime
import logparse
-from .config import *
+import logparse.config
+from logparse.config import prefs, loadconf
from logparse import formatting, mail, config
from .parsers import load_parsers
+global argparser
+
def rotate(): # Rotate logs using systemd logrotate
try:
if not os.geteuid() == 0:
if not os.geteuid() == 0:
logger.warning("Cannot run logrotate as root - you will see permission errors in the output below")
sim_cmd = "logrotate -d /etc/logrotate.conf"
- logger.debug("Here is the output of `{0}` (simulated):".format(sim_cmd)
+ logger.debug("Here is the output of `{0}` (simulated):".format(sim_cmd))
sim = check_output(sim_cmd, shell=True)
logger.debug(sim)
except Exception as e:
def main():
# Get arguments
+ global argparser
argparser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
argparser.add_argument('-t','--to', help='mail recipient (\"to\" address)', required=False)
- argparser.add_argument('-c', '--config', help='path to config file', required=False)
+ argparser.add_argument('-c', '--config', help='path to config file', required=False, default="/etc/logparse/logparse.conf")
argparser.add_argument('-p', '--print', help='print HTML to stdout', required=False, dest='printout', action='store_true', default=False)
argparser.add_argument('-d', '--destination', help='file to output HTML', required=False)
argparser.add_argument('-f', '--overwrite', help='force overwrite an existing output file', required=False, action='store_true', default=False)
argparser.add_argument('-es', '--embed-styles', help='make CSS rules inline rather than linking the file', required=False, default=False, action='store_true')
argparser.add_argument('-nh', '--plain', help='write/send plain text rather than HTML', required=False, default=False, action='store_true')
argparser.add_argument('-q', '--quiet', help='no output to stdout', required=False, default=False, action='store_true')
+ argparser.add_argument('-nm', '--no-mail', help="do not send email (overrides config file)", required=False, default=False, action="store_true")
+ argparser.add_argument('-nw', '--no-write', help="do not write output file (overrides config file)", required=False, default=False, action="store_true")
# Load config
- if argparser.parse_args().config:
- config.prefs = config.loadconf(argparser.parse_args().config, argparser)
- else:
- config.prefs = config.loadconf(argparser=argparser)
- prefs = config.prefs
+ config.prefs = loadconf(argparser.parse_args().config)
# Set up logging
logger = logging.getLogger(__name__)
loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
loghandler.setFormatter(logging.Formatter(fmt='logparse[' + str(os.getpid()) + ']: %(message)s'))
loghandler.setLevel(logging.INFO) # don't spam syslog with debug messages
- if argparser.parse_args().quiet or config.prefs['quiet']:
+ if argparser.parse_args().quiet or config.prefs.getboolean("logparse", "quiet"):
logging.basicConfig(level=logging.CRITICAL)
- elif argparser.parse_args().verbose or config.prefs['verbose']:
+ elif argparser.parse_args().verbose or config.prefs.getboolean("logparse", "verbose"):
logging.basicConfig(level=logging.DEBUG)
logger.debug("Verbose mode turned on")
else:
logging.basicConfig(level=logging.INFO)
logger.addHandler(loghandler)
- logger.debug("Finished loading config")
+ logger.debug([x for x in config.prefs.sections()])
+ logger.debug(config.prefs.get("logparse", "output"))
+ logger.debug("Config test: " + config.prefs.get("logparse", "output"))
# Time analysis
global start
# Write header
- global output
+ formatting.init_var()
+
if argparser.parse_args().plain:
- output = formatting.PlaintextOutput(linewidth=prefs['linewidth'])
+ output = formatting.PlaintextOutput(linewidth=config.prefs.getint("plain", "linewidth"))
+ output.append_header()
else:
output = formatting.HtmlOutput()
+ output.append_header(config.prefs.get("html", "header"))
- output.append_header(prefs['header'])
# Find parsers
parser_providers = []
if argparser.parse_args().logs:
log_src = argparser.parse_args().logs.split()
- elif len(prefs['parsers']) > 0:
- log_src = prefs['parsers']
+ elif config.prefs.get("logparse", "parsers"):
+ log_src = config.prefs.get("logparse", "parsers").split()
else:
log_src = load_parsers.default_parsers
else:
parser_providers.append(load_parsers.load(parser))
- if argparser.parse_args().ignore_logs or len(prefs['ignore-parsers']) > 0:
- if argparser.parse_args().ignore_logs:
- ignore_src = argparser.parse_args().ignore_logs.split()
- else:
- ignore_src = prefs['ignore-parsers']
+ if argparser.parse_args().ignore_logs:
+ ignore_src = argparser.parse_args().ignore_logs.split()
+ elif config.prefs.get("logparse", "ignore-parsers"):
+ ignore_src = config.prefs.get("logparse", "ignore-parsers").split()
+ else:
+ ignore_src = []
+ if len(ignore_src) > 0:
for parser_name in ignore_src:
if parser_name in [x.__name__.rpartition('.')[2] for x in parser_providers]:
logger.info("Ignoring default parser {0}".format(parser_name))
# Write HTML footer
output.append_footer()
- if argparser.parse_args().printout:
- print(output)
- if argparser.parse_args().destination or prefs['output']:
+ if (argparser.parse_args().destination or config.prefs.get("logparse", "output")) and not argparser.parse_args().no_write:
if argparser.parse_args().destination:
dest_path = argparser.parse_args().destination
else:
- dest_path = prefs['output']
+ dest_path = config.prefs.get("logparse", "output")
logger.debug("Outputting to {0}".format(dest_path))
- if (argparser.parse_args().embed_styles or prefs['embed-styles']) and not (argparser.parse_args().plain or prefs['plain']):
- output.embed_css(prefs['css'])
- if (not os.path.isfile(dest_path)) and not (argparser.parse_args().overwrite or config['overwrite']):
+ if (argparser.parse_args().embed_styles or config.prefs.getboolean("html", "embed-styles")) and not (argparser.parse_args().plain or config.prefs.getboolean("plain", "plain")):
+ output.embed_css(config.prefs.get("html", "css"))
+ if (not os.path.isfile(dest_path)) and not (argparser.parse_args().overwrite or config.prefs.getboolean("logparse", "overwrite")):
output.write(dest_path)
elif logging.root.level == logging.CRITICAL:
pass
else:
logger.warning("No output written")
- if argparser.parse_args().to or prefs['mail']['to']:
- if argparser.parse_args().to:
+ if (str(argparser.parse_args().to) or str(config.prefs.get("mail", "to"))) and not argparser.parse_args().no_mail:
+ if str(argparser.parse_args().to):
to = argparser.parse_args().to
else:
- to = prefs['mail']['to']
- mail.sendmail(mailbin=prefs['mail']['mailbin'], body=(output.embed_css(prefs['css']) if isinstance(output, formatting.HtmlOutput) else output.content), recipient=to, subject=formatting.fsubject(config.prefs['mail']['subject']), html=isinstance(output, formatting.HtmlOutput), sender=prefs['mail']['from'])
+ to = config.prefs.get("mail", "to")
+ mail.sendmail(
+ mailbin=config.prefs.get("mail", "mailbin"),
+ body=(output.embed_css(config.prefs.get("html", "css")) if isinstance(output, formatting.HtmlOutput) else output.content),
+ recipient=to,
+ subject=formatting.fsubject(config.prefs.get("mail", "subject")),
+ html=isinstance(output, formatting.HtmlOutput),
+ sender=config.prefs.get("mail", "from"))
if not argparser.parse_args().no_rotate:
- if argparser.parse_args().simulate or prefs['rotate'] == 's':
+ if argparser.parse_args().simulate or config.prefs.getboolean("logparse", "rotate"):
rotate_sim()
- elif prefs['rotate'] or argparser.parse_args().rotate:
+ elif config.prefs.getboolean("logparse", "rotate") or argparser.parse_args().rotate:
rotate()
else:
logger.debug("User doesn't want to rotate logs")
finish = datetime.now()
logger.info("Finished parsing logs at {0} {1} (total time: {2})".format(finish.strftime(formatting.DATEFMT), finish.strftime(formatting.TIMEFMT), finish - start))
+ if argparser.parse_args().printout:
+ output.print_stdout()
+
return
user_data.subtitle = plural("cron session", len(cmdlist)) + " for " + usr
user_data.items = ("`{0}`".format(cmd) for cmd in cmdlist)
user_data.orderbyfreq()
- user_data.truncl(config.prefs['maxcmd'])
+ user_data.truncl(config.prefs.getint("logparse", "maxcmd"))
section.append_data(user_data)
logger.info("Finished cron section")
import re
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
-from .. import util
+from logparse.formatting import *
+from logparse.util import readlog
+from logparse import config
import logging
logger = logging.getLogger(__name__)
logger.debug("Starting cron section")
section = Section("cron")
- matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs['logs']['cron']))
+ matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog(config.prefs.get("logs", "cron")))
num = len(matches)
commands = []
for match in matches:
commands.append(str(match))
- # commands.append([str(match)for match in matches])
- #logger.debug("found cron command " + str(commands))
logger.info("Found " + str(num) + " cron jobs")
jobs_data = Data(str(num) + " cron jobs run")
section.append_data(jobs_data)
cmd_data = Data("Top cron commands")
cmd_data.items = ("`{0}`".format(x) for x in commands)
cmd_data.orderbyfreq()
- cmd_data.truncl(config.prefs['maxcmd'])
+ cmd_data.truncl(config.prefs.getint("logparse", "maxcmd"))
section.append_data(cmd_data)
logger.info("Finished cron section")
import re
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse.formatting import *
+from logparse.util import readlog, resolve
+from logparse import config
import logging
logger = logging.getLogger(__name__)
logger.debug("Starting httpd section")
section = Section("httpd")
- accesslog = readlog(config.prefs['logs']['httpd'] + '/access.log')
+ accesslog = readlog(prefs("logs", "httpd-access"))
- errorlog = readlog(config.prefs['logs']['httpd'] + '/error.log')
+ errorlog= readlog(prefs("logs", "httpd-error"))
total_errors = len(errorlog.splitlines())
logger.debug("Retrieved log data")
- errors = []
- notfound = []
- unprivileged = []
-
logger.debug("Searching through access log")
accesses = []
section.append_data(size)
clients = Data()
- clients.items = [resolve(ac.client, "fqdn") for ac in accesses]
+ clients.items = [resolve(ac.client, config.prefs.get("httpd", "resolve-domains")) for ac in accesses]
clients.orderbyfreq()
clients.subtitle = "Received requests from " + plural("client", len(clients.items))
- clients.truncl(config.prefs['maxlist'])
+ clients.truncl(config.prefs.getint("logparse", "maxlist"))
section.append_data(clients)
files = Data()
files.items = [ac.file for ac in accesses]
files.orderbyfreq()
files.subtitle = plural("file", len(files.items)) + " requested"
- files.truncl(config.prefs['maxlist'])
+ files.truncl(config.prefs.getint("logparse", "maxlist"))
section.append_data(files)
useragents = Data()
useragents.items = [ac.useragent for ac in accesses]
useragents.orderbyfreq()
useragents.subtitle = plural("user agent", len(useragents.items))
- useragents.truncl(config.prefs['maxlist'])
+ useragents.truncl(config.prefs.getint("logparse", "maxlist"))
section.append_data(useragents)
logger.info("httpd has received " + str(total_requests) + " requests with " + str(total_errors) + " errors")
+++ /dev/null
-from ..formatting import *
-from .. import config
-
-import logging
-logger = logging.getLogger(__name__)
-
-def parse_log():
-
- parser = util.JournalParser()
- parser.parse()
-
import re
-from ..formatting import *
-from .. import config
+from logparse.formatting import *
+from logparse import config
import logging
logger = logging.getLogger(__name__)
ram_b = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
table.add_row(Row([Column("Installed"), Column(parsesize(ram_b))]))
- raw_mem = util.readlog(config.prefs['logs']['meminfo'])
+ raw_mem = util.readlog(config.prefs.get("logs", "meminfo"))
line_regex = re.compile("^Mem(\w+):\s*(\d*)\s*kB$")
for line in raw_mem.splitlines():
import re
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse.formatting import *
+from logparse.util import readlog
+from logparse import config
import logging
logger = logging.getLogger(__name__)
section = Section("postfix")
logger.debug("Starting postfix section")
logger.debug("Searching through postfix logs")
- messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs['logs']['postfix']))
+ messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog(config.prefs.get("logs", "postfix")))
r = []
s = []
size = 0
if (len(s) > 1):
rec_data.items = r
rec_data.orderbyfreq()
- rec_data.truncl(config.prefs['maxlist'])
+ rec_data.truncl(config.prefs.getint("logparse", "maxlist"))
rec_data.subtitle = n + " messages sent to"
else:
rec_data.subtitle = n + " messages sent to " + r[0]
import re
import glob
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse.formatting import *
+from logparse.util import readlog, resolve
+from logparse import config
import logging
logger = logging.getLogger(__name__)
def parse_log():
logger.debug("Starting smbd section")
section = Section("smbd")
- files = glob.glob(config.prefs['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
+ files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]") # find list of logfiles
# for f in files:
# file_mod_time = os.stat(f).st_mtime
# find the machine (ip or hostname) that this file represents
ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
- host = resolve(ip, fqdn=config.prefs['smbd']['resolve-domains'])
- if (host == ip and (config.prefs['smbd']['resolve-domains'] or config.prefs['resolve-domains']) != 'ip'): # if ip has disappeared, fall back to a hostname from logfile
+ host = resolve(ip, fqdn=config.prefs.get("smbd", "resolve-domains"))
+ if host == ip and (config.prefs.get("smbd", "resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"): # if ip has disappeared, fall back to a hostname from logfile
newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
if (len(set(newhost)) == 1): # all hosts in one file should be the same
host = newhost[0].lower()
else: # multiple users
auth_data.items = sigma_auths
auth_data.orderbyfreq()
- auth_data.truncl(config.prefs['maxlist'])
+ auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
logger.debug("Found {0} samba logins".format(str(n_auths)))
section.append_data(auth_data)
logger.info("Finished smbd section")
user = entry.group(1)
ip = entry.group(2)
- userhost = user + '@' + resolve(ip, fqdn=config.prefs['sshd']['resolve-domains'])
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
login_data.items.append(userhost)
elif "Connection closed by authenticating user root" in msg:
user = entry.group(1)
ip = entry.group(2)
- userhost = user + '@' + resolve(ip, fqdn=config.prefs['sshd']['resolve-domains'])
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
failed_data.items.append(userhost)
elif "Invalid user" in msg:
user = entry.group(1)
ip = entry.group(2)
- userhost = user + '@' + resolve(ip, fqdn=config.prefs['sshd']['resolve-domains'])
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
invalid_data.items.append(userhost)
login_data.subtitle = plural("successful login", len(login_data.items)) + " from"
login_data.orderbyfreq()
- login_data.truncl(config.prefs['maxlist'])
+ login_data.truncl(config.prefs.getint("logparse", "maxlist"))
invalid_data.subtitle = plural("attempted login", len(invalid_data.items))
invalid_data.orderbyfreq()
invalid_data.subtitle += plural(" from invalid user", len(invalid_data.items), False)
- invalid_data.truncl(config.prefs['maxlist'])
+ invalid_data.truncl(config.prefs.getint("logparse", "maxlist"))
failed_data.subtitle = plural("failed login", len(failed_data.items)) + " from"
failed_data.orderbyfreq()
- failed_data.truncl(config.prefs['maxlist'])
+ failed_data.truncl(config.prefs.getint("logparse", "maxlist"))
section.append_data(login_data)
section.append_data(invalid_data)
import re
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse.formatting import *
+from logparse.util import readlog, resolve
+from logparse import config
import logging
logger = logging.getLogger(__name__)
logger.debug("Starting sshd section")
section = Section("ssh")
- logger.debug("Searching for matches in {0}".format(config.prefs['logs']['auth']))
- matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs['logs']['auth'])) # get all logins
+ logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+ matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog(config.prefs.get("logs", "auth"))) # get all logins
logger.debug("Finished searching for logins")
- logger.debug("Searching for matches in {0}".format(config.prefs['logs']['auth']))
- authlog = readlog(config.prefs['logs']['auth'])
+ logger.debug("Searching for matches in {0}".format(config.prefs.get("logs", "auth")))
+ authlog = readlog(config.prefs.get("logs", "auth"))
matches = re.findall('.*sshd.*Accepted publickey for .* from .*', authlog) # get all logins
invalid_matches = re.findall(".*sshd.*Invalid user .* from .*", authlog)
user = entry.group(1)
ip = entry.group(2)
- userhost = user + '@' + resolve(ip, fqdn=config.prefs['sshd']['resolve-domains'])
+ userhost = user + '@' + resolve(ip, fqdn=config.prefs.get("sshd", "resolve-domains"))
users.append(userhost)
logger.debug("Parsed list of authorised users")
logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0])
auth_data.subtitle += ' ' + auth_data.items[0]
auth_data.orderbyfreq()
- auth_data.truncl(config.prefs['maxlist'])
+ auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data))
section.append_data(auth_data)
logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid user " + invalid_users[0])
invalid_data.subtitle += ' ' + invalid_data.items[0]
invalid_data.orderbyfreq()
- invalid_data.truncl(config.prefs['maxlist'])
+ invalid_data.truncl(config.prefs.get("logparse", "maxlist"))
logger.debug("Found " + str(len(invalid_matches)) + " SSH login attempts for invalid users " + str(data))
section.append_data(invalid_data)
import re
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse..formatting import *
+from logparse.util import readlog
+from logparse.config import prefs
import logging
logger = logging.getLogger(__name__)
def parse_log():
logger.debug("Starting sudo section")
section = Section("sudo")
- logger.debug("Searching for matches in {0}".format(config.prefs['logs']['auth']))
- umatches = re.findall('.*sudo:session\): session opened.*', readlog(config.prefs['logs']['auth']))
+ logger.debug("Searching for matches in {0}".format(prefs.get("logs", "auth")))
+ umatches = re.findall('.*sudo:session\): session opened.*', readlog(prefs.get("logs", "auth")))
num = sum(1 for line in umatches) # total number of sessions
users = []
data = []
else:
users[exists[0]][1] += 1
commands = []
- cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(config.prefs['logs']['auth']))
+ cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog(prefs.get("logs", "auth")))
for cmd in cmatches:
commands.append(cmd)
logger.debug("Finished parsing sudo sessions")
commands = backticks(commands)
command_data.items = commands
command_data.orderbyfreq()
- command_data.truncl(config.prefs['maxcmd'])
+ command_data.truncl(prefs.getint("logparse", "maxcmd"))
section.append_data(command_data)
logger.info("Finished sudo section")
import os
import re
-from ..formatting import *
-from .. import config
+from logparse.formatting import *
+from logparse.config import prefs
import logging
logger = logging.getLogger(__name__)
section = Section("system")
table = Table()
- table.add_row(Row([Column("Hostname"), Column(util.hostname(config.prefs['hostname-path']))]))
+ table.add_row(Row([Column("Hostname"), Column(util.hostname(prefs.get("logparse", "hostname-path")))]))
table.add_row(Row([Column("OS"), Column(platform.platform())]))
table.add_row(Row([Column("OS version"), Column(platform.version())]))
table.add_row(Row([Column("Platform"), Column(platform.system() + " " + platform.machine())]))
processors = []
- raw_proc = util.readlog(config.prefs['logs']['cpuinfo'])
+ raw_proc = util.readlog(prefs.get("logs", "cpuinfo"))
line_regex = re.compile(".*model name.*:\s*")
proc_regex = re.compile("\s*(\(R\)|\(TM\)|CPU)")
for line in raw_proc.splitlines():
from telnetlib import Telnet
from typing import List, Dict, NamedTuple
-from logparse import formatting
-from ..formatting import *
-from ..util import readlog, resolve
-from ..config import *
+from logparse.formatting import *
+from logparse.util import readlog
+from logparse import config
import logging
logger = logging.getLogger(__name__)
data = ""
hddtemp_data = Data("Disks")
- client = HddtempClient(host=config.prefs['hddtemp']['host'], port=int(config.prefs['hddtemp']['port']), sep=config.prefs['hddtemp']['separator'], timeout=int(config.prefs['hddtemp']['timeout']))
+ client = HddtempClient(
+ host=config.prefs.get("temperatures", "host"),
+ port=config.prefs.getint("temperatures", "port"),
+ sep=config.prefs.get("temperatures", "separator"),
+ timeout=int(config.prefs.get("temperatures", "timeout")))
drives = client.get_drives()
logger.debug("Received drive info: " + str(drives))
for drive in sorted(drives, key=lambda x: x.path):
- if drive.path in config.prefs['hddtemp']['drives']:
+ if drive.path in config.prefs.get("temperatures", "drives").split():
sumtemp += drive.temperature
- hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs['hddtemp']['show-model'] else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
+ hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs.getboolean("temperatures", "show-model") else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units))
else:
drives.remove(drive)
logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
import re
import sys, traceback
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from formatting import *
+from logparse.util import readlog
+from logparse.config import prefs
import logging
logger = logging.getLogger(__name__)
logger.debug("Starting zfs section")
section = Section("zfs")
- zfslog = readlog(config.prefs['logs']['zfs'])
+ zfslog = readlog(prefs.get("logs", "zfs"))
logger.debug("Analysing zpool log")
pool = re.search('.*---\n(\w*)', zfslog).group(1)
from pkg_resources import Requirement, resource_filename
-from . import config
+from logparse.config import prefs
def hostname(path): # get the hostname of current server
hnfile = open(path, 'r')
# resolve-domains defined in individual sections of the config take priority over global config
if not fqdn:
- fqdn = config.prefs['resolve-domains']
+ fqdn = prefs.getboolean("logparse", "resolve-domains")
if fqdn == 'ip':
return(ip)
keywords='logparse log parse analysis summary monitor email server',
packages=['logparse', 'logparse.parsers'],
python_requires='>=3', # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
- install_requires=['premailer', 'requests', 'pyyaml', 'tabulate'], # https://packaging.python.org/en/latest/requirements.html
+ install_requires=['premailer', 'requests', 'tabulate'], # https://packaging.python.org/en/latest/requirements.html
data_files=[('/etc/logparse', ['logparse.conf', 'header.html', 'main.css'])], # installed to /etc/logparse
project_urls={
'Readme': 'https://git.lorimer.id.au/logparse.git/about',