'quiet': False,
'hostname-path': '/etc/hostname',
'parsers': '',
- 'ignore-parsers': ''
+ 'ignore-parsers': '',
+ 'period': '1 day'
},
'html': {
'header': '/etc/logparse/header.html',
'httpd-access': '/var/log/apache2/access.log',
'httpd-error': '/var/log/apache2/error.log'
},
+ 'cron': {
+ 'period': ''
+ },
'mail': {
'to': '',
'from': '',
'timeout': 10,
'port': 7634,
'show-model': False,
+ 'period': ''
},
'sshd': {
- 'sshd-resolve-domains': ''
+ 'sshd-resolve-domains': '',
+ 'period': ''
},
'smbd': {
'shares': '^((?!IPC\$).)*$',
'users': '.*',
- 'smbd-resolve-domains': ''
+ 'smbd-resolve-domains': '',
+ 'period': ''
},
'httpd': {
- 'httpd-resolve-domains': ''
+ 'httpd-resolve-domains': '',
+ 'period': ''
},
'du': {
'paths': ['/', '/etc', '/home'],
'force-write': False
+ },
+ 'ufw': {
+ 'ufw-resolve-domains': '',
+ 'period': ''
+ },
+ 'sudo': {
+ 'period': ''
+ },
+ 'systemctl': {
+ 'period': '',
+ 'show-all': True
}
}
"""
self.append(PlaintextBox(content=section.title, double=False, fullwidth=False, vpadding=0, hpadding=" ").draw())
+ if section.period and section.period.unique:
+ self.append("\n(since {0})".format(section.period.startdate.strftime(DATEFMT + " " + TIMEFMT)))
self.append('\n'*2)
for data in section.data:
self.append(self._fmt_data(data.subtitle, data.items))
"""
if not self._embedded:
- self._embedded = mail.mailprep(self.content, css)
+ self._embedded = mail.mailprep(re.sub(".*" + re.escape(VARSUBST['css']) + ".*\n", "", self.content), css)
return self._embedded
+ def write_embedded(self, destination = ""):
+ """
+ Write contents to file with inline CSS tags
+ """
+
+ logger.debug("Writing HTML with embedded styles to " + destination)
+ if not self._embedded:
+ logger.warning("Call to write_embedded before embed_css - \
+ embedding stylesheets immediately")
+ self.embed_css(config.prefs.get("html", "css"))
+ if destination == "":
+ destination = self.destination
+ if destination == "":
+ logger.warning("No destination path provided")
+ return 1
+ with open(destination, 'w') as f:
+ f.write(self._embedded)
+ logger.info("Written output to {}".format(destination))
+
+
def append_header(self, template):
"""
Insert variables into header template file and append HTML tags
"""
+ self.headertemplate = template
headercontent = Template(open(template, 'r').read())
self.append(headercontent.safe_substitute(VARSUBST))
self.append(opentag('div', id='main'))
self.append(opentag('div', 1, section.title, 'section'))
self.append(self._gen_title(section.title))
+ if section.period and section.period.unique:
+ self.append(self._fmt_period(section.period))
for data in section.data:
- self.append(self._fmt_data(data.subtitle, data.items))
+ self.append(self._fmt_data(data.subtitle, data.items, data.severity))
for table in section.tables:
self.append(table.draw_html())
self.append(closetag('div', 1))
logger.debug("Writing title for " + title)
return tag('h2', False, title)
- def _fmt_data(self, subtitle, data = None):
+ def _fmt_data(self, subtitle, data=None, severity=0):
"""
Format the properties of a data object into usable HTML tags.
- Subtitle is required, data is not. If only subtitle is supplied or subtitle + one data item, a single line will be printed.
+ Subtitle is required, data is not. If only subtitle is supplied or
+ subtitle + one data item, a single line will be printed.
"""
if (subtitle == ""):
if (data == None or len(data) == 0):
logger.debug("No data provided.. just printing subtitle")
- return tag('p', False, subtitle)
+ return tag('p', False, subtitle, cl="severity-" + str(severity))
else:
logger.debug("Received data " + str(data))
subtitle += ':'
if (len(data) == 1):
- return tag('p', False, subtitle + ' ' + data[0])
+ return tag('p', False, subtitle + ' ' + data[0], cl="severity-" + str(severity))
else:
output = ""
- output += tag('p', False, subtitle)
+ output += tag('p', False, subtitle, cl="severity-" + str(severity))
output += opentag('ul', 1)
coderegex = re.compile('`(.*)`')
for datum in data:
output += closetag('ul', True)
return output
+ def _fmt_period(self, period):
+ output = ''
+ output += opentag('span', cl='period')
+ output += "since " + period.startdate.strftime(DATEFMT + " " + TIMEFMT)
+ output += closetag('span')
+ return output
+
+ def print_stdout_embedded(self, lines=False):
+ """
+ Echo the version with embedded style tags to the console
+ """
+
+ if self._embedded == "":
+ self.embed_css(config.prefs.get("html", "css"))
+ print()
+ if lines:
+ line = PlaintextLine(linewidth=config.prefs.getint("plain", "linewidth"), double=True)
+ print(line.draw())
+ print(self._embedded)
+ if lines:
+ print(line.draw())
+ print()
+
class Section:
"""
- Each parser should output a Section() which contains the title and returned data.
+ Each parser should output a Section() which contains the title, returned
+ data, and applicable time period.
"""
- def __init__(self, title):
+ def __init__(self, title, period=None):
self.title = title
self.data = []
self.tables = []
+ self.period = util.LogPeriod(self.title)
def append_data(self, data):
self.data.append(data)
class Data:
"""
Each section (parser) can have one or more Data() objects which are
- essentially glorified lists.
+ essentially glorified lists with titles (`self.subtitle`).
"""
- def __init__(self, subtitle="", items=[]):
+ def __init__(self, subtitle="", items=[], severity=0):
"""
Initialise variables. No parameters are enforced upon initialisation,
- but at least the subtitle is required for valid output.
+ but at least the subtitle is required for valid output. Severity refers
+ to the importance of the data (integer from 0 to 5). e.g. a failed
+ system should have severity 5 and will be formatted appropriately by
+ the Output object.
"""
self.subtitle = subtitle
self.items = items
+ self.severity = severity
def truncl(self, limit): # truncate list
"""
return 0
self.items = self.items[:limit]
self.items.append("+ {0} more".format(str(more)))
+ return self
def orderbyfreq(self):
"""
unsorted = list(self.items)
self.items = ["{0} ({1})".format(y, unsorted.count(y)) for y in sorted(set(unsorted), key = lambda x: -unsorted.count(x))]
+ return self
class Table(object):
Return "1 noun" or "n nouns"
"""
- if print_quantity:
- if (quantity == 1):
+ if (quantity == 1):
+ if print_quantity:
return(str(quantity) + " " + noun)
else:
- return(str(quantity) + " " + noun + "s")
- else:
- if (quantity == 1):
return noun
+ else:
+ if noun.endswith("s"):
+ noun += "e"
+ if print_quantity:
+ return(str(quantity) + " " + noun + "s")
else:
return noun + "s"
o = opentag(tag, block, id, cl, style)
c = closetag(tag, block)
return o + content + c
-
# Load config
config.prefs = config.loadconf(argparser.parse_args().config)
+ if argparser.parse_args().time_period:
+ config.prefs.set("logparse", "period",
+ argparser.parse_args().time_period)
# Set up logging
logger = logging.getLogger(__name__)
loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
- loghandler.setFormatter(logging.Formatter(fmt='logparse[' + str(os.getpid()) + ']: %(message)s'))
+ loghandler.setFormatter(logging.Formatter(
+ fmt='logparse[' + str(os.getpid()) + ']: %(message)s'))
loghandler.setLevel(logging.INFO) # don't spam syslog with debug messages
- if argparser.parse_args().quiet or config.prefs.getboolean("logparse", "quiet"):
+
+ if (argparser.parse_args().quiet
+ or config.prefs.getboolean("logparse", "quiet")):
logging.basicConfig(level=logging.CRITICAL)
- elif argparser.parse_args().verbose or config.prefs.getboolean("logparse", "verbose"):
+ elif (argparser.parse_args().verbose
+ or config.prefs.getboolean("logparse", "verbose")):
logging.basicConfig(level=logging.DEBUG)
logger.debug("Verbose mode turned on")
else:
logging.basicConfig(level=logging.INFO)
+
logger.addHandler(loghandler)
# Time analysis
global start
start = datetime.now()
- logger.info("Beginning log analysis at {0} {1}".format(start.strftime(formatting.DATEFMT), start.strftime(formatting.TIMEFMT)))
- logger.debug("This is {0} version {1}, running on Python {2}".format(logparse.__name__, logparse.__version__, version.replace('\n', '')))
+ logger.info("Beginning log analysis at {0} {1}".format(
+ start.strftime(formatting.DATEFMT), start.strftime(formatting.TIMEFMT)))
+ logger.debug("This is {0} version {1}, running on Python {2}".format(
+ logparse.__name__, logparse.__version__, version.replace('\n', '')))
# Write header
formatting.init_var()
if argparser.parse_args().plain:
- output = formatting.PlaintextOutput(linewidth=config.prefs.getint("plain", "linewidth"))
+ output = formatting.PlaintextOutput(
+ linewidth=config.prefs.getint("plain", "linewidth"))
output.append_header()
else:
output = formatting.HtmlOutput()
output.append_section(parser.parse_log())
# Write footer
-
output.append_footer()
# Write output
+ if ((argparser.parse_args().destination
+ or config.prefs.get("logparse", "output"))
+ and not argparser.parse_args().no_write):
- if (argparser.parse_args().destination or config.prefs.get("logparse", "output")) and not argparser.parse_args().no_write:
+ # Determine destination path
if argparser.parse_args().destination:
dest_path = argparser.parse_args().destination
else:
dest_path = config.prefs.get("logparse", "output")
+
logger.debug("Outputting to {0}".format(dest_path))
- if (argparser.parse_args().embed_styles or config.prefs.getboolean("html", "embed-styles")) and not (argparser.parse_args().plain or config.prefs.getboolean("plain", "plain")):
- output.embed_css(config.prefs.get("html", "css"))
- if (not os.path.isfile(dest_path)) and not (argparser.parse_args().overwrite or config.prefs.getboolean("logparse", "overwrite")):
- output.write(dest_path)
+
+ # Determine whether to clobber old file
+ if (not os.path.isfile(dest_path)) \
+ and not (argparser.parse_args().overwrite
+ or config.prefs.getboolean("logparse", "overwrite")):
+
+ if (argparser.parse_args().embed_styles
+ or config.prefs.getboolean("html", "embed-styles")) \
+ and not (argparser.parse_args().plain
+ or config.prefs.getboolean("plain", "plain")):
+ # Embed CSS stylesheet
+ output.embed_css(config.prefs.get("html", "css"))
+ output.write_embedded(dest_path)
+ else:
+ output.write(dest_path)
+
elif logging.root.level == logging.CRITICAL:
+
+ # Don't write output if running in quiet mode (only stdout)
pass
+
else:
+
logger.warning("Destination file already exists")
- if input("Would you like to overwrite {0}? (y/n) [n] ".format(dest_path)) == 'y':
- output.write(dest_path)
+ if input("Would you like to overwrite {0}? (y/n) [n] "
+ .format(dest_path)) == 'y':
+ if (argparser.parse_args().embed_styles
+ or config.prefs.getboolean("html", "embed-styles")) \
+ and not (argparser.parse_args().plain
+ or config.prefs.getboolean("plain", "plain")):
+
+ output.embed_css(config.prefs.get("html", "css"))
+ output.write_embedded(dest_path)
+
+ else:
+ output.write(dest_path)
else:
logger.warning("No output written")
# Send email if requested
- if (str(argparser.parse_args().to) or str(config.prefs.get("mail", "to"))) and not argparser.parse_args().no_mail:
+ if (str(argparser.parse_args().to) or str(config.prefs.get("mail", "to"))) \
+ and not argparser.parse_args().no_mail:
+
if str(argparser.parse_args().to):
to = argparser.parse_args().to
else:
to = config.prefs.get("mail", "to")
+
mail.sendmail(
mailbin=config.prefs.get("mail", "mailbin"),
- body=(output.embed_css(config.prefs.get("html", "css")) if isinstance(output, formatting.HtmlOutput) else output.content),
+ body=(output.embed_css(config.prefs.get("html", "css"))
+ if isinstance(output, formatting.HtmlOutput) else output.content),
recipient=to,
subject=formatting.fsubject(config.prefs.get("mail", "subject")),
html=isinstance(output, formatting.HtmlOutput),
# Rotate logs if requested
if not argparser.parse_args().no_rotate:
- if argparser.parse_args().simulate or config.prefs.getboolean("logparse", "rotate"):
+ if (argparser.parse_args().simulate
+ or config.prefs.getboolean("logparse", "rotate")):
rotate_sim()
- elif config.prefs.getboolean("logparse", "rotate") or argparser.parse_args().rotate:
+ elif (config.prefs.getboolean("logparse", "rotate")
+ or argparser.parse_args().rotate):
rotate()
else:
logger.debug("User doesn't want to rotate logs")
# Finish up
finish = datetime.now()
- logger.info("Finished parsing logs at {0} {1} (total time: {2})".format(finish.strftime(formatting.DATEFMT), finish.strftime(formatting.TIMEFMT), finish - start))
+ logger.info("Finished parsing logs at {0} {1} (total time: {2})".format(
+ finish.strftime(formatting.DATEFMT),
+ finish.strftime(formatting.TIMEFMT),
+ finish - start))
if argparser.parse_args().printout:
- output.print_stdout()
+ if isinstance(output, formatting.HtmlOutput) \
+ and argparser.parse_args().embed_styles \
+ or config.prefs.getboolean("html", "embed-styles"):
+ output.print_stdout_embedded()
+ else:
+ output.print_stdout()
return
"""
Initialise arguments (in a separate function for documentation purposes)
"""
- argparser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
- argparser.add_argument('-t','--to', help='mail recipient (\"to\" address)', required=False)
- argparser.add_argument('-c', '--config', help='path to config file', required=False, default="/etc/logparse/logparse.conf")
- argparser.add_argument('-p', '--print', help='print HTML to stdout', required=False, dest='printout', action='store_true', default=False)
- argparser.add_argument('-d', '--destination', help='file to output HTML', required=False)
- argparser.add_argument('-f', '--overwrite', help='force overwrite an existing output file', required=False, action='store_true', default=False)
- argparser.add_argument('-v', '--verbose', help='verbose console/syslog output (for debugging)', required=False, default=False, action='store_true')
- argparser.add_argument('-r', '--rotate', help='force rotate log files using systemd logrotate (overrides --rotate and "rotate" in logparse.conf)', required=False, default=False, action='store_true')
- argparser.add_argument('-nr', '--no-rotate', help='do not rotate logfiles (overrides --rotate and logparse.conf)', required=False, default=False, action='store_true')
- argparser.add_argument('-s', '--simulate', help="test run logrotate (do not actually change files)", required=False, default=False, action="store_true")
- argparser.add_argument('-l', '--logs', help='services to analyse', required=False)
- argparser.add_argument('-nl', '--ignore-logs', help='skip these services (takes precedence over -l)', required=False)
- argparser.add_argument('-es', '--embed-styles', help='make CSS rules inline rather than linking the file', required=False, default=False, action='store_true')
- argparser.add_argument('-nh', '--plain', help='write/send plain text rather than HTML', required=False, default=False, action='store_true')
- argparser.add_argument('-q', '--quiet', help='no output to stdout', required=False, default=False, action='store_true')
- argparser.add_argument('-nm', '--no-mail', help="do not send email (overrides config file)", required=False, default=False, action="store_true")
- argparser.add_argument('-nw', '--no-write', help="do not write output file (overrides config file)", required=False, default=False, action="store_true")
+
+ argparser = argparse.ArgumentParser(description=
+ 'Grab logs of some common services and send them by email')
+ argparser.add_argument('-t','--to', required=False,
+ help='mail recipient (\"to\" address)')
+ argparser.add_argument('-c', '--config', required=False,
+ default="/etc/logparse/logparse.conf",
+ help='path to config file')
+ argparser.add_argument('-p', '--print', required=False, dest='printout',
+ action='store_true', default=False,
+ help='print HTML to stdout')
+ argparser.add_argument('-d', '--destination', required=False,
+ help='file to output HTML')
+ argparser.add_argument('-f', '--overwrite', required=False,
+ action='store_true', default=False,
+ help='force overwrite an existing output file')
+ argparser.add_argument('-v', '--verbose', required=False, default=False,
+ action='store_true',
+ help='verbose console/syslog output (for debugging)')
+ argparser.add_argument('-r', '--rotate', required=False, default=False,
+ action='store_true',
+ help='force rotate log files using systemd logrotate (overrides \
+ --rotate and "rotate" in logparse.conf)')
+ argparser.add_argument('-nr', '--no-rotate', required=False, default=False,
+ action='store_true',
+ help='do not rotate log files (overrides config)')
+ argparser.add_argument('-s', '--simulate', required=False, default=False,
+ action="store_true",
+ help="test run logrotate (do not actually change files)")
+ argparser.add_argument('-l', '--logs', required=False,
+ help='services to analyse')
+ argparser.add_argument('-nl', '--ignore-logs', required=False,
+ help='skip these services (takes precedence over -l)')
+ argparser.add_argument('-es', '--embed-styles', required=False,
+ default=False, action='store_true',
+ help='make CSS rules inline rather than linking the file')
+ argparser.add_argument('-nh', '--plain', required=False, default=False,
+ action='store_true', help='write/send plain text rather than HTML')
+ argparser.add_argument('-q', '--quiet', required=False, default=False,
+ action='store_true', help='no output to stdout')
+ argparser.add_argument('-nm', '--no-mail', required=False, default=False,
+ action="store_true",
+ help="do not send email (overrides config file)")
+ argparser.add_argument('-nw', '--no-write', required=False, default=False,
+ action="store_true",
+ help="do not write output file (overrides config file)")
+ argparser.add_argument('-tp', '--time-period', required=False,
+ help="time period to analyse logs for (applies to all parsers)")
+
return argparser
try:
if not os.geteuid() == 0:
if stdin.isatty():
- logger.warning("Not running as root, using sudo (may require password to be entered)")
- rotate_shell = check_output("sudo logrotate /etc/logrotate.conf", shell=True)
+ logger.warning("Not running as root, using sudo \
+ (may require password to be entered)")
+ rotate_shell = check_output(
+ "sudo logrotate /etc/logrotate.conf", shell=True)
else:
- raise PermissionError("Root priviliges are required to run logrotate but are not provided")
+ raise PermissionError("Root priviliges are required to run \
+ logrotate but were not provided")
else:
- rotate_shell = check_output("/usr/sbin/logrotate /etc/logrotate.conf", shell=True)
+ rotate_shell = check_output(
+ "/usr/sbin/logrotate /etc/logrotate.conf", shell=True)
logger.info("Rotated logfiles")
logger.debug("logrotate output: " + rotate_shell)
except Exception as e:
logger = logging.getLogger(__name__)
try:
if not os.geteuid() == 0:
- logger.warning("Cannot run logrotate as root - you will see permission errors in the output below")
+ logger.warning("Cannot run logrotate as root - \
+ you will see permission errors in the output below")
sim_cmd = "logrotate -d /etc/logrotate.conf"
logger.debug("Here is the output of `{0}` (simulated):".format(sim_cmd))
sim = check_output(sim_cmd, shell=True)
parser_module = spec.loader.load_module(spec.name)
return self._validate_module(parser_module)
except Exception as e:
- logger.debug("Couldn't find parser {0} in {1}".format(pattern, self.path))
return None
def _search_default(self, pattern):
from systemd import journal
-from logparse.formatting import *
from logparse import config
+from logparse.formatting import *
from logparse.load_parsers import Parser
class CronJournald(Parser):
# Initiate journald reader
j = journal.Reader()
- j.this_boot()
j.this_machine()
j.log_level(journal.LOG_INFO)
j.add_match(_COMM="cron")
+ j.seek_realtime(section.period.startdate)
logger.info("Obtaining cron logs")
"""
import re
-import glob
from systemd import journal
-from logparse.formatting import *
-from logparse.util import readlog, resolve
from logparse import config
+from logparse.formatting import *
from logparse.load_parsers import Parser
+from logparse.util import LogPeriod, resolve
class SmbdJournald(Parser):
import re
from systemd import journal
-from logparse.formatting import *
-from logparse.util import resolve
from logparse import config
+from logparse.formatting import *
from logparse.load_parsers import Parser
+from logparse.util import resole
class SshdJournald(Parser):
section = Section("ssh")
j = journal.Reader()
- j.this_boot()
- j.log_level(journal.LOG_DEBUG)
+ j.this_machine()
+ j.log_level(journal.LOG_INFO)
j.add_match(_COMM="sshd")
+ j.seek_realtime(section.period.startdate)
messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
--- /dev/null
+# -*- coding: utf-8 -*-
+#
+# systemctl.py
+#
+# Get information about running/failed units and boot process
+#
+
+import re
+import subprocess
+
+from logparse import config
+from logparse.formatting import *
+from logparse.load_parsers import Parser
+from logparse.util import resolve
+
+# The following list changes with each systemd version.
+# Run `systemctl --state=help` to view currently implemented states.
+# The numbers correspond to degrees of severity for later formatting.
+BAD_STATES = {"bad": 4, "failed": 4, "not-found": 4, "bad-setting": 2,
+ "error": 3, "masked": 2, "dead": 3, "abandoned": 3}
+SYS_STATUS = {'running': 0, 'initializing': 1, 'starting': 1, 'stopping': 1,
+ 'degraded': 3, 'unknown': 4, 'offline': 5}
+
+class Unit():
+
+ def __init__(self, name, loaded, active, sub, description):
+ self.name = name
+ self.loaded = loaded
+ self.active = active
+ self.sub = sub
+ self.description = description
+
+ def status():
+ try:
+ p = subprocess.Popen(["systemctl", "is-active", self.name],
+ stdout=subprocess.PIPE)
+ (output, err) = p.communicate()
+ status = output.decode('utf-8')
+ return status
+ except Exception as e:
+ logger.warning("Failed to get status for unit {0}: {1}".format(
+ self.name, str(e)))
+
+
+class Systemctl(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "systemctl"
+ self.info = "Information about running/failed units and boot process"
+
+ def parse_log(self):
+
+ logger.debug("Starting systemctl section")
+ section = Section("systemctl")
+
+ try:
+ p = subprocess.Popen(["systemctl", "is-system-running"],
+ stdout = subprocess.PIPE)
+ (output, err) = p.communicate()
+ except Exception as e:
+ logger.warning("Failed to get system status: " + str(e))
+ else:
+ status_raw = str(output.decode('utf-8')).split()[0]
+ section.append_data(Data("System status", [status_raw], severity=SYS_STATUS[status_raw]))
+
+ try:
+ p = subprocess.Popen(
+ ["systemctl", "list-units"], stdout = subprocess.PIPE)
+ (output, err) = p.communicate()
+ except Exception as e:
+ logger.warning("Failed to get list of unit files: " + str(e))
+ units_raw = None
+ else:
+ units_raw = output.decode('utf-8')
+ unit_properties = [Unit(*line.split(maxsplit=4))
+ for line in units_raw.replace("●", " ").splitlines()[1:-7]]
+ unit_states = {}
+
+ for u in unit_properties:
+ if not u.sub in unit_states:
+ unit_states[u.sub] = []
+ unit_states[u.sub].append(u.name)
+
+ ok_data = Data()
+
+ for state, unit_list in unit_states.items():
+ if state in BAD_STATES:
+ logger.debug("Found critical unit {0} with status {1}".format(
+ u.name, u.sub))
+ section.append_data(Data(
+ plural(state + " unit", len(unit_list)), unit_list,
+ severity=BAD_STATES[state])
+ .truncl(config.prefs.getint("logparse", "maxlist")))
+ else:
+ ok_data.items.append(" ".join([str(len(unit_list)), state]))
+
+ if len(ok_data.items) > 0 and config.prefs.getboolean("systemctl", "show-all"):
+ ok_data.subtitle = plural("unit", len(ok_data.items)) \
+ + " in a non-critical state"
+ ok_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(ok_data)
+
+ logger.info("Finished systemctl section")
+ return section
+
--- /dev/null
+#
+# ufw_journald.py
+#
+# Get details about packets blocked by ufw (uses journald)
+#
+
+import datetime
+import re
+from systemd import journal
+
+from logparse import config
+from logparse.formatting import *
+from logparse.load_parsers import Parser
+from logparse.util import resolve
+
+PROTOCOLS = ["TCP", "UDP", "UDP-Lite", "ICMP", "ICMPv6", "AH", "SCTP", "MH"]
+
+class Packet():
+
+ def __init__(self, msg):
+ try:
+ self.inif, self.outif, self.mac, self.src, self.dst, self.len, self.proto, self.spt, self.dpt = re.search(r"IN=(?P<inif>\w*).*OUT=(?P<outif>\w*).*MAC=(?P<mac>\S*).*SRC=(?P<src>\S*).*DST=(?P<dst>\S*).*LEN=(?P<length>\d*).*PROTO=(?P<proto>\S*)(?:\sSPT=(?P<spt>\d*))?(?:\sDPT=(?P<dpt>\d*))?", msg).groupdict().values()
+ if self.proto and self.proto.isdigit():
+ self.proto = PROTOCOLS[int(self.proto)-1]
+ except Exception as e:
+ logger.warning("Malformed packet log: {0}. Error message: {1}".format(msg, str(e)))
+ return None
+
+class UfwJournald(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "ufw_journald"
+ self.info = "Get details about packets blocked by ufw"
+
+ def parse_log(self):
+
+ logger.debug("Starting ufw section")
+ section = Section("ufw")
+
+ j = journal.Reader()
+ j.this_machine()
+ j.add_match(_TRANSPORT='kernel')
+ j.add_match(PRIORITY=4)
+ j.seek_realtime(section.period.startdate)
+
+ logger.debug("Searching for messages")
+
+ blocked_packets = [Packet(entry["MESSAGE"]) for entry in j if "MESSAGE" in entry and "UFW BLOCK" in entry["MESSAGE"]]
+
+ logger.debug("Parsing messages")
+
+ inbound_interfaces = []
+ outbound_interfaces = []
+ n_inbound = n_outbond = 0
+ src_ips = []
+ dst_ips = []
+ src_ports = []
+ dst_ports = []
+ protocols = {'UDP': 0, 'TCP': 0}
+ src_macs = []
+
+ for pkt in blocked_packets:
+ if pkt.inif:
+ inbound_interfaces.append(pkt.inif)
+ elif pkt.outif:
+ outbound_interfaces.append(pkt.outif)
+ if pkt.src: src_ips.append(resolve(pkt.src, config.prefs.get("ufw", "ufw-resolve-domains")))
+ if pkt.dst: dst_ips.append(resolve(pkt.dst, config.prefs.get("ufw", "ufw-resolve-domains")))
+ if pkt.spt: src_ports.append(pkt.spt)
+ if pkt.dpt: dst_ports.append(pkt.dpt)
+ if pkt.proto: protocols[pkt.proto] += 1
+
+ section.append_data(Data(subtitle="{} blocked ({} UDP, {} TCP)".format(plural("packet", len(blocked_packets)), protocols['UDP'], protocols['TCP'])))
+
+ src_port_data = Data(items=src_ports)
+ src_port_data.orderbyfreq()
+ src_port_data.subtitle = plural("source port", len(src_port_data.items))
+ src_port_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(src_port_data)
+
+ dst_port_data= Data(items=dst_ports)
+ dst_port_data.orderbyfreq()
+ dst_port_data.subtitle = plural("destination port", len(dst_port_data.items))
+ dst_port_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(dst_port_data)
+
+ src_ips_data= Data(items=src_ips)
+ src_ips_data.orderbyfreq()
+ src_ips_data.subtitle = plural("source IP", len(src_ips_data.items))
+ src_ips_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(src_ips_data)
+
+ dst_ips_data= Data(items=dst_ips)
+ dst_ips_data.orderbyfreq()
+ dst_ips_data.subtitle = plural("destination IP", len(dst_ips_data.items))
+ dst_ips_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ section.append_data(dst_ips_data)
+
+ logger.info("Finished ufw section")
+ return section
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+'''
+timeparse.py
+(c) Will Roberts <wildwilhelm@gmail.com> 1 February, 2014
+
+Implements a single function, `timeparse`, which can parse various
+kinds of time expressions.
+'''
+
+# MIT LICENSE
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import re
+from datetime import timedelta
+
+SIGN = r'(?P<sign>[+|-])?'
+#YEARS = r'(?P<years>\d+)\s*(?:ys?|yrs?.?|years?)'
+#MONTHS = r'(?P<months>\d+)\s*(?:mos?.?|mths?.?|months?)'
+WEEKS = r'(?P<weeks>[\d.]+)\s*(?:w|wks?|weeks?)'
+DAYS = r'(?P<days>[\d.]+)\s*(?:d|dys?|days?)'
+HOURS = r'(?P<hours>[\d.]+)\s*(?:h|hrs?|hours?)'
+MINS = r'(?P<mins>[\d.]+)\s*(?:m|(mins?)|(minutes?))'
+SECS = r'(?P<secs>[\d.]+)\s*(?:s|secs?|seconds?)'
+SEPARATORS = r'[,/]'
+SECCLOCK = r':(?P<secs>\d{2}(?:\.\d+)?)'
+MINCLOCK = r'(?P<mins>\d{1,2}):(?P<secs>\d{2}(?:\.\d+)?)'
+HOURCLOCK = r'(?P<hours>\d+):(?P<mins>\d{2}):(?P<secs>\d{2}(?:\.\d+)?)'
+DAYCLOCK = (r'(?P<days>\d+):(?P<hours>\d{2}):'
+ r'(?P<mins>\d{2}):(?P<secs>\d{2}(?:\.\d+)?)')
+
+OPT = lambda x: r'(?:{x})?'.format(x=x, SEPARATORS=SEPARATORS)
+OPTSEP = lambda x: r'(?:{x}\s*(?:{SEPARATORS}\s*)?)?'.format(
+ x=x, SEPARATORS=SEPARATORS)
+
+TIMEFORMATS = [
+ r'{WEEKS}\s*{DAYS}\s*{HOURS}\s*{MINS}\s*{SECS}'.format(
+ #YEARS=OPTSEP(YEARS),
+ #MONTHS=OPTSEP(MONTHS),
+ WEEKS=OPTSEP(WEEKS),
+ DAYS=OPTSEP(DAYS),
+ HOURS=OPTSEP(HOURS),
+ MINS=OPTSEP(MINS),
+ SECS=OPT(SECS)),
+ r'{MINCLOCK}'.format(
+ MINCLOCK=MINCLOCK),
+ r'{WEEKS}\s*{DAYS}\s*{HOURCLOCK}'.format(
+ WEEKS=OPTSEP(WEEKS),
+ DAYS=OPTSEP(DAYS),
+ HOURCLOCK=HOURCLOCK),
+ r'{DAYCLOCK}'.format(
+ DAYCLOCK=DAYCLOCK),
+ r'{SECCLOCK}'.format(
+ SECCLOCK=SECCLOCK),
+ #r'{YEARS}'.format(
+ #YEARS=YEARS),
+ #r'{MONTHS}'.format(
+ #MONTHS=MONTHS),
+ ]
+
+COMPILED_SIGN = re.compile(r'\s*' + SIGN + r'\s*(?P<unsigned>.*)$')
+COMPILED_TIMEFORMATS = [re.compile(r'\s*' + timefmt + r'\s*$', re.I)
+ for timefmt in TIMEFORMATS]
+
+MULTIPLIERS = dict([
+ #('years', 60 * 60 * 24 * 365),
+ #('months', 60 * 60 * 24 * 30),
+ ('weeks', 60 * 60 * 24 * 7),
+ ('days', 60 * 60 * 24),
+ ('hours', 60 * 60),
+ ('mins', 60),
+ ('secs', 1)
+ ])
+
+def _interpret_as_minutes(sval, mdict):
+ """
+ Times like "1:22" are ambiguous; do they represent minutes and seconds
+ or hours and minutes? By default, timeparse assumes the latter. Call
+ this function after parsing out a dictionary to change that assumption.
+
+ >>> import pprint
+ >>> pprint.pprint(_interpret_as_minutes('1:24', {'secs': '24', 'mins': '1'}))
+ {'hours': '1', 'mins': '24'}
+ """
+ if ( sval.count(':') == 1
+ and '.' not in sval
+ and (('hours' not in mdict) or (mdict['hours'] is None))
+ and (('days' not in mdict) or (mdict['days'] is None))
+ and (('weeks' not in mdict) or (mdict['weeks'] is None))
+ #and (('months' not in mdict) or (mdict['months'] is None))
+ #and (('years' not in mdict) or (mdict['years'] is None))
+ ):
+ mdict['hours'] = mdict['mins']
+ mdict['mins'] = mdict['secs']
+ mdict.pop('secs')
+ pass
+ return mdict
+
+def timeparse(sval, granularity='seconds'):
+ '''
+ Wrapper for the _strseconds function to convert the number of seconds to a
+ datetime.timedelta object.
+ '''
+ return timedelta(seconds = strseconds(sval, granularity))
+
+
+def strseconds(sval, granularity='seconds'):
+ '''
+ Parse a time expression, returning it as a timedelta. If
+ possible, the return value will be an `int`; if this is not
+ possible, the return will be a `float`. Returns `None` if a time
+ expression cannot be parsed from the given string.
+
+ Arguments:
+ - `sval`: the string value to parse
+
+ >>> timeparse('1:24')
+ 84
+ >>> timeparse(':22')
+ 22
+ >>> timeparse('1 minute, 24 secs')
+ 84
+ >>> timeparse('1m24s')
+ 84
+ >>> timeparse('1.2 minutes')
+ 72
+ >>> timeparse('1.2 seconds')
+ 1.2
+
+ Time expressions can be signed.
+
+ >>> timeparse('- 1 minute')
+ -60
+ >>> timeparse('+ 1 minute')
+ 60
+
+ If granularity is specified as ``minutes``, then ambiguous digits following
+ a colon will be interpreted as minutes; otherwise they are considered seconds.
+
+ >>> timeparse('1:30')
+ 90
+ >>> timeparse('1:30', granularity='minutes')
+ 5400
+ '''
+ match = COMPILED_SIGN.match(sval)
+ sign = -1 if match.groupdict()['sign'] == '-' else 1
+ sval = match.groupdict()['unsigned']
+ for timefmt in COMPILED_TIMEFORMATS:
+ match = timefmt.match(sval)
+ if match and match.group(0).strip():
+ mdict = match.groupdict()
+ if granularity == 'minutes':
+ mdict = _interpret_as_minutes(sval, mdict)
+ # if all of the fields are integer numbers
+ if all(v.isdigit() for v in list(mdict.values()) if v):
+ return sign * sum([MULTIPLIERS[k] * int(v, 10) for (k, v) in
+ list(mdict.items()) if v is not None])
+ # if SECS is an integer number
+ elif ('secs' not in mdict or
+ mdict['secs'] is None or
+ mdict['secs'].isdigit()):
+ # we will return an integer
+ return (
+ sign * int(sum([MULTIPLIERS[k] * float(v) for (k, v) in
+ list(mdict.items()) if k != 'secs' and v is not None])) +
+ (int(mdict['secs'], 10) if mdict['secs'] else 0))
+ else:
+ # SECS is a float, we will return a float
+ return sign * sum([MULTIPLIERS[k] * float(v) for (k, v) in
+ list(mdict.items()) if v is not None])
+
"""
from datetime import datetime, timedelta
-import inspect
+import ipaddress
import logging
import os
from pkg_resources import Requirement, resource_filename
import socket
from systemd import journal
-from logparse import config
+from logparse import config, formatting
+from logparse.timeparse import timeparse
logger = logging.getLogger(__name__)
return(ip)
try:
- socket.inet_aton(ip) # succeeds if text contains ip
+ ip_obj = ipaddress.ip_address(ip)
+ except ValueError as err:
+ logger.debug("Invalid format: " + str(err))
+ return ip
+
+ try:
hn = socket.gethostbyaddr(ip)[0] # resolve ip to hostname
- if fqdn == 'fqdn-implicit' and hn.split('.', 1)[1] == getlocaldomain():
- return(hn.split('.')[0])
- elif fqdn == 'fqdn' or fqdn == 'fqdn-implicit':
- return(hn)
- elif fqdn == 'host-only':
- return(hn.split('.')[0])
- else:
- logger.warning("Invalid value for FQDN config")
- return(hn)
except socket.herror:
# cannot resolve ip
logger.debug(ip + " cannot be found, might not exist anymore")
return(ip)
- except (OSError, socket.error): # socket.error for Python 2 compatibility
- # already a hostname
- logger.debug(ip + " is already a hostname")
- return(ip)
except Exception as err:
- logger.warning("failed to resolve hostname for " + ip + ": " + str(err))
+ logger.warning("Failed to resolve hostname for " + ip + ": " + str(err))
return(ip) # return ip if no hostname exists
+ if (fqdn == "host-only") or (fqdn == "fqdn-implicit" and ip_obj.is_private):
+ return hn.split('.')[0]
+ if fqdn == 'fqdn' or fqdn == 'fqdn-implicit':
+ return hn
+ return hn
+
+
def readlog(path = None, mode = 'r'):
"""
except IOError or OSError as e:
logger.warning("Error reading log at {0}: {1}".format(path, e.strerror))
return 1
+
+class LogPeriod:
+
+ def __init__(self, section):
+ if config.prefs.get(section.split("_")[0], "period"):
+ self.startdate = datetime.now() - timeparse(config.prefs.get(section.split("_")[0], "period"))
+ logger.debug("Parsing logs for {0} since {1}".format(section, self.startdate.strftime(formatting.DATEFMT + " " + formatting.TIMEFMT)))
+ self.unique = True
+ else:
+ self.startdate = datetime.now() - timeparse(config.prefs.get("logparse", "period"))
+ self.unique = False
p {
margin-bottom: 0;
}
+
div.section {
display: inline-block;
width: 100%
}
+
+span.period {
+ color: #666;
+ font-size: 0.85em;
+}
+
+.severity-3, .severity-4, .severity-5 {
+ color: red;
+}