--- /dev/null
+clean:
+ find . -name "*.pyc" -exec rm -f {} +
+ find . -name "*.pyo" -exec rm -f {} +
+ find . -name "*~" -exec rm -f {} +
+
+clean-build:
+ rm -rf build/
+ rm -rf dist/
+ rm -rf *.egg-info
+
+build:
+ python setup.py sdist
+
+install:
+ python setup.py install
+
+docs:
+ make -C doc man
+++ /dev/null
-output: /mnt/andrew/temp/logparse/summary.html
-hddtemp:
- drives:
- - /dev/sda
- - /dev/sdc
- - /dev/sdd
- - /dev/sde
- port: 7634
- show-model: yes
-du:
- force-write: y
- paths:
- - /home/andrew
- - /mnt/andrew
-rotate: y
-httpd:
- resolve-domains: ip
-resolve-domains: fqdn-implicit
'cron': '/var/log/cron.log',
'cpuinfo': '/proc/cpuinfo',
'meminfo': '/proc/meminfo',
+ 'uptime': '/proc/uptime',
'sys': '/var/log/syslog',
'smbd': '/var/log/samba',
'zfs': '/var/log/zpool.log',
'sshd-resolve-domains': ''
},
'smbd': {
+ 'shares': '^((?!IPC\$).)*$',
+ 'users': '.*',
'smbd-resolve-domains': ''
},
'httpd': {
for if permissions are not automatically granted.
"""
+ logger = logging.getLogger(__name__)
try:
if not os.geteuid() == 0:
if stdin.isatty():
privileges, but permission errors will be shown in the output without it.
"""
+ logger = logging.getLogger(__name__)
try:
if not os.geteuid() == 0:
logger.warning("Cannot run logrotate as root - you will see permission errors in the output below")
logparse.formatting.Section object.
"""
- def __init__(self, name=None, path=None, info=None, deprecated=False):
+ def __init__(self, name=None, path=None, info=None, deprecated=False, successor=""):
"""
The following variables can be set to display information about the
parser. The object `self.logger` can be used as for outputting messages
self.info = dict(info) if info else None
self.logger = logging.getLogger(__name__)
self.deprecated = deprecated
+ self.successor = successor
def load(self):
"""
parser_module = spec.loader.load_module(spec.name)
return self._validate_module(parser_module)
except Exception as e:
- logger.debug("Couldn't find parser {0} in {1}: {2}".format(pattern, self.path, str(e)))
+ logger.debug("Couldn't find parser {0} in {1}".format(pattern, self.path))
return None
def _search_default(self, pattern):
if None in get_type_hints(c):
logger.warning("Parser class {0} in {1} contains a null-returning parse_log() method".format(c.__class__.__name__, c.__file__))
continue
+ parser_obj = c()
+ if parser_obj.deprecated:
+ logger.warning("Parser {0} is deprecated - use {1} instead".format(parser_obj.name, parser_obj.successor))
logger.debug("Found parser {0}.{1}".format(c.__module__, c.__class__.__name__))
available_parsers.append(c())
self.name = "cron"
self.info = "List the logged (executed) cron jobs and their commands (uses static syslog file)"
self.deprecated = True
+ self.successor = "cron_journald"
def parse_log(self):
# smbd.py
#
# Get login statistics for a samba server.
-# TODO: add feature to specify shares to check in config file
+#
+# NOTE: This file is now deprecated in favour of the newer journald mechanism
+# used in smbd-journald.py. This parser is still functional but is slower and
+# has less features. Please switch over if possible.
#
import re
super().__init__()
self.name = "smbd"
self.info = "Get login statistics for a samba server."
+ self.deprecated = True
+ self.successor = "smbd_journald"
def parse_log(self):
logger.debug("Starting smbd section")
--- /dev/null
+"""
+Get login statistics for a samba server daemon (uses journald). Recommended
+setup in /etc/smbd.conf is to set `logging = syslog@3 ...` (ensure smbd was
+built with `configure --with-syslog`).
+"""
+
+import re
+import glob
+from systemd import journal
+
+from logparse.formatting import *
+from logparse.util import readlog, resolve
+from logparse import config
+from logparse.load_parsers import Parser
+
+class SmbdJournald(Parser):
+
+ def __init__(self):
+ super().__init__()
+ self.name = "smbd_journald"
+ self.info = "Get login statistics for a samba server."
+
+ def parse_log(self):
+ logger.debug("Starting smbd section")
+ section = Section("smbd")
+
+ j = journal.Reader()
+ j.this_boot()
+ j.log_level(journal.LOG_DEBUG)
+ j.add_match(_COMM="smbd")
+
+ messages = [entry["MESSAGE"] for entry in j if "MESSAGE" in entry]
+
+ total_auths = 0 # total number of logins for all users and all shares
+ shares = {} # file shares (each share is mapped to a list of user-hostname pairs)
+
+ logger.debug("Found {0} samba logins".format(str(len(messages))))
+
+ for msg in messages: # one log file for each client
+
+ if "connect to service" in msg:
+ entry = re.search('(\w*)\s*\(ipv.:(.+):.+\) connect to service (\S+) initially as user (\S+)', msg) # [('client', 'ip', 'share', 'user')]
+ try:
+ client, ip, share, user = entry.group(1,2,3,4)
+ except:
+ logger.warning("Malformed log message: " + msg)
+ continue
+
+ if not share in shares:
+ share_match = False
+ for pattern in config.prefs.get("smbd", "shares").split():
+ share_match = re.fullmatch(pattern, share) or share_match
+ if not share_match:
+ logger.debug("Ignoring share {0} due to config".format(share))
+ continue
+
+ if (not client.strip()):
+ client = ip
+ userhost = user + '@' + resolve(client, fqdn=config.prefs.get("smbd", "smbd-resolve-domains"))
+
+ user_match = False
+ for pattern in config.prefs.get("smbd", "users").split():
+ user_match = re.fullmatch(pattern, userhost) or user_match
+ if not user_match:
+ logger.debug("Ignoring login to {0} by user {1} due to config".format(share, userhost))
+ continue
+
+ total_auths += 1
+ if share in shares:
+ shares[share].append(userhost)
+ else:
+ shares[share] = [userhost]
+
+ section.append_data(Data(subtitle="Total of {0} authentications".format(str(total_auths))))
+
+ for share, logins in shares.items():
+ share_data = Data()
+ share_data.items = logins
+ share_data.orderbyfreq()
+ share_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ share_data.subtitle = share + " ({0}, {1})".format(plural("user", len(share_data.items)), plural("login", len(logins)))
+ section.append_data(share_data)
+ logger.debug("Found {0} logins for share {1}".format(str(len(logins)), share))
+
+ logger.info("Finished smbd section")
+ return section
self.name = "sshd"
self.info = "Find number of ssh logins and authorised users (uses /var/log/auth.log)"
self.deprecated = True
+ self.successor = "sshd_journald"
def parse_log(self):
import subprocess
import os
import re
+from datetime import timedelta
+from multiprocessing import cpu_count
from logparse.formatting import *
from logparse.config import prefs
else:
logger.warning("Failed to find processor data")
+ raw_uptime = util.readlog(prefs.get("logs", "uptime")).split("\n")[0]
+ logger.debug("Found uptime data " + str(raw_uptime))
+
+ uptime_total = float(raw_uptime.split()[0])
+ table.add_row(Row([Column("Uptime"), Column("%d d %d h %d m" % (uptime_total // 86400, uptime_total % 86400 // 3600, uptime_total % 3600 // 60))]))
+
+ idle_time = float(raw_uptime.split()[1]) / cpu_count()
+ m, s = divmod(idle_time, 60)
+ h, m = divmod(m, 60)
+ table.add_row(Row([Column("Idle time"), Column("%d d %d h %d m per core (avg)" % (idle_time // 86400, idle_time % 86400 // 3600, idle_time % 3600 // 60))]))
+
logger.info("Finished sysinfo section")
return section
logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model))
logger.debug("Sorted drive info: " + str(drives))
- hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
- logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg))
- hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
+ if not len(drives) == 0:
+ hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
+ logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg))
+ hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL)
+ section.append_data(hddtemp_data)
logger.debug("Finished processing drive temperatures")
logger.info("Finished temp section")
-from setuptools import setup
+import setuptools
from os import path
# Import main module so we can set the version
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
-setup(
+setuptools.setup(
name='logparse', # https://packaging.python.org/specifications/core-metadata/#name
version=logparse.__version__, # https://www.python.org/dev/peps/pep-0440/ https://packaging.python.org/en/latest/single_source_version.html
description='Summarise server logs',
keywords='logparse log parse analysis summary monitor email server',
packages=['logparse', 'logparse.parsers'],
python_requires='>=3', # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
- install_requires=['premailer', 'requests', 'tabulate'], # https://packaging.python.org/en/latest/requirements.html
+ install_requires=['premailer', 'requests', 'tabulate', 'sensors.py', 'systemd-python'], # https://packaging.python.org/en/latest/requirements.html
data_files=[('/etc/logparse', ['logparse.conf', 'header.html', 'main.css']), ('man/man8', ['doc/build/man/logparse.8'])],
project_urls={
'Readme': 'https://git.lorimer.id.au/logparse.git/about',