# Executed when the logparse directory is executed as a script
from .interface import main
-__version__ = '1.0'
main()
import types
import os
from pkg_resources import Requirement, resource_filename
+from types import SimpleNamespace
import logparse
import logging
logger.debug("Found {0}".format(loc))
return loc
-prefs = {
+class Configuration(dict):
+
+ def __init__(self, *arg, **kw):
+ super(Configuration, self).__init__(*arg, **kw)
+
+ def _str2bool(x):
+ positives = ["yes", "true", "1", "y"]
+ negatives = ["no", "false", "0", "n"]
+ x = x.lower()
+ if x in positives:
+ return True
+ elif x in negatives:
+ return False
+ else:
+ raise ValueError("Unknown option %s" % x)
+
+defaults = Configuration({
'output': '/var/www/logparse/summary.html',
'header': '/etc/logparse/header.html',
'css': '/etc/logparse/main.css',
'verbose': 'n',
'hddtemp': {
'drives': ['/dev/sda'],
- 'host': 'localhost',
+ 'host': '127.0.0.1',
'separator': '|',
'timeout': 10,
'port': 7634,
'postfix': '/var/log/mail.log',
'httpd': '/var/log/apache2'
}
-}
+})
+
+def verify(raw_dict, defaults):
+ for key, value in raw_dict.items():
+ if key in defaults: # valid key
+ logger.debug("Found valid key {0} with value {1}".format(key, value))
+ if (isinstance(value, dict)):
+ verify(value, defaults[key]) # recurse nested dictionaries
+
+ else: # invalid key
+ logger.warning("Invalid key {0} with value {1}".format(key, value))
def loadconf(argparser, configfile = "/etc/logparse/logparse.conf"):
- logger.debug("getting config from {0}".format(configfile))
+ logger.debug("Getting config from {0}".format(configfile))
try:
- data = yaml.safe_load(open(configfile))
- for value in data:
- if(isinstance(data[value], dict)):
- for key in data[value].items():
+ raw_dict = yaml.safe_load(open(configfile))
+ # verify fields
+ verify(raw_dict, defaults)
+ prefs = defaults
+ for value in raw_dict:
+ if(isinstance(raw_dict[value], dict)):
+ for key in raw_dict[value].items():
+ logger.debug("Inserting key {0} with value {1}".format(key[0], key[1]))
+ if not value in prefs:
+ prefs[value] = {}
prefs[value][key[0]] = key[1]
else:
- prefs[value] = data[value]
-# config['dest'] = paths.dirname(config['output'])
+ prefs[value] = raw_dict[value]
if argparser.parse_args().to is not None:
prefs['mail']['to'] = argparser.parse_args().to
if not prefs['mail']['to']:
- logger.info("no recipient address provided, outputting to stdout")
+ logger.info("No recipient address provided, outputting to stdout")
else:
- logger.info("email will be sent to " + prefs['mail']['to'])
+ logger.info("Email will be sent to " + prefs['mail']['to'])
return prefs
except Exception as e:
- logger.warning("error processing config: " + str(e))
+ logger.warning("Error processing config: " + str(e))
import re
import locale
-from .config import prefs
#import util
#import interface
import logparse
-from . import interface, util
+from . import interface, util, config
import logging
logger = logging.getLogger(__name__)
def init_varfilter():
global varfilter
global varpattern
- varfilter = {"$title$": prefs['title'], "$date$": interface.start.strftime(DATEFMT),"$time$": interface.start.strftime(TIMEFMT), "$hostname$": util.hostname(prefs['hostname-path']), "$version$": logparse.__version__, "$css$": os.path.relpath(prefs['css'], os.path.dirname(prefs['output']))}
+ varfilter = {"$title$": config.prefs['title'], "$date$": interface.start.strftime(DATEFMT),"$time$": interface.start.strftime(TIMEFMT), "$hostname$": util.hostname(config.prefs['hostname-path']), "$version$": logparse.__version__, "$css$": os.path.relpath(config.prefs['css'], os.path.dirname(config.prefs['output']))}
varfilter = dict((re.escape(k), v) for k, v in varfilter.items())
varpattern = re.compile("|".join(varfilter.keys()))
from datetime import datetime
import logparse
-from . import config
-from logparse import formatting, mail
+from .config import *
+from logparse import formatting, mail, config
from .parsers import load_parsers, sudo, sshd, cron, httpd, smbd, postfix, zfs, temperature
def rotate():
# rotate logs using systemd logrotate
if parser.parse_args().function is None:
- if (config.prefs['rotate'] == 'y'):
+ if (prefs['rotate'] == 'y'):
subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
logger.info("rotated logfiles")
else:
logger.debug("user doesn't want to rotate logs")
- if (config.prefs['rotate'] == 's'):
+ if (prefs['rotate'] == 's'):
logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
logger.debug(sim)
parser.add_argument('-nr', '--no-rotate', help='do not rotate logfiles (overrides logparse.conf)', required=False, default=False, action='store_true')
parser.add_argument('-l', '--logs', help='services to analyse', required=False)
+ # Load config
+ if parser.parse_args().config:
+ config.prefs = config.loadconf(parser.parse_args().config, parser)
+ else:
+ config.prefs = config.loadconf(argparser=parser)
+ prefs = config.prefs
+
# Set up logging
logger = logging.getLogger(__name__)
loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
loghandler.setLevel(logging.WARNING) # don't spam syslog with debug messages
- if parser.parse_args().verbose:
+ if parser.parse_args().verbose or (config.prefs['verbose'] == 'y' or config.prefs['verbose'] == 'yes'):
print("Verbose mode is on")
logging.basicConfig(level=logging.DEBUG)
logger.debug("Verbose mode turned on")
logging.basicConfig(level=logging.INFO)
logger.addHandler(loghandler)
- # Load config
- if parser.parse_args().config or config.prefs['verbose']:
- config.prefs = config.loadconf(parser.parse_args().config, parser)
- else:
- config.prefs = config.loadconf(argparser=parser)
logger.debug("Finished loading config")
# Time analysis
global start
start = datetime.now()
logger.info("Beginning log analysis at {0} {1}".format(start.strftime(formatting.DATEFMT), start.strftime(formatting.TIMEFMT)))
- logger.debug("This is {0} version {1}, running on Python {2}".format(logparse.__name__, logparse.__version__, sys.version))
+ logger.debug("This is {0} version {1}, running on Python {2}".format(logparse.__name__, logparse.__version__, sys.version.replace('\n', '')))
# for l in parser.parse_args().logs.split(' '):
# eval(l)
# sys.exit()
-# print(load_parsers.search());
+ print(load_parsers.search());
# Write HTML document
global output_html
- output_html = formatting.header(config.prefs['header'])
+ output_html = formatting.header(prefs['header'])
output_html += sudo.parse_log()
output_html += sshd.parse_log()
output_html += cron.parse_log()
logger.warning("No output written")
if parser.parse_args().to:
- mail.sendmail(mailbin=config.prefs['mail']['mailbin'], body=output_html, recipient=parser.parse_args().to, subject="logparse test")
+ mail.sendmail(mailbin=prefs['mail']['mailbin'], body=output_html, recipient=parser.parse_args().to, subject="logparse test")
# Print end message
finish = datetime.now()
output = ''
logger.debug("Starting httpd section")
output += opentag('div', 1, 'httpd', 'section')
- accesslog = readlog(config['logs']['httpd'] + '/access.log')
+ accesslog = readlog(config.prefs['logs']['httpd'] + '/access.log')
a = len(accesslog.split('\n'))
- errorlog = readlog(config['logs']['httpd'] + '/error.log')
+ errorlog = readlog(config.prefs['logs']['httpd'] + '/error.log')
e = len(errorlog.split('\n'))
data_b = 0
ips = []
def search():
logger.debug("Searching for parsers in {0}".format(parser_dir))
parsers = []
- parser_candidates = glob.glob(os.path.join(os.path.dirname(parser_dir), "*.py"))
+ parser_candidates = glob.glob(os.path.join(os.path.dirname(parser_dir), "*"))
+ logger.debug("Found parser candidates {0}".format(str(parser_candidates)))
for p in parser_candidates:
location = os.path.join(parser_dir, p)
if not os.path.isdir(parser_dir) or not main_module + '.py' in os.listdir(location):
from ..formatting import *
from ..util import readlog, resolve
-from .. import config
+from ..config import *
import logging
logger = logging.getLogger(__name__)
class HddtempClient:
- def __init__(self, host: str='localhost', port: int=7634, timeout: int=10, sep: str='|') -> None:
+ def __init__(self, host: str='127.0.0.1', port: int=7634, timeout: int=10, sep: str='|') -> None:
self.host = host
self.port = port
self.timeout = timeout
self.sep = sep
def _parse_drive(self, drive: str) -> Drive:
- drive_data = drive.split(self.sep)
- return Drive(drive_data[0], drive_data[1], int(drive_data[2]), drive_data[3])
+ try:
+ drive_data = drive.split(self.sep)
+ return Drive(drive_data[0], drive_data[1], int(drive_data[2]), drive_data[3])
+ except Exception as e:
+ logger.warning("Error processing drive: {0}".format(str(drive_data)))
+ return None
def _parse(self, data: str) -> List[Drive]:
line = data.lstrip(self.sep).rstrip(self.sep) # Remove first/last
drives = line.split(self.sep * 2)
- return [self._parse_drive(drive) for drive in drives]
+ parsed_drives = []
+ for drive in drives:
+ parsed_drive = self._parse_drive(drive)
+ if parsed_drive != None:
+ parsed_drives.append(parsed_drive)
+
+# return [self._parse_drive(drive) for drive in drives if drive != None]
+# return list(filter(lambda drive: self._parse_drive(drive), drives))
+ return parsed_drives
def get_drives(self) -> List[Drive]: # Obtain data from telnet server
try:
with Telnet(self.host, self.port, timeout=self.timeout) as tn:
data = tn.read_all()
return self._parse(data.decode('ascii')) # Return parsed data
- except:
- logger.warning("Couldn't read data from {0}:{1}".format(self.host, self.port))
+ except Exception as e:
+ logger.warning("Couldn't read data from {0}:{1} - {2}".format(self.host, self.port, str(e)))
return 1
client = HddtempClient(host=config.prefs['hddtemp']['host'], port=int(config.prefs['hddtemp']['port']), sep=config.prefs['hddtemp']['separator'], timeout=int(config.prefs['hddtemp']['timeout']))
drives = client.get_drives()
+ logger.debug("Received drive info: " + str(drives))
for drive in sorted(drives, key=lambda x: x.path):
if drive.path in config.prefs['hddtemp']['drives']:
sumtemp += drive.temperature
else:
drives.remove(drive)
logger.debug("Ignoring drive {0} ({1})due to config".format(drive.path, drive.model))
- logger.debug("Received drive info: " + str(drives))
+ logger.debug("Sorted drive info: " + str(drives))
hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive
logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg))
fields.append("avg: " + str(hddavg))
- if (prefs['hddtemp']['drives'] != ''):
+ if (config.prefs['hddtemp']['drives'] != ''):
output += writedata("disks", fields)
logger.info("Finished processing drive temperatures")
import logging
logger = logging.getLogger(__name__)
-from .config import prefs
from pkg_resources import Requirement, resource_filename
+from . import config
+
def hostname(path): # get the hostname of current server
hnfile = open(path, 'r')
hn = re.search('^(\w*)\n*', hnfile.read()).group(1)
# resolve-domains defined in individual sections of the config take priority over global config
if not fqdn:
- fqdn = prefs['resolve-domains']
+ fqdn = config.prefs['resolve-domains']
if fqdn == 'ip':
return(ip)
import logparse
here = path.abspath(path.dirname(__file__))
-__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/ https://packaging.python.org/en/latest/single_source_version.html
+__version__ = logparse.__version__ # https://www.python.org/dev/peps/pep-0440/ https://packaging.python.org/en/latest/single_source_version.html
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f: