From: Andrew Lorimer Date: Mon, 21 Jan 2019 03:19:23 +0000 (+1100) Subject: bugfixing in du and apache sections X-Git-Url: https://git.lorimer.id.au/logparse.git/diff_plain/c24ab8a1e0437e1afcc990786c39186320475d87?hp=6b6fec6ca92d82c05b56d7224541e56eed5f2d37 bugfixing in du and apache sections --- diff --git a/logparse.py b/logparse.py index 59370bc..8906802 100755 --- a/logparse.py +++ b/logparse.py @@ -1,6 +1,6 @@ #! /usr/bin/python -import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer +import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer, locale from sys import stdin from collections import namedtuple, defaultdict from shutil import copyfile @@ -10,13 +10,15 @@ import logging.handlers import types reload(sys) -sys.setdefaultencoding('utf-8') +sys.setdefaultencoding('utf-8') # force utf-8 because anything else should die + +locale.setlocale(locale.LC_ALL, '') # inherit system locale scriptdir = os.path.dirname(os.path.realpath(__file__)) diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio']) -drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units']) +drivetemp = namedtuple('drivetemp', ['path', 'name', 'temp', 'units']) config = { 'output': '~/var/www/logparse/summary.html', 'header': scriptdir + '/header.html', @@ -32,7 +34,8 @@ config = { 'rotate': 'y', 'hddtemp': { 'drives': ['/dev/sda'], - 'port': 7634 + 'port': 7634, + 'show-model': False, }, 'du-paths': ['/', '/etc', '/home'], 'hostname-path': '/etc/hostname', @@ -50,25 +53,14 @@ config = { HTTPDSTATUS = "http://localhost/server-status" -# config['du-paths'] = ["/home/andrew", "/mnt/andrew"] -# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"] -# config['hddtemp']['port'] = 7634 -# config['output'] = "/mnt/andrew/temp/logparse/summary.html" -# config['output'] = "/mnt/andrew/temp/logparse/out.html" MAILPATH = "/mnt/andrew/temp/logparse/mail.html" -# config['dest'] = "/mnt/andrew/temp/logparse" -# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html" -# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css" MAILOUT = "" HTMLOUT = "" TXTOUT = "" -# config['title'] = "logparse" -# config['maxlist'] = 10 -# config['maxcmd'] = 3 -# config['mail']['subject'] = "logparse from $hostname$" VERSION = "v0.1" -DEG = u'\N{DEGREE SIGN}'.encode('utf-8') -DEG = " °C".encode('unicode_escape') +#DEG = u'\N{DEGREE SIGN}'.encode('utf-8') +DEG = "°".encode('unicode_escape') +CEL = "C" # Set up logging logging.basicConfig(level=logging.DEBUG) @@ -80,13 +72,26 @@ logger.addHandler(loghandler) # Get arguments parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email') +parser.add_argument('-f', '--function', help='run a specified function with parameters (for debugging purposes',required=False) parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False) def __main__(): logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow)) + loadconf(scriptdir + "/logparse.yaml") - if (config['mail']['to'] == None): + # check if user wants to test an isolated function + debugfunc = parser.parse_args().function + if debugfunc is not None: + logger.debug("executing a single function: " + debugfunc) + try: + logger.debug((debugfunc + ': ' + eval(debugfunc))) + sys.exit() + except Exception as e: + sys.exit("debug function failed with error " + e) + logger.debug("finished executing debug function") + + if not config['mail']['to']: logger.info("no recipient address provided, outputting to stdout") else: logger.info("email will be sent to " + config['mail']['to']) @@ -154,7 +159,6 @@ def writedata(subtitle, data = None): # write title and data to tempfile tag('p', 0, subtitle) opentag('ul', 1) for datum in data: - logger.debug("printing datum " + datum) tag('li', 0, datum) closetag('ul', 1) @@ -196,18 +200,25 @@ def subject(template): logger.debug("returning subject line " + r) return r -def hostname(): # get the hostname +def hostname(): # get the hostname of current server hnfile = open(config['hostname-path'], 'r') hn = re.search('^(.*)\n*', hnfile.read()).group(1) return hn -def resolve(ip): # try to resolve an ip to hostname - logger.debug("trying to resolve ip " + ip) + +def resolve(ip, fqdn = False): # try to resolve an ip to hostname try: socket.inet_aton(ip) # succeeds if text contains ip - hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname - logger.debug("found hostname " + hn) - return(hn) + hn = socket.gethostbyaddr(ip)[0] # resolve ip to hostname + return(hn if fqdn else hn.split('.')[0]) + except OSError: + # already a hostname + logger.debug(ip + " is already a hostname") + return(ip) + except socket.herror: + # cannot resolve ip + logger.debug(ip + " cannot be found, might not exist anymore") + return(ip) except: logger.debug("failed to resolve hostname for " + ip) return(ip) # return ip if no hostname exists @@ -354,7 +365,7 @@ def sudo(): cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth')) for cmd in cmatches: commands.append(cmd) - logger.debug("found the following commands: " + str(commands)) +# logger.debug("found the following commands: " + str(commands)) writetitle("sudo") subtitle = plural("sudo session", num) + " for" @@ -388,7 +399,7 @@ def cron(): for match in matches: commands.append(str(match)) # commands.append([str(match)for match in matches]) - logger.debug("found cron command " + str(commands)) + #logger.debug("found cron command " + str(commands)) logger.info("found " + str(num) + " cron jobs") subtitle = str(num) + " cron jobs run" writetitle("cron") @@ -408,12 +419,13 @@ def cron(): def nameget(): logger.debug("starting nameget section") opentag('div', 1, 'nameget', 'section') + logger.debug("reading syslog.. this may take a while") syslog = readlog('sys') failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog) n_f = sum(1 for i in failed) l_f = [] for i in failed: - l_f.append(i) + l_f.append(i if i else '[no destination]') logger.debug("the following downloads failed: " + str(l_f)) succ = re.findall('.*nameget.*downloaded\s(.*)', syslog) n_s = sum(1 for i in succ) @@ -423,8 +435,8 @@ def nameget(): logger.debug("the following downloads succeeded: " + str(l_f)) logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts") writetitle("nameget") - writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd'])) - writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd'])) + writedata(str(n_s) + " succeeded", truncl(l_s, config['maxlist'])) + writedata(str(n_f) + " failed", truncl(l_f, config['maxlist'])) closetag('div', 1) logger.info("finished nameget section") @@ -450,15 +462,13 @@ def httpd(): for line in accesslog.split('\n'): fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line) try: - ips.append(fields.group(1)) + ips.append(resolve(fields.group(1), fqdn=True)) files.append(fields.group(2)) useragents.append(fields.group(5)) - logger.debug("transferred " + fields.group(3) + " bytes in this request") data_b += int(fields.group(3)) - logger.debug("data_b is now " + str(data_b)) except Exception as error: - if type(error) is AttributeError: - logger.debug("attributeerrror: " + str(error)) + if type(error) is AttributeError: # this line is not an access log + pass else: logger.warning("error processing httpd access log: " + str(error)) logger.debug(str(data_b) + " bytes transferred") @@ -467,28 +477,25 @@ def httpd(): logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors") if (a > 0): - logger.debug("found the following requests: " + str(files)) files = addtag(files, 'code') files = orderbyfreq(files) - files = truncl(files, config['maxcmd']) - writedata(str(a) + " requests", files) + files = truncl(files, config['maxlist']) + writedata(plural(" request", a), files) if (ips != None): - logger.debug("found the following ips: " + str(ips)) ips = addtag(ips, 'code') ips = orderbyfreq(ips) n_ip = str(len(ips)) - ips = truncl(ips, config['maxcmd']) - writedata(n_ip + " clients", ips) + ips = truncl(ips, config['maxlist']) + writedata(plural(" client", n_ip), ips) if (useragents != None): - logger.debug("found the following useragents: " + str(useragents)) useragents = addtag(useragents, 'code') useragents = orderbyfreq(useragents) n_ua = str(len(useragents)) - useragents = truncl(useragents, config['maxcmd']) - writedata(n_ua + " devices", useragents) + useragents = truncl(useragents, config['maxlist']) + writedata(plural(" device", n_ua), useragents) writedata(data_h + " transferred") - writedata(str(e) + " errors") + writedata(plural(" error", e)) closetag('div', 1) logger.info("finished httpd section") @@ -549,9 +556,13 @@ def smbd(): # find the machine (ip or hostname) that this file represents ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host) host = resolve(ip) + if (host == ip): # if ip has disappeared, fall back to a hostname from logfile + newhost = re.findall('.*\]\@\[(.*)\]', readlog(file)) + if (len(set(newhost)) == 1): # all hosts in one file should be the same + host = newhost[0].lower() - # count number of logins from each user - matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file)) + # count number of logins from each user-host pair + matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file)) for match in matches: userhost = match + "@" + host sigma_auths.append(userhost) @@ -568,7 +579,7 @@ def smbd(): writedata(subtitle) else: # multiple users sigma_auths = orderbyfreq(sigma_auths) - sigma_auths = truncl(sigma_auths, config['maxcmd']) + sigma_auths = truncl(sigma_auths, config['maxlist']) logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths)) writedata(subtitle, sigma_auths) closetag('div', 1) @@ -598,7 +609,7 @@ def postfix(): s = list(set(r)) # unique recipients if (len(s) > 1): r = orderbyfreq(r) - r = truncl(r, config['maxcmd']) + r = truncl(r, config['maxlist']) writedata(n + " messages sent to", r) else: writedata(n + " messages sent to " + r[0]) @@ -616,8 +627,7 @@ def zfs(): logger.debug("starting zfs section") opentag('div', 1, 'zfs', 'section') zfslog = readlog('zfs') - logger.debug("zfs log is " + zfslog) - logger.debug("got zfs logfile\n" + zfslog + "---end log---") + logger.debug("got zfs logfile") pool = re.search('.*---\n(\w*)', zfslog).group(1) scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog) iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog) @@ -648,12 +658,14 @@ def zfs(): def temp(): logger.debug("starting temp section") opentag('div', 1, 'temp', 'section') + + # cpu temp + sensors.init() coretemps = [] pkgtemp = 0 systemp = 0 try: - print(sensors.iter_detected_chips()) for chip in sensors.iter_detected_chips(): for feature in chip: if "Core" in feature.label: @@ -669,45 +681,63 @@ def temp(): logger.debug("average cpu temp is " + str(core_avg)) coretemps.append(["avg", str(core_avg)]) coretemps.append(["pkg", pkgtemp]) - coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps] + coretemps = [x[0] + ": " + str(x[1]) + DEG + CEL for x in coretemps] finally: sensors.cleanup() + # drive temp + # For this to work, `hddtemp` must be running in daemon mode. # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX... - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(('localhost',config['hddtemp']['port'])) - output = s.recv(4096) - output += s.recv(4096) - s.close() - config['hddtemp']['drives'] = [] - for drive in re.split('\|1}', output): - try: - fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive) - name = fields.group(1) - temp = float(fields.group(2)) - units = fields.group(3) - config['hddtemp']['drives'].append(drivetemp(name, temp, DEG)) - except: - pass - hddtotal = 0 - data = [] - for drive in config['hddtemp']['drives']: - data.append(drive.name + ': ' + str(drive.temp) + drive.units) - logger.debug("found disk " + drive.name + " at " + str(drive.temp)) - hddtotal += drive.temp - logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks") - logger.debug("sum of disk temps is " + str(hddtotal)) - hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG - logger.debug("avg disk temp is " + str(hddavg)) - data.append("avg: " + str(hddavg)) + + received = '' + sumtemp = 0 + data = "" + output = [] + + try: + hsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + hsock.connect(("localhost", int(config['hddtemp']['port']))) + logger.debug("tcp socket on port " + str(int(config['hddtemp']['port'])) + " opened for `hddtemp` (ensure daemon is running)") + hsock.sendall('') # send dummy packet and shut write conn + hsock.shutdown(socket.SHUT_WR) + + while 1: + line = hsock.recv(1024) + if line == "": # exit on blank line + break + logger.debug("received line " + str(line)) + data += line + hsock.close() + logger.debug("closed connection, having received " + str(sys.getsizeof(data)) + " bytes") + + data = data.lstrip('|').rstrip('|') # remove leading & trailing `|` + drives = data.split('|' * 2) # split into drives + + for drive in drives: + fields = drive.split('|') + if fields[0] in config['hddtemp']['drives']: + output.append(fields[0] + (' (' + fields[1] + ')' if config['hddtemp']['show-model'] else '')+ ': ' + fields[2] + DEG + fields[3]) + sumtemp += int(fields[2]) + logger.debug("added drive " + fields[0]) + else: + logger.debug("ignoring drive " + fields[0]) + + hddavg = int(format(sumtemp/float(len(drives)))) + e + DEG + output[0][-1:] # use units of first drive (last character of output) + logger.debug("avg disk temp is " + str(hddavg)) + output.append("avg: " + str(hddavg)) + except Exception as ex: + logger.debug("failed getting hddtemps with error " + str(ex)) + finally: + hsock.close() + writetitle("temperatures") if (systemp != 0): writedata("sys: " + str(systemp) + DEG) if (coretemps != ''): writedata("cores", coretemps) if (config['hddtemp']['drives'] != ''): - writedata("disks", data) + writedata("disks", output) closetag('div', 1) logger.info("finished temp section") @@ -730,7 +760,6 @@ def du(): delta = alloc_f - float(alloc_i) except: pass - logger.debug("delta is " + str(delta)) if (delta == None): out.append([p, "used " + parsesize(alloc_f)]) else: @@ -757,17 +786,13 @@ def loadconf(configfile): try: data = yaml.safe_load(open(configfile)) for value in data: - logger.debug(data[value]) if(type(data[value]) == types.DictType): for key in data[value].iteritems(): config[value][key[0]] = key[1] else: config[value] = data[value] config['dest'] = os.path.dirname(config['output']) - logger.debug(str(type(parser.parse_args().to))) - logger.debug(config['mail']['to']) if parser.parse_args().to is not None: config['mail']['to'] = parser.parse_args().to - logger.debug(str(config)) except Exception as e: logger.warning("error processing config: " + str(e)) @@ -776,16 +801,17 @@ try: __main__() finally: # rotate logs using systemd logrotate - if (config['rotate'] == 'y'): - subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True) - logger.info("rotated logfiles") - else: - logger.debug("user doesn't want to rotate logs") - if (config['rotate'] == 's'): - logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):") - sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True) - logger.debug(sim) - + if parser.parse_args().function is None: + if (config['rotate'] == 'y'): + subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True) + logger.info("rotated logfiles") + else: + logger.debug("user doesn't want to rotate logs") + if (config['rotate'] == 's'): + logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):") + sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True) + logger.debug(sim) + timenow = time.strftime("%H:%M:%S") datenow = time.strftime("%x") logger.info("finished parsing logs at " + datetime.datetime.now().strftime("%x %H:%M:%S") + " (" + str(datetime.datetime.now() - starttime) + ")")