-#! /usr/bin/python
-
-import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer, locale
-from sys import stdin
-from collections import namedtuple, defaultdict
-from shutil import copyfile
-import yaml
-import ast
-import logging.handlers
-import types
-import traceback # debugging only
-
-reload(sys)
-sys.setdefaultencoding('utf-8') # force utf-8 because anything else should die
-
-locale.setlocale(locale.LC_ALL, '') # inherit system locale
-
-scriptdir = os.path.dirname(os.path.realpath(__file__))
-
-
-diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
-drivetemp = namedtuple('drivetemp', ['path', 'name', 'temp', 'units'])
-config = {
- 'output': '~/var/www/logparse/summary.html',
- 'header': scriptdir + '/header.html',
- 'css': scriptdir + '/main.css',
- 'title': 'logparse',
- 'maxlist': 10,
- 'maxcmd': 3,
- 'resolve-domains': 'fqdn',
- 'mail': {
- 'to': '',
- 'from': '',
- 'subject': 'logparse from $hostname$'
- },
- 'rotate': 'y',
- 'hddtemp': {
- 'drives': ['/dev/sda'],
- 'port': 7634,
- 'show-model': False,
- },
- 'apache': {
- 'resolve-domains': '',
- },
- 'sshd': {
- 'resolve-domains': '',
- },
- 'smbd': {
- 'resolve-domains': '',
- },
- 'httpd': {
- 'resolve-domains': '',
- },
- 'du': {
- 'paths': ['/', '/etc', '/home'],
- 'force-write': 'n',
- },
- 'hostname-path': '/etc/hostname',
- 'logs': {
- 'auth': '/var/log/auth.log',
- 'cron': '/var/log/cron.log',
- 'sys': '/var/log/syslog',
- 'smb': '/var/log/samba',
- 'zfs': '/var/log/zpool.log',
- 'alloc': '/var/log/du.log',
- 'postfix': '/var/log/mail.log',
- 'httpd': '/var/log/apache2'
- }
-}
-
-
-HTTPDSTATUS = "http://localhost/server-status"
-MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
-MAILOUT = ""
-HTMLOUT = ""
-TXTOUT = ""
-VERSION = "v0.1"
-#DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
-DEG = "°".encode('unicode_escape')
-CEL = "C"
-
-# Set up logging
-logging.basicConfig(level=logging.DEBUG)
-logger = logging.getLogger('logparse')
-loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
-loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
-logger.addHandler(loghandler)
-
-
-# Get arguments
-parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
-parser.add_argument('-f', '--function', help='run a specified function with parameters (for debugging purposes',required=False)
-parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
-
-def __main__():
- logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow))
-
- loadconf(scriptdir + "/logparse.yaml")
-
- # check if user wants to test an isolated function
- debugfunc = parser.parse_args().function
- if debugfunc is not None:
- logger.debug("executing a single function: " + debugfunc)
- eval(debugfunc)
- sys.exit()
-
- if not config['mail']['to']:
- logger.info("no recipient address provided, outputting to stdout")
- else:
- logger.info("email will be sent to " + config['mail']['to'])
-
- global LOCALDOMAIN
- LOCALDOMAIN = getlocaldomain()
-
- global pathfilter
- global pathpattern
- pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
- pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
- pathpattern = re.compile("|".join(pathfilter.keys()))
-
- global varfilter
- global varpattern
- varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.relpath(config['css'], os.path.dirname(config['output']))}
- varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
- varpattern = re.compile("|".join(varfilter.keys()))
-
- global tempfile
- tempfile = open(config['output'], 'w+')
- tempfile.write(header(config['header']))
- opentag('div', 1, 'main')
- sshd()
- sudo()
- cron()
- nameget()
- httpd()
- smbd()
- postfix()
- zfs()
- temp()
- du()
- for tag in ['div', 'body', 'html']:
- closetag(tag, 1)
- tempfile.close()
- mailprep(config['output'], MAILPATH)
- if (config['mail']['to']):
- logger.debug("sending email")
- ms = subject(config['mail']['subject'])
- cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + config['mail']['to']
- logger.debug(cmd)
- subprocess.call(cmd, shell=True)
- logger.info("sent email")
-
-
-def writetitle(title):
- if (title == '' or '\n' in title):
- logger.error("invalid title")
- return
- logger.debug("writing title for " + title)
- tag('h2', 0, title)
-
-def writedata(subtitle, data = None): # write title and data to tempfile
- if (subtitle == ""):
- loggger.warning("no subtitle provided.. skipping section")
- return
-
- if (data == None or len(data) == 0):
- logger.debug("no data provided.. just printing subtitle")
- tag('p', 0, subtitle)
- else:
- logger.debug("received data " + str(data))
- subtitle += ':'
- if (len(data) == 1):
- tag('p', 0, subtitle + ' ' + data[0])
- else:
- tag('p', 0, subtitle)
- opentag('ul', 1)
- for datum in data:
- tag('li', 0, datum)
- closetag('ul', 1)
-
-def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
- if (block == 1):
- tempfile.write('\n')
- tempfile.write('<' + tag)
- if (id != None):
- tempfile.write(" id='" + id + "'")
- if (cl != None):
- tempfile.write(" class='" + cl + "'")
- tempfile.write('>')
- if (block == 1):
- tempfile.write('\n')
-
-def closetag(tag, block = 0): # write html closing tag
- if (block == 0):
- tempfile.write("</" + tag + ">")
- else:
- tempfile.write("\n</" + tag + ">\n")
-
-def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
- opentag(tag, block)
- tempfile.write(content)
- closetag(tag, block)
-
-def header(template): # return a parsed html header from file
- try:
- copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
- logger.debug("copied main.css")
- except Exception as e:
- logger.warning("could not copy main.css - " + str(e))
- headercontent = open(template, 'r').read()
- headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
- return headercontent
-
-def subject(template):
- r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
- logger.debug("returning subject line " + r)
- return r
-
-def hostname(): # get the hostname of current server
- hnfile = open(config['hostname-path'], 'r')
- hn = re.search('^(.*)\n*', hnfile.read()).group(1)
- return hn
-
-def getlocaldomain(): # get the parent fqdn of current server
- domain = socket.getfqdn().split('.', 1) # Note: if socket.fetfqdn() returns localhost, make sure the first entry in /etc/hosts contains the fqdn
- if len(domain) != 2:
- logger.warning('Could not get domain of this server, only hostname. Please consider updating /etc/hosts')
- return ''
- else:
- return domain[-1]
-
-def resolve(ip, fqdn = 'host-only'): # try to resolve an ip to hostname
- # Possible values for fqdn:
- # fqdn show full hostname and domain
- # fqdn-implicit show hostname and domain unless local
- # host-only only show hostname
- # ip never resolve anything
- # resolve-domains defined in individual sections of the config take priority over global config
-
- if not fqdn:
- fqdn = config['resolve-domains']
-
- if fqdn == 'ip':
- return(ip)
-
- try:
- socket.inet_aton(ip) # succeeds if text contains ip
- hn = socket.gethostbyaddr(ip)[0] # resolve ip to hostname
- if fqdn == 'fqdn-implicit' and hn.split('.', 1)[1] == LOCALDOMAIN:
- return(hn.split('.')[0])
- elif fqdn == 'fqdn' or fqdn == 'fqdn-implicit':
- return(hn)
- elif fqdn == 'host-only':
- return(hn.split('.')[0])
- else:
- logger.warning("invalid value for fqdn config")
- return(hn)
- except socket.herror:
- # cannot resolve ip
- logger.debug(ip + " cannot be found, might not exist anymore")
- return(ip)
- except (OSError, socket.error): # socket.error for Python 2 compatibility
- # already a hostname
- logger.debug(ip + " is already a hostname")
- return(ip)
- except Exception as err:
- logger.warning("failed to resolve hostname for " + ip + ": " + str(err))
- return(ip) # return ip if no hostname exists
-
-def plural(noun, quantity): # return "1 noun" or "n nouns"
- if (quantity == 1):
- return(str(quantity) + " " + noun)
- else:
- return(str(quantity) + " " + noun + "s")
-
-def parsesize(num, suffix='B'): # return human-readable size from number of bytes
- for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
- if abs(num) < 1024.0:
- return "%3.1f %s%s" % (num, unit, suffix)
- num /= 1024.0
- return "%.1f%s%s" % (num, 'Yi', suffix)
-
-def readlog(path = None, mode = 'r'): # read file, substituting known paths
- if (path == None):
- logger.error("no path provided")
- return
- else:
- path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
- if (os.path.isfile(path) is False):
- logger.error(path + " does not exist")
- return ''
- else:
- return open(path, mode).read()
-
-def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
- if (path == None or content == None):
- logger.error("invalid usage of writelog")
- return
- else:
- path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
- file = open(path, mode)
- file.write(content)
- file.close()
- logger.debug("written to file " + path)
-
-def getusage(path): # Get disk usage statistics
- disk = os.statvfs(path)
- cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
- alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
- free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
- ratio = alloc / cap * 100 # percentage used
- return diskstat(cap, alloc, free, ratio)
-
-def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
- temp_l = l[:]
- l = list(set(l))
- l = [[i, temp_l.count(i)] for i in l] # add count of each element
- l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
- l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
- l = l[::-1] # reverse
- return l
-
-def addtag(l, tag): # add prefix and suffix tags to each item in a list
- l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
- return l2
-
-def truncl(input, limit): # truncate list
- if (len(input) > limit):
- more = str(len(input) - limit)
- output = input[:limit]
- output.append("+ " + more + " more")
- return(output)
- else:
- return(input)
-
-def mailprep(inputpath, output, *stylesheet):
- logger.debug("converting stylesheet to inline tags")
- old = readlog(inputpath)
- logger.debug(config['css'])
- pm = premailer.Premailer(old, external_styles=config['css'])
- MAILOUT = pm.transform()
- logger.info("converted stylesheet to inline tags")
- file = open(output, 'w')
- file.write(MAILOUT)
- file.close()
- logger.info("written to temporary mail file")
-
-
-
-#
-#
-#
-
-def sshd():
- logger.debug("starting sshd section")
- opentag('div', 1, 'sshd', 'section')
- matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
- users = [] # list of users with format [username, number of logins] for each item
- data = []
- num = sum(1 for x in matches) # total number of logins
- for match in matches:
- entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
-
- user = entry.group(1)
- ip = entry.group(2)
-
- userhost = user + '@' + resolve(ip, fqdn=config['sshd']['resolve-domains'])
- exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
- if (exists == []):
- users.append([userhost, 1])
- else:
- users[exists[0]][1] += 1
-
- writetitle('sshd')
- subtitle = plural('login', num) + ' from'
- if (len(users) == 1): # if only one user, do not display no of logins for this user
- logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
- subtitle += ' ' + users[0][0]
- writedata(subtitle)
- else:
- for user in users:
- data.append(user[0] + ' (' + str(user[1]) + ')')
- if len(data) > config['maxlist']: # if there are lots of users, truncate them
- data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
- break
- logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
- writedata(subtitle, data)
- closetag('div', 1)
- logger.info("finished sshd section")
-
-#
-#
-#
-
-def sudo():
- logger.debug("starting sudo section")
- opentag('div', 1, 'sudo', 'section')
- umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
- num = sum(1 for line in umatches) # total number of sessions
- users = []
- data = []
- for match in umatches:
- user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
- exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
- if (exists == []):
- users.append([user, 1])
- else:
- users[exists[0]][1] += 1
- commands = []
- cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
- for cmd in cmatches:
- commands.append(cmd)
-# logger.debug("found the following commands: " + str(commands))
-
- writetitle("sudo")
- subtitle = plural("sudo session", num) + " for"
- if (len(users) == 1):
- logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
- subtitle += ' ' + users[0][0]
- writedata(subtitle)
- else:
- for user in users:
- data.append(user[0] + ' (' + str(user[1]) + ')')
- logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
- writedata(subtitle, data)
- if (len(commands) > 0):
- commands = addtag(commands, 'code')
- commands = orderbyfreq(commands)
- commands = truncl(commands, config['maxcmd'])
- writedata("top sudo commands", [c for c in commands])
- closetag('div', 1)
- logger.info("finished sudo section")
-
-#
-#
-#
-
-def cron():
- logger.debug("starting cron section")
- opentag('div', 1, 'cron', 'section')
- matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
- num = sum(1 for line in matches)
- commands = []
- for match in matches:
- commands.append(str(match))
- # commands.append([str(match)for match in matches])
- #logger.debug("found cron command " + str(commands))
- logger.info("found " + str(num) + " cron jobs")
- subtitle = str(num) + " cron jobs run"
- writetitle("cron")
- writedata(subtitle)
- if (matches > 0):
- commands = addtag(commands, 'code')
- commands = orderbyfreq(commands)
- commands = truncl(commands, config['maxcmd'])
- writedata("top cron commands", [c for c in commands])
- closetag('div', 1)
- logger.info("finished cron section")
-
-#
-#
-#
-
-def nameget():
- logger.debug("starting nameget section")
- opentag('div', 1, 'nameget', 'section')
- logger.debug("reading syslog.. this may take a while")
- syslog = readlog('sys')
- failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
- n_f = sum(1 for i in failed)
- l_f = []
- for i in failed:
- l_f.append(i if i else '[no destination]')
- logger.debug("the following downloads failed: " + str(l_f))
- succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
- n_s = sum(1 for i in succ)
- l_s = []
- for i in succ:
- l_s.append(i)
- logger.debug("the following downloads succeeded: " + str(l_f))
- logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
- writetitle("nameget")
- writedata(str(n_s) + " succeeded", truncl(l_s, config['maxlist']))
- writedata(str(n_f) + " failed", truncl(l_f, config['maxlist']))
- closetag('div', 1)
- logger.info("finished nameget section")
-
-#
-#
-#
-
-def httpd():
- logger.info("starting httpd section")
- opentag('div', 1, 'httpd', 'section')
- accesslog = readlog("httpd/access.log")
- a = len(accesslog.split('\n'))
- errorlog = readlog("httpd/error.log")
- e = len(errorlog.split('\n'))
- data_b = 0
- ips = []
- files = []
- useragents = []
- errors = []
- notfound = []
- unprivileged = []
-
- for line in accesslog.split('\n'):
- fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
- try:
- ips.append(resolve(fields.group(1), fqdn=config['httpd']['resolve-domains']))
- files.append(fields.group(2))
- useragents.append(fields.group(5))
- data_b += int(fields.group(3))
- except Exception as error:
- if type(error) is AttributeError: # this line is not an access log
- pass
- else:
- logger.warning("error processing httpd access log: " + str(error))
- traceback.print_exc()
- logger.debug(str(data_b) + " bytes transferred")
- data_h = parsesize(data_b)
- writetitle("apache")
-
- logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
- if (a > 0):
- files = addtag(files, 'code')
- files = orderbyfreq(files)
- files = truncl(files, config['maxlist'])
- writedata(plural(" request", a), files)
- if (ips != None):
- ips = addtag(ips, 'code')
- ips = orderbyfreq(ips)
- n_ip = str(len(ips))
- ips = truncl(ips, config['maxlist'])
- writedata(plural(" client", n_ip), ips)
- if (useragents != None):
- useragents = addtag(useragents, 'code')
- useragents = orderbyfreq(useragents)
- n_ua = str(len(useragents))
- useragents = truncl(useragents, config['maxlist'])
- writedata(plural(" device", n_ua), useragents)
-
- writedata(data_h + " transferred")
- writedata(plural(" error", e))
-
- closetag('div', 1)
- logger.info("finished httpd section")
-
-#
-#
-#
-
-def httpdsession():
- # logger.debug("starting httpd section")
- opentag('div', 1, 'httpd', 'section')
- httpdlog = requests.get(HTTPDSTATUS).content
- uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
- uptime = re.sub(' minute[s]', 'm', uptime)
- uptime = re.sub(' second[s]', 's', uptime)
- uptime = re.sub(' day[s]', 's', uptime)
- uptime = re.sub(' month[s]', 'mo', uptime)
- accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
- traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
- return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
- closetag('div', 1)
- # logger.info("finished httpd section")
-
-#
-#
-#
-
-def smbd():
- logger.debug("starting smbd section")
- opentag('div', 1, 'smbd', 'section')
- files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
- # for f in files:
-
- # file_mod_time = os.stat(f).st_mtime
-
- # Time in seconds since epoch for time, in which logfile can be unmodified.
- # should_time = time.time() - (30 * 60)
-
- # Time in minutes since last modification of file
- # last_time = (time.time() - file_mod_time)
- # logger.debug(last_time)
-
- # if (file_mod_time - should_time) < args.time:
- # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
- # else:
-
- # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
- # files.remove(f)
- logger.debug("found log files " + str(files))
- n_auths = 0 # total number of logins from all users
- sigma_auths = [] # contains users
- output = ""
-
- for file in files: # one log file for each client
-
- logger.debug("looking at file " + file)
-
- # find the machine (ip or hostname) that this file represents
- ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
- host = resolve(ip, fqdn=config['smbd']['resolve-domains'])
- if (host == ip and (config['smbd']['resolve-domains'] or config['resolve-domains']) != 'ip'): # if ip has disappeared, fall back to a hostname from logfile
- newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
- if (len(set(newhost)) == 1): # all hosts in one file should be the same
- host = newhost[0].lower()
-
- # count number of logins from each user-host pair
- matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
- for match in matches:
- userhost = match + "@" + host
- sigma_auths.append(userhost)
- # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
- # if (exists == []):
- # sigma_auths.append([userhost, 1])
- # else:
- # sigma_auths[exists[0]][1] += 1
- n_auths += 1
- writetitle("samba")
- subtitle = plural("login", n_auths) + " from"
- if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
- subtitle += ' ' + sigma_auths[0][0]
- writedata(subtitle)
- else: # multiple users
- sigma_auths = orderbyfreq(sigma_auths)
- sigma_auths = truncl(sigma_auths, config['maxlist'])
- logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
- writedata(subtitle, sigma_auths)
- closetag('div', 1)
- logger.info("finished smbd section")
-
-#
-#
-#
-
-def postfix():
- logger.debug("starting postfix section")
- opentag('div', 1, 'postfix', 'section')
- messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
- r = []
- s = []
- size = 0
- for message in messages:
- r.append(message[2])
- s.append(message[0])
- size += int(message[1])
- # size = sum([int(x) for x in messages])
- size = parsesize(size)
- n = str(len(messages))
- writetitle("postfix")
-
- if (len(r) > 0):
- s = list(set(r)) # unique recipients
- if (len(s) > 1):
- r = orderbyfreq(r)
- r = truncl(r, config['maxlist'])
- writedata(n + " messages sent to", r)
- else:
- writedata(n + " messages sent to " + r[0])
- else:
- writedata(n + " messages sent")
- writedata("total of " + size)
- closetag('div', 1)
- logger.info("finished postfix section")
-
-#
-#
-#
-
-def zfs():
- logger.debug("starting zfs section")
- opentag('div', 1, 'zfs', 'section')
- zfslog = readlog('zfs')
- pool = re.search('.*---\n(\w*)', zfslog).group(1)
- scrub = re.search('.*scrub repaired (\d*).* in .*\d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
- iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
- scrubrepairs = scruberrors = scrubdate = None
- try:
- scrubrepairs = scrub.group(1)
- scruberrors = scrub.group(2)
- scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
- except Exception as e:
- logger.debug("error getting scrub data: " + str(e))
- alloc = iostat.group(1)
- free = iostat.group(2)
- writetitle("zfs")
- if (scrubdate != None):
- subtitle = "Scrub of " + pool + " on " + scrubdate
- data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
- else:
- subtitle = pool
- data = [alloc + " used", free + " free"]
- writedata(subtitle, data)
- closetag('div', 1)
- logger.info("finished zfs section")
-
-#
-#
-#
-
-def temp():
- logger.debug("starting temp section")
- opentag('div', 1, 'temp', 'section')
-
- # cpu temp
-
- sensors.init()
- coretemps = []
- pkgtemp = 0
- systemp = 0
- try:
- for chip in sensors.iter_detected_chips():
- for feature in chip:
- if "Core" in feature.label:
- coretemps.append([feature.label, feature.get_value()])
- logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
- if "CPUTIN" in feature.label:
- pkgtemp = str(feature.get_value())
- logger.debug("found cpu package at temperature " + pkgtemp)
- if "SYS" in feature.label:
- systemp = feature.get_value()
- logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
- core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
- logger.debug("average cpu temp is " + str(core_avg))
- coretemps.append(["avg", str(core_avg)])
- coretemps.append(["pkg", pkgtemp])
- coretemps = [x[0] + ": " + str(x[1]) + DEG + CEL for x in coretemps]
- finally:
- sensors.cleanup()
-
- # drive temp
-
- # For this to work, `hddtemp` must be running in daemon mode.
- # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
-
- received = ''
- sumtemp = 0.0
- data = ""
- output = []
-
- try:
- hsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- hsock.connect(("localhost", int(config['hddtemp']['port'])))
- logger.debug("tcp socket on port " + str(int(config['hddtemp']['port'])) + " opened for `hddtemp` (ensure daemon is running)")
- hsock.sendall('') # send dummy packet and shut write conn
- hsock.shutdown(socket.SHUT_WR)
-
- while 1:
- line = hsock.recv(1024)
- if line == "": # exit on blank line
- break
- logger.debug("received line " + str(line))
- data += line
- hsock.close()
- logger.debug("closed connection, having received " + str(sys.getsizeof(data)) + " bytes")
-
- data = data.lstrip('|').rstrip('|') # remove leading & trailing `|`
- drives = data.split('|' * 2) # split into drives
-
- for drive in drives:
- fields = drive.split('|')
- if fields[0] in config['hddtemp']['drives']:
- output.append(fields[0] + (' (' + fields[1] + ')' if config['hddtemp']['show-model'] else '')+ ': ' + fields[2] + DEG + fields[3])
- sumtemp += int(fields[2])
- logger.debug("added drive " + fields[0])
- else:
- logger.debug("ignoring drive " + fields[0])
-
- hddavg = int(format(sumtemp/float(len(drives)))) + e + DEG + output[0][-1:] # use units of first drive (last character of output)
- logger.debug("avg disk temp is " + str(hddavg))
- output.append("avg: " + str(hddavg))
- except Exception as ex:
- logger.debug("failed getting hddtemps with error " + str(ex))
- finally:
- hsock.close()
-
- writetitle("temperatures")
- if (systemp != 0):
- writedata("sys: " + str(systemp) + DEG)
- if (coretemps != ''):
- writedata("cores", coretemps)
- if (config['hddtemp']['drives'] != ''):
- writedata("disks", output)
-
- closetag('div', 1)
- logger.info("finished temp section")
-
-#
-#
-#
-
-def du():
- logger.debug("starting du section")
- opentag('div', 1, 'du', 'section')
- out = []
- content = readlog('alloc')
- contentnew = ""
- for path in config['du']['paths']:
- alloc_f = getusage(path).alloc
- delta = None
- try:
- alloc_i = re.search(path + '\t(.*)\n', content).group(1)
- delta = alloc_f - float(alloc_i)
- except:
- pass
- if (delta == None):
- out.append([path, "used " + parsesize(alloc_f)])
- else:
- out.append([path, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
- contentnew += (path + '\t' + str(alloc_f) + '\n')
- if config['rotate'] == 'y' or config['du']['force-write'] == 'y':
- writelog('alloc', contentnew)
-
- writetitle("du")
- logger.debug("disk usage data is " + str(out))
- for path in out:
- writedata(path[0], [p for p in path[1:]])
-
- closetag('div', 1)
- logger.info("finished du section")
-
-#
-#
-#
-starttime = datetime.datetime.now()
-timenow = time.strftime("%H:%M:%S")
-datenow = time.strftime("%x")
-
-def loadconf(configfile):
- try:
- data = yaml.safe_load(open(configfile))
- for value in data:
- if(type(data[value]) == types.DictType):
- for key in data[value].iteritems():
- config[value][key[0]] = key[1]
- else:
- config[value] = data[value]
- config['dest'] = os.path.dirname(config['output'])
- if parser.parse_args().to is not None: config['mail']['to'] = parser.parse_args().to
- except Exception as e:
- logger.warning("error processing config: " + str(e))
-
-
-try:
- __main__()
-finally:
- # rotate logs using systemd logrotate
- if parser.parse_args().function is None:
- if (config['rotate'] == 'y'):
- subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
- logger.info("rotated logfiles")
- else:
- logger.debug("user doesn't want to rotate logs")
- if (config['rotate'] == 's'):
- logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
- sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
- logger.debug(sim)
-
- timenow = time.strftime("%H:%M:%S")
- datenow = time.strftime("%x")
- logger.info("finished parsing logs at " + datetime.datetime.now().strftime("%x %H:%M:%S") + " (" + str(datetime.datetime.now() - starttime) + ")")