logparse.pyon commit writing disk usage now depends on logrotate config (7e5b6ea)
   1#! /usr/bin/python
   2
   3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer, locale
   4from sys import stdin
   5from collections import namedtuple, defaultdict
   6from shutil import copyfile
   7import yaml
   8import ast
   9import logging.handlers
  10import types
  11
  12reload(sys)
  13sys.setdefaultencoding('utf-8')     # force utf-8 because anything else should die
  14
  15locale.setlocale(locale.LC_ALL, '') # inherit system locale
  16
  17scriptdir = os.path.dirname(os.path.realpath(__file__))
  18
  19
  20diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
  21drivetemp = namedtuple('drivetemp', ['path', 'name', 'temp', 'units'])
  22config = {
  23    'output': '~/var/www/logparse/summary.html',
  24    'header': scriptdir + '/header.html',
  25    'css': scriptdir + '/main.css',
  26    'title': 'logparse',
  27    'maxlist': 10,
  28    'maxcmd': 3,
  29    'mail': {
  30        'to': '',
  31        'from': '',
  32        'subject': 'logparse from $hostname$'
  33    },
  34    'rotate': 'y',
  35    'hddtemp': {
  36        'drives': ['/dev/sda'],
  37        'port': 7634,
  38        'show-model': False, 
  39    },
  40    'du-paths': ['/', '/etc', '/home'],
  41    'hostname-path': '/etc/hostname',
  42    'logs': {
  43        'auth': '/var/log/auth.log',
  44        'cron': '/var/log/cron.log',
  45        'sys': '/var/log/syslog',
  46        'smb': '/var/log/samba',
  47        'zfs': '/var/log/zpool.log',
  48        'alloc': '/var/log/du.log',
  49        'postfix': '/var/log/mail.log',
  50        'httpd': '/var/log/apache2'
  51    }
  52}
  53
  54
  55HTTPDSTATUS = "http://localhost/server-status"
  56MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
  57MAILOUT = ""
  58HTMLOUT = ""
  59TXTOUT = ""
  60VERSION = "v0.1"
  61#DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
  62DEG = "°".encode('unicode_escape')
  63CEL = "C"
  64
  65# Set up logging
  66logging.basicConfig(level=logging.DEBUG)
  67logger = logging.getLogger('logparse')
  68loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
  69loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
  70logger.addHandler(loghandler)
  71
  72
  73# Get arguments
  74parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
  75parser.add_argument('-f', '--function', help='run a specified function with parameters (for debugging purposes',required=False)
  76parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
  77
  78def __main__():
  79    logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow))
  80
  81    loadconf(scriptdir + "/logparse.yaml")
  82
  83    # check if user wants to test an isolated function
  84    debugfunc = parser.parse_args().function
  85    if debugfunc is not None:
  86        logger.debug("executing a single function: " + debugfunc)
  87        try:
  88            logger.debug((debugfunc + ': ' + eval(debugfunc)))
  89            sys.exit()
  90        except Exception as e:
  91            sys.exit("debug function failed with error " + e)
  92        logger.debug("finished executing debug function")
  93        
  94    if not config['mail']['to']:
  95        logger.info("no recipient address provided, outputting to stdout")
  96    else:
  97        logger.info("email will be sent to " + config['mail']['to'])
  98
  99    global pathfilter
 100    global pathpattern
 101    pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
 102    pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
 103    pathpattern = re.compile("|".join(pathfilter.keys()))
 104
 105    global varfilter
 106    global varpattern
 107    varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.relpath(config['css'], os.path.dirname(config['output']))}
 108    varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
 109    varpattern = re.compile("|".join(varfilter.keys()))
 110
 111    global tempfile
 112    tempfile = open(config['output'], 'w+')
 113    tempfile.write(header(config['header']))
 114    opentag('div', 1, 'main')
 115    sshd()
 116    sudo()
 117    cron()
 118    nameget()
 119    httpd()
 120    smbd()
 121    postfix()
 122    zfs()
 123    temp()
 124    du()
 125    for tag in ['div', 'body', 'html']:
 126        closetag(tag, 1)
 127    tempfile.close()
 128    mailprep(config['output'], MAILPATH)
 129    if (config['mail']['to']):
 130        logger.debug("sending email")
 131        ms = subject(config['mail']['subject'])
 132        cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + config['mail']['to']
 133        logger.debug(cmd)
 134        subprocess.call(cmd, shell=True)
 135        logger.info("sent email")
 136
 137
 138def writetitle(title):
 139    if (title == '' or '\n' in title):
 140        logger.error("invalid title")
 141        return
 142        logger.debug("writing title for " + title)
 143    tag('h2', 0, title)
 144
 145def writedata(subtitle, data = None):   # write title and data to tempfile
 146    if (subtitle == ""):
 147        loggger.warning("no subtitle provided.. skipping section")
 148        return
 149
 150    if (data == None or len(data) == 0):
 151        logger.debug("no data provided.. just printing subtitle")
 152        tag('p', 0, subtitle)
 153    else:
 154        logger.debug("received data " + str(data))
 155        subtitle += ':'
 156        if (len(data) == 1):
 157            tag('p', 0, subtitle + ' ' + data[0])
 158        else:
 159            tag('p', 0, subtitle)
 160            opentag('ul', 1)
 161            for datum in data:
 162                tag('li', 0, datum)
 163            closetag('ul', 1)
 164
 165def opentag(tag, block = 0, id = None, cl = None):   # write html opening tag
 166    if (block == 1):
 167        tempfile.write('\n')
 168    tempfile.write('<' + tag)
 169    if (id != None):
 170        tempfile.write(" id='" + id + "'")
 171    if (cl != None):
 172        tempfile.write(" class='" + cl + "'")
 173    tempfile.write('>')
 174    if (block == 1):
 175        tempfile.write('\n')
 176
 177def closetag(tag, block = 0):  # write html closing tag
 178    if (block == 0):
 179        tempfile.write("</" + tag + ">")
 180    else:
 181        tempfile.write("\n</" + tag + ">\n")
 182
 183def tag(tag, block = 0, content = ""):  # write html opening tag, content, and html closing tag
 184    opentag(tag, block)
 185    tempfile.write(content)
 186    closetag(tag, block)
 187
 188def header(template):   # return a parsed html header from file
 189    try:
 190        copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
 191        logger.debug("copied main.css")
 192    except Exception as e:
 193        logger.warning("could not copy main.css - " + str(e))
 194    headercontent = open(template, 'r').read()
 195    headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
 196    return headercontent
 197
 198def subject(template):
 199    r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
 200    logger.debug("returning subject line " + r)
 201    return r
 202
 203def hostname(): # get the hostname of current server
 204    hnfile = open(config['hostname-path'], 'r')
 205    hn = re.search('^(.*)\n*', hnfile.read()).group(1)
 206    return hn
 207
 208
 209def resolve(ip, fqdn = False):        # try to resolve an ip to hostname
 210    try:
 211        socket.inet_aton(ip)  # succeeds if text contains ip
 212        hn = socket.gethostbyaddr(ip)[0] # resolve ip to hostname
 213        return(hn if fqdn else hn.split('.')[0])
 214    except OSError:
 215        # already a hostname
 216        logger.debug(ip + " is already a hostname")
 217        return(ip)
 218    except socket.herror:
 219        # cannot resolve ip
 220        logger.debug(ip + " cannot be found, might not exist anymore")
 221        return(ip)
 222    except:
 223        logger.debug("failed to resolve hostname for " + ip)
 224        return(ip)  # return ip if no hostname exists
 225
 226def plural(noun, quantity): # return "1 noun" or "n nouns"
 227    if (quantity == 1):
 228        return(str(quantity) + " " + noun)
 229    else:
 230        return(str(quantity) + " " + noun + "s")
 231
 232def parsesize(num, suffix='B'):     # return human-readable size from number of bytes
 233    for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
 234        if abs(num) < 1024.0:
 235            return "%3.1f %s%s" % (num, unit, suffix)
 236        num /= 1024.0
 237    return "%.1f%s%s" % (num, 'Yi', suffix)
 238
 239def readlog(path = None, mode = 'r'):   # read file, substituting known paths
 240    if (path == None):
 241        logger.error("no path provided")
 242        return
 243    else:
 244        path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
 245        if (os.path.isfile(path) is False):
 246            logger.error(path + " does not exist")
 247            return ''
 248        else:
 249            return open(path, mode).read()
 250
 251def writelog(path = None, content = "", mode = 'w'):   # read file, substituting known paths
 252    if (path == None or content == None):
 253        logger.error("invalid usage of writelog")
 254        return
 255    else:
 256        path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
 257        file = open(path, mode)
 258        file.write(content)
 259        file.close()
 260
 261def getusage(path):     # Get disk usage statistics
 262    disk = os.statvfs(path)
 263    cap = float(disk.f_bsize*disk.f_blocks)                     # disk capacity
 264    alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree))    # size of path
 265    free = float(disk.f_bsize*disk.f_bfree)                     # free space on disk (blocks, not usable space)
 266    ratio = alloc / cap * 100                                   # percentage used
 267    return diskstat(cap, alloc, free, ratio)
 268
 269def orderbyfreq(l):     # order a list by the frequency of its elements and remove duplicates
 270    temp_l = l[:]
 271    l = list(set(l))
 272    l = [[i, temp_l.count(i)] for i in l]   # add count of each element
 273    l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
 274    l = [i[0] + ' (' + str(i[1]) + ')' for i in l]  # put element and count into string
 275    l = l[::-1]     # reverse
 276    return l
 277
 278def addtag(l, tag):  # add prefix and suffix tags to each item in a list
 279    l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
 280    return l2
 281
 282def truncl(input, limit):      # truncate list
 283    if (len(input) > limit):
 284        more = str(len(input) - limit)
 285        output = input[:limit]
 286        output.append("+ " + more + " more")
 287        return(output)
 288    else:
 289        return(input)
 290
 291def mailprep(inputpath, output, *stylesheet):
 292    logger.debug("converting stylesheet to inline tags")
 293    old = readlog(inputpath)
 294    logger.debug(config['css'])
 295    pm = premailer.Premailer(old, external_styles=config['css'])
 296    MAILOUT = pm.transform()
 297    logger.info("converted stylesheet to inline tags")
 298    file = open(output, 'w')
 299    file.write(MAILOUT)
 300    file.close()
 301    logger.info("written to temporary mail file")
 302
 303
 304
 305#
 306#
 307#
 308
 309def sshd():
 310    logger.debug("starting sshd section")
 311    opentag('div', 1, 'sshd', 'section')
 312    matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth'))    # get all logins
 313    users = []  # list of users with format [username, number of logins] for each item
 314    data = []
 315    num = sum(1 for x in matches)     # total number of logins
 316    for match in matches:
 317        entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match)  # [('user', 'ip')]
 318
 319        user = entry.group(1)
 320        ip = entry.group(2)
 321
 322        userhost = user + '@' + resolve(ip)
 323        exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
 324        if (exists == []):
 325            users.append([userhost, 1])
 326        else:
 327            users[exists[0]][1] += 1
 328
 329    writetitle('sshd')
 330    subtitle = plural('login', num) + ' from'
 331    if (len(users) == 1):             # if only one user, do not display no of logins for this user
 332        logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
 333        subtitle += ' ' + users[0][0]
 334        writedata(subtitle)
 335    else:
 336        for user in users:
 337            data.append(user[0] + ' (' + str(user[1]) + ')')
 338            if len(data) > config['maxlist']:     # if there are lots of users, truncate them
 339                data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
 340                break
 341        logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
 342        writedata(subtitle, data)
 343    closetag('div', 1)
 344    logger.info("finished sshd section")
 345
 346#
 347#
 348#
 349
 350def sudo():
 351    logger.debug("starting sudo section")
 352    opentag('div', 1, 'sudo', 'section')
 353    umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
 354    num = sum(1 for line in umatches)    # total number of sessions
 355    users = []
 356    data = []
 357    for match in umatches:
 358        user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
 359        exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
 360        if (exists == []):
 361            users.append([user, 1])
 362        else:
 363            users[exists[0]][1] += 1
 364    commands = []
 365    cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
 366    for cmd in cmatches:
 367        commands.append(cmd)
 368#    logger.debug("found the following commands: " + str(commands))
 369
 370    writetitle("sudo")
 371    subtitle = plural("sudo session", num) + " for"
 372    if (len(users) == 1):
 373        logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
 374        subtitle += ' ' + users[0][0]
 375        writedata(subtitle)
 376    else:
 377        for user in users:
 378            data.append(user[0] + ' (' + str(user[1]) + ')')
 379        logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
 380        writedata(subtitle, data)
 381    if (len(commands) > 0):
 382        commands = addtag(commands, 'code')
 383        commands = orderbyfreq(commands)
 384        commands = truncl(commands, config['maxcmd'])
 385        writedata("top sudo commands", [c for c in commands])
 386    closetag('div', 1)
 387    logger.info("finished sudo section")
 388
 389#
 390#
 391#
 392
 393def cron():
 394    logger.debug("starting cron section")
 395    opentag('div', 1, 'cron', 'section')
 396    matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
 397    num = sum(1 for line in matches)
 398    commands = []
 399    for match in matches:
 400        commands.append(str(match))
 401    # commands.append([str(match)for match in matches])
 402    #logger.debug("found cron command " + str(commands))
 403    logger.info("found " + str(num) + " cron jobs")
 404    subtitle = str(num) + " cron jobs run"
 405    writetitle("cron")
 406    writedata(subtitle)
 407    if (matches > 0):
 408        commands = addtag(commands, 'code')
 409        commands = orderbyfreq(commands)
 410        commands = truncl(commands, config['maxcmd'])
 411        writedata("top cron commands", [c for c in commands])
 412    closetag('div', 1)
 413    logger.info("finished cron section")
 414
 415#
 416#
 417#
 418
 419def nameget():
 420    logger.debug("starting nameget section")
 421    opentag('div', 1, 'nameget', 'section')
 422    logger.debug("reading syslog.. this may take a while")
 423    syslog = readlog('sys')
 424    failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
 425    n_f = sum(1 for i in failed)
 426    l_f = []
 427    for i in failed:
 428        l_f.append(i if i else '[no destination]')
 429    logger.debug("the following downloads failed: " + str(l_f))
 430    succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
 431    n_s = sum(1 for i in succ)
 432    l_s = []
 433    for i in succ:
 434        l_s.append(i)
 435    logger.debug("the following downloads succeeded: " + str(l_f))
 436    logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
 437    writetitle("nameget")
 438    writedata(str(n_s) + " succeeded", truncl(l_s, config['maxlist']))
 439    writedata(str(n_f) + " failed", truncl(l_f, config['maxlist']))
 440    closetag('div', 1)
 441    logger.info("finished nameget section")
 442
 443#
 444#
 445#
 446
 447def httpd():
 448    logger.info("starting httpd section")
 449    opentag('div', 1, 'httpd', 'section')
 450    accesslog = readlog("httpd/access.log")
 451    a = len(accesslog.split('\n'))
 452    errorlog = readlog("httpd/error.log")
 453    e = len(errorlog.split('\n'))
 454    data_b = 0
 455    ips = []
 456    files = []
 457    useragents = []
 458    errors = []
 459    notfound = []
 460    unprivileged = []
 461
 462    for line in accesslog.split('\n'):
 463        fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
 464        try:
 465            ips.append(resolve(fields.group(1), fqdn=True))
 466            files.append(fields.group(2))
 467            useragents.append(fields.group(5))
 468            data_b += int(fields.group(3))
 469        except Exception as error:
 470            if type(error) is AttributeError: # this line is not an access log
 471                pass
 472            else:
 473                logger.warning("error processing httpd access log: " + str(error))
 474    logger.debug(str(data_b) + " bytes transferred")
 475    data_h = parsesize(data_b)
 476    writetitle("apache")
 477
 478    logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
 479    if (a > 0):
 480        files = addtag(files, 'code')
 481        files = orderbyfreq(files)
 482        files = truncl(files, config['maxlist'])
 483        writedata(plural(" request", a), files)
 484    if (ips != None):
 485        ips = addtag(ips, 'code')
 486        ips = orderbyfreq(ips)
 487        n_ip = str(len(ips))
 488        ips = truncl(ips, config['maxlist'])
 489        writedata(plural(" client", n_ip), ips)
 490    if (useragents != None):
 491        useragents = addtag(useragents, 'code')
 492        useragents = orderbyfreq(useragents)
 493        n_ua = str(len(useragents))
 494        useragents = truncl(useragents, config['maxlist'])
 495        writedata(plural(" device", n_ua), useragents)
 496
 497    writedata(data_h + " transferred")
 498    writedata(plural(" error", e))
 499
 500    closetag('div', 1)
 501    logger.info("finished httpd section")
 502
 503#
 504#
 505#
 506
 507def httpdsession():
 508    # logger.debug("starting httpd section")
 509    opentag('div', 1, 'httpd', 'section')
 510    httpdlog = requests.get(HTTPDSTATUS).content
 511    uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
 512    uptime = re.sub(' minute[s]', 'm', uptime)
 513    uptime = re.sub(' second[s]', 's', uptime)
 514    uptime = re.sub(' day[s]', 's', uptime)
 515    uptime = re.sub(' month[s]', 'mo', uptime)
 516    accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
 517    traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
 518    return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
 519    closetag('div', 1)
 520    # logger.info("finished httpd section")
 521
 522#
 523#
 524#
 525
 526def smbd():
 527    logger.debug("starting smbd section")
 528    opentag('div', 1, 'smbd', 'section')
 529    files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]")    # find list of logfiles
 530    # for f in files:
 531
 532        # file_mod_time = os.stat(f).st_mtime
 533
 534        # Time in seconds since epoch for time, in which logfile can be unmodified.
 535        # should_time = time.time() - (30 * 60)
 536
 537        # Time in minutes since last modification of file
 538        # last_time = (time.time() - file_mod_time)
 539        # logger.debug(last_time)
 540
 541        # if (file_mod_time - should_time) < args.time:
 542            # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
 543        # else:
 544
 545        # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
 546            # files.remove(f)
 547    logger.debug("found log files " + str(files))
 548    n_auths = 0         # total number of logins from all users
 549    sigma_auths = []    # contains users
 550    output = ""
 551
 552    for file in files:  # one log file for each client
 553
 554        logger.debug("looking at file " + file)
 555
 556        # find the machine (ip or hostname) that this file represents
 557        ip = re.search('log\.(.*)', file).group(1)    # get ip or hostname from file path (/var/log/samba/log.host)
 558        host = resolve(ip)
 559        if (host == ip):    # if ip has disappeared, fall back to a hostname from logfile
 560            newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
 561            if (len(set(newhost)) == 1):    # all hosts in one file should be the same
 562                host = newhost[0].lower()
 563
 564        # count number of logins from each user-host pair
 565        matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
 566        for match in matches:
 567            userhost = match + "@" + host
 568            sigma_auths.append(userhost)
 569            # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
 570            # if (exists == []):
 571            #     sigma_auths.append([userhost, 1])
 572            # else:
 573            #     sigma_auths[exists[0]][1] += 1
 574            n_auths += 1
 575    writetitle("samba")
 576    subtitle = plural("login", n_auths) + " from"
 577    if (len(sigma_auths) == 1):             # if only one user, do not display no of logins for this user
 578        subtitle += ' ' + sigma_auths[0][0]
 579        writedata(subtitle)
 580    else:       # multiple users
 581        sigma_auths = orderbyfreq(sigma_auths)
 582        sigma_auths = truncl(sigma_auths, config['maxlist'])
 583        logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
 584        writedata(subtitle, sigma_auths)
 585    closetag('div', 1)
 586    logger.info("finished smbd section")
 587
 588#
 589#
 590#
 591
 592def postfix():
 593    logger.debug("starting postfix section")
 594    opentag('div', 1, 'postfix', 'section')
 595    messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
 596    r = []
 597    s = []
 598    size = 0
 599    for message in messages:
 600        r.append(message[2])
 601        s.append(message[0])
 602        size += int(message[1])
 603    # size = sum([int(x) for x in messages])
 604    size = parsesize(size)
 605    n = str(len(messages))
 606    writetitle("postfix")
 607
 608    if (len(r) > 0):
 609        s = list(set(r))    # unique recipients
 610        if (len(s) > 1):
 611            r = orderbyfreq(r)
 612            r = truncl(r, config['maxlist'])
 613            writedata(n + " messages sent to", r)
 614        else:
 615            writedata(n + " messages sent to " + r[0])
 616    else:
 617        writedata(n + " messages sent")
 618    writedata("total of " + size)
 619    closetag('div', 1)
 620    logger.info("finished postfix section")
 621
 622#
 623#
 624#
 625
 626def zfs():
 627    logger.debug("starting zfs section")
 628    opentag('div', 1, 'zfs', 'section')
 629    zfslog = readlog('zfs')
 630    logger.debug("got zfs logfile")
 631    pool = re.search('.*---\n(\w*)', zfslog).group(1)
 632    scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
 633    iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
 634    scrubrepairs = scruberrors = scrubdate = None
 635    try:
 636        scrubrepairs = scrub.group(1)
 637        scruberrors = scrub.group(2)
 638        scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
 639    except:
 640        logger.debug("error getting scrub data")
 641    alloc = iostat.group(1)
 642    free = iostat.group(2)
 643    writetitle("zfs")
 644    if (scrubdate != None):
 645        subtitle = "Scrub of " + pool + " on " + scrubdate
 646        data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
 647    else:
 648        subtitle = pool
 649        data = [alloc + " used", free + " free"]
 650    writedata(subtitle, data)
 651    closetag('div', 1)
 652    logger.info("finished zfs section")
 653
 654#
 655#
 656#
 657
 658def temp():
 659    logger.debug("starting temp section")
 660    opentag('div', 1, 'temp', 'section')
 661
 662    # cpu temp
 663
 664    sensors.init()
 665    coretemps = []
 666    pkgtemp = 0
 667    systemp = 0
 668    try:
 669        for chip in sensors.iter_detected_chips():
 670            for feature in chip:
 671                if "Core" in feature.label:
 672                    coretemps.append([feature.label, feature.get_value()])
 673                    logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
 674                if "CPUTIN" in feature.label:
 675                    pkgtemp = str(feature.get_value())
 676                    logger.debug("found cpu package at temperature " + pkgtemp)
 677                if "SYS" in feature.label:
 678                    systemp = feature.get_value()
 679                    logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
 680        core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
 681        logger.debug("average cpu temp is " + str(core_avg))
 682        coretemps.append(["avg", str(core_avg)])
 683        coretemps.append(["pkg", pkgtemp])
 684        coretemps = [x[0] + ": " + str(x[1]) + DEG + CEL for x in coretemps]
 685    finally:
 686        sensors.cleanup()
 687
 688    # drive temp
 689
 690    # For this to work, `hddtemp` must be running in daemon mode.
 691    # Start it like this (bash):   sudo hddtemp -d /dev/sda /dev/sdX...
 692    
 693    received = ''
 694    sumtemp = 0 
 695    data = ""
 696    output = []
 697    
 698    try:
 699        hsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
 700        hsock.connect(("localhost", int(config['hddtemp']['port'])))
 701        logger.debug("tcp socket on port " + str(int(config['hddtemp']['port'])) + " opened for `hddtemp` (ensure daemon is running)")
 702        hsock.sendall('')   # send dummy packet and shut write conn
 703        hsock.shutdown(socket.SHUT_WR)
 704
 705        while 1:
 706            line = hsock.recv(1024)
 707            if line == "":      # exit on blank line
 708                break
 709            logger.debug("received line " + str(line))
 710            data += line
 711        hsock.close()
 712        logger.debug("closed connection, having received " + str(sys.getsizeof(data)) + " bytes")
 713
 714        data = data.lstrip('|').rstrip('|') # remove leading & trailing `|`
 715        drives = data.split('|' * 2) # split into drives
 716
 717        for drive in drives:
 718            fields = drive.split('|')
 719            if fields[0] in config['hddtemp']['drives']:
 720                output.append(fields[0] + (' (' + fields[1] + ')' if config['hddtemp']['show-model'] else '')+ ': ' + fields[2] + DEG + fields[3])
 721                sumtemp += int(fields[2])
 722                logger.debug("added drive " + fields[0])
 723            else:
 724                logger.debug("ignoring drive " + fields[0])
 725
 726        hddavg = int(format(sumtemp/float(len(drives)))) + e + DEG + output[0][-1:] # use units of first drive (last character of output) 
 727        logger.debug("avg disk temp is " + str(hddavg))
 728        output.append("avg: " + str(hddavg))
 729    except Exception as ex:
 730        logger.debug("failed getting hddtemps with error " + str(ex))
 731    finally:
 732        hsock.close()
 733
 734    writetitle("temperatures")
 735    if (systemp != 0):
 736        writedata("sys: " + str(systemp) + DEG)
 737    if (coretemps != ''):
 738        writedata("cores", coretemps)
 739    if (config['hddtemp']['drives'] != ''):
 740        writedata("disks", output)
 741
 742    closetag('div', 1)
 743    logger.info("finished temp section")
 744
 745#
 746#
 747#
 748
 749def du():
 750    logger.debug("starting du section")
 751    opentag('div', 1, 'du', 'section')
 752    out = []
 753    content = readlog('alloc')
 754    contentnew = ""
 755    for p in config['du-paths']:
 756        alloc_f = getusage(p).alloc
 757        delta = None
 758        try:
 759            alloc_i = re.search(p + '\t(.*)\n', content).group(1)
 760            delta = alloc_f - float(alloc_i)
 761        except:
 762            pass
 763        if (delta == None):
 764            out.append([p, "used " + parsesize(alloc_f)])
 765        else:
 766            out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
 767        contentnew += (p + '\t' + str(alloc_f) + '\n')
 768    if config['rotate'] == 'y':
 769        writelog('alloc', contentnew) 
 770
 771    writetitle("du")
 772    logger.debug("disk usage data is " + str(out))
 773    for path in out:
 774        writedata(path[0], [p for p in path[1:]])
 775
 776    closetag('div', 1)
 777    logger.info("finished du section")
 778
 779#
 780#
 781#
 782starttime = datetime.datetime.now()
 783timenow = time.strftime("%H:%M:%S")
 784datenow = time.strftime("%x")
 785
 786def loadconf(configfile):
 787    try:
 788        data = yaml.safe_load(open(configfile))
 789        for value in data:
 790            if(type(data[value]) == types.DictType):
 791                for key in data[value].iteritems():
 792                    config[value][key[0]] = key[1]
 793            else:
 794                config[value] = data[value]
 795        config['dest'] = os.path.dirname(config['output'])
 796        if parser.parse_args().to is not None: config['mail']['to'] = parser.parse_args().to
 797    except Exception as e:
 798        logger.warning("error processing config: " + str(e))
 799
 800
 801try:
 802    __main__()
 803finally:
 804    # rotate logs using systemd logrotate
 805    if parser.parse_args().function is None:
 806        if (config['rotate'] == 'y'):
 807            subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
 808            logger.info("rotated logfiles")
 809        else:
 810            logger.debug("user doesn't want to rotate logs")
 811            if (config['rotate'] == 's'):
 812                logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
 813                sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
 814                logger.debug(sim)
 815    
 816    timenow = time.strftime("%H:%M:%S")
 817    datenow = time.strftime("%x")
 818    logger.info("finished parsing logs at " + datetime.datetime.now().strftime("%x %H:%M:%S") + " (" + str(datetime.datetime.now() - starttime) + ")")