0ec21b5af90a13d2420917069a759916a0e8a22e
   1#! /usr/bin/python
   2
   3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
   4from sys import stdin
   5from collections import namedtuple, defaultdict
   6from shutil import copyfile
   7import yaml
   8
   9reload(sys)
  10sys.setdefaultencoding('utf-8')
  11
  12scriptdir = os.path.dirname(os.path.realpath(__file__))
  13
  14
  15diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
  16drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
  17config = {
  18    'output': '~/var/www/logparse/summary.html',
  19    'header': scriptdir + '/header.html',
  20    'css': scriptdir + '/main.css',
  21    'title': 'logparse',
  22    'maxlist': 10,
  23    'maxcmd': 3,
  24    'mail': {
  25        'to': '',
  26        'from': '',
  27        'subject': 'logparse from $hostname$'
  28    },
  29    'hddtemp': {
  30        'drives': ['/dev/sda'],
  31        'port': 7634
  32    },
  33    'du-paths': ['/', '/etc', '/home'],
  34    'hostname-path': '/etc/hostname',
  35    'logs': {
  36        'auth': '/var/log/auth.log',
  37        'cron': '/var/log/cron.log',
  38        'sys': '/var/log/syslog',
  39        'smb': '/var/log/samba',
  40        'zfs': '/var/log/zpool.log',
  41        'alloc': '/tmp/alloc',
  42        'postfix': '/var/log/mail.log',
  43        'httpd': '/var/log/apache2'
  44    }
  45}
  46
  47
  48HTTPDSTATUS = "http://localhost/server-status"
  49# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
  50# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
  51# config['hddtemp']['port'] = 7634
  52# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
  53# config['output'] = "/mnt/andrew/temp/logparse/out.html"
  54MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
  55# config['dest'] = "/mnt/andrew/temp/logparse"
  56# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
  57# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
  58MAILOUT = ""
  59HTMLOUT = ""
  60TXTOUT = ""
  61# config['title'] = "logparse"
  62# config['maxlist'] = 10
  63# config['maxcmd'] = 3
  64# config['mail']['subject'] = "logparse from $hostname$"
  65VERSION = "v0.1"
  66DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
  67DEG = " °C".encode('unicode_escape')
  68
  69# Set up logging
  70logging.basicConfig(level=logging.DEBUG)
  71logger = logging.getLogger('logparse')
  72
  73# Get arguments
  74parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
  75parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
  76to = parser.parse_args().to
  77
  78def __main__():
  79    logger.info("Beginning log analysis at " + str(timenow))
  80    if (to == None):
  81        logger.info("no recipient address provided, outputting to stdout")
  82    else:
  83        logger.info("email will be sent to " + to)
  84
  85    loadconf(scriptdir + "/logparse.yaml")
  86
  87    global tempfile
  88    tempfile = open(config['output'], 'w+')
  89    tempfile.write(header(config['header']))
  90    opentag('div', 1, 'main')
  91    sshd()
  92    sudo()
  93    cron()
  94    nameget()
  95    httpd()
  96    smbd()
  97    postfix()
  98    zfs()
  99    temp()
 100    du()
 101    for tag in ['div', 'body', 'html']:
 102        closetag(tag, 1)
 103    tempfile.close()
 104    mailprep(config['output'], MAILPATH)
 105    if (to != None):
 106        logger.debug("sending email")
 107        ms = subject(config['mail']['subject'])
 108        cmd = "cat " + MAILPATH + " | mail --debug-level=10 -a 'Content-type: text/html'  -s '" + ms + "' " + to
 109        logger.debug(cmd)
 110        subprocess.call(cmd, shell=True)
 111        logger.info("sent email")
 112
 113
 114def writetitle(title):
 115    if (title == '' or '\n' in title):
 116        logger.error("invalid title")
 117        return
 118        logger.debug("writing title for " + title)
 119    tag('h2', 0, title)
 120
 121def writedata(subtitle, data = None):   # write title and data to tempfile
 122    if (subtitle == ""):
 123        loggger.warning("no subtitle provided.. skipping section")
 124        return
 125
 126    if (data == None or len(data) == 0):
 127        logger.debug("no data provided.. just printing subtitle")
 128        tag('p', 0, subtitle)
 129    else:
 130        logger.debug("received data " + str(data))
 131        subtitle += ':'
 132        if (len(data) == 1):
 133            tag('p', 0, subtitle + ' ' + data[0])
 134        else:
 135            tag('p', 0, subtitle)
 136            opentag('ul', 1)
 137            for datum in data:
 138                logger.debug("printing datum " + datum)
 139                tag('li', 0, datum)
 140            closetag('ul', 1)
 141
 142def opentag(tag, block = 0, id = None, cl = None):   # write html opening tag
 143    if (block == 1):
 144        tempfile.write('\n')
 145    tempfile.write('<' + tag)
 146    if (id != None):
 147        tempfile.write(" id='" + id + "'")
 148    if (cl != None):
 149        tempfile.write(" class='" + cl + "'")
 150    tempfile.write('>')
 151    if (block == 1):
 152        tempfile.write('\n')
 153
 154def closetag(tag, block = 0):  # write html closing tag
 155    if (block == 0):
 156        tempfile.write("</" + tag + ">")
 157    else:
 158        tempfile.write("\n</" + tag + ">\n")
 159
 160def tag(tag, block = 0, content = ""):  # write html opening tag, content, and html closing tag
 161    opentag(tag, block)
 162    tempfile.write(content)
 163    closetag(tag, block)
 164
 165def header(template):   # return a parsed html header from file
 166    try:
 167        copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
 168        logger.debug("copied main.css")
 169    except Exception as e:
 170        logger.warning("could not copy main.css - " + str(e))
 171    headercontent = open(template, 'r').read()
 172    headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
 173    return headercontent
 174
 175def subject(template):
 176    r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
 177    logger.debug("returning subject line " + r)
 178    return r
 179
 180def hostname(): # get the hostname
 181    hnfile = open(config['hostname-path'], 'r')
 182    hn = re.search('^(.*)\n*', hnfile.read()).group(1)
 183    return hn
 184
 185def resolve(ip):        # try to resolve an ip to hostname
 186    logger.debug("trying to resolve ip " + ip)
 187    try:
 188        socket.inet_aton(ip)  # succeeds if text contains ip
 189        hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
 190        logger.debug("found hostname " + hn)
 191        return(hn)
 192    except:
 193        logger.debug("failed to resolve hostname for " + ip)
 194        return(ip)  # return ip if no hostname exists
 195
 196def plural(noun, quantity): # return "1 noun" or "n nouns"
 197    if (quantity == 1):
 198        return(str(quantity) + " " + noun)
 199    else:
 200        return(str(quantity) + " " + noun + "s")
 201
 202def parsesize(num, suffix='B'):     # return human-readable size from number of bytes
 203    for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
 204        if abs(num) < 1024.0:
 205            return "%3.1f %s%s" % (num, unit, suffix)
 206        num /= 1024.0
 207    return "%.1f%s%s" % (num, 'Yi', suffix)
 208
 209def readlog(path = None, mode = 'r'):   # read file, substituting known paths
 210    if (path == None):
 211        logger.error("no path provided")
 212        return
 213    else:
 214        path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
 215        if (os.path.isfile(path) is False):
 216            logger.error(path + " does not exist")
 217            return ''
 218        else:
 219            return open(path, mode).read()
 220
 221def writelog(path = None, content = "", mode = 'w'):   # read file, substituting known paths
 222    if (path == None or content == None):
 223        logger.error("invalid usage of writelog")
 224        return
 225    else:
 226        path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
 227        file = open(path, mode)
 228        file.write(content)
 229        file.close()
 230
 231def getusage(path):     # Get disk usage statistics
 232    disk = os.statvfs(path)
 233    cap = float(disk.f_bsize*disk.f_blocks)                     # disk capacity
 234    alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree))    # size of path
 235    free = float(disk.f_bsize*disk.f_bfree)                     # free space on disk (blocks, not usable space)
 236    ratio = alloc / cap * 100                                   # percentage used
 237    return diskstat(cap, alloc, free, ratio)
 238
 239def orderbyfreq(l):     # order a list by the frequency of its elements and remove duplicates
 240    temp_l = l[:]
 241    l = list(set(l))
 242    l = [[i, temp_l.count(i)] for i in l]   # add count of each element
 243    l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
 244    l = [i[0] + ' (' + str(i[1]) + ')' for i in l]  # put element and count into string
 245    l = l[::-1]     # reverse
 246    return l
 247
 248def addtag(l, tag):  # add prefix and suffix tags to each item in a list
 249    l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
 250    return l2
 251
 252def truncl(input, limit):      # truncate list
 253    if (len(input) > limit):
 254        more = str(len(input) - limit)
 255        output = input[:limit]
 256        output.append("+ " + more + " more")
 257        return(output)
 258    else:
 259        return(input)
 260
 261def mailprep(inputpath, output, *stylesheet):
 262    logger.debug("converting stylesheet to inline tags")
 263    old = readlog(inputpath)
 264    pm = premailer.Premailer(old, external_styles=config['css'])
 265    MAILOUT = pm.transform()
 266    logger.info("converted stylesheet to inline tags")
 267    file = open(output, 'w')
 268    file.write(MAILOUT)
 269    file.close()
 270    logger.info("written to temporary mail file")
 271
 272
 273
 274#
 275#
 276#
 277
 278def sshd():
 279    logger.debug("starting sshd section")
 280    opentag('div', 1, 'sshd', 'section')
 281    matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth'))    # get all logins
 282    users = []  # list of users with format [username, number of logins] for each item
 283    data = []
 284    num = sum(1 for x in matches)     # total number of logins
 285    for match in matches:
 286        entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match)  # [('user', 'ip')]
 287
 288        user = entry.group(1)
 289        ip = entry.group(2)
 290
 291        userhost = user + '@' + resolve(ip)
 292        exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
 293        if (exists == []):
 294            users.append([userhost, 1])
 295        else:
 296            users[exists[0]][1] += 1
 297
 298    writetitle('sshd')
 299    subtitle = plural('login', num) + ' from'
 300    if (len(users) == 1):             # if only one user, do not display no of logins for this user
 301        logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
 302        subtitle += ' ' + users[0][0]
 303        writedata(subtitle)
 304    else:
 305        for user in users:
 306            data.append(user[0] + ' (' + str(user[1]) + ')')
 307            if len(data) > config['maxlist']:     # if there are lots of users, truncate them
 308                data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
 309                break
 310        logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
 311        writedata(subtitle, data)
 312    closetag('div', 1)
 313    logger.info("finished sshd section")
 314
 315#
 316#
 317#
 318
 319def sudo():
 320    logger.debug("starting sudo section")
 321    opentag('div', 1, 'sudo', 'section')
 322    umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
 323    num = sum(1 for line in umatches)    # total number of sessions
 324    users = []
 325    data = []
 326    for match in umatches:
 327        user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
 328        exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
 329        if (exists == []):
 330            users.append([user, 1])
 331        else:
 332            users[exists[0]][1] += 1
 333    commands = []
 334    cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
 335    for cmd in cmatches:
 336        commands.append(cmd)
 337    logger.debug("found the following commands: " + str(commands))
 338
 339    writetitle("sudo")
 340    subtitle = plural("sudo session", num) + " for"
 341    if (len(users) == 1):
 342        logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
 343        subtitle += ' ' + users[0][0]
 344        writedata(subtitle)
 345    else:
 346        for user in users:
 347            data.append(user[0] + ' (' + str(user[1]) + ')')
 348        logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
 349        writedata(subtitle, data)
 350    if (len(commands) > 0):
 351        commands = addtag(commands, 'code')
 352        commands = orderbyfreq(commands)
 353        commands = truncl(commands, config['maxcmd'])
 354        writedata("top sudo commands", [c for c in commands])
 355    closetag('div', 1)
 356    logger.info("finished sudo section")
 357
 358#
 359#
 360#
 361
 362def cron():
 363    logger.debug("starting cron section")
 364    opentag('div', 1, 'cron', 'section')
 365    matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
 366    num = sum(1 for line in matches)
 367    commands = []
 368    for match in matches:
 369        commands.append(str(match))
 370    # commands.append([str(match)for match in matches])
 371    logger.debug("found cron command " + str(commands))
 372    logger.info("found " + str(num) + " cron jobs")
 373    subtitle = str(num) + " cron jobs run"
 374    writetitle("cron")
 375    writedata(subtitle)
 376    if (matches > 0):
 377        commands = addtag(commands, 'code')
 378        commands = orderbyfreq(commands)
 379        commands = truncl(commands, config['maxcmd'])
 380        writedata("top cron commands", [c for c in commands])
 381    closetag('div', 1)
 382    logger.info("finished cron section")
 383
 384#
 385#
 386#
 387
 388def nameget():
 389    logger.debug("starting nameget section")
 390    opentag('div', 1, 'nameget', 'section')
 391    syslog = readlog('sys')
 392    failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
 393    n_f = sum(1 for i in failed)
 394    l_f = []
 395    for i in failed:
 396        l_f.append(i)
 397    logger.debug("the following downloads failed: " + str(l_f))
 398    succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
 399    n_s = sum(1 for i in succ)
 400    l_s = []
 401    for i in succ:
 402        l_s.append(i)
 403    logger.debug("the following downloads succeeded: " + str(l_f))
 404    logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
 405    writetitle("nameget")
 406    writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
 407    writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
 408    closetag('div', 1)
 409    logger.info("finished nameget section")
 410
 411#
 412#
 413#
 414
 415def httpd():
 416    logger.info("starting httpd section")
 417    opentag('div', 1, 'httpd', 'section')
 418    accesslog = readlog("httpd/access.log")
 419    a = len(accesslog.split('\n'))
 420    errorlog = readlog("httpd/error.log")
 421    e = len(errorlog.split('\n'))
 422    data_b = 0
 423    ips = []
 424    files = []
 425    useragents = []
 426    errors = []
 427    notfound = []
 428    unprivileged = []
 429
 430    for line in accesslog.split('\n'):
 431        fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
 432        try:
 433            ips.append(fields.group(1))
 434            files.append(fields.group(2))
 435            useragents.append(fields.group(5))
 436            logger.debug("transferred " + fields.group(3) + " bytes in this request")
 437            data_b += int(fields.group(3))
 438            logger.debug("data_b is now " + str(data_b))
 439        except Exception as error:
 440            if type(error) is AttributeError:
 441                logger.debug("attributeerrror: " + str(error))
 442            else:
 443                logger.warning("error processing httpd access log: " + str(error))
 444    logger.debug(str(data_b) + " bytes transferred")
 445    data_h = parsesize(data_b)
 446    writetitle("apache")
 447
 448    logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
 449    if (a > 0):
 450        logger.debug("found the following requests: " + str(files))
 451        files = addtag(files, 'code')
 452        files = orderbyfreq(files)
 453        files = truncl(files, config['maxcmd'])
 454        writedata(str(a) + " requests", files)
 455    if (ips != None):
 456        logger.debug("found the following ips: " + str(ips))
 457        ips = addtag(ips, 'code')
 458        ips = orderbyfreq(ips)
 459        n_ip = str(len(ips))
 460        ips = truncl(ips, config['maxcmd'])
 461        writedata(n_ip + " clients", ips)
 462    if (useragents != None):
 463        logger.debug("found the following useragents: " + str(useragents))
 464        useragents = addtag(useragents, 'code')
 465        useragents = orderbyfreq(useragents)
 466        n_ua = str(len(useragents))
 467        useragents = truncl(useragents, config['maxcmd'])
 468        writedata(n_ua + " devices", useragents)
 469
 470    writedata(data_h + " transferred")
 471    writedata(str(e) + " errors")
 472
 473    closetag('div', 1)
 474    logger.info("finished httpd section")
 475
 476#
 477#
 478#
 479
 480def httpdsession():
 481    # logger.debug("starting httpd section")
 482    opentag('div', 1, 'httpd', 'section')
 483    httpdlog = requests.get(HTTPDSTATUS).content
 484    uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
 485    uptime = re.sub(' minute[s]', 'm', uptime)
 486    uptime = re.sub(' second[s]', 's', uptime)
 487    uptime = re.sub(' day[s]', 's', uptime)
 488    uptime = re.sub(' month[s]', 'mo', uptime)
 489    accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
 490    traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
 491    return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
 492    closetag('div', 1)
 493    # logger.info("finished httpd section")
 494
 495#
 496#
 497#
 498
 499def smbd():
 500    logger.debug("starting smbd section")
 501    opentag('div', 1, 'smbd', 'section')
 502    files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]")    # find list of logfiles
 503    logger.debug("found log files " + str(files))
 504    n_auths = 0         # total number of logins from all users
 505    sigma_auths = []    # contains users
 506    output = ""
 507
 508    for file in files:  # one log file for each client
 509
 510        logger.debug("looking at file " + file)
 511
 512        # find the machine (ip or hostname) that this file represents
 513        ip = re.search('log\.(.*)', file).group(1)    # get ip or hostname from file path (/var/log/samba/log.host)
 514        host = resolve(ip)
 515
 516        # count number of logins from each user
 517        matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
 518        for match in matches:
 519            userhost = match + "@" + host
 520            sigma_auths.append(userhost)
 521            # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
 522            # if (exists == []):
 523            #     sigma_auths.append([userhost, 1])
 524            # else:
 525            #     sigma_auths[exists[0]][1] += 1
 526            n_auths += 1
 527    writetitle("samba")
 528    subtitle = plural("login", n_auths) + " from"
 529    if (len(sigma_auths) == 1):             # if only one user, do not display no of logins for this user
 530        subtitle += ' ' + sigma_auths[0][0]
 531        writedata(subtitle)
 532    else:       # multiple users
 533        sigma_auths = orderbyfreq(sigma_auths)
 534        sigma_auths = truncl(sigma_auths, config['maxcmd'])
 535        logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
 536        writedata(subtitle, sigma_auths)
 537    closetag('div', 1)
 538    logger.info("finished smbd section")
 539
 540#
 541#
 542#
 543
 544def postfix():
 545    logger.debug("starting postfix section")
 546    opentag('div', 1, 'postfix', 'section')
 547    messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
 548    r = []
 549    s = []
 550    size = 0
 551    for message in messages:
 552        r.append(message[2])
 553        s.append(message[0])
 554        size += int(message[1])
 555    # size = sum([int(x) for x in messages])
 556    size = parsesize(size)
 557    n = str(len(messages))
 558    writetitle("postfix")
 559
 560    if (len(r) > 0):
 561        s = list(set(r))    # unique recipients
 562        if (len(s) > 1):
 563            r = orderbyfreq(r)
 564            r = truncl(r, config['maxcmd'])
 565            writedata(n + " messages sent to", r)
 566        else:
 567            writedata(n + " messages sent to " + r[0])
 568    else:
 569        writedata(n + " messages sent")
 570    writedata("total of " + size)
 571    closetag('div', 1)
 572    logger.info("finished postfix section")
 573
 574#
 575#
 576#
 577
 578def zfs():
 579    logger.debug("starting zfs section")
 580    opentag('div', 1, 'zfs', 'section')
 581    zfslog = readlog('zfs')
 582    logger.debug("zfs log is " + zfslog)
 583    logger.debug("got zfs logfile\n" + zfslog + "---end log---")
 584    pool = re.search('.*---\n(\w*)', zfslog).group(1)
 585    scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
 586    iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
 587    scrubrepairs = scruberrors = scrubdate = None
 588    try:
 589        scrubrepairs = scrub.group(1)
 590        scruberrors = scrub.group(2)
 591        scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
 592    except:
 593        logger.debug("error getting scrub data")
 594    alloc = iostat.group(1)
 595    free = iostat.group(2)
 596    writetitle("zfs")
 597    if (scrubdate != None):
 598        subtitle = "Scrub of " + pool + " on " + scrubdate
 599        data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
 600    else:
 601        subtitle = pool
 602        data = [alloc + " used", free + " free"]
 603    writedata(subtitle, data)
 604    closetag('div', 1)
 605    logger.info("finished zfs section")
 606
 607#
 608#
 609#
 610
 611def temp():
 612    logger.debug("starting temp section")
 613    opentag('div', 1, 'temp', 'section')
 614    sensors.init()
 615    coretemps = []
 616    pkgtemp = 0
 617    systemp = 0
 618    try:
 619        print(sensors.iter_detected_chips())
 620        for chip in sensors.iter_detected_chips():
 621            for feature in chip:
 622                if "Core" in feature.label:
 623                    coretemps.append([feature.label, feature.get_value()])
 624                    logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
 625                if "CPUTIN" in feature.label:
 626                    pkgtemp = str(feature.get_value())
 627                    logger.debug("found cpu package at temperature " + pkgtemp)
 628                if "SYS" in feature.label:
 629                    systemp = feature.get_value()
 630                    logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
 631        core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
 632        logger.debug("average cpu temp is " + str(core_avg))
 633        coretemps.append(["avg", str(core_avg)])
 634        coretemps.append(["pkg", pkgtemp])
 635        coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
 636    finally:
 637        sensors.cleanup()
 638
 639    # For this to work, `hddtemp` must be running in daemon mode.
 640    # Start it like this (bash):   sudo hddtemp -d /dev/sda /dev/sdX...
 641    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
 642    s.connect(('localhost',config['hddtemp']['port']))
 643    output = s.recv(4096)
 644    output += s.recv(4096)
 645    s.close()
 646    config['hddtemp']['drives'] = []
 647    for drive in re.split('\|1}', output):
 648        try:
 649            fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
 650            name = fields.group(1)
 651            temp = float(fields.group(2))
 652            units = fields.group(3)
 653            config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
 654        except:
 655            pass
 656    hddtotal = 0
 657    data = []
 658    for drive in config['hddtemp']['drives']:
 659        data.append(drive.name + ': ' + str(drive.temp) + drive.units)
 660        logger.debug("found disk " + drive.name + " at " + str(drive.temp))
 661        hddtotal += drive.temp
 662    logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
 663    logger.debug("sum of disk temps is " + str(hddtotal))
 664    hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
 665    logger.debug("avg disk temp is " + str(hddavg))
 666    data.append("avg: " + str(hddavg))
 667    writetitle("temperatures")
 668    if (systemp != 0):
 669        writedata("sys: " + str(systemp) + DEG)
 670    if (coretemps != ''):
 671        writedata("cores", coretemps)
 672    if (config['hddtemp']['drives'] != ''):
 673        writedata("disks", data)
 674
 675    closetag('div', 1)
 676    logger.info("finished temp section")
 677
 678#
 679#
 680#
 681
 682def du():
 683    logger.debug("starting du section")
 684    opentag('div', 1, 'du', 'section')
 685    out = []
 686    content = readlog('alloc')
 687    contentnew = ""
 688    for p in config['du-paths']:
 689        alloc_f = getusage(p).alloc
 690        delta = None
 691        try:
 692            alloc_i = re.search(p + '\t(.*)\n', content).group(1)
 693            delta = alloc_f - float(alloc_i)
 694        except:
 695            pass
 696        logger.debug("delta is " + str(delta))
 697        if (delta == None):
 698            out.append([p, "used " + parsesize(alloc_f)])
 699        else:
 700            out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
 701        contentnew += (p + '\t' + str(alloc_f) + '\n')
 702    writelog('alloc', contentnew)
 703
 704    writetitle("du")
 705    logger.debug("disk usage data is " + str(out))
 706    for path in out:
 707        writedata(path[0], [p for p in path[1:]])
 708
 709    closetag('div', 1)
 710    logger.info("finished du section")
 711
 712#
 713#
 714#
 715
 716timenow = time.strftime("%H:%M:%S")
 717datenow = time.strftime("%x")
 718
 719pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
 720pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
 721pathpattern = re.compile("|".join(pathfilter.keys()))
 722
 723varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.basename(config['css'])}
 724varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
 725varpattern = re.compile("|".join(varfilter.keys()))
 726
 727def loadconf(configfile):
 728    try:
 729        data = yaml.safe_load(open(configfile))
 730        for value in data:
 731            if (type(value) == dict):
 732                config[value][key] = (data[value][key] for key in value)
 733            else:
 734                config[value] = data[value]
 735        config['dest'] = os.path.dirname(config['output'])
 736        logger.debug(str(config))
 737    except Exception as e:
 738        logger.warning("error processing config: " + str(e))
 739
 740
 741try:
 742    __main__()
 743finally:
 744    subprocess.call("logrotate -f /etc/logrotate.conf", shell=True)
 745    logger.info("rotated logfiles")