c40b29f024391f76c80bdb738701857122dfd031
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11
12reload(sys)
13sys.setdefaultencoding('utf-8')
14
15scriptdir = os.path.dirname(os.path.realpath(__file__))
16
17
18diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
19drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
20config = {
21 'output': '~/var/www/logparse/summary.html',
22 'header': scriptdir + '/header.html',
23 'css': scriptdir + '/main.css',
24 'title': 'logparse',
25 'maxlist': 10,
26 'maxcmd': 3,
27 'mail': {
28 'to': '',
29 'from': '',
30 'subject': 'logparse from $hostname$'
31 },
32 'hddtemp': {
33 'drives': ['/dev/sda'],
34 'port': 7634
35 },
36 'du-paths': ['/', '/etc', '/home'],
37 'hostname-path': '/etc/hostname',
38 'logs': {
39 'auth': '/var/log/auth.log',
40 'cron': '/var/log/cron.log',
41 'sys': '/var/log/syslog',
42 'smb': '/var/log/samba',
43 'zfs': '/var/log/zpool.log',
44 'alloc': '/tmp/alloc',
45 'postfix': '/var/log/mail.log',
46 'httpd': '/var/log/apache2'
47 }
48}
49
50
51HTTPDSTATUS = "http://localhost/server-status"
52# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
53# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
54# config['hddtemp']['port'] = 7634
55# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
56# config['output'] = "/mnt/andrew/temp/logparse/out.html"
57MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
58# config['dest'] = "/mnt/andrew/temp/logparse"
59# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
60# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
61MAILOUT = ""
62HTMLOUT = ""
63TXTOUT = ""
64# config['title'] = "logparse"
65# config['maxlist'] = 10
66# config['maxcmd'] = 3
67# config['mail']['subject'] = "logparse from $hostname$"
68VERSION = "v0.1"
69DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
70DEG = " °C".encode('unicode_escape')
71
72# Set up logging
73logging.basicConfig(level=logging.DEBUG)
74logger = logging.getLogger('logparse')
75loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
76loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
77logger.addHandler(loghandler)
78
79
80# Get arguments
81parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
82parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
83to = parser.parse_args().to
84
85def __main__():
86 logger.info("Beginning log analysis at " + str(timenow))
87 if (to == None):
88 logger.info("no recipient address provided, outputting to stdout")
89 else:
90 logger.info("email will be sent to " + to)
91
92 loadconf(scriptdir + "/logparse.yaml")
93
94 global tempfile
95 tempfile = open(config['output'], 'w+')
96 tempfile.write(header(config['header']))
97 opentag('div', 1, 'main')
98 sshd()
99 sudo()
100 cron()
101 nameget()
102 httpd()
103 smbd()
104 postfix()
105 zfs()
106 temp()
107 du()
108 for tag in ['div', 'body', 'html']:
109 closetag(tag, 1)
110 tempfile.close()
111 mailprep(config['output'], MAILPATH)
112 if (to != None):
113 logger.debug("sending email")
114 ms = subject(config['mail']['subject'])
115 cmd = "cat " + MAILPATH + " | mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
116 logger.debug(cmd)
117 subprocess.call(cmd, shell=True)
118 logger.info("sent email")
119
120
121def writetitle(title):
122 if (title == '' or '\n' in title):
123 logger.error("invalid title")
124 return
125 logger.debug("writing title for " + title)
126 tag('h2', 0, title)
127
128def writedata(subtitle, data = None): # write title and data to tempfile
129 if (subtitle == ""):
130 loggger.warning("no subtitle provided.. skipping section")
131 return
132
133 if (data == None or len(data) == 0):
134 logger.debug("no data provided.. just printing subtitle")
135 tag('p', 0, subtitle)
136 else:
137 logger.debug("received data " + str(data))
138 subtitle += ':'
139 if (len(data) == 1):
140 tag('p', 0, subtitle + ' ' + data[0])
141 else:
142 tag('p', 0, subtitle)
143 opentag('ul', 1)
144 for datum in data:
145 logger.debug("printing datum " + datum)
146 tag('li', 0, datum)
147 closetag('ul', 1)
148
149def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
150 if (block == 1):
151 tempfile.write('\n')
152 tempfile.write('<' + tag)
153 if (id != None):
154 tempfile.write(" id='" + id + "'")
155 if (cl != None):
156 tempfile.write(" class='" + cl + "'")
157 tempfile.write('>')
158 if (block == 1):
159 tempfile.write('\n')
160
161def closetag(tag, block = 0): # write html closing tag
162 if (block == 0):
163 tempfile.write("</" + tag + ">")
164 else:
165 tempfile.write("\n</" + tag + ">\n")
166
167def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
168 opentag(tag, block)
169 tempfile.write(content)
170 closetag(tag, block)
171
172def header(template): # return a parsed html header from file
173 try:
174 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
175 logger.debug("copied main.css")
176 except Exception as e:
177 logger.warning("could not copy main.css - " + str(e))
178 headercontent = open(template, 'r').read()
179 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
180 return headercontent
181
182def subject(template):
183 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
184 logger.debug("returning subject line " + r)
185 return r
186
187def hostname(): # get the hostname
188 hnfile = open(config['hostname-path'], 'r')
189 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
190 return hn
191
192def resolve(ip): # try to resolve an ip to hostname
193 logger.debug("trying to resolve ip " + ip)
194 try:
195 socket.inet_aton(ip) # succeeds if text contains ip
196 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
197 logger.debug("found hostname " + hn)
198 return(hn)
199 except:
200 logger.debug("failed to resolve hostname for " + ip)
201 return(ip) # return ip if no hostname exists
202
203def plural(noun, quantity): # return "1 noun" or "n nouns"
204 if (quantity == 1):
205 return(str(quantity) + " " + noun)
206 else:
207 return(str(quantity) + " " + noun + "s")
208
209def parsesize(num, suffix='B'): # return human-readable size from number of bytes
210 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
211 if abs(num) < 1024.0:
212 return "%3.1f %s%s" % (num, unit, suffix)
213 num /= 1024.0
214 return "%.1f%s%s" % (num, 'Yi', suffix)
215
216def readlog(path = None, mode = 'r'): # read file, substituting known paths
217 if (path == None):
218 logger.error("no path provided")
219 return
220 else:
221 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
222 if (os.path.isfile(path) is False):
223 logger.error(path + " does not exist")
224 return ''
225 else:
226 return open(path, mode).read()
227
228def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
229 if (path == None or content == None):
230 logger.error("invalid usage of writelog")
231 return
232 else:
233 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
234 file = open(path, mode)
235 file.write(content)
236 file.close()
237
238def getusage(path): # Get disk usage statistics
239 disk = os.statvfs(path)
240 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
241 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
242 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
243 ratio = alloc / cap * 100 # percentage used
244 return diskstat(cap, alloc, free, ratio)
245
246def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
247 temp_l = l[:]
248 l = list(set(l))
249 l = [[i, temp_l.count(i)] for i in l] # add count of each element
250 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
251 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
252 l = l[::-1] # reverse
253 return l
254
255def addtag(l, tag): # add prefix and suffix tags to each item in a list
256 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
257 return l2
258
259def truncl(input, limit): # truncate list
260 if (len(input) > limit):
261 more = str(len(input) - limit)
262 output = input[:limit]
263 output.append("+ " + more + " more")
264 return(output)
265 else:
266 return(input)
267
268def mailprep(inputpath, output, *stylesheet):
269 logger.debug("converting stylesheet to inline tags")
270 old = readlog(inputpath)
271 logger.debug(config['css'])
272 pm = premailer.Premailer(old, external_styles=config['css'])
273 MAILOUT = pm.transform()
274 logger.info("converted stylesheet to inline tags")
275 file = open(output, 'w')
276 file.write(MAILOUT)
277 file.close()
278 logger.info("written to temporary mail file")
279
280
281
282#
283#
284#
285
286def sshd():
287 logger.debug("starting sshd section")
288 opentag('div', 1, 'sshd', 'section')
289 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
290 users = [] # list of users with format [username, number of logins] for each item
291 data = []
292 num = sum(1 for x in matches) # total number of logins
293 for match in matches:
294 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
295
296 user = entry.group(1)
297 ip = entry.group(2)
298
299 userhost = user + '@' + resolve(ip)
300 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
301 if (exists == []):
302 users.append([userhost, 1])
303 else:
304 users[exists[0]][1] += 1
305
306 writetitle('sshd')
307 subtitle = plural('login', num) + ' from'
308 if (len(users) == 1): # if only one user, do not display no of logins for this user
309 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
310 subtitle += ' ' + users[0][0]
311 writedata(subtitle)
312 else:
313 for user in users:
314 data.append(user[0] + ' (' + str(user[1]) + ')')
315 if len(data) > config['maxlist']: # if there are lots of users, truncate them
316 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
317 break
318 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
319 writedata(subtitle, data)
320 closetag('div', 1)
321 logger.info("finished sshd section")
322
323#
324#
325#
326
327def sudo():
328 logger.debug("starting sudo section")
329 opentag('div', 1, 'sudo', 'section')
330 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
331 num = sum(1 for line in umatches) # total number of sessions
332 users = []
333 data = []
334 for match in umatches:
335 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
336 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
337 if (exists == []):
338 users.append([user, 1])
339 else:
340 users[exists[0]][1] += 1
341 commands = []
342 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
343 for cmd in cmatches:
344 commands.append(cmd)
345 logger.debug("found the following commands: " + str(commands))
346
347 writetitle("sudo")
348 subtitle = plural("sudo session", num) + " for"
349 if (len(users) == 1):
350 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
351 subtitle += ' ' + users[0][0]
352 writedata(subtitle)
353 else:
354 for user in users:
355 data.append(user[0] + ' (' + str(user[1]) + ')')
356 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
357 writedata(subtitle, data)
358 if (len(commands) > 0):
359 commands = addtag(commands, 'code')
360 commands = orderbyfreq(commands)
361 commands = truncl(commands, config['maxcmd'])
362 writedata("top sudo commands", [c for c in commands])
363 closetag('div', 1)
364 logger.info("finished sudo section")
365
366#
367#
368#
369
370def cron():
371 logger.debug("starting cron section")
372 opentag('div', 1, 'cron', 'section')
373 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
374 num = sum(1 for line in matches)
375 commands = []
376 for match in matches:
377 commands.append(str(match))
378 # commands.append([str(match)for match in matches])
379 logger.debug("found cron command " + str(commands))
380 logger.info("found " + str(num) + " cron jobs")
381 subtitle = str(num) + " cron jobs run"
382 writetitle("cron")
383 writedata(subtitle)
384 if (matches > 0):
385 commands = addtag(commands, 'code')
386 commands = orderbyfreq(commands)
387 commands = truncl(commands, config['maxcmd'])
388 writedata("top cron commands", [c for c in commands])
389 closetag('div', 1)
390 logger.info("finished cron section")
391
392#
393#
394#
395
396def nameget():
397 logger.debug("starting nameget section")
398 opentag('div', 1, 'nameget', 'section')
399 syslog = readlog('sys')
400 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
401 n_f = sum(1 for i in failed)
402 l_f = []
403 for i in failed:
404 l_f.append(i)
405 logger.debug("the following downloads failed: " + str(l_f))
406 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
407 n_s = sum(1 for i in succ)
408 l_s = []
409 for i in succ:
410 l_s.append(i)
411 logger.debug("the following downloads succeeded: " + str(l_f))
412 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
413 writetitle("nameget")
414 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
415 writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
416 closetag('div', 1)
417 logger.info("finished nameget section")
418
419#
420#
421#
422
423def httpd():
424 logger.info("starting httpd section")
425 opentag('div', 1, 'httpd', 'section')
426 accesslog = readlog("httpd/access.log")
427 a = len(accesslog.split('\n'))
428 errorlog = readlog("httpd/error.log")
429 e = len(errorlog.split('\n'))
430 data_b = 0
431 ips = []
432 files = []
433 useragents = []
434 errors = []
435 notfound = []
436 unprivileged = []
437
438 for line in accesslog.split('\n'):
439 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
440 try:
441 ips.append(fields.group(1))
442 files.append(fields.group(2))
443 useragents.append(fields.group(5))
444 logger.debug("transferred " + fields.group(3) + " bytes in this request")
445 data_b += int(fields.group(3))
446 logger.debug("data_b is now " + str(data_b))
447 except Exception as error:
448 if type(error) is AttributeError:
449 logger.debug("attributeerrror: " + str(error))
450 else:
451 logger.warning("error processing httpd access log: " + str(error))
452 logger.debug(str(data_b) + " bytes transferred")
453 data_h = parsesize(data_b)
454 writetitle("apache")
455
456 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
457 if (a > 0):
458 logger.debug("found the following requests: " + str(files))
459 files = addtag(files, 'code')
460 files = orderbyfreq(files)
461 files = truncl(files, config['maxcmd'])
462 writedata(str(a) + " requests", files)
463 if (ips != None):
464 logger.debug("found the following ips: " + str(ips))
465 ips = addtag(ips, 'code')
466 ips = orderbyfreq(ips)
467 n_ip = str(len(ips))
468 ips = truncl(ips, config['maxcmd'])
469 writedata(n_ip + " clients", ips)
470 if (useragents != None):
471 logger.debug("found the following useragents: " + str(useragents))
472 useragents = addtag(useragents, 'code')
473 useragents = orderbyfreq(useragents)
474 n_ua = str(len(useragents))
475 useragents = truncl(useragents, config['maxcmd'])
476 writedata(n_ua + " devices", useragents)
477
478 writedata(data_h + " transferred")
479 writedata(str(e) + " errors")
480
481 closetag('div', 1)
482 logger.info("finished httpd section")
483
484#
485#
486#
487
488def httpdsession():
489 # logger.debug("starting httpd section")
490 opentag('div', 1, 'httpd', 'section')
491 httpdlog = requests.get(HTTPDSTATUS).content
492 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
493 uptime = re.sub(' minute[s]', 'm', uptime)
494 uptime = re.sub(' second[s]', 's', uptime)
495 uptime = re.sub(' day[s]', 's', uptime)
496 uptime = re.sub(' month[s]', 'mo', uptime)
497 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
498 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
499 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
500 closetag('div', 1)
501 # logger.info("finished httpd section")
502
503#
504#
505#
506
507def smbd():
508 logger.debug("starting smbd section")
509 opentag('div', 1, 'smbd', 'section')
510 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
511 logger.debug("found log files " + str(files))
512 n_auths = 0 # total number of logins from all users
513 sigma_auths = [] # contains users
514 output = ""
515
516 for file in files: # one log file for each client
517
518 logger.debug("looking at file " + file)
519
520 # find the machine (ip or hostname) that this file represents
521 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
522 host = resolve(ip)
523
524 # count number of logins from each user
525 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
526 for match in matches:
527 userhost = match + "@" + host
528 sigma_auths.append(userhost)
529 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
530 # if (exists == []):
531 # sigma_auths.append([userhost, 1])
532 # else:
533 # sigma_auths[exists[0]][1] += 1
534 n_auths += 1
535 writetitle("samba")
536 subtitle = plural("login", n_auths) + " from"
537 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
538 subtitle += ' ' + sigma_auths[0][0]
539 writedata(subtitle)
540 else: # multiple users
541 sigma_auths = orderbyfreq(sigma_auths)
542 sigma_auths = truncl(sigma_auths, config['maxcmd'])
543 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
544 writedata(subtitle, sigma_auths)
545 closetag('div', 1)
546 logger.info("finished smbd section")
547
548#
549#
550#
551
552def postfix():
553 logger.debug("starting postfix section")
554 opentag('div', 1, 'postfix', 'section')
555 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
556 r = []
557 s = []
558 size = 0
559 for message in messages:
560 r.append(message[2])
561 s.append(message[0])
562 size += int(message[1])
563 # size = sum([int(x) for x in messages])
564 size = parsesize(size)
565 n = str(len(messages))
566 writetitle("postfix")
567
568 if (len(r) > 0):
569 s = list(set(r)) # unique recipients
570 if (len(s) > 1):
571 r = orderbyfreq(r)
572 r = truncl(r, config['maxcmd'])
573 writedata(n + " messages sent to", r)
574 else:
575 writedata(n + " messages sent to " + r[0])
576 else:
577 writedata(n + " messages sent")
578 writedata("total of " + size)
579 closetag('div', 1)
580 logger.info("finished postfix section")
581
582#
583#
584#
585
586def zfs():
587 logger.debug("starting zfs section")
588 opentag('div', 1, 'zfs', 'section')
589 zfslog = readlog('zfs')
590 logger.debug("zfs log is " + zfslog)
591 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
592 pool = re.search('.*---\n(\w*)', zfslog).group(1)
593 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
594 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
595 scrubrepairs = scruberrors = scrubdate = None
596 try:
597 scrubrepairs = scrub.group(1)
598 scruberrors = scrub.group(2)
599 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
600 except:
601 logger.debug("error getting scrub data")
602 alloc = iostat.group(1)
603 free = iostat.group(2)
604 writetitle("zfs")
605 if (scrubdate != None):
606 subtitle = "Scrub of " + pool + " on " + scrubdate
607 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
608 else:
609 subtitle = pool
610 data = [alloc + " used", free + " free"]
611 writedata(subtitle, data)
612 closetag('div', 1)
613 logger.info("finished zfs section")
614
615#
616#
617#
618
619def temp():
620 logger.debug("starting temp section")
621 opentag('div', 1, 'temp', 'section')
622 sensors.init()
623 coretemps = []
624 pkgtemp = 0
625 systemp = 0
626 try:
627 print(sensors.iter_detected_chips())
628 for chip in sensors.iter_detected_chips():
629 for feature in chip:
630 if "Core" in feature.label:
631 coretemps.append([feature.label, feature.get_value()])
632 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
633 if "CPUTIN" in feature.label:
634 pkgtemp = str(feature.get_value())
635 logger.debug("found cpu package at temperature " + pkgtemp)
636 if "SYS" in feature.label:
637 systemp = feature.get_value()
638 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
639 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
640 logger.debug("average cpu temp is " + str(core_avg))
641 coretemps.append(["avg", str(core_avg)])
642 coretemps.append(["pkg", pkgtemp])
643 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
644 finally:
645 sensors.cleanup()
646
647 # For this to work, `hddtemp` must be running in daemon mode.
648 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
649 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
650 s.connect(('localhost',config['hddtemp']['port']))
651 output = s.recv(4096)
652 output += s.recv(4096)
653 s.close()
654 config['hddtemp']['drives'] = []
655 for drive in re.split('\|1}', output):
656 try:
657 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
658 name = fields.group(1)
659 temp = float(fields.group(2))
660 units = fields.group(3)
661 config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
662 except:
663 pass
664 hddtotal = 0
665 data = []
666 for drive in config['hddtemp']['drives']:
667 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
668 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
669 hddtotal += drive.temp
670 logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
671 logger.debug("sum of disk temps is " + str(hddtotal))
672 hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
673 logger.debug("avg disk temp is " + str(hddavg))
674 data.append("avg: " + str(hddavg))
675 writetitle("temperatures")
676 if (systemp != 0):
677 writedata("sys: " + str(systemp) + DEG)
678 if (coretemps != ''):
679 writedata("cores", coretemps)
680 if (config['hddtemp']['drives'] != ''):
681 writedata("disks", data)
682
683 closetag('div', 1)
684 logger.info("finished temp section")
685
686#
687#
688#
689
690def du():
691 logger.debug("starting du section")
692 opentag('div', 1, 'du', 'section')
693 out = []
694 content = readlog('alloc')
695 contentnew = ""
696 for p in config['du-paths']:
697 alloc_f = getusage(p).alloc
698 delta = None
699 try:
700 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
701 delta = alloc_f - float(alloc_i)
702 except:
703 pass
704 logger.debug("delta is " + str(delta))
705 if (delta == None):
706 out.append([p, "used " + parsesize(alloc_f)])
707 else:
708 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
709 contentnew += (p + '\t' + str(alloc_f) + '\n')
710 writelog('alloc', contentnew)
711
712 writetitle("du")
713 logger.debug("disk usage data is " + str(out))
714 for path in out:
715 writedata(path[0], [p for p in path[1:]])
716
717 closetag('div', 1)
718 logger.info("finished du section")
719
720#
721#
722#
723
724timenow = time.strftime("%H:%M:%S")
725datenow = time.strftime("%x")
726
727pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
728pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
729pathpattern = re.compile("|".join(pathfilter.keys()))
730
731varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.basename(config['css'])}
732varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
733varpattern = re.compile("|".join(varfilter.keys()))
734
735def loadconf(configfile):
736 try:
737 data = yaml.safe_load(open(configfile))
738 for value in data:
739 logger.debug(data[value])
740 if(type(data[value]) == types.DictType):
741 for key in data[value].iteritems():
742 config[value][key[0]] = key[1]
743 else:
744 config[value] = data[value]
745 config['dest'] = os.path.dirname(config['output'])
746 logger.debug(str(config))
747 except Exception as e:
748 logger.warning("error processing config: " + str(e))
749
750
751try:
752 __main__()
753finally:
754 subprocess.call("logrotate -f /etc/logrotate.conf", shell=True)
755 logger.info("rotated logfiles")