4035e0c6b7c1eb743198719ed9b48a65f7ff78b3
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11
12reload(sys)
13sys.setdefaultencoding('utf-8')
14
15scriptdir = os.path.dirname(os.path.realpath(__file__))
16
17
18diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
19drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
20config = {
21 'output': '~/var/www/logparse/summary.html',
22 'header': scriptdir + '/header.html',
23 'css': scriptdir + '/main.css',
24 'title': 'logparse',
25 'maxlist': 10,
26 'maxcmd': 3,
27 'mail': {
28 'to': '',
29 'from': '',
30 'subject': 'logparse from $hostname$'
31 },
32 'hddtemp': {
33 'drives': ['/dev/sda'],
34 'port': 7634
35 },
36 'du-paths': ['/', '/etc', '/home'],
37 'hostname-path': '/etc/hostname',
38 'logs': {
39 'auth': '/var/log/auth.log',
40 'cron': '/var/log/cron.log',
41 'sys': '/var/log/syslog',
42 'smb': '/var/log/samba',
43 'zfs': '/var/log/zpool.log',
44 'alloc': '/tmp/alloc',
45 'postfix': '/var/log/mail.log',
46 'httpd': '/var/log/apache2'
47 }
48}
49
50
51HTTPDSTATUS = "http://localhost/server-status"
52# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
53# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
54# config['hddtemp']['port'] = 7634
55# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
56# config['output'] = "/mnt/andrew/temp/logparse/out.html"
57MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
58# config['dest'] = "/mnt/andrew/temp/logparse"
59# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
60# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
61MAILOUT = ""
62HTMLOUT = ""
63TXTOUT = ""
64# config['title'] = "logparse"
65# config['maxlist'] = 10
66# config['maxcmd'] = 3
67# config['mail']['subject'] = "logparse from $hostname$"
68VERSION = "v0.1"
69DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
70DEG = " °C".encode('unicode_escape')
71
72# Set up logging
73logging.basicConfig(level=logging.DEBUG)
74logger = logging.getLogger('logparse')
75loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
76loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
77logger.addHandler(loghandler)
78
79
80# Get arguments
81parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
82parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
83to = parser.parse_args().to
84
85def __main__():
86 logger.info("Beginning log analysis at " + str(timenow))
87 if (to == None):
88 logger.info("no recipient address provided, outputting to stdout")
89 else:
90 logger.info("email will be sent to " + to)
91
92 loadconf(scriptdir + "/logparse.yaml")
93
94 global tempfile
95 tempfile = open(config['output'], 'w+')
96 tempfile.write(header(config['header']))
97 opentag('div', 1, 'main')
98 sshd()
99 sudo()
100 cron()
101 nameget()
102 httpd()
103 smbd()
104 postfix()
105 zfs()
106 temp()
107 du()
108 for tag in ['div', 'body', 'html']:
109 closetag(tag, 1)
110 tempfile.close()
111 mailprep(config['output'], MAILPATH)
112 if (to != None):
113 logger.debug("sending email")
114 ms = subject(config['mail']['subject'])
115 cmd = "cat " + MAILPATH + " | mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
116 logger.debug(cmd)
117 subprocess.call(cmd, shell=True)
118 logger.info("sent email")
119
120
121def writetitle(title):
122 if (title == '' or '\n' in title):
123 logger.error("invalid title")
124 return
125 logger.debug("writing title for " + title)
126 tag('h2', 0, title)
127
128def writedata(subtitle, data = None): # write title and data to tempfile
129 if (subtitle == ""):
130 loggger.warning("no subtitle provided.. skipping section")
131 return
132
133 if (data == None or len(data) == 0):
134 logger.debug("no data provided.. just printing subtitle")
135 tag('p', 0, subtitle)
136 else:
137 logger.debug("received data " + str(data))
138 subtitle += ':'
139 if (len(data) == 1):
140 tag('p', 0, subtitle + ' ' + data[0])
141 else:
142 tag('p', 0, subtitle)
143 opentag('ul', 1)
144 for datum in data:
145 logger.debug("printing datum " + datum)
146 tag('li', 0, datum)
147 closetag('ul', 1)
148
149def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
150 if (block == 1):
151 tempfile.write('\n')
152 tempfile.write('<' + tag)
153 if (id != None):
154 tempfile.write(" id='" + id + "'")
155 if (cl != None):
156 tempfile.write(" class='" + cl + "'")
157 tempfile.write('>')
158 if (block == 1):
159 tempfile.write('\n')
160
161def closetag(tag, block = 0): # write html closing tag
162 if (block == 0):
163 tempfile.write("</" + tag + ">")
164 else:
165 tempfile.write("\n</" + tag + ">\n")
166
167def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
168 opentag(tag, block)
169 tempfile.write(content)
170 closetag(tag, block)
171
172def header(template): # return a parsed html header from file
173 try:
174 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
175 logger.debug("copied main.css")
176 except Exception as e:
177 logger.warning("could not copy main.css - " + str(e))
178 headercontent = open(template, 'r').read()
179 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
180 return headercontent
181
182def subject(template):
183 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
184 logger.debug("returning subject line " + r)
185 return r
186
187def hostname(): # get the hostname
188 hnfile = open(config['hostname-path'], 'r')
189 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
190 return hn
191
192def resolve(ip): # try to resolve an ip to hostname
193 logger.debug("trying to resolve ip " + ip)
194 try:
195 socket.inet_aton(ip) # succeeds if text contains ip
196 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
197 logger.debug("found hostname " + hn)
198 return(hn)
199 except:
200 logger.debug("failed to resolve hostname for " + ip)
201 return(ip) # return ip if no hostname exists
202
203def plural(noun, quantity): # return "1 noun" or "n nouns"
204 if (quantity == 1):
205 return(str(quantity) + " " + noun)
206 else:
207 return(str(quantity) + " " + noun + "s")
208
209def parsesize(num, suffix='B'): # return human-readable size from number of bytes
210 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
211 if abs(num) < 1024.0:
212 return "%3.1f %s%s" % (num, unit, suffix)
213 num /= 1024.0
214 return "%.1f%s%s" % (num, 'Yi', suffix)
215
216def readlog(path = None, mode = 'r'): # read file, substituting known paths
217 if (path == None):
218 logger.error("no path provided")
219 return
220 else:
221 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
222 if (os.path.isfile(path) is False):
223 logger.error(path + " does not exist")
224 return ''
225 else:
226 return open(path, mode).read()
227
228def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
229 if (path == None or content == None):
230 logger.error("invalid usage of writelog")
231 return
232 else:
233 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
234 file = open(path, mode)
235 file.write(content)
236 file.close()
237
238def getusage(path): # Get disk usage statistics
239 disk = os.statvfs(path)
240 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
241 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
242 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
243 ratio = alloc / cap * 100 # percentage used
244 return diskstat(cap, alloc, free, ratio)
245
246def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
247 temp_l = l[:]
248 l = list(set(l))
249 l = [[i, temp_l.count(i)] for i in l] # add count of each element
250 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
251 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
252 l = l[::-1] # reverse
253 return l
254
255def addtag(l, tag): # add prefix and suffix tags to each item in a list
256 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
257 return l2
258
259def truncl(input, limit): # truncate list
260 if (len(input) > limit):
261 more = str(len(input) - limit)
262 output = input[:limit]
263 output.append("+ " + more + " more")
264 return(output)
265 else:
266 return(input)
267
268def mailprep(inputpath, output, *stylesheet):
269 logger.debug("converting stylesheet to inline tags")
270 old = readlog(inputpath)
271 pm = premailer.Premailer(old, external_styles=config['css'])
272 MAILOUT = pm.transform()
273 logger.info("converted stylesheet to inline tags")
274 file = open(output, 'w')
275 file.write(MAILOUT)
276 file.close()
277 logger.info("written to temporary mail file")
278
279
280
281#
282#
283#
284
285def sshd():
286 logger.debug("starting sshd section")
287 opentag('div', 1, 'sshd', 'section')
288 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
289 users = [] # list of users with format [username, number of logins] for each item
290 data = []
291 num = sum(1 for x in matches) # total number of logins
292 for match in matches:
293 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
294
295 user = entry.group(1)
296 ip = entry.group(2)
297
298 userhost = user + '@' + resolve(ip)
299 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
300 if (exists == []):
301 users.append([userhost, 1])
302 else:
303 users[exists[0]][1] += 1
304
305 writetitle('sshd')
306 subtitle = plural('login', num) + ' from'
307 if (len(users) == 1): # if only one user, do not display no of logins for this user
308 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
309 subtitle += ' ' + users[0][0]
310 writedata(subtitle)
311 else:
312 for user in users:
313 data.append(user[0] + ' (' + str(user[1]) + ')')
314 if len(data) > config['maxlist']: # if there are lots of users, truncate them
315 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
316 break
317 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
318 writedata(subtitle, data)
319 closetag('div', 1)
320 logger.info("finished sshd section")
321
322#
323#
324#
325
326def sudo():
327 logger.debug("starting sudo section")
328 opentag('div', 1, 'sudo', 'section')
329 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
330 num = sum(1 for line in umatches) # total number of sessions
331 users = []
332 data = []
333 for match in umatches:
334 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
335 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
336 if (exists == []):
337 users.append([user, 1])
338 else:
339 users[exists[0]][1] += 1
340 commands = []
341 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
342 for cmd in cmatches:
343 commands.append(cmd)
344 logger.debug("found the following commands: " + str(commands))
345
346 writetitle("sudo")
347 subtitle = plural("sudo session", num) + " for"
348 if (len(users) == 1):
349 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
350 subtitle += ' ' + users[0][0]
351 writedata(subtitle)
352 else:
353 for user in users:
354 data.append(user[0] + ' (' + str(user[1]) + ')')
355 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
356 writedata(subtitle, data)
357 if (len(commands) > 0):
358 commands = addtag(commands, 'code')
359 commands = orderbyfreq(commands)
360 commands = truncl(commands, config['maxcmd'])
361 writedata("top sudo commands", [c for c in commands])
362 closetag('div', 1)
363 logger.info("finished sudo section")
364
365#
366#
367#
368
369def cron():
370 logger.debug("starting cron section")
371 opentag('div', 1, 'cron', 'section')
372 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
373 num = sum(1 for line in matches)
374 commands = []
375 for match in matches:
376 commands.append(str(match))
377 # commands.append([str(match)for match in matches])
378 logger.debug("found cron command " + str(commands))
379 logger.info("found " + str(num) + " cron jobs")
380 subtitle = str(num) + " cron jobs run"
381 writetitle("cron")
382 writedata(subtitle)
383 if (matches > 0):
384 commands = addtag(commands, 'code')
385 commands = orderbyfreq(commands)
386 commands = truncl(commands, config['maxcmd'])
387 writedata("top cron commands", [c for c in commands])
388 closetag('div', 1)
389 logger.info("finished cron section")
390
391#
392#
393#
394
395def nameget():
396 logger.debug("starting nameget section")
397 opentag('div', 1, 'nameget', 'section')
398 syslog = readlog('sys')
399 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
400 n_f = sum(1 for i in failed)
401 l_f = []
402 for i in failed:
403 l_f.append(i)
404 logger.debug("the following downloads failed: " + str(l_f))
405 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
406 n_s = sum(1 for i in succ)
407 l_s = []
408 for i in succ:
409 l_s.append(i)
410 logger.debug("the following downloads succeeded: " + str(l_f))
411 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
412 writetitle("nameget")
413 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
414 writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
415 closetag('div', 1)
416 logger.info("finished nameget section")
417
418#
419#
420#
421
422def httpd():
423 logger.info("starting httpd section")
424 opentag('div', 1, 'httpd', 'section')
425 accesslog = readlog("httpd/access.log")
426 a = len(accesslog.split('\n'))
427 errorlog = readlog("httpd/error.log")
428 e = len(errorlog.split('\n'))
429 data_b = 0
430 ips = []
431 files = []
432 useragents = []
433 errors = []
434 notfound = []
435 unprivileged = []
436
437 for line in accesslog.split('\n'):
438 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
439 try:
440 ips.append(fields.group(1))
441 files.append(fields.group(2))
442 useragents.append(fields.group(5))
443 logger.debug("transferred " + fields.group(3) + " bytes in this request")
444 data_b += int(fields.group(3))
445 logger.debug("data_b is now " + str(data_b))
446 except Exception as error:
447 if type(error) is AttributeError:
448 logger.debug("attributeerrror: " + str(error))
449 else:
450 logger.warning("error processing httpd access log: " + str(error))
451 logger.debug(str(data_b) + " bytes transferred")
452 data_h = parsesize(data_b)
453 writetitle("apache")
454
455 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
456 if (a > 0):
457 logger.debug("found the following requests: " + str(files))
458 files = addtag(files, 'code')
459 files = orderbyfreq(files)
460 files = truncl(files, config['maxcmd'])
461 writedata(str(a) + " requests", files)
462 if (ips != None):
463 logger.debug("found the following ips: " + str(ips))
464 ips = addtag(ips, 'code')
465 ips = orderbyfreq(ips)
466 n_ip = str(len(ips))
467 ips = truncl(ips, config['maxcmd'])
468 writedata(n_ip + " clients", ips)
469 if (useragents != None):
470 logger.debug("found the following useragents: " + str(useragents))
471 useragents = addtag(useragents, 'code')
472 useragents = orderbyfreq(useragents)
473 n_ua = str(len(useragents))
474 useragents = truncl(useragents, config['maxcmd'])
475 writedata(n_ua + " devices", useragents)
476
477 writedata(data_h + " transferred")
478 writedata(str(e) + " errors")
479
480 closetag('div', 1)
481 logger.info("finished httpd section")
482
483#
484#
485#
486
487def httpdsession():
488 # logger.debug("starting httpd section")
489 opentag('div', 1, 'httpd', 'section')
490 httpdlog = requests.get(HTTPDSTATUS).content
491 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
492 uptime = re.sub(' minute[s]', 'm', uptime)
493 uptime = re.sub(' second[s]', 's', uptime)
494 uptime = re.sub(' day[s]', 's', uptime)
495 uptime = re.sub(' month[s]', 'mo', uptime)
496 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
497 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
498 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
499 closetag('div', 1)
500 # logger.info("finished httpd section")
501
502#
503#
504#
505
506def smbd():
507 logger.debug("starting smbd section")
508 opentag('div', 1, 'smbd', 'section')
509 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
510 logger.debug("found log files " + str(files))
511 n_auths = 0 # total number of logins from all users
512 sigma_auths = [] # contains users
513 output = ""
514
515 for file in files: # one log file for each client
516
517 logger.debug("looking at file " + file)
518
519 # find the machine (ip or hostname) that this file represents
520 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
521 host = resolve(ip)
522
523 # count number of logins from each user
524 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
525 for match in matches:
526 userhost = match + "@" + host
527 sigma_auths.append(userhost)
528 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
529 # if (exists == []):
530 # sigma_auths.append([userhost, 1])
531 # else:
532 # sigma_auths[exists[0]][1] += 1
533 n_auths += 1
534 writetitle("samba")
535 subtitle = plural("login", n_auths) + " from"
536 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
537 subtitle += ' ' + sigma_auths[0][0]
538 writedata(subtitle)
539 else: # multiple users
540 sigma_auths = orderbyfreq(sigma_auths)
541 sigma_auths = truncl(sigma_auths, config['maxcmd'])
542 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
543 writedata(subtitle, sigma_auths)
544 closetag('div', 1)
545 logger.info("finished smbd section")
546
547#
548#
549#
550
551def postfix():
552 logger.debug("starting postfix section")
553 opentag('div', 1, 'postfix', 'section')
554 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
555 r = []
556 s = []
557 size = 0
558 for message in messages:
559 r.append(message[2])
560 s.append(message[0])
561 size += int(message[1])
562 # size = sum([int(x) for x in messages])
563 size = parsesize(size)
564 n = str(len(messages))
565 writetitle("postfix")
566
567 if (len(r) > 0):
568 s = list(set(r)) # unique recipients
569 if (len(s) > 1):
570 r = orderbyfreq(r)
571 r = truncl(r, config['maxcmd'])
572 writedata(n + " messages sent to", r)
573 else:
574 writedata(n + " messages sent to " + r[0])
575 else:
576 writedata(n + " messages sent")
577 writedata("total of " + size)
578 closetag('div', 1)
579 logger.info("finished postfix section")
580
581#
582#
583#
584
585def zfs():
586 logger.debug("starting zfs section")
587 opentag('div', 1, 'zfs', 'section')
588 zfslog = readlog('zfs')
589 logger.debug("zfs log is " + zfslog)
590 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
591 pool = re.search('.*---\n(\w*)', zfslog).group(1)
592 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
593 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
594 scrubrepairs = scruberrors = scrubdate = None
595 try:
596 scrubrepairs = scrub.group(1)
597 scruberrors = scrub.group(2)
598 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
599 except:
600 logger.debug("error getting scrub data")
601 alloc = iostat.group(1)
602 free = iostat.group(2)
603 writetitle("zfs")
604 if (scrubdate != None):
605 subtitle = "Scrub of " + pool + " on " + scrubdate
606 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
607 else:
608 subtitle = pool
609 data = [alloc + " used", free + " free"]
610 writedata(subtitle, data)
611 closetag('div', 1)
612 logger.info("finished zfs section")
613
614#
615#
616#
617
618def temp():
619 logger.debug("starting temp section")
620 opentag('div', 1, 'temp', 'section')
621 sensors.init()
622 coretemps = []
623 pkgtemp = 0
624 systemp = 0
625 try:
626 print(sensors.iter_detected_chips())
627 for chip in sensors.iter_detected_chips():
628 for feature in chip:
629 if "Core" in feature.label:
630 coretemps.append([feature.label, feature.get_value()])
631 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
632 if "CPUTIN" in feature.label:
633 pkgtemp = str(feature.get_value())
634 logger.debug("found cpu package at temperature " + pkgtemp)
635 if "SYS" in feature.label:
636 systemp = feature.get_value()
637 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
638 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
639 logger.debug("average cpu temp is " + str(core_avg))
640 coretemps.append(["avg", str(core_avg)])
641 coretemps.append(["pkg", pkgtemp])
642 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
643 finally:
644 sensors.cleanup()
645
646 # For this to work, `hddtemp` must be running in daemon mode.
647 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
648 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
649 s.connect(('localhost',config['hddtemp']['port']))
650 output = s.recv(4096)
651 output += s.recv(4096)
652 s.close()
653 config['hddtemp']['drives'] = []
654 for drive in re.split('\|1}', output):
655 try:
656 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
657 name = fields.group(1)
658 temp = float(fields.group(2))
659 units = fields.group(3)
660 config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
661 except:
662 pass
663 hddtotal = 0
664 data = []
665 for drive in config['hddtemp']['drives']:
666 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
667 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
668 hddtotal += drive.temp
669 logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
670 logger.debug("sum of disk temps is " + str(hddtotal))
671 hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
672 logger.debug("avg disk temp is " + str(hddavg))
673 data.append("avg: " + str(hddavg))
674 writetitle("temperatures")
675 if (systemp != 0):
676 writedata("sys: " + str(systemp) + DEG)
677 if (coretemps != ''):
678 writedata("cores", coretemps)
679 if (config['hddtemp']['drives'] != ''):
680 writedata("disks", data)
681
682 closetag('div', 1)
683 logger.info("finished temp section")
684
685#
686#
687#
688
689def du():
690 logger.debug("starting du section")
691 opentag('div', 1, 'du', 'section')
692 out = []
693 content = readlog('alloc')
694 contentnew = ""
695 for p in config['du-paths']:
696 alloc_f = getusage(p).alloc
697 delta = None
698 try:
699 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
700 delta = alloc_f - float(alloc_i)
701 except:
702 pass
703 logger.debug("delta is " + str(delta))
704 if (delta == None):
705 out.append([p, "used " + parsesize(alloc_f)])
706 else:
707 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
708 contentnew += (p + '\t' + str(alloc_f) + '\n')
709 writelog('alloc', contentnew)
710
711 writetitle("du")
712 logger.debug("disk usage data is " + str(out))
713 for path in out:
714 writedata(path[0], [p for p in path[1:]])
715
716 closetag('div', 1)
717 logger.info("finished du section")
718
719#
720#
721#
722
723timenow = time.strftime("%H:%M:%S")
724datenow = time.strftime("%x")
725
726pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
727pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
728pathpattern = re.compile("|".join(pathfilter.keys()))
729
730varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.basename(config['css'])}
731varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
732varpattern = re.compile("|".join(varfilter.keys()))
733
734def loadconf(configfile):
735 try:
736 data = yaml.safe_load(open(configfile))
737 for value in data:
738 logger.debug(data[value])
739 if(type(data[value]) == types.DictType):
740 for key in data[value].iteritems():
741 config[value][key[0]] = key[1]
742 else:
743 config[value] = data[value]
744 config['dest'] = os.path.dirname(config['output'])
745 logger.debug(str(config))
746 except Exception as e:
747 logger.warning("error processing config: " + str(e))
748
749
750try:
751 __main__()
752finally:
753 subprocess.call("logrotate -f /etc/logrotate.conf", shell=True)
754 logger.info("rotated logfiles")