3a82e96781a51ba1b19f826cd8ddbd9aebe798f3
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11
12reload(sys)
13sys.setdefaultencoding('utf-8')
14
15scriptdir = os.path.dirname(os.path.realpath(__file__))
16
17
18diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
19drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
20config = {
21 'output': '~/var/www/logparse/summary.html',
22 'header': scriptdir + '/header.html',
23 'css': scriptdir + '/main.css',
24 'title': 'logparse',
25 'maxlist': 10,
26 'maxcmd': 3,
27 'mail': {
28 'to': '',
29 'from': '',
30 'subject': 'logparse from $hostname$'
31 },
32 'rotate': 1,
33 'hddtemp': {
34 'drives': ['/dev/sda'],
35 'port': 7634
36 },
37 'du-paths': ['/', '/etc', '/home'],
38 'hostname-path': '/etc/hostname',
39 'logs': {
40 'auth': '/var/log/auth.log',
41 'cron': '/var/log/cron.log',
42 'sys': '/var/log/syslog',
43 'smb': '/var/log/samba',
44 'zfs': '/var/log/zpool.log',
45 'alloc': '/tmp/alloc',
46 'postfix': '/var/log/mail.log',
47 'httpd': '/var/log/apache2'
48 }
49}
50
51
52HTTPDSTATUS = "http://localhost/server-status"
53# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
54# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
55# config['hddtemp']['port'] = 7634
56# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
57# config['output'] = "/mnt/andrew/temp/logparse/out.html"
58MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
59# config['dest'] = "/mnt/andrew/temp/logparse"
60# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
61# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
62MAILOUT = ""
63HTMLOUT = ""
64TXTOUT = ""
65# config['title'] = "logparse"
66# config['maxlist'] = 10
67# config['maxcmd'] = 3
68# config['mail']['subject'] = "logparse from $hostname$"
69VERSION = "v0.1"
70DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
71DEG = " °C".encode('unicode_escape')
72
73# Set up logging
74logging.basicConfig(level=logging.DEBUG)
75logger = logging.getLogger('logparse')
76loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
77loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
78logger.addHandler(loghandler)
79
80
81# Get arguments
82parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
83parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
84to = parser.parse_args().to
85
86def __main__():
87 logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow))
88 if (to == None):
89 logger.info("no recipient address provided, outputting to stdout")
90 else:
91 logger.info("email will be sent to " + to)
92
93 loadconf(scriptdir + "/logparse.yaml")
94
95 global tempfile
96 tempfile = open(config['output'], 'w+')
97 tempfile.write(header(config['header']))
98 opentag('div', 1, 'main')
99 sshd()
100 sudo()
101 cron()
102 nameget()
103 httpd()
104 smbd()
105 postfix()
106 zfs()
107 temp()
108 du()
109 for tag in ['div', 'body', 'html']:
110 closetag(tag, 1)
111 tempfile.close()
112 mailprep(config['output'], MAILPATH)
113 if (to != None):
114 logger.debug("sending email")
115 ms = subject(config['mail']['subject'])
116 cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
117 logger.debug(cmd)
118 subprocess.call(cmd, shell=True)
119 logger.info("sent email")
120
121
122def writetitle(title):
123 if (title == '' or '\n' in title):
124 logger.error("invalid title")
125 return
126 logger.debug("writing title for " + title)
127 tag('h2', 0, title)
128
129def writedata(subtitle, data = None): # write title and data to tempfile
130 if (subtitle == ""):
131 loggger.warning("no subtitle provided.. skipping section")
132 return
133
134 if (data == None or len(data) == 0):
135 logger.debug("no data provided.. just printing subtitle")
136 tag('p', 0, subtitle)
137 else:
138 logger.debug("received data " + str(data))
139 subtitle += ':'
140 if (len(data) == 1):
141 tag('p', 0, subtitle + ' ' + data[0])
142 else:
143 tag('p', 0, subtitle)
144 opentag('ul', 1)
145 for datum in data:
146 logger.debug("printing datum " + datum)
147 tag('li', 0, datum)
148 closetag('ul', 1)
149
150def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
151 if (block == 1):
152 tempfile.write('\n')
153 tempfile.write('<' + tag)
154 if (id != None):
155 tempfile.write(" id='" + id + "'")
156 if (cl != None):
157 tempfile.write(" class='" + cl + "'")
158 tempfile.write('>')
159 if (block == 1):
160 tempfile.write('\n')
161
162def closetag(tag, block = 0): # write html closing tag
163 if (block == 0):
164 tempfile.write("</" + tag + ">")
165 else:
166 tempfile.write("\n</" + tag + ">\n")
167
168def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
169 opentag(tag, block)
170 tempfile.write(content)
171 closetag(tag, block)
172
173def header(template): # return a parsed html header from file
174 try:
175 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
176 logger.debug("copied main.css")
177 except Exception as e:
178 logger.warning("could not copy main.css - " + str(e))
179 headercontent = open(template, 'r').read()
180 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
181 return headercontent
182
183def subject(template):
184 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
185 logger.debug("returning subject line " + r)
186 return r
187
188def hostname(): # get the hostname
189 hnfile = open(config['hostname-path'], 'r')
190 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
191 return hn
192
193def resolve(ip): # try to resolve an ip to hostname
194 logger.debug("trying to resolve ip " + ip)
195 try:
196 socket.inet_aton(ip) # succeeds if text contains ip
197 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
198 logger.debug("found hostname " + hn)
199 return(hn)
200 except:
201 logger.debug("failed to resolve hostname for " + ip)
202 return(ip) # return ip if no hostname exists
203
204def plural(noun, quantity): # return "1 noun" or "n nouns"
205 if (quantity == 1):
206 return(str(quantity) + " " + noun)
207 else:
208 return(str(quantity) + " " + noun + "s")
209
210def parsesize(num, suffix='B'): # return human-readable size from number of bytes
211 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
212 if abs(num) < 1024.0:
213 return "%3.1f %s%s" % (num, unit, suffix)
214 num /= 1024.0
215 return "%.1f%s%s" % (num, 'Yi', suffix)
216
217def readlog(path = None, mode = 'r'): # read file, substituting known paths
218 if (path == None):
219 logger.error("no path provided")
220 return
221 else:
222 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
223 if (os.path.isfile(path) is False):
224 logger.error(path + " does not exist")
225 return ''
226 else:
227 return open(path, mode).read()
228
229def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
230 if (path == None or content == None):
231 logger.error("invalid usage of writelog")
232 return
233 else:
234 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
235 file = open(path, mode)
236 file.write(content)
237 file.close()
238
239def getusage(path): # Get disk usage statistics
240 disk = os.statvfs(path)
241 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
242 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
243 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
244 ratio = alloc / cap * 100 # percentage used
245 return diskstat(cap, alloc, free, ratio)
246
247def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
248 temp_l = l[:]
249 l = list(set(l))
250 l = [[i, temp_l.count(i)] for i in l] # add count of each element
251 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
252 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
253 l = l[::-1] # reverse
254 return l
255
256def addtag(l, tag): # add prefix and suffix tags to each item in a list
257 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
258 return l2
259
260def truncl(input, limit): # truncate list
261 if (len(input) > limit):
262 more = str(len(input) - limit)
263 output = input[:limit]
264 output.append("+ " + more + " more")
265 return(output)
266 else:
267 return(input)
268
269def mailprep(inputpath, output, *stylesheet):
270 logger.debug("converting stylesheet to inline tags")
271 old = readlog(inputpath)
272 logger.debug(config['css'])
273 pm = premailer.Premailer(old, external_styles=config['css'])
274 MAILOUT = pm.transform()
275 logger.info("converted stylesheet to inline tags")
276 file = open(output, 'w')
277 file.write(MAILOUT)
278 file.close()
279 logger.info("written to temporary mail file")
280
281
282
283#
284#
285#
286
287def sshd():
288 logger.debug("starting sshd section")
289 opentag('div', 1, 'sshd', 'section')
290 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
291 users = [] # list of users with format [username, number of logins] for each item
292 data = []
293 num = sum(1 for x in matches) # total number of logins
294 for match in matches:
295 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
296
297 user = entry.group(1)
298 ip = entry.group(2)
299
300 userhost = user + '@' + resolve(ip)
301 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
302 if (exists == []):
303 users.append([userhost, 1])
304 else:
305 users[exists[0]][1] += 1
306
307 writetitle('sshd')
308 subtitle = plural('login', num) + ' from'
309 if (len(users) == 1): # if only one user, do not display no of logins for this user
310 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
311 subtitle += ' ' + users[0][0]
312 writedata(subtitle)
313 else:
314 for user in users:
315 data.append(user[0] + ' (' + str(user[1]) + ')')
316 if len(data) > config['maxlist']: # if there are lots of users, truncate them
317 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
318 break
319 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
320 writedata(subtitle, data)
321 closetag('div', 1)
322 logger.info("finished sshd section")
323
324#
325#
326#
327
328def sudo():
329 logger.debug("starting sudo section")
330 opentag('div', 1, 'sudo', 'section')
331 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
332 num = sum(1 for line in umatches) # total number of sessions
333 users = []
334 data = []
335 for match in umatches:
336 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
337 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
338 if (exists == []):
339 users.append([user, 1])
340 else:
341 users[exists[0]][1] += 1
342 commands = []
343 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
344 for cmd in cmatches:
345 commands.append(cmd)
346 logger.debug("found the following commands: " + str(commands))
347
348 writetitle("sudo")
349 subtitle = plural("sudo session", num) + " for"
350 if (len(users) == 1):
351 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
352 subtitle += ' ' + users[0][0]
353 writedata(subtitle)
354 else:
355 for user in users:
356 data.append(user[0] + ' (' + str(user[1]) + ')')
357 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
358 writedata(subtitle, data)
359 if (len(commands) > 0):
360 commands = addtag(commands, 'code')
361 commands = orderbyfreq(commands)
362 commands = truncl(commands, config['maxcmd'])
363 writedata("top sudo commands", [c for c in commands])
364 closetag('div', 1)
365 logger.info("finished sudo section")
366
367#
368#
369#
370
371def cron():
372 logger.debug("starting cron section")
373 opentag('div', 1, 'cron', 'section')
374 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
375 num = sum(1 for line in matches)
376 commands = []
377 for match in matches:
378 commands.append(str(match))
379 # commands.append([str(match)for match in matches])
380 logger.debug("found cron command " + str(commands))
381 logger.info("found " + str(num) + " cron jobs")
382 subtitle = str(num) + " cron jobs run"
383 writetitle("cron")
384 writedata(subtitle)
385 if (matches > 0):
386 commands = addtag(commands, 'code')
387 commands = orderbyfreq(commands)
388 commands = truncl(commands, config['maxcmd'])
389 writedata("top cron commands", [c for c in commands])
390 closetag('div', 1)
391 logger.info("finished cron section")
392
393#
394#
395#
396
397def nameget():
398 logger.debug("starting nameget section")
399 opentag('div', 1, 'nameget', 'section')
400 syslog = readlog('sys')
401 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
402 n_f = sum(1 for i in failed)
403 l_f = []
404 for i in failed:
405 l_f.append(i)
406 logger.debug("the following downloads failed: " + str(l_f))
407 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
408 n_s = sum(1 for i in succ)
409 l_s = []
410 for i in succ:
411 l_s.append(i)
412 logger.debug("the following downloads succeeded: " + str(l_f))
413 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
414 writetitle("nameget")
415 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
416 writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
417 closetag('div', 1)
418 logger.info("finished nameget section")
419
420#
421#
422#
423
424def httpd():
425 logger.info("starting httpd section")
426 opentag('div', 1, 'httpd', 'section')
427 accesslog = readlog("httpd/access.log")
428 a = len(accesslog.split('\n'))
429 errorlog = readlog("httpd/error.log")
430 e = len(errorlog.split('\n'))
431 data_b = 0
432 ips = []
433 files = []
434 useragents = []
435 errors = []
436 notfound = []
437 unprivileged = []
438
439 for line in accesslog.split('\n'):
440 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
441 try:
442 ips.append(fields.group(1))
443 files.append(fields.group(2))
444 useragents.append(fields.group(5))
445 logger.debug("transferred " + fields.group(3) + " bytes in this request")
446 data_b += int(fields.group(3))
447 logger.debug("data_b is now " + str(data_b))
448 except Exception as error:
449 if type(error) is AttributeError:
450 logger.debug("attributeerrror: " + str(error))
451 else:
452 logger.warning("error processing httpd access log: " + str(error))
453 logger.debug(str(data_b) + " bytes transferred")
454 data_h = parsesize(data_b)
455 writetitle("apache")
456
457 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
458 if (a > 0):
459 logger.debug("found the following requests: " + str(files))
460 files = addtag(files, 'code')
461 files = orderbyfreq(files)
462 files = truncl(files, config['maxcmd'])
463 writedata(str(a) + " requests", files)
464 if (ips != None):
465 logger.debug("found the following ips: " + str(ips))
466 ips = addtag(ips, 'code')
467 ips = orderbyfreq(ips)
468 n_ip = str(len(ips))
469 ips = truncl(ips, config['maxcmd'])
470 writedata(n_ip + " clients", ips)
471 if (useragents != None):
472 logger.debug("found the following useragents: " + str(useragents))
473 useragents = addtag(useragents, 'code')
474 useragents = orderbyfreq(useragents)
475 n_ua = str(len(useragents))
476 useragents = truncl(useragents, config['maxcmd'])
477 writedata(n_ua + " devices", useragents)
478
479 writedata(data_h + " transferred")
480 writedata(str(e) + " errors")
481
482 closetag('div', 1)
483 logger.info("finished httpd section")
484
485#
486#
487#
488
489def httpdsession():
490 # logger.debug("starting httpd section")
491 opentag('div', 1, 'httpd', 'section')
492 httpdlog = requests.get(HTTPDSTATUS).content
493 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
494 uptime = re.sub(' minute[s]', 'm', uptime)
495 uptime = re.sub(' second[s]', 's', uptime)
496 uptime = re.sub(' day[s]', 's', uptime)
497 uptime = re.sub(' month[s]', 'mo', uptime)
498 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
499 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
500 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
501 closetag('div', 1)
502 # logger.info("finished httpd section")
503
504#
505#
506#
507
508def smbd():
509 logger.debug("starting smbd section")
510 opentag('div', 1, 'smbd', 'section')
511 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
512 # for f in files:
513
514 # file_mod_time = os.stat(f).st_mtime
515
516 # Time in seconds since epoch for time, in which logfile can be unmodified.
517 # should_time = time.time() - (30 * 60)
518
519 # Time in minutes since last modification of file
520 # last_time = (time.time() - file_mod_time)
521 # logger.debug(last_time)
522
523 # if (file_mod_time - should_time) < args.time:
524 # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
525 # else:
526
527 # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
528 # files.remove(f)
529 logger.debug("found log files " + str(files))
530 n_auths = 0 # total number of logins from all users
531 sigma_auths = [] # contains users
532 output = ""
533
534 for file in files: # one log file for each client
535
536 logger.debug("looking at file " + file)
537
538 # find the machine (ip or hostname) that this file represents
539 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
540 host = resolve(ip)
541
542 # count number of logins from each user
543 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
544 for match in matches:
545 userhost = match + "@" + host
546 sigma_auths.append(userhost)
547 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
548 # if (exists == []):
549 # sigma_auths.append([userhost, 1])
550 # else:
551 # sigma_auths[exists[0]][1] += 1
552 n_auths += 1
553 writetitle("samba")
554 subtitle = plural("login", n_auths) + " from"
555 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
556 subtitle += ' ' + sigma_auths[0][0]
557 writedata(subtitle)
558 else: # multiple users
559 sigma_auths = orderbyfreq(sigma_auths)
560 sigma_auths = truncl(sigma_auths, config['maxcmd'])
561 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
562 writedata(subtitle, sigma_auths)
563 closetag('div', 1)
564 logger.info("finished smbd section")
565
566#
567#
568#
569
570def postfix():
571 logger.debug("starting postfix section")
572 opentag('div', 1, 'postfix', 'section')
573 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
574 r = []
575 s = []
576 size = 0
577 for message in messages:
578 r.append(message[2])
579 s.append(message[0])
580 size += int(message[1])
581 # size = sum([int(x) for x in messages])
582 size = parsesize(size)
583 n = str(len(messages))
584 writetitle("postfix")
585
586 if (len(r) > 0):
587 s = list(set(r)) # unique recipients
588 if (len(s) > 1):
589 r = orderbyfreq(r)
590 r = truncl(r, config['maxcmd'])
591 writedata(n + " messages sent to", r)
592 else:
593 writedata(n + " messages sent to " + r[0])
594 else:
595 writedata(n + " messages sent")
596 writedata("total of " + size)
597 closetag('div', 1)
598 logger.info("finished postfix section")
599
600#
601#
602#
603
604def zfs():
605 logger.debug("starting zfs section")
606 opentag('div', 1, 'zfs', 'section')
607 zfslog = readlog('zfs')
608 logger.debug("zfs log is " + zfslog)
609 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
610 pool = re.search('.*---\n(\w*)', zfslog).group(1)
611 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
612 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
613 scrubrepairs = scruberrors = scrubdate = None
614 try:
615 scrubrepairs = scrub.group(1)
616 scruberrors = scrub.group(2)
617 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
618 except:
619 logger.debug("error getting scrub data")
620 alloc = iostat.group(1)
621 free = iostat.group(2)
622 writetitle("zfs")
623 if (scrubdate != None):
624 subtitle = "Scrub of " + pool + " on " + scrubdate
625 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
626 else:
627 subtitle = pool
628 data = [alloc + " used", free + " free"]
629 writedata(subtitle, data)
630 closetag('div', 1)
631 logger.info("finished zfs section")
632
633#
634#
635#
636
637def temp():
638 logger.debug("starting temp section")
639 opentag('div', 1, 'temp', 'section')
640 sensors.init()
641 coretemps = []
642 pkgtemp = 0
643 systemp = 0
644 try:
645 print(sensors.iter_detected_chips())
646 for chip in sensors.iter_detected_chips():
647 for feature in chip:
648 if "Core" in feature.label:
649 coretemps.append([feature.label, feature.get_value()])
650 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
651 if "CPUTIN" in feature.label:
652 pkgtemp = str(feature.get_value())
653 logger.debug("found cpu package at temperature " + pkgtemp)
654 if "SYS" in feature.label:
655 systemp = feature.get_value()
656 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
657 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
658 logger.debug("average cpu temp is " + str(core_avg))
659 coretemps.append(["avg", str(core_avg)])
660 coretemps.append(["pkg", pkgtemp])
661 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
662 finally:
663 sensors.cleanup()
664
665 # For this to work, `hddtemp` must be running in daemon mode.
666 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
667 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
668 s.connect(('localhost',config['hddtemp']['port']))
669 output = s.recv(4096)
670 output += s.recv(4096)
671 s.close()
672 config['hddtemp']['drives'] = []
673 for drive in re.split('\|1}', output):
674 try:
675 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
676 name = fields.group(1)
677 temp = float(fields.group(2))
678 units = fields.group(3)
679 config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
680 except:
681 pass
682 hddtotal = 0
683 data = []
684 for drive in config['hddtemp']['drives']:
685 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
686 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
687 hddtotal += drive.temp
688 logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
689 logger.debug("sum of disk temps is " + str(hddtotal))
690 hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
691 logger.debug("avg disk temp is " + str(hddavg))
692 data.append("avg: " + str(hddavg))
693 writetitle("temperatures")
694 if (systemp != 0):
695 writedata("sys: " + str(systemp) + DEG)
696 if (coretemps != ''):
697 writedata("cores", coretemps)
698 if (config['hddtemp']['drives'] != ''):
699 writedata("disks", data)
700
701 closetag('div', 1)
702 logger.info("finished temp section")
703
704#
705#
706#
707
708def du():
709 logger.debug("starting du section")
710 opentag('div', 1, 'du', 'section')
711 out = []
712 content = readlog('alloc')
713 contentnew = ""
714 for p in config['du-paths']:
715 alloc_f = getusage(p).alloc
716 delta = None
717 try:
718 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
719 delta = alloc_f - float(alloc_i)
720 except:
721 pass
722 logger.debug("delta is " + str(delta))
723 if (delta == None):
724 out.append([p, "used " + parsesize(alloc_f)])
725 else:
726 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
727 contentnew += (p + '\t' + str(alloc_f) + '\n')
728 writelog('alloc', contentnew)
729
730 writetitle("du")
731 logger.debug("disk usage data is " + str(out))
732 for path in out:
733 writedata(path[0], [p for p in path[1:]])
734
735 closetag('div', 1)
736 logger.info("finished du section")
737
738#
739#
740#
741
742timenow = time.strftime("%H:%M:%S")
743datenow = time.strftime("%x")
744
745pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
746pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
747pathpattern = re.compile("|".join(pathfilter.keys()))
748
749varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": config['css']}
750varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
751varpattern = re.compile("|".join(varfilter.keys()))
752
753def loadconf(configfile):
754 try:
755 data = yaml.safe_load(open(configfile))
756 for value in data:
757 logger.debug(data[value])
758 if(type(data[value]) == types.DictType):
759 for key in data[value].iteritems():
760 config[value][key[0]] = key[1]
761 else:
762 config[value] = data[value]
763 config['dest'] = os.path.dirname(config['output'])
764 logger.debug(str(config))
765 except Exception as e:
766 logger.warning("error processing config: " + str(e))
767
768
769try:
770 __main__()
771finally:
772 # rotate logs using systemd logrotate
773 if (config['rotate'] == 1):
774 subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
775 logger.info("rotated logfiles")
776 else:
777 logger.info("user doesn't want to rotate logs")
778 logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
779 sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
780 logger.debug(sim)
781 timenow = time.strftime("%H:%M:%S")
782 datenow = time.strftime("%x")
783 logger.info("finished parsing logs at " + str(datenow) + ' ' + str(timenow))