59370bcc6981f644ed00fc83eea34691acbdacd6
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11
12reload(sys)
13sys.setdefaultencoding('utf-8')
14
15scriptdir = os.path.dirname(os.path.realpath(__file__))
16
17
18diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
19drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
20config = {
21 'output': '~/var/www/logparse/summary.html',
22 'header': scriptdir + '/header.html',
23 'css': scriptdir + '/main.css',
24 'title': 'logparse',
25 'maxlist': 10,
26 'maxcmd': 3,
27 'mail': {
28 'to': '',
29 'from': '',
30 'subject': 'logparse from $hostname$'
31 },
32 'rotate': 'y',
33 'hddtemp': {
34 'drives': ['/dev/sda'],
35 'port': 7634
36 },
37 'du-paths': ['/', '/etc', '/home'],
38 'hostname-path': '/etc/hostname',
39 'logs': {
40 'auth': '/var/log/auth.log',
41 'cron': '/var/log/cron.log',
42 'sys': '/var/log/syslog',
43 'smb': '/var/log/samba',
44 'zfs': '/var/log/zpool.log',
45 'alloc': '/tmp/alloc',
46 'postfix': '/var/log/mail.log',
47 'httpd': '/var/log/apache2'
48 }
49}
50
51
52HTTPDSTATUS = "http://localhost/server-status"
53# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
54# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
55# config['hddtemp']['port'] = 7634
56# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
57# config['output'] = "/mnt/andrew/temp/logparse/out.html"
58MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
59# config['dest'] = "/mnt/andrew/temp/logparse"
60# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
61# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
62MAILOUT = ""
63HTMLOUT = ""
64TXTOUT = ""
65# config['title'] = "logparse"
66# config['maxlist'] = 10
67# config['maxcmd'] = 3
68# config['mail']['subject'] = "logparse from $hostname$"
69VERSION = "v0.1"
70DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
71DEG = " °C".encode('unicode_escape')
72
73# Set up logging
74logging.basicConfig(level=logging.DEBUG)
75logger = logging.getLogger('logparse')
76loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
77loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
78logger.addHandler(loghandler)
79
80
81# Get arguments
82parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
83parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
84
85def __main__():
86 logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow))
87 loadconf(scriptdir + "/logparse.yaml")
88
89 if (config['mail']['to'] == None):
90 logger.info("no recipient address provided, outputting to stdout")
91 else:
92 logger.info("email will be sent to " + config['mail']['to'])
93
94 global pathfilter
95 global pathpattern
96 pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
97 pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
98 pathpattern = re.compile("|".join(pathfilter.keys()))
99
100 global varfilter
101 global varpattern
102 varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.relpath(config['css'], os.path.dirname(config['output']))}
103 varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
104 varpattern = re.compile("|".join(varfilter.keys()))
105
106 global tempfile
107 tempfile = open(config['output'], 'w+')
108 tempfile.write(header(config['header']))
109 opentag('div', 1, 'main')
110 sshd()
111 sudo()
112 cron()
113 nameget()
114 httpd()
115 smbd()
116 postfix()
117 zfs()
118 temp()
119 du()
120 for tag in ['div', 'body', 'html']:
121 closetag(tag, 1)
122 tempfile.close()
123 mailprep(config['output'], MAILPATH)
124 if (config['mail']['to'] != None):
125 logger.debug("sending email")
126 ms = subject(config['mail']['subject'])
127 cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + config['mail']['to']
128 logger.debug(cmd)
129 subprocess.call(cmd, shell=True)
130 logger.info("sent email")
131
132
133def writetitle(title):
134 if (title == '' or '\n' in title):
135 logger.error("invalid title")
136 return
137 logger.debug("writing title for " + title)
138 tag('h2', 0, title)
139
140def writedata(subtitle, data = None): # write title and data to tempfile
141 if (subtitle == ""):
142 loggger.warning("no subtitle provided.. skipping section")
143 return
144
145 if (data == None or len(data) == 0):
146 logger.debug("no data provided.. just printing subtitle")
147 tag('p', 0, subtitle)
148 else:
149 logger.debug("received data " + str(data))
150 subtitle += ':'
151 if (len(data) == 1):
152 tag('p', 0, subtitle + ' ' + data[0])
153 else:
154 tag('p', 0, subtitle)
155 opentag('ul', 1)
156 for datum in data:
157 logger.debug("printing datum " + datum)
158 tag('li', 0, datum)
159 closetag('ul', 1)
160
161def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
162 if (block == 1):
163 tempfile.write('\n')
164 tempfile.write('<' + tag)
165 if (id != None):
166 tempfile.write(" id='" + id + "'")
167 if (cl != None):
168 tempfile.write(" class='" + cl + "'")
169 tempfile.write('>')
170 if (block == 1):
171 tempfile.write('\n')
172
173def closetag(tag, block = 0): # write html closing tag
174 if (block == 0):
175 tempfile.write("</" + tag + ">")
176 else:
177 tempfile.write("\n</" + tag + ">\n")
178
179def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
180 opentag(tag, block)
181 tempfile.write(content)
182 closetag(tag, block)
183
184def header(template): # return a parsed html header from file
185 try:
186 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
187 logger.debug("copied main.css")
188 except Exception as e:
189 logger.warning("could not copy main.css - " + str(e))
190 headercontent = open(template, 'r').read()
191 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
192 return headercontent
193
194def subject(template):
195 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
196 logger.debug("returning subject line " + r)
197 return r
198
199def hostname(): # get the hostname
200 hnfile = open(config['hostname-path'], 'r')
201 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
202 return hn
203
204def resolve(ip): # try to resolve an ip to hostname
205 logger.debug("trying to resolve ip " + ip)
206 try:
207 socket.inet_aton(ip) # succeeds if text contains ip
208 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
209 logger.debug("found hostname " + hn)
210 return(hn)
211 except:
212 logger.debug("failed to resolve hostname for " + ip)
213 return(ip) # return ip if no hostname exists
214
215def plural(noun, quantity): # return "1 noun" or "n nouns"
216 if (quantity == 1):
217 return(str(quantity) + " " + noun)
218 else:
219 return(str(quantity) + " " + noun + "s")
220
221def parsesize(num, suffix='B'): # return human-readable size from number of bytes
222 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
223 if abs(num) < 1024.0:
224 return "%3.1f %s%s" % (num, unit, suffix)
225 num /= 1024.0
226 return "%.1f%s%s" % (num, 'Yi', suffix)
227
228def readlog(path = None, mode = 'r'): # read file, substituting known paths
229 if (path == None):
230 logger.error("no path provided")
231 return
232 else:
233 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
234 if (os.path.isfile(path) is False):
235 logger.error(path + " does not exist")
236 return ''
237 else:
238 return open(path, mode).read()
239
240def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
241 if (path == None or content == None):
242 logger.error("invalid usage of writelog")
243 return
244 else:
245 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
246 file = open(path, mode)
247 file.write(content)
248 file.close()
249
250def getusage(path): # Get disk usage statistics
251 disk = os.statvfs(path)
252 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
253 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
254 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
255 ratio = alloc / cap * 100 # percentage used
256 return diskstat(cap, alloc, free, ratio)
257
258def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
259 temp_l = l[:]
260 l = list(set(l))
261 l = [[i, temp_l.count(i)] for i in l] # add count of each element
262 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
263 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
264 l = l[::-1] # reverse
265 return l
266
267def addtag(l, tag): # add prefix and suffix tags to each item in a list
268 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
269 return l2
270
271def truncl(input, limit): # truncate list
272 if (len(input) > limit):
273 more = str(len(input) - limit)
274 output = input[:limit]
275 output.append("+ " + more + " more")
276 return(output)
277 else:
278 return(input)
279
280def mailprep(inputpath, output, *stylesheet):
281 logger.debug("converting stylesheet to inline tags")
282 old = readlog(inputpath)
283 logger.debug(config['css'])
284 pm = premailer.Premailer(old, external_styles=config['css'])
285 MAILOUT = pm.transform()
286 logger.info("converted stylesheet to inline tags")
287 file = open(output, 'w')
288 file.write(MAILOUT)
289 file.close()
290 logger.info("written to temporary mail file")
291
292
293
294#
295#
296#
297
298def sshd():
299 logger.debug("starting sshd section")
300 opentag('div', 1, 'sshd', 'section')
301 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
302 users = [] # list of users with format [username, number of logins] for each item
303 data = []
304 num = sum(1 for x in matches) # total number of logins
305 for match in matches:
306 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
307
308 user = entry.group(1)
309 ip = entry.group(2)
310
311 userhost = user + '@' + resolve(ip)
312 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
313 if (exists == []):
314 users.append([userhost, 1])
315 else:
316 users[exists[0]][1] += 1
317
318 writetitle('sshd')
319 subtitle = plural('login', num) + ' from'
320 if (len(users) == 1): # if only one user, do not display no of logins for this user
321 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
322 subtitle += ' ' + users[0][0]
323 writedata(subtitle)
324 else:
325 for user in users:
326 data.append(user[0] + ' (' + str(user[1]) + ')')
327 if len(data) > config['maxlist']: # if there are lots of users, truncate them
328 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
329 break
330 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
331 writedata(subtitle, data)
332 closetag('div', 1)
333 logger.info("finished sshd section")
334
335#
336#
337#
338
339def sudo():
340 logger.debug("starting sudo section")
341 opentag('div', 1, 'sudo', 'section')
342 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
343 num = sum(1 for line in umatches) # total number of sessions
344 users = []
345 data = []
346 for match in umatches:
347 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
348 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
349 if (exists == []):
350 users.append([user, 1])
351 else:
352 users[exists[0]][1] += 1
353 commands = []
354 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
355 for cmd in cmatches:
356 commands.append(cmd)
357 logger.debug("found the following commands: " + str(commands))
358
359 writetitle("sudo")
360 subtitle = plural("sudo session", num) + " for"
361 if (len(users) == 1):
362 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
363 subtitle += ' ' + users[0][0]
364 writedata(subtitle)
365 else:
366 for user in users:
367 data.append(user[0] + ' (' + str(user[1]) + ')')
368 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
369 writedata(subtitle, data)
370 if (len(commands) > 0):
371 commands = addtag(commands, 'code')
372 commands = orderbyfreq(commands)
373 commands = truncl(commands, config['maxcmd'])
374 writedata("top sudo commands", [c for c in commands])
375 closetag('div', 1)
376 logger.info("finished sudo section")
377
378#
379#
380#
381
382def cron():
383 logger.debug("starting cron section")
384 opentag('div', 1, 'cron', 'section')
385 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
386 num = sum(1 for line in matches)
387 commands = []
388 for match in matches:
389 commands.append(str(match))
390 # commands.append([str(match)for match in matches])
391 logger.debug("found cron command " + str(commands))
392 logger.info("found " + str(num) + " cron jobs")
393 subtitle = str(num) + " cron jobs run"
394 writetitle("cron")
395 writedata(subtitle)
396 if (matches > 0):
397 commands = addtag(commands, 'code')
398 commands = orderbyfreq(commands)
399 commands = truncl(commands, config['maxcmd'])
400 writedata("top cron commands", [c for c in commands])
401 closetag('div', 1)
402 logger.info("finished cron section")
403
404#
405#
406#
407
408def nameget():
409 logger.debug("starting nameget section")
410 opentag('div', 1, 'nameget', 'section')
411 syslog = readlog('sys')
412 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
413 n_f = sum(1 for i in failed)
414 l_f = []
415 for i in failed:
416 l_f.append(i)
417 logger.debug("the following downloads failed: " + str(l_f))
418 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
419 n_s = sum(1 for i in succ)
420 l_s = []
421 for i in succ:
422 l_s.append(i)
423 logger.debug("the following downloads succeeded: " + str(l_f))
424 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
425 writetitle("nameget")
426 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
427 writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
428 closetag('div', 1)
429 logger.info("finished nameget section")
430
431#
432#
433#
434
435def httpd():
436 logger.info("starting httpd section")
437 opentag('div', 1, 'httpd', 'section')
438 accesslog = readlog("httpd/access.log")
439 a = len(accesslog.split('\n'))
440 errorlog = readlog("httpd/error.log")
441 e = len(errorlog.split('\n'))
442 data_b = 0
443 ips = []
444 files = []
445 useragents = []
446 errors = []
447 notfound = []
448 unprivileged = []
449
450 for line in accesslog.split('\n'):
451 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
452 try:
453 ips.append(fields.group(1))
454 files.append(fields.group(2))
455 useragents.append(fields.group(5))
456 logger.debug("transferred " + fields.group(3) + " bytes in this request")
457 data_b += int(fields.group(3))
458 logger.debug("data_b is now " + str(data_b))
459 except Exception as error:
460 if type(error) is AttributeError:
461 logger.debug("attributeerrror: " + str(error))
462 else:
463 logger.warning("error processing httpd access log: " + str(error))
464 logger.debug(str(data_b) + " bytes transferred")
465 data_h = parsesize(data_b)
466 writetitle("apache")
467
468 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
469 if (a > 0):
470 logger.debug("found the following requests: " + str(files))
471 files = addtag(files, 'code')
472 files = orderbyfreq(files)
473 files = truncl(files, config['maxcmd'])
474 writedata(str(a) + " requests", files)
475 if (ips != None):
476 logger.debug("found the following ips: " + str(ips))
477 ips = addtag(ips, 'code')
478 ips = orderbyfreq(ips)
479 n_ip = str(len(ips))
480 ips = truncl(ips, config['maxcmd'])
481 writedata(n_ip + " clients", ips)
482 if (useragents != None):
483 logger.debug("found the following useragents: " + str(useragents))
484 useragents = addtag(useragents, 'code')
485 useragents = orderbyfreq(useragents)
486 n_ua = str(len(useragents))
487 useragents = truncl(useragents, config['maxcmd'])
488 writedata(n_ua + " devices", useragents)
489
490 writedata(data_h + " transferred")
491 writedata(str(e) + " errors")
492
493 closetag('div', 1)
494 logger.info("finished httpd section")
495
496#
497#
498#
499
500def httpdsession():
501 # logger.debug("starting httpd section")
502 opentag('div', 1, 'httpd', 'section')
503 httpdlog = requests.get(HTTPDSTATUS).content
504 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
505 uptime = re.sub(' minute[s]', 'm', uptime)
506 uptime = re.sub(' second[s]', 's', uptime)
507 uptime = re.sub(' day[s]', 's', uptime)
508 uptime = re.sub(' month[s]', 'mo', uptime)
509 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
510 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
511 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
512 closetag('div', 1)
513 # logger.info("finished httpd section")
514
515#
516#
517#
518
519def smbd():
520 logger.debug("starting smbd section")
521 opentag('div', 1, 'smbd', 'section')
522 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
523 # for f in files:
524
525 # file_mod_time = os.stat(f).st_mtime
526
527 # Time in seconds since epoch for time, in which logfile can be unmodified.
528 # should_time = time.time() - (30 * 60)
529
530 # Time in minutes since last modification of file
531 # last_time = (time.time() - file_mod_time)
532 # logger.debug(last_time)
533
534 # if (file_mod_time - should_time) < args.time:
535 # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
536 # else:
537
538 # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
539 # files.remove(f)
540 logger.debug("found log files " + str(files))
541 n_auths = 0 # total number of logins from all users
542 sigma_auths = [] # contains users
543 output = ""
544
545 for file in files: # one log file for each client
546
547 logger.debug("looking at file " + file)
548
549 # find the machine (ip or hostname) that this file represents
550 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
551 host = resolve(ip)
552
553 # count number of logins from each user
554 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
555 for match in matches:
556 userhost = match + "@" + host
557 sigma_auths.append(userhost)
558 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
559 # if (exists == []):
560 # sigma_auths.append([userhost, 1])
561 # else:
562 # sigma_auths[exists[0]][1] += 1
563 n_auths += 1
564 writetitle("samba")
565 subtitle = plural("login", n_auths) + " from"
566 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
567 subtitle += ' ' + sigma_auths[0][0]
568 writedata(subtitle)
569 else: # multiple users
570 sigma_auths = orderbyfreq(sigma_auths)
571 sigma_auths = truncl(sigma_auths, config['maxcmd'])
572 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
573 writedata(subtitle, sigma_auths)
574 closetag('div', 1)
575 logger.info("finished smbd section")
576
577#
578#
579#
580
581def postfix():
582 logger.debug("starting postfix section")
583 opentag('div', 1, 'postfix', 'section')
584 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
585 r = []
586 s = []
587 size = 0
588 for message in messages:
589 r.append(message[2])
590 s.append(message[0])
591 size += int(message[1])
592 # size = sum([int(x) for x in messages])
593 size = parsesize(size)
594 n = str(len(messages))
595 writetitle("postfix")
596
597 if (len(r) > 0):
598 s = list(set(r)) # unique recipients
599 if (len(s) > 1):
600 r = orderbyfreq(r)
601 r = truncl(r, config['maxcmd'])
602 writedata(n + " messages sent to", r)
603 else:
604 writedata(n + " messages sent to " + r[0])
605 else:
606 writedata(n + " messages sent")
607 writedata("total of " + size)
608 closetag('div', 1)
609 logger.info("finished postfix section")
610
611#
612#
613#
614
615def zfs():
616 logger.debug("starting zfs section")
617 opentag('div', 1, 'zfs', 'section')
618 zfslog = readlog('zfs')
619 logger.debug("zfs log is " + zfslog)
620 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
621 pool = re.search('.*---\n(\w*)', zfslog).group(1)
622 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
623 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
624 scrubrepairs = scruberrors = scrubdate = None
625 try:
626 scrubrepairs = scrub.group(1)
627 scruberrors = scrub.group(2)
628 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
629 except:
630 logger.debug("error getting scrub data")
631 alloc = iostat.group(1)
632 free = iostat.group(2)
633 writetitle("zfs")
634 if (scrubdate != None):
635 subtitle = "Scrub of " + pool + " on " + scrubdate
636 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
637 else:
638 subtitle = pool
639 data = [alloc + " used", free + " free"]
640 writedata(subtitle, data)
641 closetag('div', 1)
642 logger.info("finished zfs section")
643
644#
645#
646#
647
648def temp():
649 logger.debug("starting temp section")
650 opentag('div', 1, 'temp', 'section')
651 sensors.init()
652 coretemps = []
653 pkgtemp = 0
654 systemp = 0
655 try:
656 print(sensors.iter_detected_chips())
657 for chip in sensors.iter_detected_chips():
658 for feature in chip:
659 if "Core" in feature.label:
660 coretemps.append([feature.label, feature.get_value()])
661 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
662 if "CPUTIN" in feature.label:
663 pkgtemp = str(feature.get_value())
664 logger.debug("found cpu package at temperature " + pkgtemp)
665 if "SYS" in feature.label:
666 systemp = feature.get_value()
667 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
668 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
669 logger.debug("average cpu temp is " + str(core_avg))
670 coretemps.append(["avg", str(core_avg)])
671 coretemps.append(["pkg", pkgtemp])
672 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
673 finally:
674 sensors.cleanup()
675
676 # For this to work, `hddtemp` must be running in daemon mode.
677 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
678 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
679 s.connect(('localhost',config['hddtemp']['port']))
680 output = s.recv(4096)
681 output += s.recv(4096)
682 s.close()
683 config['hddtemp']['drives'] = []
684 for drive in re.split('\|1}', output):
685 try:
686 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
687 name = fields.group(1)
688 temp = float(fields.group(2))
689 units = fields.group(3)
690 config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
691 except:
692 pass
693 hddtotal = 0
694 data = []
695 for drive in config['hddtemp']['drives']:
696 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
697 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
698 hddtotal += drive.temp
699 logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
700 logger.debug("sum of disk temps is " + str(hddtotal))
701 hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
702 logger.debug("avg disk temp is " + str(hddavg))
703 data.append("avg: " + str(hddavg))
704 writetitle("temperatures")
705 if (systemp != 0):
706 writedata("sys: " + str(systemp) + DEG)
707 if (coretemps != ''):
708 writedata("cores", coretemps)
709 if (config['hddtemp']['drives'] != ''):
710 writedata("disks", data)
711
712 closetag('div', 1)
713 logger.info("finished temp section")
714
715#
716#
717#
718
719def du():
720 logger.debug("starting du section")
721 opentag('div', 1, 'du', 'section')
722 out = []
723 content = readlog('alloc')
724 contentnew = ""
725 for p in config['du-paths']:
726 alloc_f = getusage(p).alloc
727 delta = None
728 try:
729 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
730 delta = alloc_f - float(alloc_i)
731 except:
732 pass
733 logger.debug("delta is " + str(delta))
734 if (delta == None):
735 out.append([p, "used " + parsesize(alloc_f)])
736 else:
737 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
738 contentnew += (p + '\t' + str(alloc_f) + '\n')
739 writelog('alloc', contentnew)
740
741 writetitle("du")
742 logger.debug("disk usage data is " + str(out))
743 for path in out:
744 writedata(path[0], [p for p in path[1:]])
745
746 closetag('div', 1)
747 logger.info("finished du section")
748
749#
750#
751#
752starttime = datetime.datetime.now()
753timenow = time.strftime("%H:%M:%S")
754datenow = time.strftime("%x")
755
756def loadconf(configfile):
757 try:
758 data = yaml.safe_load(open(configfile))
759 for value in data:
760 logger.debug(data[value])
761 if(type(data[value]) == types.DictType):
762 for key in data[value].iteritems():
763 config[value][key[0]] = key[1]
764 else:
765 config[value] = data[value]
766 config['dest'] = os.path.dirname(config['output'])
767 logger.debug(str(type(parser.parse_args().to)))
768 logger.debug(config['mail']['to'])
769 if parser.parse_args().to is not None: config['mail']['to'] = parser.parse_args().to
770 logger.debug(str(config))
771 except Exception as e:
772 logger.warning("error processing config: " + str(e))
773
774
775try:
776 __main__()
777finally:
778 # rotate logs using systemd logrotate
779 if (config['rotate'] == 'y'):
780 subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
781 logger.info("rotated logfiles")
782 else:
783 logger.debug("user doesn't want to rotate logs")
784 if (config['rotate'] == 's'):
785 logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
786 sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
787 logger.debug(sim)
788
789 timenow = time.strftime("%H:%M:%S")
790 datenow = time.strftime("%x")
791 logger.info("finished parsing logs at " + datetime.datetime.now().strftime("%x %H:%M:%S") + " (" + str(datetime.datetime.now() - starttime) + ")")