19dba198cf926ace2dbdcc9048e33c4ffc531bf7
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer, locale
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11import traceback # debugging only
12
13reload(sys)
14sys.setdefaultencoding('utf-8') # force utf-8 because anything else should die
15
16locale.setlocale(locale.LC_ALL, '') # inherit system locale
17
18scriptdir = os.path.dirname(os.path.realpath(__file__))
19
20
21diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
22drivetemp = namedtuple('drivetemp', ['path', 'name', 'temp', 'units'])
23config = {
24 'output': '~/var/www/logparse/summary.html',
25 'header': scriptdir + '/header.html',
26 'css': scriptdir + '/main.css',
27 'title': 'logparse',
28 'maxlist': 10,
29 'maxcmd': 3,
30 'resolve-domains': 'fqdn',
31 'mail': {
32 'to': '',
33 'from': '',
34 'subject': 'logparse from $hostname$'
35 },
36 'rotate': 'y',
37 'hddtemp': {
38 'drives': ['/dev/sda'],
39 'port': 7634,
40 'show-model': False,
41 },
42 'apache': {
43 'resolve-domains': '',
44 },
45 'sshd': {
46 'resolve-domains': '',
47 },
48 'smbd': {
49 'resolve-domains': '',
50 },
51 'httpd': {
52 'resolve-domains': '',
53 },
54 'du': {
55 'paths': ['/', '/etc', '/home'],
56 'force-write': 'n',
57 },
58 'hostname-path': '/etc/hostname',
59 'logs': {
60 'auth': '/var/log/auth.log',
61 'cron': '/var/log/cron.log',
62 'sys': '/var/log/syslog',
63 'smb': '/var/log/samba',
64 'zfs': '/var/log/zpool.log',
65 'alloc': '/var/log/du.log',
66 'postfix': '/var/log/mail.log',
67 'httpd': '/var/log/apache2'
68 }
69}
70
71
72HTTPDSTATUS = "http://localhost/server-status"
73MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
74MAILOUT = ""
75HTMLOUT = ""
76TXTOUT = ""
77VERSION = "v0.1"
78#DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
79DEG = "°".encode('unicode_escape')
80CEL = "C"
81
82# Set up logging
83logging.basicConfig(level=logging.DEBUG)
84logger = logging.getLogger('logparse')
85loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
86loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
87logger.addHandler(loghandler)
88
89
90# Get arguments
91parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
92parser.add_argument('-f', '--function', help='run a specified function with parameters (for debugging purposes',required=False)
93parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
94
95def __main__():
96 logger.info("Beginning log analysis at " + str(datenow) + ' ' + str(timenow))
97
98 loadconf(scriptdir + "/logparse.yaml")
99
100 # check if user wants to test an isolated function
101 debugfunc = parser.parse_args().function
102 if debugfunc is not None:
103 logger.debug("executing a single function: " + debugfunc)
104 eval(debugfunc)
105 sys.exit()
106
107 if not config['mail']['to']:
108 logger.info("no recipient address provided, outputting to stdout")
109 else:
110 logger.info("email will be sent to " + config['mail']['to'])
111
112 global LOCALDOMAIN
113 LOCALDOMAIN = getlocaldomain()
114
115 global pathfilter
116 global pathpattern
117 pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
118 pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
119 pathpattern = re.compile("|".join(pathfilter.keys()))
120
121 global varfilter
122 global varpattern
123 varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": os.path.relpath(config['css'], os.path.dirname(config['output']))}
124 varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
125 varpattern = re.compile("|".join(varfilter.keys()))
126
127 global tempfile
128 tempfile = open(config['output'], 'w+')
129 tempfile.write(header(config['header']))
130 opentag('div', 1, 'main')
131 sshd()
132 sudo()
133 cron()
134 nameget()
135 httpd()
136 smbd()
137 postfix()
138 zfs()
139 temp()
140 du()
141 for tag in ['div', 'body', 'html']:
142 closetag(tag, 1)
143 tempfile.close()
144 mailprep(config['output'], MAILPATH)
145 if (config['mail']['to']):
146 logger.debug("sending email")
147 ms = subject(config['mail']['subject'])
148 cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + config['mail']['to']
149 logger.debug(cmd)
150 subprocess.call(cmd, shell=True)
151 logger.info("sent email")
152
153
154def writetitle(title):
155 if (title == '' or '\n' in title):
156 logger.error("invalid title")
157 return
158 logger.debug("writing title for " + title)
159 tag('h2', 0, title)
160
161def writedata(subtitle, data = None): # write title and data to tempfile
162 if (subtitle == ""):
163 loggger.warning("no subtitle provided.. skipping section")
164 return
165
166 if (data == None or len(data) == 0):
167 logger.debug("no data provided.. just printing subtitle")
168 tag('p', 0, subtitle)
169 else:
170 logger.debug("received data " + str(data))
171 subtitle += ':'
172 if (len(data) == 1):
173 tag('p', 0, subtitle + ' ' + data[0])
174 else:
175 tag('p', 0, subtitle)
176 opentag('ul', 1)
177 for datum in data:
178 tag('li', 0, datum)
179 closetag('ul', 1)
180
181def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
182 if (block == 1):
183 tempfile.write('\n')
184 tempfile.write('<' + tag)
185 if (id != None):
186 tempfile.write(" id='" + id + "'")
187 if (cl != None):
188 tempfile.write(" class='" + cl + "'")
189 tempfile.write('>')
190 if (block == 1):
191 tempfile.write('\n')
192
193def closetag(tag, block = 0): # write html closing tag
194 if (block == 0):
195 tempfile.write("</" + tag + ">")
196 else:
197 tempfile.write("\n</" + tag + ">\n")
198
199def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
200 opentag(tag, block)
201 tempfile.write(content)
202 closetag(tag, block)
203
204def header(template): # return a parsed html header from file
205 try:
206 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
207 logger.debug("copied main.css")
208 except Exception as e:
209 logger.warning("could not copy main.css - " + str(e))
210 headercontent = open(template, 'r').read()
211 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
212 return headercontent
213
214def subject(template):
215 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
216 logger.debug("returning subject line " + r)
217 return r
218
219def hostname(): # get the hostname of current server
220 hnfile = open(config['hostname-path'], 'r')
221 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
222 return hn
223
224def getlocaldomain(): # get the parent fqdn of current server
225 domain = socket.getfqdn().split('.', 1) # Note: if socket.fetfqdn() returns localhost, make sure the first entry in /etc/hosts contains the fqdn
226 if len(domain) != 2:
227 logger.warning('Could not get domain of this server, only hostname. Please consider updating /etc/hosts')
228 return ''
229 else:
230 return domain[-1]
231
232def resolve(ip, fqdn = 'host-only'): # try to resolve an ip to hostname
233 # Possible values for fqdn:
234 # fqdn show full hostname and domain
235 # fqdn-implicit show hostname and domain unless local
236 # host-only only show hostname
237 # ip never resolve anything
238 # resolve-domains defined in individual sections of the config take priority over global config
239
240 if not fqdn:
241 fqdn = config['resolve-domains']
242
243 if fqdn == 'ip':
244 return(ip)
245
246 try:
247 socket.inet_aton(ip) # succeeds if text contains ip
248 hn = socket.gethostbyaddr(ip)[0] # resolve ip to hostname
249 if fqdn == 'fqdn-implicit' and hn.split('.', 1)[1] == LOCALDOMAIN:
250 return(hn.split('.')[0])
251 elif fqdn == 'fqdn' or fqdn == 'fqdn-implicit':
252 return(hn)
253 elif fqdn == 'host-only':
254 return(hn.split('.')[0])
255 else:
256 logger.warning("invalid value for fqdn config")
257 return(hn)
258 except socket.herror:
259 # cannot resolve ip
260 logger.debug(ip + " cannot be found, might not exist anymore")
261 return(ip)
262 except (OSError, socket.error): # socket.error for Python 2 compatibility
263 # already a hostname
264 logger.debug(ip + " is already a hostname")
265 return(ip)
266 except Exception as err:
267 logger.warning("failed to resolve hostname for " + ip + ": " + str(err))
268 return(ip) # return ip if no hostname exists
269
270def plural(noun, quantity): # return "1 noun" or "n nouns"
271 if (quantity == 1):
272 return(str(quantity) + " " + noun)
273 else:
274 return(str(quantity) + " " + noun + "s")
275
276def parsesize(num, suffix='B'): # return human-readable size from number of bytes
277 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
278 if abs(num) < 1024.0:
279 return "%3.1f %s%s" % (num, unit, suffix)
280 num /= 1024.0
281 return "%.1f%s%s" % (num, 'Yi', suffix)
282
283def readlog(path = None, mode = 'r'): # read file, substituting known paths
284 if (path == None):
285 logger.error("no path provided")
286 return
287 else:
288 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
289 if (os.path.isfile(path) is False):
290 logger.error(path + " does not exist")
291 return ''
292 else:
293 return open(path, mode).read()
294
295def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
296 if (path == None or content == None):
297 logger.error("invalid usage of writelog")
298 return
299 else:
300 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
301 file = open(path, mode)
302 file.write(content)
303 file.close()
304 logger.debug("written to file " + path)
305
306def getusage(path): # Get disk usage statistics
307 disk = os.statvfs(path)
308 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
309 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
310 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
311 ratio = alloc / cap * 100 # percentage used
312 return diskstat(cap, alloc, free, ratio)
313
314def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
315 temp_l = l[:]
316 l = list(set(l))
317 l = [[i, temp_l.count(i)] for i in l] # add count of each element
318 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
319 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
320 l = l[::-1] # reverse
321 return l
322
323def addtag(l, tag): # add prefix and suffix tags to each item in a list
324 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
325 return l2
326
327def truncl(input, limit): # truncate list
328 if (len(input) > limit):
329 more = str(len(input) - limit)
330 output = input[:limit]
331 output.append("+ " + more + " more")
332 return(output)
333 else:
334 return(input)
335
336def mailprep(inputpath, output, *stylesheet):
337 logger.debug("converting stylesheet to inline tags")
338 old = readlog(inputpath)
339 logger.debug(config['css'])
340 pm = premailer.Premailer(old, external_styles=config['css'])
341 MAILOUT = pm.transform()
342 logger.info("converted stylesheet to inline tags")
343 file = open(output, 'w')
344 file.write(MAILOUT)
345 file.close()
346 logger.info("written to temporary mail file")
347
348
349
350#
351#
352#
353
354def sshd():
355 logger.debug("starting sshd section")
356 opentag('div', 1, 'sshd', 'section')
357 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
358 users = [] # list of users with format [username, number of logins] for each item
359 data = []
360 num = sum(1 for x in matches) # total number of logins
361 for match in matches:
362 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
363
364 user = entry.group(1)
365 ip = entry.group(2)
366
367 userhost = user + '@' + resolve(ip, fqdn=config['sshd']['resolve-domains'])
368 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
369 if (exists == []):
370 users.append([userhost, 1])
371 else:
372 users[exists[0]][1] += 1
373
374 writetitle('sshd')
375 subtitle = plural('login', num) + ' from'
376 if (len(users) == 1): # if only one user, do not display no of logins for this user
377 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
378 subtitle += ' ' + users[0][0]
379 writedata(subtitle)
380 else:
381 for user in users:
382 data.append(user[0] + ' (' + str(user[1]) + ')')
383 if len(data) > config['maxlist']: # if there are lots of users, truncate them
384 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
385 break
386 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
387 writedata(subtitle, data)
388 closetag('div', 1)
389 logger.info("finished sshd section")
390
391#
392#
393#
394
395def sudo():
396 logger.debug("starting sudo section")
397 opentag('div', 1, 'sudo', 'section')
398 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
399 num = sum(1 for line in umatches) # total number of sessions
400 users = []
401 data = []
402 for match in umatches:
403 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
404 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
405 if (exists == []):
406 users.append([user, 1])
407 else:
408 users[exists[0]][1] += 1
409 commands = []
410 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
411 for cmd in cmatches:
412 commands.append(cmd)
413# logger.debug("found the following commands: " + str(commands))
414
415 writetitle("sudo")
416 subtitle = plural("sudo session", num) + " for"
417 if (len(users) == 1):
418 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
419 subtitle += ' ' + users[0][0]
420 writedata(subtitle)
421 else:
422 for user in users:
423 data.append(user[0] + ' (' + str(user[1]) + ')')
424 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
425 writedata(subtitle, data)
426 if (len(commands) > 0):
427 commands = addtag(commands, 'code')
428 commands = orderbyfreq(commands)
429 commands = truncl(commands, config['maxcmd'])
430 writedata("top sudo commands", [c for c in commands])
431 closetag('div', 1)
432 logger.info("finished sudo section")
433
434#
435#
436#
437
438def cron():
439 logger.debug("starting cron section")
440 opentag('div', 1, 'cron', 'section')
441 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
442 num = sum(1 for line in matches)
443 commands = []
444 for match in matches:
445 commands.append(str(match))
446 # commands.append([str(match)for match in matches])
447 #logger.debug("found cron command " + str(commands))
448 logger.info("found " + str(num) + " cron jobs")
449 subtitle = str(num) + " cron jobs run"
450 writetitle("cron")
451 writedata(subtitle)
452 if (matches > 0):
453 commands = addtag(commands, 'code')
454 commands = orderbyfreq(commands)
455 commands = truncl(commands, config['maxcmd'])
456 writedata("top cron commands", [c for c in commands])
457 closetag('div', 1)
458 logger.info("finished cron section")
459
460#
461#
462#
463
464def nameget():
465 logger.debug("starting nameget section")
466 opentag('div', 1, 'nameget', 'section')
467 logger.debug("reading syslog.. this may take a while")
468 syslog = readlog('sys')
469 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
470 n_f = sum(1 for i in failed)
471 l_f = []
472 for i in failed:
473 l_f.append(i if i else '[no destination]')
474 logger.debug("the following downloads failed: " + str(l_f))
475 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
476 n_s = sum(1 for i in succ)
477 l_s = []
478 for i in succ:
479 l_s.append(i)
480 logger.debug("the following downloads succeeded: " + str(l_f))
481 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
482 writetitle("nameget")
483 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxlist']))
484 writedata(str(n_f) + " failed", truncl(l_f, config['maxlist']))
485 closetag('div', 1)
486 logger.info("finished nameget section")
487
488#
489#
490#
491
492def httpd():
493 logger.info("starting httpd section")
494 opentag('div', 1, 'httpd', 'section')
495 accesslog = readlog("httpd/access.log")
496 a = len(accesslog.split('\n'))
497 errorlog = readlog("httpd/error.log")
498 e = len(errorlog.split('\n'))
499 data_b = 0
500 ips = []
501 files = []
502 useragents = []
503 errors = []
504 notfound = []
505 unprivileged = []
506
507 for line in accesslog.split('\n'):
508 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
509 try:
510 ips.append(resolve(fields.group(1), fqdn=config['httpd']['resolve-domains']))
511 files.append(fields.group(2))
512 useragents.append(fields.group(5))
513 data_b += int(fields.group(3))
514 except Exception as error:
515 if type(error) is AttributeError: # this line is not an access log
516 pass
517 else:
518 logger.warning("error processing httpd access log: " + str(error))
519 traceback.print_exc()
520 logger.debug(str(data_b) + " bytes transferred")
521 data_h = parsesize(data_b)
522 writetitle("apache")
523
524 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
525 if (a > 0):
526 files = addtag(files, 'code')
527 files = orderbyfreq(files)
528 files = truncl(files, config['maxlist'])
529 writedata(plural(" request", a), files)
530 if (ips != None):
531 ips = addtag(ips, 'code')
532 ips = orderbyfreq(ips)
533 n_ip = str(len(ips))
534 ips = truncl(ips, config['maxlist'])
535 writedata(plural(" client", n_ip), ips)
536 if (useragents != None):
537 useragents = addtag(useragents, 'code')
538 useragents = orderbyfreq(useragents)
539 n_ua = str(len(useragents))
540 useragents = truncl(useragents, config['maxlist'])
541 writedata(plural(" device", n_ua), useragents)
542
543 writedata(data_h + " transferred")
544 writedata(plural(" error", e))
545
546 closetag('div', 1)
547 logger.info("finished httpd section")
548
549#
550#
551#
552
553def httpdsession():
554 # logger.debug("starting httpd section")
555 opentag('div', 1, 'httpd', 'section')
556 httpdlog = requests.get(HTTPDSTATUS).content
557 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
558 uptime = re.sub(' minute[s]', 'm', uptime)
559 uptime = re.sub(' second[s]', 's', uptime)
560 uptime = re.sub(' day[s]', 's', uptime)
561 uptime = re.sub(' month[s]', 'mo', uptime)
562 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
563 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
564 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
565 closetag('div', 1)
566 # logger.info("finished httpd section")
567
568#
569#
570#
571
572def smbd():
573 logger.debug("starting smbd section")
574 opentag('div', 1, 'smbd', 'section')
575 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
576 # for f in files:
577
578 # file_mod_time = os.stat(f).st_mtime
579
580 # Time in seconds since epoch for time, in which logfile can be unmodified.
581 # should_time = time.time() - (30 * 60)
582
583 # Time in minutes since last modification of file
584 # last_time = (time.time() - file_mod_time)
585 # logger.debug(last_time)
586
587 # if (file_mod_time - should_time) < args.time:
588 # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
589 # else:
590
591 # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
592 # files.remove(f)
593 logger.debug("found log files " + str(files))
594 n_auths = 0 # total number of logins from all users
595 sigma_auths = [] # contains users
596 output = ""
597
598 for file in files: # one log file for each client
599
600 logger.debug("looking at file " + file)
601
602 # find the machine (ip or hostname) that this file represents
603 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
604 host = resolve(ip, fqdn=config['smbd']['resolve-domains'])
605 if (host == ip and (config['smbd']['resolve-domains'] or config['resolve-domains']) != 'ip'): # if ip has disappeared, fall back to a hostname from logfile
606 newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
607 if (len(set(newhost)) == 1): # all hosts in one file should be the same
608 host = newhost[0].lower()
609
610 # count number of logins from each user-host pair
611 matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
612 for match in matches:
613 userhost = match + "@" + host
614 sigma_auths.append(userhost)
615 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
616 # if (exists == []):
617 # sigma_auths.append([userhost, 1])
618 # else:
619 # sigma_auths[exists[0]][1] += 1
620 n_auths += 1
621 writetitle("samba")
622 subtitle = plural("login", n_auths) + " from"
623 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
624 subtitle += ' ' + sigma_auths[0][0]
625 writedata(subtitle)
626 else: # multiple users
627 sigma_auths = orderbyfreq(sigma_auths)
628 sigma_auths = truncl(sigma_auths, config['maxlist'])
629 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
630 writedata(subtitle, sigma_auths)
631 closetag('div', 1)
632 logger.info("finished smbd section")
633
634#
635#
636#
637
638def postfix():
639 logger.debug("starting postfix section")
640 opentag('div', 1, 'postfix', 'section')
641 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
642 r = []
643 s = []
644 size = 0
645 for message in messages:
646 r.append(message[2])
647 s.append(message[0])
648 size += int(message[1])
649 # size = sum([int(x) for x in messages])
650 size = parsesize(size)
651 n = str(len(messages))
652 writetitle("postfix")
653
654 if (len(r) > 0):
655 s = list(set(r)) # unique recipients
656 if (len(s) > 1):
657 r = orderbyfreq(r)
658 r = truncl(r, config['maxlist'])
659 writedata(n + " messages sent to", r)
660 else:
661 writedata(n + " messages sent to " + r[0])
662 else:
663 writedata(n + " messages sent")
664 writedata("total of " + size)
665 closetag('div', 1)
666 logger.info("finished postfix section")
667
668#
669#
670#
671
672def zfs():
673 logger.debug("starting zfs section")
674 opentag('div', 1, 'zfs', 'section')
675 zfslog = readlog('zfs')
676 pool = re.search('.*---\n(\w*)', zfslog).group(1)
677 scrub = re.search('.*scrub repaired (\d*).* in .*\d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
678 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
679 scrubrepairs = scruberrors = scrubdate = None
680 try:
681 scrubrepairs = scrub.group(1)
682 scruberrors = scrub.group(2)
683 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
684 except Exception as e:
685 logger.debug("error getting scrub data: " + str(e))
686 alloc = iostat.group(1)
687 free = iostat.group(2)
688 writetitle("zfs")
689 if (scrubdate != None):
690 subtitle = "Scrub of " + pool + " on " + scrubdate
691 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
692 else:
693 subtitle = pool
694 data = [alloc + " used", free + " free"]
695 writedata(subtitle, data)
696 closetag('div', 1)
697 logger.info("finished zfs section")
698
699#
700#
701#
702
703def temp():
704 logger.debug("starting temp section")
705 opentag('div', 1, 'temp', 'section')
706
707 # cpu temp
708
709 sensors.init()
710 coretemps = []
711 pkgtemp = 0
712 systemp = 0
713 try:
714 for chip in sensors.iter_detected_chips():
715 for feature in chip:
716 if "Core" in feature.label:
717 coretemps.append([feature.label, feature.get_value()])
718 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
719 if "CPUTIN" in feature.label:
720 pkgtemp = str(feature.get_value())
721 logger.debug("found cpu package at temperature " + pkgtemp)
722 if "SYS" in feature.label:
723 systemp = feature.get_value()
724 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
725 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
726 logger.debug("average cpu temp is " + str(core_avg))
727 coretemps.append(["avg", str(core_avg)])
728 coretemps.append(["pkg", pkgtemp])
729 coretemps = [x[0] + ": " + str(x[1]) + DEG + CEL for x in coretemps]
730 finally:
731 sensors.cleanup()
732
733 # drive temp
734
735 # For this to work, `hddtemp` must be running in daemon mode.
736 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
737
738 received = ''
739 sumtemp = 0.0
740 data = ""
741 output = []
742
743 try:
744 hsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
745 hsock.connect(("localhost", int(config['hddtemp']['port'])))
746 logger.debug("tcp socket on port " + str(int(config['hddtemp']['port'])) + " opened for `hddtemp` (ensure daemon is running)")
747 hsock.sendall('') # send dummy packet and shut write conn
748 hsock.shutdown(socket.SHUT_WR)
749
750 while 1:
751 line = hsock.recv(1024)
752 if line == "": # exit on blank line
753 break
754 logger.debug("received line " + str(line))
755 data += line
756 hsock.close()
757 logger.debug("closed connection, having received " + str(sys.getsizeof(data)) + " bytes")
758
759 data = data.lstrip('|').rstrip('|') # remove leading & trailing `|`
760 drives = data.split('|' * 2) # split into drives
761
762 for drive in drives:
763 fields = drive.split('|')
764 if fields[0] in config['hddtemp']['drives']:
765 output.append(fields[0] + (' (' + fields[1] + ')' if config['hddtemp']['show-model'] else '')+ ': ' + fields[2] + DEG + fields[3])
766 sumtemp += int(fields[2])
767 logger.debug("added drive " + fields[0])
768 else:
769 logger.debug("ignoring drive " + fields[0])
770
771 hddavg = int(format(sumtemp/float(len(drives)))) + e + DEG + output[0][-1:] # use units of first drive (last character of output)
772 logger.debug("avg disk temp is " + str(hddavg))
773 output.append("avg: " + str(hddavg))
774 except Exception as ex:
775 logger.debug("failed getting hddtemps with error " + str(ex))
776 finally:
777 hsock.close()
778
779 writetitle("temperatures")
780 if (systemp != 0):
781 writedata("sys: " + str(systemp) + DEG)
782 if (coretemps != ''):
783 writedata("cores", coretemps)
784 if (config['hddtemp']['drives'] != ''):
785 writedata("disks", output)
786
787 closetag('div', 1)
788 logger.info("finished temp section")
789
790#
791#
792#
793
794def du():
795 logger.debug("starting du section")
796 opentag('div', 1, 'du', 'section')
797 out = []
798 content = readlog('alloc')
799 contentnew = ""
800 for path in config['du']['paths']:
801 alloc_f = getusage(path).alloc
802 delta = None
803 try:
804 alloc_i = re.search(path + '\t(.*)\n', content).group(1)
805 delta = alloc_f - float(alloc_i)
806 except:
807 pass
808 if (delta == None):
809 out.append([path, "used " + parsesize(alloc_f)])
810 else:
811 out.append([path, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
812 contentnew += (path + '\t' + str(alloc_f) + '\n')
813 if config['rotate'] == 'y' or config['du']['force-write'] == 'y':
814 writelog('alloc', contentnew)
815
816 writetitle("du")
817 logger.debug("disk usage data is " + str(out))
818 for path in out:
819 writedata(path[0], [p for p in path[1:]])
820
821 closetag('div', 1)
822 logger.info("finished du section")
823
824#
825#
826#
827starttime = datetime.datetime.now()
828timenow = time.strftime("%H:%M:%S")
829datenow = time.strftime("%x")
830
831def loadconf(configfile):
832 try:
833 data = yaml.safe_load(open(configfile))
834 for value in data:
835 if(type(data[value]) == types.DictType):
836 for key in data[value].iteritems():
837 config[value][key[0]] = key[1]
838 else:
839 config[value] = data[value]
840 config['dest'] = os.path.dirname(config['output'])
841 if parser.parse_args().to is not None: config['mail']['to'] = parser.parse_args().to
842 except Exception as e:
843 logger.warning("error processing config: " + str(e))
844
845
846try:
847 __main__()
848finally:
849 # rotate logs using systemd logrotate
850 if parser.parse_args().function is None:
851 if (config['rotate'] == 'y'):
852 subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
853 logger.info("rotated logfiles")
854 else:
855 logger.debug("user doesn't want to rotate logs")
856 if (config['rotate'] == 's'):
857 logger.debug("Here is the output of `logrotate -d /etc/logrotate.conf` (simulated):")
858 sim = subprocess.check_output("/usr/sbin/logrotate -d /etc/logrotate.conf", shell=True)
859 logger.debug(sim)
860
861 timenow = time.strftime("%H:%M:%S")
862 datenow = time.strftime("%x")
863 logger.info("finished parsing logs at " + datetime.datetime.now().strftime("%x %H:%M:%S") + " (" + str(datetime.datetime.now() - starttime) + ")")