23a8ddae888d23186027c47b24d0b77693a802f1
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6from shutil import copyfile
7import yaml
8import ast
9import logging.handlers
10import types
11
12reload(sys)
13sys.setdefaultencoding('utf-8')
14
15scriptdir = os.path.dirname(os.path.realpath(__file__))
16
17
18diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
19drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
20config = {
21 'output': '~/var/www/logparse/summary.html',
22 'header': scriptdir + '/header.html',
23 'css': scriptdir + '/main.css',
24 'title': 'logparse',
25 'maxlist': 10,
26 'maxcmd': 3,
27 'mail': {
28 'to': '',
29 'from': '',
30 'subject': 'logparse from $hostname$'
31 },
32 'hddtemp': {
33 'drives': ['/dev/sda'],
34 'port': 7634
35 },
36 'du-paths': ['/', '/etc', '/home'],
37 'hostname-path': '/etc/hostname',
38 'logs': {
39 'auth': '/var/log/auth.log',
40 'cron': '/var/log/cron.log',
41 'sys': '/var/log/syslog',
42 'smb': '/var/log/samba',
43 'zfs': '/var/log/zpool.log',
44 'alloc': '/tmp/alloc',
45 'postfix': '/var/log/mail.log',
46 'httpd': '/var/log/apache2'
47 }
48}
49
50
51HTTPDSTATUS = "http://localhost/server-status"
52# config['du-paths'] = ["/home/andrew", "/mnt/andrew"]
53# config['hddtemp']['drives'] = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
54# config['hddtemp']['port'] = 7634
55# config['output'] = "/mnt/andrew/temp/logparse/summary.html"
56# config['output'] = "/mnt/andrew/temp/logparse/out.html"
57MAILPATH = "/mnt/andrew/temp/logparse/mail.html"
58# config['dest'] = "/mnt/andrew/temp/logparse"
59# config['header'] = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
60# config['css'] = os.path.dirname(os.path.realpath(__file__)) + "/main.css"
61MAILOUT = ""
62HTMLOUT = ""
63TXTOUT = ""
64# config['title'] = "logparse"
65# config['maxlist'] = 10
66# config['maxcmd'] = 3
67# config['mail']['subject'] = "logparse from $hostname$"
68VERSION = "v0.1"
69DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
70DEG = " °C".encode('unicode_escape')
71
72# Set up logging
73logging.basicConfig(level=logging.DEBUG)
74logger = logging.getLogger('logparse')
75loghandler = logging.handlers.SysLogHandler(address = '/dev/log')
76loghandler.setFormatter(logging.Formatter(fmt='logparse.py[' + str(os.getpid()) + ']: %(message)s'))
77logger.addHandler(loghandler)
78
79
80# Get arguments
81parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
82parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
83to = parser.parse_args().to
84
85def __main__():
86 logger.info("Beginning log analysis at " + str(timenow))
87 if (to == None):
88 logger.info("no recipient address provided, outputting to stdout")
89 else:
90 logger.info("email will be sent to " + to)
91
92 loadconf(scriptdir + "/logparse.yaml")
93
94 global tempfile
95 tempfile = open(config['output'], 'w+')
96 tempfile.write(header(config['header']))
97 opentag('div', 1, 'main')
98 sshd()
99 sudo()
100 cron()
101 nameget()
102 httpd()
103 smbd()
104 postfix()
105 zfs()
106 temp()
107 du()
108 for tag in ['div', 'body', 'html']:
109 closetag(tag, 1)
110 tempfile.close()
111 mailprep(config['output'], MAILPATH)
112 if (to != None):
113 logger.debug("sending email")
114 ms = subject(config['mail']['subject'])
115 cmd = "/bin/cat " + MAILPATH + " | /usr/bin/mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
116 logger.debug(cmd)
117 subprocess.call(cmd, shell=True)
118 logger.info("sent email")
119
120
121def writetitle(title):
122 if (title == '' or '\n' in title):
123 logger.error("invalid title")
124 return
125 logger.debug("writing title for " + title)
126 tag('h2', 0, title)
127
128def writedata(subtitle, data = None): # write title and data to tempfile
129 if (subtitle == ""):
130 loggger.warning("no subtitle provided.. skipping section")
131 return
132
133 if (data == None or len(data) == 0):
134 logger.debug("no data provided.. just printing subtitle")
135 tag('p', 0, subtitle)
136 else:
137 logger.debug("received data " + str(data))
138 subtitle += ':'
139 if (len(data) == 1):
140 tag('p', 0, subtitle + ' ' + data[0])
141 else:
142 tag('p', 0, subtitle)
143 opentag('ul', 1)
144 for datum in data:
145 logger.debug("printing datum " + datum)
146 tag('li', 0, datum)
147 closetag('ul', 1)
148
149def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
150 if (block == 1):
151 tempfile.write('\n')
152 tempfile.write('<' + tag)
153 if (id != None):
154 tempfile.write(" id='" + id + "'")
155 if (cl != None):
156 tempfile.write(" class='" + cl + "'")
157 tempfile.write('>')
158 if (block == 1):
159 tempfile.write('\n')
160
161def closetag(tag, block = 0): # write html closing tag
162 if (block == 0):
163 tempfile.write("</" + tag + ">")
164 else:
165 tempfile.write("\n</" + tag + ">\n")
166
167def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
168 opentag(tag, block)
169 tempfile.write(content)
170 closetag(tag, block)
171
172def header(template): # return a parsed html header from file
173 try:
174 copyfile(config['css'], config['dest'] + '/' + os.path.basename(config['css']))
175 logger.debug("copied main.css")
176 except Exception as e:
177 logger.warning("could not copy main.css - " + str(e))
178 headercontent = open(template, 'r').read()
179 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
180 return headercontent
181
182def subject(template):
183 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
184 logger.debug("returning subject line " + r)
185 return r
186
187def hostname(): # get the hostname
188 hnfile = open(config['hostname-path'], 'r')
189 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
190 return hn
191
192def resolve(ip): # try to resolve an ip to hostname
193 logger.debug("trying to resolve ip " + ip)
194 try:
195 socket.inet_aton(ip) # succeeds if text contains ip
196 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
197 logger.debug("found hostname " + hn)
198 return(hn)
199 except:
200 logger.debug("failed to resolve hostname for " + ip)
201 return(ip) # return ip if no hostname exists
202
203def plural(noun, quantity): # return "1 noun" or "n nouns"
204 if (quantity == 1):
205 return(str(quantity) + " " + noun)
206 else:
207 return(str(quantity) + " " + noun + "s")
208
209def parsesize(num, suffix='B'): # return human-readable size from number of bytes
210 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
211 if abs(num) < 1024.0:
212 return "%3.1f %s%s" % (num, unit, suffix)
213 num /= 1024.0
214 return "%.1f%s%s" % (num, 'Yi', suffix)
215
216def readlog(path = None, mode = 'r'): # read file, substituting known paths
217 if (path == None):
218 logger.error("no path provided")
219 return
220 else:
221 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
222 if (os.path.isfile(path) is False):
223 logger.error(path + " does not exist")
224 return ''
225 else:
226 return open(path, mode).read()
227
228def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
229 if (path == None or content == None):
230 logger.error("invalid usage of writelog")
231 return
232 else:
233 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
234 file = open(path, mode)
235 file.write(content)
236 file.close()
237
238def getusage(path): # Get disk usage statistics
239 disk = os.statvfs(path)
240 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
241 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
242 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
243 ratio = alloc / cap * 100 # percentage used
244 return diskstat(cap, alloc, free, ratio)
245
246def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
247 temp_l = l[:]
248 l = list(set(l))
249 l = [[i, temp_l.count(i)] for i in l] # add count of each element
250 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
251 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
252 l = l[::-1] # reverse
253 return l
254
255def addtag(l, tag): # add prefix and suffix tags to each item in a list
256 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
257 return l2
258
259def truncl(input, limit): # truncate list
260 if (len(input) > limit):
261 more = str(len(input) - limit)
262 output = input[:limit]
263 output.append("+ " + more + " more")
264 return(output)
265 else:
266 return(input)
267
268def mailprep(inputpath, output, *stylesheet):
269 logger.debug("converting stylesheet to inline tags")
270 old = readlog(inputpath)
271 logger.debug(config['css'])
272 pm = premailer.Premailer(old, external_styles=config['css'])
273 MAILOUT = pm.transform()
274 logger.info("converted stylesheet to inline tags")
275 file = open(output, 'w')
276 file.write(MAILOUT)
277 file.close()
278 logger.info("written to temporary mail file")
279
280
281
282#
283#
284#
285
286def sshd():
287 logger.debug("starting sshd section")
288 opentag('div', 1, 'sshd', 'section')
289 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
290 users = [] # list of users with format [username, number of logins] for each item
291 data = []
292 num = sum(1 for x in matches) # total number of logins
293 for match in matches:
294 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
295
296 user = entry.group(1)
297 ip = entry.group(2)
298
299 userhost = user + '@' + resolve(ip)
300 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
301 if (exists == []):
302 users.append([userhost, 1])
303 else:
304 users[exists[0]][1] += 1
305
306 writetitle('sshd')
307 subtitle = plural('login', num) + ' from'
308 if (len(users) == 1): # if only one user, do not display no of logins for this user
309 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
310 subtitle += ' ' + users[0][0]
311 writedata(subtitle)
312 else:
313 for user in users:
314 data.append(user[0] + ' (' + str(user[1]) + ')')
315 if len(data) > config['maxlist']: # if there are lots of users, truncate them
316 data.append('+ ' + str(len(users) - config['maxlist'] - 1) + " more")
317 break
318 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
319 writedata(subtitle, data)
320 closetag('div', 1)
321 logger.info("finished sshd section")
322
323#
324#
325#
326
327def sudo():
328 logger.debug("starting sudo section")
329 opentag('div', 1, 'sudo', 'section')
330 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
331 num = sum(1 for line in umatches) # total number of sessions
332 users = []
333 data = []
334 for match in umatches:
335 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
336 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
337 if (exists == []):
338 users.append([user, 1])
339 else:
340 users[exists[0]][1] += 1
341 commands = []
342 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
343 for cmd in cmatches:
344 commands.append(cmd)
345 logger.debug("found the following commands: " + str(commands))
346
347 writetitle("sudo")
348 subtitle = plural("sudo session", num) + " for"
349 if (len(users) == 1):
350 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
351 subtitle += ' ' + users[0][0]
352 writedata(subtitle)
353 else:
354 for user in users:
355 data.append(user[0] + ' (' + str(user[1]) + ')')
356 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
357 writedata(subtitle, data)
358 if (len(commands) > 0):
359 commands = addtag(commands, 'code')
360 commands = orderbyfreq(commands)
361 commands = truncl(commands, config['maxcmd'])
362 writedata("top sudo commands", [c for c in commands])
363 closetag('div', 1)
364 logger.info("finished sudo section")
365
366#
367#
368#
369
370def cron():
371 logger.debug("starting cron section")
372 opentag('div', 1, 'cron', 'section')
373 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
374 num = sum(1 for line in matches)
375 commands = []
376 for match in matches:
377 commands.append(str(match))
378 # commands.append([str(match)for match in matches])
379 logger.debug("found cron command " + str(commands))
380 logger.info("found " + str(num) + " cron jobs")
381 subtitle = str(num) + " cron jobs run"
382 writetitle("cron")
383 writedata(subtitle)
384 if (matches > 0):
385 commands = addtag(commands, 'code')
386 commands = orderbyfreq(commands)
387 commands = truncl(commands, config['maxcmd'])
388 writedata("top cron commands", [c for c in commands])
389 closetag('div', 1)
390 logger.info("finished cron section")
391
392#
393#
394#
395
396def nameget():
397 logger.debug("starting nameget section")
398 opentag('div', 1, 'nameget', 'section')
399 syslog = readlog('sys')
400 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
401 n_f = sum(1 for i in failed)
402 l_f = []
403 for i in failed:
404 l_f.append(i)
405 logger.debug("the following downloads failed: " + str(l_f))
406 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
407 n_s = sum(1 for i in succ)
408 l_s = []
409 for i in succ:
410 l_s.append(i)
411 logger.debug("the following downloads succeeded: " + str(l_f))
412 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
413 writetitle("nameget")
414 writedata(str(n_s) + " succeeded", truncl(l_s, config['maxcmd']))
415 writedata(str(n_f) + " failed", truncl(l_f, config['maxcmd']))
416 closetag('div', 1)
417 logger.info("finished nameget section")
418
419#
420#
421#
422
423def httpd():
424 logger.info("starting httpd section")
425 opentag('div', 1, 'httpd', 'section')
426 accesslog = readlog("httpd/access.log")
427 a = len(accesslog.split('\n'))
428 errorlog = readlog("httpd/error.log")
429 e = len(errorlog.split('\n'))
430 data_b = 0
431 ips = []
432 files = []
433 useragents = []
434 errors = []
435 notfound = []
436 unprivileged = []
437
438 for line in accesslog.split('\n'):
439 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
440 try:
441 ips.append(fields.group(1))
442 files.append(fields.group(2))
443 useragents.append(fields.group(5))
444 logger.debug("transferred " + fields.group(3) + " bytes in this request")
445 data_b += int(fields.group(3))
446 logger.debug("data_b is now " + str(data_b))
447 except Exception as error:
448 if type(error) is AttributeError:
449 logger.debug("attributeerrror: " + str(error))
450 else:
451 logger.warning("error processing httpd access log: " + str(error))
452 logger.debug(str(data_b) + " bytes transferred")
453 data_h = parsesize(data_b)
454 writetitle("apache")
455
456 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
457 if (a > 0):
458 logger.debug("found the following requests: " + str(files))
459 files = addtag(files, 'code')
460 files = orderbyfreq(files)
461 files = truncl(files, config['maxcmd'])
462 writedata(str(a) + " requests", files)
463 if (ips != None):
464 logger.debug("found the following ips: " + str(ips))
465 ips = addtag(ips, 'code')
466 ips = orderbyfreq(ips)
467 n_ip = str(len(ips))
468 ips = truncl(ips, config['maxcmd'])
469 writedata(n_ip + " clients", ips)
470 if (useragents != None):
471 logger.debug("found the following useragents: " + str(useragents))
472 useragents = addtag(useragents, 'code')
473 useragents = orderbyfreq(useragents)
474 n_ua = str(len(useragents))
475 useragents = truncl(useragents, config['maxcmd'])
476 writedata(n_ua + " devices", useragents)
477
478 writedata(data_h + " transferred")
479 writedata(str(e) + " errors")
480
481 closetag('div', 1)
482 logger.info("finished httpd section")
483
484#
485#
486#
487
488def httpdsession():
489 # logger.debug("starting httpd section")
490 opentag('div', 1, 'httpd', 'section')
491 httpdlog = requests.get(HTTPDSTATUS).content
492 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
493 uptime = re.sub(' minute[s]', 'm', uptime)
494 uptime = re.sub(' second[s]', 's', uptime)
495 uptime = re.sub(' day[s]', 's', uptime)
496 uptime = re.sub(' month[s]', 'mo', uptime)
497 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
498 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
499 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
500 closetag('div', 1)
501 # logger.info("finished httpd section")
502
503#
504#
505#
506
507def smbd():
508 logger.debug("starting smbd section")
509 opentag('div', 1, 'smbd', 'section')
510 files = glob.glob(config['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
511 # for f in files:
512
513 # file_mod_time = os.stat(f).st_mtime
514
515 # Time in seconds since epoch for time, in which logfile can be unmodified.
516 # should_time = time.time() - (30 * 60)
517
518 # Time in minutes since last modification of file
519 # last_time = (time.time() - file_mod_time)
520 # logger.debug(last_time)
521
522 # if (file_mod_time - should_time) < args.time:
523 # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
524 # else:
525
526 # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
527 # files.remove(f)
528 logger.debug("found log files " + str(files))
529 n_auths = 0 # total number of logins from all users
530 sigma_auths = [] # contains users
531 output = ""
532
533 for file in files: # one log file for each client
534
535 logger.debug("looking at file " + file)
536
537 # find the machine (ip or hostname) that this file represents
538 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
539 host = resolve(ip)
540
541 # count number of logins from each user
542 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
543 for match in matches:
544 userhost = match + "@" + host
545 sigma_auths.append(userhost)
546 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
547 # if (exists == []):
548 # sigma_auths.append([userhost, 1])
549 # else:
550 # sigma_auths[exists[0]][1] += 1
551 n_auths += 1
552 writetitle("samba")
553 subtitle = plural("login", n_auths) + " from"
554 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
555 subtitle += ' ' + sigma_auths[0][0]
556 writedata(subtitle)
557 else: # multiple users
558 sigma_auths = orderbyfreq(sigma_auths)
559 sigma_auths = truncl(sigma_auths, config['maxcmd'])
560 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
561 writedata(subtitle, sigma_auths)
562 closetag('div', 1)
563 logger.info("finished smbd section")
564
565#
566#
567#
568
569def postfix():
570 logger.debug("starting postfix section")
571 opentag('div', 1, 'postfix', 'section')
572 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
573 r = []
574 s = []
575 size = 0
576 for message in messages:
577 r.append(message[2])
578 s.append(message[0])
579 size += int(message[1])
580 # size = sum([int(x) for x in messages])
581 size = parsesize(size)
582 n = str(len(messages))
583 writetitle("postfix")
584
585 if (len(r) > 0):
586 s = list(set(r)) # unique recipients
587 if (len(s) > 1):
588 r = orderbyfreq(r)
589 r = truncl(r, config['maxcmd'])
590 writedata(n + " messages sent to", r)
591 else:
592 writedata(n + " messages sent to " + r[0])
593 else:
594 writedata(n + " messages sent")
595 writedata("total of " + size)
596 closetag('div', 1)
597 logger.info("finished postfix section")
598
599#
600#
601#
602
603def zfs():
604 logger.debug("starting zfs section")
605 opentag('div', 1, 'zfs', 'section')
606 zfslog = readlog('zfs')
607 logger.debug("zfs log is " + zfslog)
608 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
609 pool = re.search('.*---\n(\w*)', zfslog).group(1)
610 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
611 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
612 scrubrepairs = scruberrors = scrubdate = None
613 try:
614 scrubrepairs = scrub.group(1)
615 scruberrors = scrub.group(2)
616 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
617 except:
618 logger.debug("error getting scrub data")
619 alloc = iostat.group(1)
620 free = iostat.group(2)
621 writetitle("zfs")
622 if (scrubdate != None):
623 subtitle = "Scrub of " + pool + " on " + scrubdate
624 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
625 else:
626 subtitle = pool
627 data = [alloc + " used", free + " free"]
628 writedata(subtitle, data)
629 closetag('div', 1)
630 logger.info("finished zfs section")
631
632#
633#
634#
635
636def temp():
637 logger.debug("starting temp section")
638 opentag('div', 1, 'temp', 'section')
639 sensors.init()
640 coretemps = []
641 pkgtemp = 0
642 systemp = 0
643 try:
644 print(sensors.iter_detected_chips())
645 for chip in sensors.iter_detected_chips():
646 for feature in chip:
647 if "Core" in feature.label:
648 coretemps.append([feature.label, feature.get_value()])
649 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
650 if "CPUTIN" in feature.label:
651 pkgtemp = str(feature.get_value())
652 logger.debug("found cpu package at temperature " + pkgtemp)
653 if "SYS" in feature.label:
654 systemp = feature.get_value()
655 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
656 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
657 logger.debug("average cpu temp is " + str(core_avg))
658 coretemps.append(["avg", str(core_avg)])
659 coretemps.append(["pkg", pkgtemp])
660 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
661 finally:
662 sensors.cleanup()
663
664 # For this to work, `hddtemp` must be running in daemon mode.
665 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
666 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
667 s.connect(('localhost',config['hddtemp']['port']))
668 output = s.recv(4096)
669 output += s.recv(4096)
670 s.close()
671 config['hddtemp']['drives'] = []
672 for drive in re.split('\|1}', output):
673 try:
674 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
675 name = fields.group(1)
676 temp = float(fields.group(2))
677 units = fields.group(3)
678 config['hddtemp']['drives'].append(drivetemp(name, temp, DEG))
679 except:
680 pass
681 hddtotal = 0
682 data = []
683 for drive in config['hddtemp']['drives']:
684 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
685 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
686 hddtotal += drive.temp
687 logger.debug("found " + str(len(config['hddtemp']['drives'])) + " disks")
688 logger.debug("sum of disk temps is " + str(hddtotal))
689 hddavg = "{0:.2f}".format(hddtotal/float(len(config['hddtemp']['drives']))) + DEG
690 logger.debug("avg disk temp is " + str(hddavg))
691 data.append("avg: " + str(hddavg))
692 writetitle("temperatures")
693 if (systemp != 0):
694 writedata("sys: " + str(systemp) + DEG)
695 if (coretemps != ''):
696 writedata("cores", coretemps)
697 if (config['hddtemp']['drives'] != ''):
698 writedata("disks", data)
699
700 closetag('div', 1)
701 logger.info("finished temp section")
702
703#
704#
705#
706
707def du():
708 logger.debug("starting du section")
709 opentag('div', 1, 'du', 'section')
710 out = []
711 content = readlog('alloc')
712 contentnew = ""
713 for p in config['du-paths']:
714 alloc_f = getusage(p).alloc
715 delta = None
716 try:
717 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
718 delta = alloc_f - float(alloc_i)
719 except:
720 pass
721 logger.debug("delta is " + str(delta))
722 if (delta == None):
723 out.append([p, "used " + parsesize(alloc_f)])
724 else:
725 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
726 contentnew += (p + '\t' + str(alloc_f) + '\n')
727 writelog('alloc', contentnew)
728
729 writetitle("du")
730 logger.debug("disk usage data is " + str(out))
731 for path in out:
732 writedata(path[0], [p for p in path[1:]])
733
734 closetag('div', 1)
735 logger.info("finished du section")
736
737#
738#
739#
740
741timenow = time.strftime("%H:%M:%S")
742datenow = time.strftime("%x")
743
744pathfilter = {"auth": config['logs']['auth'], "cron": config['logs']['cron'], "sys": config['logs']['sys'], "postfix": config['logs']['postfix'], "smb": config['logs']['smb'], "zfs": config['logs']['zfs'], "alloc": config['logs']['alloc'], "httpd": config['logs']['httpd'], "header": config['header']}
745pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
746pathpattern = re.compile("|".join(pathfilter.keys()))
747
748varfilter = {"$title$": config['title'], "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION, "$css$": config['css']}
749varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
750varpattern = re.compile("|".join(varfilter.keys()))
751
752def loadconf(configfile):
753 try:
754 data = yaml.safe_load(open(configfile))
755 for value in data:
756 logger.debug(data[value])
757 if(type(data[value]) == types.DictType):
758 for key in data[value].iteritems():
759 config[value][key[0]] = key[1]
760 else:
761 config[value] = data[value]
762 config['dest'] = os.path.dirname(config['output'])
763 logger.debug(str(config))
764 except Exception as e:
765 logger.warning("error processing config: " + str(e))
766
767
768try:
769 __main__()
770finally:
771 subprocess.call("/usr/sbin/logrotate -f /etc/logrotate.conf", shell=True)
772 logger.info("rotated logfiles")