ac946c39b765896dad7e866d65364b3d1212a503
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6
7diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
8drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
9
10
11AUTHPATH = "/var/log/auth.log"
12CRONPATH = "/var/log/cron.log"
13SYSPATH = "/var/log/syslog"
14SMBDDIR = "/var/log/samba"
15ZFSPATH = "/var/log/zpool.log"
16ALLOCPATH = "/tmp/alloc"
17POSTFIXPATH = "/var/log/mail.log"
18HTTPDSTATUS = "http://localhost/server-status"
19HTTPDDIR = "/var/log/apache2"
20HOSTNAMEPATH = "/etc/hostname"
21DUPATHS = ["/home/andrew", "/mnt/andrew"]
22HDDTEMPS = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
23HDDTEMPPORT = 7634
24SUMMARYPATH = "/mnt/andrew/temp/logparse-test.html"
25OUTPUTPATH = "/mnt/andrew/temp/logparse-test2.html"
26MAILPATH = "/mnt/andrew/temp/log-parse-test-3.html"
27HEADERPATH = os.path.dirname(os.path.realpath(__file__)) + "/header.html"
28STYLEPATH = os.path.dirname(os.path.realpath(__file__)) + "main.css"
29MAILOUT = ""
30HTMLOUT = ""
31TXTOUT = ""
32TITLE = "logparse"
33MAXLIST = 10
34CMDNO = 3
35MAILSUBJECT = "logparse from $hostname$"
36VERSION = "v0.1"
37# DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
38DEG = 'C'
39
40# Set up logging
41logging.basicConfig(level=logging.DEBUG)
42logger = logging.getLogger('logparse')
43
44# Get arguments
45parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
46parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
47to = parser.parse_args().to
48
49def __main__():
50 logger.info("Beginning log analysis at " + str(timenow))
51 if (to == None):
52 logger.info("no recipient address provided, outputting to stdout")
53 else:
54 logger.info("email will be sent to " + to)
55
56 global tempfile
57 tempfile = open(SUMMARYPATH, 'w+')
58 tempfile.write(header(HEADERPATH))
59 opentag('div', 1, 'main')
60 sshd()
61 sudo()
62 cron()
63 nameget()
64 httpd()
65 smbd()
66 postfix()
67 zfs()
68 temp()
69 du()
70 for tag in ['div', 'body', 'html']:
71 closetag(tag, 1)
72 tempfile.close()
73 mailprep(SUMMARYPATH, MAILPATH)
74 if (to != None):
75 logger.debug("sending email")
76 ms = subject(MAILSUBJECT)
77 cmd = "cat " + MAILPATH + " | mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
78 logger.debug(cmd)
79 subprocess.call(cmd, shell=True)
80 logger.info("sent email")
81
82
83def writetitle(title):
84 if (title == '' or '\n' in title):
85 logger.error("invalid title")
86 return
87 logger.debug("writing title for " + title)
88 tag('h2', 0, title)
89
90def writedata(subtitle, data = None): # write title and data to tempfile
91 if (subtitle == ""):
92 loggger.warning("no subtitle provided.. skipping section")
93 return
94
95 if (data == None):
96 logger.debug("no data provided.. just printing subtitle")
97 tag('p', 0, subtitle)
98 else:
99 logger.debug("received data " + str(data))
100 subtitle += ':'
101 if (len(data) == 1):
102 tag('p', 0, subtitle + ' ' + data[0])
103 else:
104 tag('p', 0, subtitle)
105 opentag('ul', 1)
106 for datum in data:
107 logger.debug("printing datum " + datum)
108 tag('li', 0, datum)
109 closetag('ul', 1)
110
111def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
112 if (block == 1):
113 tempfile.write('\n')
114 tempfile.write('<' + tag)
115 if (id != None):
116 tempfile.write(" id='" + id + "'")
117 if (cl != None):
118 tempfile.write(" class='" + cl + "'")
119 tempfile.write('>')
120 if (block == 1):
121 tempfile.write('\n')
122
123def closetag(tag, block = 0): # write html closing tag
124 if (block == 0):
125 tempfile.write("</" + tag + ">")
126 else:
127 tempfile.write("\n</" + tag + ">\n")
128
129def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
130 opentag(tag, block)
131 tempfile.write(content)
132 closetag(tag, block)
133
134def header(template): # return a parsed html header from file
135 headercontent = open(template, 'r').read()
136 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
137 return headercontent
138
139def subject(template):
140 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
141 logger.debug("returning subject line " + r)
142 return r
143
144def hostname(): # get the hostname
145 hnfile = open(HOSTNAMEPATH, 'r')
146 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
147 return hn
148
149def resolve(ip): # try to resolve an ip to hostname
150 logger.debug("trying to resolve ip " + ip)
151 try:
152 socket.inet_aton(ip) # succeeds if text contains ip
153 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
154 logger.debug("found hostname " + hn)
155 return(hn)
156 except:
157 logger.debug("failed to resolve hostname for " + ip)
158 return(ip) # return ip if no hostname exists
159
160def plural(noun, quantity): # return "1 noun" or "n nouns"
161 if (quantity == 1):
162 return(str(quantity) + " " + noun)
163 else:
164 return(str(quantity) + " " + noun + "s")
165
166def parsesize(num, suffix='B'): # return human-readable size from number of bytes
167 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
168 if abs(num) < 1024.0:
169 return "%3.1f %s%s" % (num, unit, suffix)
170 num /= 1024.0
171 return "%.1f%s%s" % (num, 'Yi', suffix)
172
173def readlog(path = None, mode = 'r'): # read file, substituting known paths
174 if (path == None):
175 logger.error("no path provided")
176 return
177 else:
178 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
179 if (os.path.isfile(path) is False):
180 logger.error(path + " does not exist")
181 return ''
182 else:
183 return open(path, mode).read()
184
185def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
186 if (path == None or content == None):
187 logger.error("invalid usage of writelog")
188 return
189 else:
190 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
191 file = open(path, mode)
192 file.write(content)
193 file.close()
194
195def getusage(path): # Get disk usage statistics
196 disk = os.statvfs(path)
197 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
198 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
199 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
200 ratio = alloc / cap * 100 # percentage used
201 return diskstat(cap, alloc, free, ratio)
202
203def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
204 temp_l = l[:]
205 l = list(set(l))
206 l = [[i, temp_l.count(i)] for i in l] # add count of each element
207 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
208 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
209 l = l[::-1] # reverse
210 return l
211
212def addtag(l, tag): # add prefix and suffix tags to each item in a list
213 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
214 return l2
215
216def truncl(input, limit): # truncate list
217 if (len(input) > limit):
218 more = str(len(input) - limit)
219 output = input[:limit]
220 output.append("+ " + more + " more")
221 return(output)
222 else:
223 return(input)
224
225def mailprep(inputpath, outputpath, *stylesheet):
226 logger.debug("converting stylesheet to inline tags")
227 old = readlog(inputpath)
228 pm = premailer.Premailer(old, external_styles=STYLEPATH)
229 MAILOUT = pm.transform()
230 logger.info("converted stylesheet to inline tags")
231 file = open(outputpath, 'w')
232 file.write(MAILOUT)
233 file.close()
234 logger.info("written to temporary mail file")
235
236
237
238#
239#
240#
241
242def sshd():
243 logger.debug("starting sshd section")
244 opentag('div', 1, 'sshd', 'section')
245 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
246 users = [] # list of users with format [username, number of logins] for each item
247 data = []
248 num = sum(1 for x in matches) # total number of logins
249 for match in matches:
250 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
251
252 user = entry.group(1)
253 ip = entry.group(2)
254
255 userhost = user + '@' + resolve(ip)
256 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
257 if (exists == []):
258 users.append([userhost, 1])
259 else:
260 users[exists[0]][1] += 1
261
262 writetitle('sshd')
263 subtitle = plural('login', num) + ' from'
264 if (len(users) == 1): # if only one user, do not display no of logins for this user
265 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
266 subtitle += ' ' + users[0][0]
267 writedata(subtitle)
268 else:
269 for user in users:
270 data.append(user[0] + ' (' + str(user[1]) + ')')
271 if len(data) > MAXLIST: # if there are lots of users, truncate them
272 data.append('+ ' + str(len(users) - MAXLIST - 1) + " more")
273 break
274 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
275 writedata(subtitle, data)
276 closetag('div', 1)
277 logger.info("finished sshd section")
278
279#
280#
281#
282
283def sudo():
284 logger.debug("starting sudo section")
285 opentag('div', 1, 'sudo', 'section')
286 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
287 num = sum(1 for line in umatches) # total number of sessions
288 users = []
289 data = []
290 for match in umatches:
291 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
292 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
293 if (exists == []):
294 users.append([user, 1])
295 else:
296 users[exists[0]][1] += 1
297 commands = []
298 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
299 for cmd in cmatches:
300 commands.append(cmd)
301 logger.debug("found the following commands: " + str(commands))
302
303 writetitle("sudo")
304 subtitle = plural("sudo session", num) + " for"
305 if (len(users) == 1):
306 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
307 subtitle += ' ' + users[0][0]
308 writedata(subtitle)
309 else:
310 for user in users:
311 data.append(user[0] + ' (' + str(user[1]) + ')')
312 logger.debug("found " + str(num) + " sudo sessions for users " + str(data))
313 writedata(subtitle, data)
314 if (len(commands) > 0):
315 commands = addtag(commands, 'code')
316 commands = orderbyfreq(commands)
317 commands = truncl(commands, CMDNO)
318 writedata("top sudo commands", [c for c in commands])
319 closetag('div', 1)
320 logger.info("finished sudo section")
321
322#
323#
324#
325
326def cron():
327 logger.debug("starting cron section")
328 opentag('div', 1, 'cron', 'section')
329 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
330 num = sum(1 for line in matches)
331 commands = []
332 for match in matches:
333 commands.append(str(match))
334 # commands.append([str(match)for match in matches])
335 logger.debug("found cron command " + str(commands))
336 logger.info("found " + str(num) + " cron jobs")
337 subtitle = str(num) + " cron jobs run"
338 writetitle("cron")
339 writedata(subtitle)
340 if (matches > 0):
341 commands = addtag(commands, 'code')
342 commands = orderbyfreq(commands)
343 commands = truncl(commands, CMDNO)
344 writedata("top cron commands", [c for c in commands])
345 closetag('div', 1)
346 logger.info("finished cron section")
347
348#
349#
350#
351
352def nameget():
353 logger.debug("starting nameget section")
354 opentag('div', 1, 'nameget', 'section')
355 syslog = readlog('sys')
356 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
357 n_f = sum(1 for i in failed)
358 l_f = []
359 for i in failed:
360 l_f.append(i)
361 logger.debug("the following downloads failed: " + str(l_f))
362 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
363 n_s = sum(1 for i in succ)
364 l_s = []
365 for i in succ:
366 l_s.append(i)
367 logger.debug("the following downloads succeeded: " + str(l_f))
368 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
369 writetitle("nameget")
370 writedata(str(n_s) + " succeeded", truncl(orderbyfreq(l_s), CMDNO))
371 writedata(str(n_f) + " failed", truncl(orderbyfreq(l_f), CMDNO))
372 closetag('div', 1)
373 logger.info("finished nameget section")
374
375#
376#
377#
378
379def httpd():
380 logger.info("starting httpd section")
381 opentag('div', 1, 'httpd', 'section')
382 accesslog = readlog("httpd/access.log")
383 a = len(accesslog.split('\n'))
384 errorlog = readlog("httpd/error.log")
385 e = len(errorlog.split('\n'))
386 data_b = 0
387 ips = []
388 files = []
389 useragents = []
390 errors = []
391 notfound = []
392 unprivileged = []
393
394 for line in accesslog.split('\n'):
395 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
396 try:
397 ips.append(fields.group(1))
398 files.append(fields.group(2))
399 useragents.append(fields.group(5))
400 logger.debug("transferred " + fields.group(3) + " bytes in this request")
401 data_b += int(fields.group(3))
402 logger.debug("data_b is now " + str(data_b))
403 except Exception as error:
404 if type(error) is AttributeError:
405 logger.debug("attributeerrror: " + str(error))
406 else:
407 logger.warning("error processing httpd access log: " + str(error))
408 logger.debug(str(data_b) + " bytes transferred")
409 data_h = parsesize(data_b)
410 writetitle("apache")
411
412 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
413 if (a > 0):
414 logger.debug("found the following requests: " + str(files))
415 files = addtag(files, 'code')
416 files = orderbyfreq(files)
417 files = truncl(files, CMDNO)
418 writedata(str(a) + " requests", files)
419 if (ips != None):
420 logger.debug("found the following ips: " + str(ips))
421 ips = addtag(ips, 'code')
422 ips = orderbyfreq(ips)
423 n_ip = str(len(ips))
424 ips = truncl(ips, CMDNO)
425 writedata(n_ip + " unique clients", ips)
426 if (useragents != None):
427 logger.debug("found the following useragents: " + str(useragents))
428 useragents = addtag(useragents, 'code')
429 useragents = orderbyfreq(useragents)
430 n_ua = str(len(useragents))
431 useragents = truncl(useragents, CMDNO)
432 writedata(n_ua + " unique devices", useragents)
433
434 writedata(data_h + " transferred")
435 writedata(str(e) + " errors")
436
437 closetag('div', 1)
438 logger.info("finished httpd section")
439
440#
441#
442#
443
444def httpdsession():
445 # logger.debug("starting httpd section")
446 opentag('div', 1, 'httpd', 'section')
447 httpdlog = requests.get(HTTPDSTATUS).content
448 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
449 uptime = re.sub(' minute[s]', 'm', uptime)
450 uptime = re.sub(' second[s]', 's', uptime)
451 uptime = re.sub(' day[s]', 's', uptime)
452 uptime = re.sub(' month[s]', 'mo', uptime)
453 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
454 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
455 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
456 closetag('div', 1)
457 # logger.info("finished httpd section")
458
459#
460#
461#
462
463def smbd():
464 logger.debug("starting smbd section")
465 opentag('div', 1, 'smbd', 'section')
466 files = glob.glob(SMBDDIR + "/log.*[!\.gz][!\.old]") # find list of logfiles
467 logger.debug("found log files " + str(files))
468 n_auths = 0 # total number of logins from all users
469 sigma_auths = [] # contains users
470 output = ""
471
472 for file in files: # one log file for each client
473
474 logger.debug("looking at file " + file)
475
476 # find the machine (ip or hostname) that this file represents
477 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
478 host = resolve(ip)
479
480 # count number of logins from each user
481 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
482 for match in matches:
483 userhost = match + "@" + host
484 sigma_auths.append(userhost)
485 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
486 # if (exists == []):
487 # sigma_auths.append([userhost, 1])
488 # else:
489 # sigma_auths[exists[0]][1] += 1
490 n_auths += 1
491 writetitle("samba")
492 subtitle = plural("login", n_auths) + " from"
493 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
494 subtitle += ' ' + sigma_auths[0][0]
495 writedata(subtitle)
496 else: # multiple users
497 sigma_auths = orderbyfreq(sigma_auths)
498 sigma_auths = truncl(sigma_auths, CMDNO)
499 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
500 writedata(subtitle, sigma_auths)
501 closetag('div', 1)
502 logger.info("finished smbd section")
503
504#
505#
506#
507
508def postfix():
509 logger.debug("starting postfix section")
510 opentag('div', 1, 'postfix', 'section')
511 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
512 r = []
513 s = []
514 size = 0
515 for message in messages:
516 r.append(message[2])
517 s.append(message[0])
518 size += int(message[1])
519 # size = sum([int(x) for x in messages])
520 size = parsesize(size)
521 n = str(len(messages))
522 writetitle("postfix")
523
524 if (len(r) > 0):
525 s = list(set(r)) # unique recipients
526 if (len(s) > 1):
527 r = orderbyfreq(r)
528 r = truncl(r, CMDNO)
529 writedata(n + " messages sent to", r)
530 else:
531 writedata(n + " messages sent to " + r[0])
532 else:
533 writedata(n + " messages sent")
534 writedata("total of " + size)
535 closetag('div', 1)
536 logger.info("finished postfix section")
537
538#
539#
540#
541
542def zfs():
543 logger.debug("starting zfs section")
544 opentag('div', 1, 'zfs', 'section')
545 zfslog = readlog('zfs')
546 logger.debug("zfs log is " + zfslog)
547 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
548 pool = re.search('.*---\n(\w*)', zfslog).group(1)
549 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
550 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
551 scrubrepairs = scruberrors = scrubdate = None
552 try:
553 scrubrepairs = scrub.group(1)
554 scruberrors = scrub.group(2)
555 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
556 except:
557 logger.debug("error getting scrub data")
558 alloc = iostat.group(1)
559 free = iostat.group(2)
560 writetitle("zfs")
561 if (scrubdate != None):
562 subtitle = "Scrub of " + pool + " on " + scrubdate
563 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
564 else:
565 subtitle = pool
566 data = [alloc + " used", free + " free"]
567 writedata(subtitle, data)
568 closetag('div', 1)
569 logger.info("finished zfs section")
570
571#
572#
573#
574
575def temp():
576 logger.debug("starting temp section")
577 opentag('div', 1, 'temp', 'section')
578 sensors.init()
579 coretemps = []
580 pkgtemp = 0
581 systemp = 0
582 try:
583 print(sensors.iter_detected_chips())
584 for chip in sensors.iter_detected_chips():
585 for feature in chip:
586 if "Core" in feature.label:
587 coretemps.append([feature.label, feature.get_value()])
588 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
589 if "CPUTIN" in feature.label:
590 pkgtemp = str(feature.get_value())
591 logger.debug("found cpu package at temperature " + pkgtemp)
592 if "SYS" in feature.label:
593 systemp = feature.get_value()
594 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
595 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
596 logger.debug("average cpu temp is " + str(core_avg))
597 coretemps.append(["avg", str(core_avg)])
598 coretemps.append(["pkg", pkgtemp])
599 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
600 finally:
601 sensors.cleanup()
602
603 # For this to work, `hddtemp` must be running in daemon mode.
604 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
605 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
606 s.connect(('localhost',HDDTEMPPORT))
607 output = s.recv(4096)
608 output += s.recv(4096)
609 s.close()
610 hddtemps = []
611 for drive in re.split('\|{2}', output):
612 try:
613 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
614 name = fields.group(1)
615 temp = float(fields.group(2))
616 units = fields.group(3)
617 hddtemps.append(drivetemp(name, temp, units))
618 except:
619 pass
620 hddtotal = 0
621 data = []
622 for drive in hddtemps:
623 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
624 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
625 hddtotal += drive.temp
626 logger.debug("found " + str(len(hddtemps)) + " disks")
627 logger.debug("sum of disk temps is " + str(hddtotal))
628 hddavg = hddtotal/float(len(hddtemps))
629 logger.debug("avg disk temp is " + str(hddavg))
630 data.append("avg: " + str(hddavg))
631 writetitle("temperatures")
632 if (systemp != 0):
633 writedata("sys: " + str(systemp) + DEG)
634 if (coretemps != ''):
635 writedata("cores", coretemps)
636 if (hddtemps != ''):
637 writedata("disks", data)
638
639 closetag('div', 1)
640 logger.info("finished temp section")
641
642#
643#
644#
645
646def du():
647 logger.debug("starting du section")
648 opentag('div', 1, 'du', 'section')
649 out = []
650 content = readlog('alloc')
651 contentnew = ""
652 for p in DUPATHS:
653 alloc_f = getusage(p).alloc
654 delta = None
655 try:
656 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
657 delta = alloc_f - float(alloc_i)
658 except:
659 pass
660 logger.debug("delta is " + str(delta))
661 if (delta == None):
662 out.append([p, "used " + parsesize(alloc_f)])
663 else:
664 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
665 contentnew += (p + '\t' + str(alloc_f) + '\n')
666 writelog('alloc', contentnew)
667
668 writetitle("du")
669 logger.debug("disk usage data is " + str(out))
670 for path in out:
671 writedata(path[0], [p for p in path[1:]])
672
673 closetag('div', 1)
674 logger.info("finished du section")
675
676#
677#
678#
679
680timenow = time.strftime("%H:%M:%S")
681datenow = time.strftime("%x")
682
683pathfilter = {"auth": AUTHPATH, "cron": CRONPATH, "sys": SYSPATH, "postfix": POSTFIXPATH, "smb": SMBDDIR, "zfs": ZFSPATH, "alloc": ALLOCPATH, "httpd": HTTPDDIR, "header": HEADERPATH}
684pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
685pathpattern = re.compile("|".join(pathfilter.keys()))
686
687varfilter = {"$title$": TITLE, "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION}
688varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
689varpattern = re.compile("|".join(varfilter.keys()))
690
691
692__main__()