b416218f8efb5255d30d4a12f89243b5f1b50252
1#! /usr/bin/python
2
3import argparse, logging, os, shutil, re, subprocess, sys, requests, glob, socket, sensors, datetime, time, operator, premailer
4from sys import stdin
5from collections import namedtuple, defaultdict
6
7diskstat = namedtuple('diskstat', ['cap', 'alloc', 'free', 'ratio'])
8drivetemp = namedtuple('drivetemp', ['name', 'temp', 'units'])
9
10
11AUTHPATH = "/var/log/auth.log"
12CRONPATH = "/var/log/cron.log"
13SYSPATH = "/var/log/syslog"
14SMBDDIR = "/var/log/samba"
15ZFSPATH = "/var/log/zpool.log"
16ALLOCPATH = "/tmp/alloc"
17POSTFIXPATH = "/var/log/mail.log"
18HTTPDSTATUS = "http://localhost/server-status"
19HTTPDDIR = "/var/log/apache2"
20HOSTNAMEPATH = "/etc/hostname"
21DUPATHS = ["/home/andrew", "/mnt/andrew"]
22HDDTEMPS = ["/dev/sda", "/dev/sdc", "/dev/sdd", "/dev/sde"]
23HDDTEMPPORT = 7634
24SUMMARYPATH = "/mnt/andrew/temp/logparse-test.html"
25OUTPUTPATH = "/mnt/andrew/temp/logparse-test2.html"
26MAILPATH = "/mnt/andrew/temp/log-parse-test-3.html"
27HEADERPATH = "header.html"
28STYLEPATH = "main.css"
29MAILOUT = ""
30HTMLOUT = ""
31TXTOUT = ""
32TITLE = "logparse"
33MAXLIST = 10
34CMDNO = 3
35MAILSUBJECT = "logparse from $hostname$"
36VERSION = "v0.1"
37# DEG = u'\N{DEGREE SIGN}'.encode('utf-8')
38DEG = 'C'
39
40# Set up logging
41logging.basicConfig(level=logging.DEBUG)
42logger = logging.getLogger('logparse')
43
44# Get arguments
45parser = argparse.ArgumentParser(description='grab logs of some common services and send them by email')
46parser.add_argument('-t','--to', help='mail recipient (\"to\" address)',required=False)
47to = parser.parse_args().to
48
49def __main__():
50 logger.info("Beginning log analysis at " + str(timenow))
51 if (to == None):
52 logger.info("no recipient address provided, outputting to stdout")
53 else:
54 logger.info("email will be sent to " + to)
55
56 global tempfile
57 tempfile = open(SUMMARYPATH, 'w+')
58 tempfile.write(header(HEADERPATH))
59 opentag('div', 1, 'main')
60 sshd()
61 sudo()
62 cron()
63 nameget()
64 httpd()
65 smbd()
66 postfix()
67 zfs()
68 temp()
69 du()
70 for tag in ['div', 'body', 'html']:
71 closetag(tag, 1)
72 tempfile.close()
73 mailprep(SUMMARYPATH, MAILPATH)
74 if (to != None):
75 logger.debug("sending email")
76 ms = subject(MAILSUBJECT)
77 cmd = "cat " + MAILPATH + " | mail --debug-level=10 -a 'Content-type: text/html' -s '" + ms + "' " + to
78 logger.debug(cmd)
79 subprocess.call(cmd, shell=True)
80 logger.info("sent email")
81
82
83def writetitle(title):
84 if (title == '' or '\n' in title):
85 logger.error("invalid title")
86 return
87 logger.debug("writing title for " + title)
88 tag('h2', 0, title)
89
90def writedata(subtitle, data = None): # write title and data to tempfile
91 if (subtitle == ""):
92 loggger.warning("no subtitle provided.. skipping section")
93 return
94
95 if (data == None):
96 logger.debug("no data provided.. just printing subtitle")
97 tag('p', 0, subtitle)
98 else:
99 logger.debug("received data " + str(data))
100 subtitle += ':'
101 if (len(data) == 1):
102 tag('p', 0, subtitle + ' ' + data[0])
103 else:
104 tag('p', 0, subtitle)
105 opentag('ul', 1)
106 for datum in data:
107 logger.debug("printing datum " + datum)
108 tag('li', 0, datum)
109 closetag('ul', 1)
110
111def opentag(tag, block = 0, id = None, cl = None): # write html opening tag
112 if (block == 1):
113 tempfile.write('\n')
114 tempfile.write('<' + tag)
115 if (id != None):
116 tempfile.write(" id='" + id + "'")
117 if (cl != None):
118 tempfile.write(" class='" + cl + "'")
119 tempfile.write('>')
120 if (block == 1):
121 tempfile.write('\n')
122
123def closetag(tag, block = 0): # write html closing tag
124 if (block == 0):
125 tempfile.write("</" + tag + ">")
126 else:
127 tempfile.write("\n</" + tag + ">\n")
128
129def tag(tag, block = 0, content = ""): # write html opening tag, content, and html closing tag
130 opentag(tag, block)
131 tempfile.write(content)
132 closetag(tag, block)
133
134def header(template): # return a parsed html header from file
135 headercontent = open(template, 'r').read()
136 headercontent = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], headercontent)
137 return headercontent
138
139def subject(template):
140 r = varpattern.sub(lambda m: varfilter[re.escape(m.group(0))], template)
141 logger.debug("returning subject line " + r)
142 return r
143
144def hostname(): # get the hostname
145 hnfile = open(HOSTNAMEPATH, 'r')
146 hn = re.search('^(.*)\n*', hnfile.read()).group(1)
147 return hn
148
149def resolve(ip): # try to resolve an ip to hostname
150 logger.debug("trying to resolve ip " + ip)
151 try:
152 socket.inet_aton(ip) # succeeds if text contains ip
153 hn = socket.gethostbyaddr(ip)[0].split(".")[0] # resolve ip to hostname
154 logger.debug("found hostname " + hn)
155 return(hn)
156 except:
157 logger.debug("failed to resolve hostname for " + ip)
158 return(ip) # return ip if no hostname exists
159
160def plural(noun, quantity): # return "1 noun" or "n nouns"
161 if (quantity == 1):
162 return(str(quantity) + " " + noun)
163 else:
164 return(str(quantity) + " " + noun + "s")
165
166def parsesize(num, suffix='B'): # return human-readable size from number of bytes
167 for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
168 if abs(num) < 1024.0:
169 return "%3.1f %s%s" % (num, unit, suffix)
170 num /= 1024.0
171 return "%.1f%s%s" % (num, 'Yi', suffix)
172
173def readlog(path = None, mode = 'r'): # read file, substituting known paths
174 if (path == None):
175 logger.error("no path provided")
176 return
177 else:
178 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
179 return open(path, mode).read()
180
181def writelog(path = None, content = "", mode = 'w'): # read file, substituting known paths
182 if (path == None or content == None):
183 logger.error("invalid usage of writelog")
184 return
185 else:
186 path = pathpattern.sub(lambda m: pathfilter[re.escape(m.group(0))], path)
187 file = open(path, mode)
188 file.write(content)
189 file.close()
190
191def getusage(path): # Get disk usage statistics
192 disk = os.statvfs(path)
193 cap = float(disk.f_bsize*disk.f_blocks) # disk capacity
194 alloc = float(disk.f_bsize*(disk.f_blocks-disk.f_bfree)) # size of path
195 free = float(disk.f_bsize*disk.f_bfree) # free space on disk (blocks, not usable space)
196 ratio = alloc / cap * 100 # percentage used
197 return diskstat(cap, alloc, free, ratio)
198
199def orderbyfreq(l): # order a list by the frequency of its elements and remove duplicates
200 temp_l = l[:]
201 l = list(set(l))
202 l = [[i, temp_l.count(i)] for i in l] # add count of each element
203 l.sort(key=lambda x:temp_l.count(x[0])) # sort by count
204 l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string
205 l = l[::-1] # reverse
206 return l
207
208def addtag(l, tag): # add prefix and suffix tags to each item in a list
209 l2 = ['<' + tag + '>' + i + '</' + tag + '>' for i in l]
210 return l2
211
212def truncl(input, limit): # truncate list
213 if (len(input) > limit):
214 more = str(len(input) - limit)
215 output = input[:limit]
216 output.append("+ " + more + " more")
217 return(output)
218 else:
219 return(input)
220
221def mailprep(inputpath, outputpath, *stylesheet):
222 logger.debug("converting stylesheet to inline tags")
223 old = readlog(inputpath)
224 pm = premailer.Premailer(old, external_styles=STYLEPATH)
225 MAILOUT = pm.transform()
226 logger.info("converted stylesheet to inline tags")
227 file = open(outputpath, 'w')
228 file.write(MAILOUT)
229 file.close()
230 logger.info("written to temporary mail file")
231
232
233
234#
235#
236#
237
238def sshd():
239 logger.debug("starting sshd section")
240 opentag('div', 1, 'sshd', 'section')
241 matches = re.findall('.*sshd.*Accepted publickey for .* from .*', readlog('auth')) # get all logins
242 users = [] # list of users with format [username, number of logins] for each item
243 data = []
244 num = sum(1 for x in matches) # total number of logins
245 for match in matches:
246 entry = re.search('^.*publickey\sfor\s(\w*)\sfrom\s(\S*)', match) # [('user', 'ip')]
247
248 user = entry.group(1)
249 ip = entry.group(2)
250
251 userhost = user + '@' + resolve(ip)
252 exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])]
253 if (exists == []):
254 users.append([userhost, 1])
255 else:
256 users[exists[0]][1] += 1
257
258 writetitle('sshd')
259 subtitle = plural('login', num) + ' from'
260 if (len(users) == 1): # if only one user, do not display no of logins for this user
261 logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0])
262 subtitle += ' ' + users[0][0]
263 writedata(subtitle)
264 else:
265 for user in users:
266 data.append(user[0] + ' (' + str(user[1]) + ')')
267 if len(data) > MAXLIST: # if there are lots of users, truncate them
268 data.append('+ ' + str(len(users) - MAXLIST - 1) + " more")
269 break
270 logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data))
271 writedata(subtitle, data)
272 closetag('div', 1)
273 logger.info("finished sshd section")
274
275#
276#
277#
278
279def sudo():
280 logger.debug("starting sudo section")
281 opentag('div', 1, 'sudo', 'section')
282 umatches = re.findall('.*sudo:session\): session opened.*', readlog('auth'))
283 num = sum(1 for line in umatches) # total number of sessions
284 users = []
285 data = []
286 for match in umatches:
287 user = re.search('.*session opened for user root by (\S*)\(uid=.*\)', match).group(1)
288 exists = [i for i, item in enumerate(users) if re.search(user, item[0])]
289 if (exists == []):
290 users.append([user, 1])
291 else:
292 users[exists[0]][1] += 1
293 commands = []
294 cmatches = re.findall('sudo:.*COMMAND\=(.*)', readlog('auth'))
295 for cmd in cmatches:
296 commands.append(cmd)
297 logger.debug("found the following commands: " + str(commands))
298
299 writetitle("sudo")
300 subtitle = plural("sudo session", num) + " for"
301 if (len(users) == 1):
302 logger.debug("found " + str(num) + " sudo session(s) for user " + str(users[0]))
303 subtitle += ' ' + users[0][0]
304 writedata(subtitle)
305 else:
306 for user in users:
307 data.append(user[0] + ' (' + str(user[1]) + ')')
308 logger.debug("found " + str(len(matches)) + " sudo sessions for users " + str(data))
309 writedata(subtitle, data)
310 if (len(commands) > 0):
311 commands = addtag(commands, 'code')
312 commands = orderbyfreq(commands)
313 commands = truncl(commands, CMDNO)
314 writedata("top sudo commands", [c for c in commands])
315 closetag('div', 1)
316 logger.info("finished sudo section")
317
318#
319#
320#
321
322def cron():
323 logger.debug("starting cron section")
324 opentag('div', 1, 'cron', 'section')
325 matches = re.findall('.*CMD\s*\(\s*(?!.*cd)(.*)\)', readlog('cron'))
326 num = sum(1 for line in matches)
327 commands = []
328 for match in matches:
329 commands.append(str(match))
330 # commands.append([str(match)for match in matches])
331 logger.debug("found cron command " + str(commands))
332 logger.info("found " + str(num) + " cron jobs")
333 subtitle = str(num) + " cron jobs run"
334 writetitle("cron")
335 writedata(subtitle)
336 if (matches > 0):
337 commands = addtag(commands, 'code')
338 commands = orderbyfreq(commands)
339 commands = truncl(commands, CMDNO)
340 writedata("top cron commands", [c for c in commands])
341 closetag('div', 1)
342 logger.info("finished cron section")
343
344#
345#
346#
347
348def nameget():
349 logger.debug("starting nameget section")
350 opentag('div', 1, 'nameget', 'section')
351 syslog = readlog('sys')
352 failed = re.findall('.*nameget.*downloading of (.*) from .*failed.*', syslog)
353 n_f = sum(1 for i in failed)
354 l_f = []
355 for i in failed:
356 l_f.append(i)
357 logger.debug("the following downloads failed: " + str(l_f))
358 succ = re.findall('.*nameget.*downloaded\s(.*)', syslog)
359 n_s = sum(1 for i in succ)
360 l_s = []
361 for i in succ:
362 l_s.append(i)
363 logger.debug("the following downloads succeeded: " + str(l_f))
364 logger.debug("found " + str(n_s) + " successful downloads, and " + str(n_f) + " failed attempts")
365 writetitle("nameget")
366 writedata(str(n_s) + " succeeded", truncl(orderbyfreq(l_s), CMDNO))
367 writedata(str(n_f) + " failed", truncl(orderbyfreq(l_f), CMDNO))
368 closetag('div', 1)
369 logger.info("finished nameget section")
370
371#
372#
373#
374
375def httpd():
376 logger.info("starting httpd section")
377 opentag('div', 1, 'httpd', 'section')
378 accesslog = readlog("httpd/access.log")
379 a = len(accesslog.split('\n'))
380 errorlog = readlog("httpd/error.log")
381 e = len(errorlog.split('\n'))
382 data_b = 0
383 ips = []
384 files = []
385 useragents = []
386 errors = []
387 notfound = []
388 unprivileged = []
389
390 for line in accesslog.split('\n'):
391 fields = re.search('^(\S*) .*GET (\/.*) HTTP/\d\.\d\" 200 (\d*) \"(.*)\".*\((.*)\;', line)
392 try:
393 ips.append(fields.group(1))
394 files.append(fields.group(2))
395 useragents.append(fields.group(5))
396 logger.debug("transferred " + fields.group(3) + " bytes in this request")
397 data_b += int(fields.group(3))
398 logger.debug("data_b is now " + str(data_b))
399 except Exception as error:
400 if type(error) is AttributeError:
401 logger.debug("attributeerrror: " + str(error))
402 else:
403 logger.warning("error processing httpd access log: " + str(error))
404 logger.debug(str(data_b) + " bytes transferred")
405 data_h = parsesize(data_b)
406 writetitle("apache")
407
408 logger.debug("httpd has transferred " + str(data_b) + " bytes in response to " + str(a) + " requests with " + str(e) + " errors")
409 if (a > 0):
410 logger.debug("found the following requests: " + str(files))
411 files = addtag(files, 'code')
412 files = orderbyfreq(files)
413 files = truncl(files, CMDNO)
414 writedata(str(a) + " requests", files)
415 if (ips != None):
416 logger.debug("found the following ips: " + str(ips))
417 ips = addtag(ips, 'code')
418 ips = orderbyfreq(ips)
419 n_ip = str(len(ips))
420 ips = truncl(ips, CMDNO)
421 writedata(n_ip + " unique clients", ips)
422 if (useragents != None):
423 logger.debug("found the following useragents: " + str(useragents))
424 useragents = addtag(useragents, 'code')
425 useragents = orderbyfreq(useragents)
426 n_ua = str(len(useragents))
427 useragents = truncl(useragents, CMDNO)
428 writedata(n_ua + " unique devices", useragents)
429
430 writedata(data_h + " transferred")
431 writedata(str(e) + " errors")
432
433 closetag('div', 1)
434 logger.info("finished httpd section")
435
436#
437#
438#
439
440def httpdsession():
441 # logger.debug("starting httpd section")
442 opentag('div', 1, 'httpd', 'section')
443 httpdlog = requests.get(HTTPDSTATUS).content
444 uptime = re.search('.*uptime: (.*)<', httpdlog).group(1)
445 uptime = re.sub(' minute[s]', 'm', uptime)
446 uptime = re.sub(' second[s]', 's', uptime)
447 uptime = re.sub(' day[s]', 's', uptime)
448 uptime = re.sub(' month[s]', 'mo', uptime)
449 accesses = re.search('.*accesses: (.*) - .*', httpdlog).group(1)
450 traffic = re.search('.*Traffic: (.*)', httpdlog).group(1)
451 return("<br /><strong>httpd session: </strong> up " + uptime + ", " + accesses + " requests, " + traffic + " transferred")
452 closetag('div', 1)
453 # logger.info("finished httpd section")
454
455#
456#
457#
458
459def smbd():
460 logger.debug("starting smbd section")
461 opentag('div', 1, 'smbd', 'section')
462 files = glob.glob(SMBDDIR + "/log.*[!\.gz][!\.old]") # find list of logfiles
463 n_auths = 0 # total number of logins from all users
464 sigma_auths = [] # contains users
465 output = ""
466
467 for file in files: # one log file for each client
468
469 # find the machine (ip or hostname) that this file represents
470 ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
471 host = resolve(ip)
472
473 # count number of logins from each user
474 matches = re.findall('.*sam authentication for user \[(.*)\] succeeded.*', readlog(file))
475 for match in matches:
476 userhost = match + "@" + host
477 sigma_auths.append(userhost)
478 # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
479 # if (exists == []):
480 # sigma_auths.append([userhost, 1])
481 # else:
482 # sigma_auths[exists[0]][1] += 1
483 n_auths += 1
484 writetitle("samba")
485 subtitle = plural("login", n_auths) + " from"
486 if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
487 subtitle += ' ' + sigma_auths[0][0]
488 writedata(subtitle)
489 else: # multiple users
490 sigma_auths = orderbyfreq(sigma_auths)
491 sigma_auths = truncl(sigma_auths, CMDNO)
492 logger.debug("found " + str(n_auths) + " samba logins for users " + str(sigma_auths))
493 writedata(subtitle, sigma_auths)
494 closetag('div', 1)
495 logger.info("finished smbd section")
496
497#
498#
499#
500
501def postfix():
502 logger.debug("starting postfix section")
503 opentag('div', 1, 'postfix', 'section')
504 messages = re.findall('.*from\=<(.*)>, size\=(\d*),.*\n.*to=<(.*)>', readlog('postfix'))
505 r = []
506 s = []
507 size = 0
508 for message in messages:
509 r.append(message[2])
510 s.append(message[0])
511 size += int(message[1])
512 # size = sum([int(x) for x in messages])
513 size = parsesize(size)
514 n = str(len(messages))
515 writetitle("postfix")
516
517 if (len(r) > 0):
518 s = list(set(r)) # unique recipients
519 if (len(s) > 1):
520 r = orderbyfreq(r)
521 r = truncl(r, CMDNO)
522 writedata(n + " messages sent to", r)
523 else:
524 writedata(n + " messages sent to " + r[0])
525 else:
526 writedata(n + " messages sent")
527 writedata("total of " + size)
528 closetag('div', 1)
529 logger.info("finished postfix section")
530
531#
532#
533#
534
535def zfs():
536 logger.debug("starting zfs section")
537 opentag('div', 1, 'zfs', 'section')
538 zfslog = readlog('zfs')
539 logger.debug("got zfs logfile\n" + zfslog + "---end log---")
540 pool = re.search('.*---\n(\w*)', zfslog).group(1)
541 scrub = re.search('.*scrub repaired (\d*) in \d*h\d*m with (\d*) errors on (\S*\s)(\S*)\s(\d+\s)', zfslog)
542 iostat = re.search('.*---\n\w*\s*(\S*)\s*(\S*)\s', zfslog)
543 scrubrepairs = scruberrors = scrubdate = None
544 try:
545 scrubrepairs = scrub.group(1)
546 scruberrors = scrub.group(2)
547 scrubdate = scrub.group(3) + scrub.group(5) + scrub.group(4)
548 except:
549 logger.debug("error getting scrub data")
550 alloc = iostat.group(1)
551 free = iostat.group(2)
552 writetitle("zfs")
553 if (scrubdate != None):
554 subtitle = "Scrub of " + pool + " on " + scrubdate
555 data = [scrubrepairs + " repaired", scruberrors + " errors", alloc + " used", free + " free"]
556 else:
557 subtitle = pool
558 data = [alloc + " used", free + " free"]
559 writedata(subtitle, data)
560 closetag('div', 1)
561 logger.info("finished zfs section")
562
563#
564#
565#
566
567def temp():
568 logger.debug("starting temp section")
569 opentag('div', 1, 'temp', 'section')
570 sensors.init()
571 coretemps = []
572 pkgtemp = 0
573 systemp = 0
574 try:
575 print(sensors.iter_detected_chips())
576 for chip in sensors.iter_detected_chips():
577 for feature in chip:
578 if "Core" in feature.label:
579 coretemps.append([feature.label, feature.get_value()])
580 logger.debug("found core " + feature.label + " at temp " + str(feature.get_value()))
581 if "CPUTIN" in feature.label:
582 pkgtemp = str(feature.get_value())
583 logger.debug("found cpu package at temperature " + pkgtemp)
584 if "SYS" in feature.label:
585 systemp = feature.get_value()
586 logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value()))
587 core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps)
588 logger.debug("average cpu temp is " + str(core_avg))
589 coretemps.append(["avg", str(core_avg)])
590 coretemps.append(["pkg", pkgtemp])
591 coretemps = [x[0] + ": " + str(x[1]) + DEG for x in coretemps]
592 finally:
593 sensors.cleanup()
594
595 # For this to work, `hddtemp` must be running in daemon mode.
596 # Start it like this (bash): sudo hddtemp -d /dev/sda /dev/sdX...
597 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
598 s.connect(('localhost',HDDTEMPPORT))
599 output = s.recv(4096)
600 output += s.recv(4096)
601 s.close()
602 hddtemps = []
603 for drive in re.split('\|{2}', output):
604 try:
605 fields = re.search('\|*(/dev/sd.)\|.*\|(\d+)\|(.)', drive)
606 name = fields.group(1)
607 temp = float(fields.group(2))
608 units = fields.group(3)
609 hddtemps.append(drivetemp(name, temp, units))
610 except:
611 pass
612 hddtotal = 0
613 data = []
614 for drive in hddtemps:
615 data.append(drive.name + ': ' + str(drive.temp) + drive.units)
616 logger.debug("found disk " + drive.name + " at " + str(drive.temp))
617 hddtotal += drive.temp
618 logger.debug("found " + str(len(hddtemps)) + " disks")
619 logger.debug("sum of disk temps is " + str(hddtotal))
620 hddavg = hddtotal/float(len(hddtemps))
621 logger.debug("avg disk temp is " + str(hddavg))
622 data.append("avg: " + str(hddavg))
623 writetitle("temperatures")
624 if (systemp != 0):
625 writedata("sys: " + str(systemp) + DEG)
626 if (coretemps != ''):
627 writedata("cores", coretemps)
628 if (hddtemps != ''):
629 writedata("disks", data)
630
631 closetag('div', 1)
632 logger.info("finished temp section")
633
634#
635#
636#
637
638def du():
639 logger.debug("starting du section")
640 opentag('div', 1, 'du', 'section')
641 out = []
642 content = readlog('alloc')
643 contentnew = ""
644 for p in DUPATHS:
645 alloc_f = getusage(p).alloc
646 delta = None
647 try:
648 alloc_i = re.search(p + '\t(.*)\n', content).group(1)
649 delta = alloc_f - float(alloc_i)
650 except:
651 pass
652 logger.debug("delta is " + str(delta))
653 if (delta == None):
654 out.append([p, "used " + parsesize(alloc_f)])
655 else:
656 out.append([p, "used " + parsesize(alloc_f), "delta " + parsesize(delta)])
657 contentnew += (p + '\t' + str(alloc_f) + '\n')
658 writelog('alloc', contentnew)
659
660 writetitle("du")
661 logger.debug("disk usage data is " + str(out))
662 for path in out:
663 writedata(path[0], [p for p in path[1:]])
664
665 closetag('div', 1)
666 logger.info("finished du section")
667
668#
669#
670#
671
672timenow = time.strftime("%H:%M:%S")
673datenow = time.strftime("%x")
674
675pathfilter = {"auth": AUTHPATH, "cron": CRONPATH, "sys": SYSPATH, "postfix": POSTFIXPATH, "smb": SMBDDIR, "zfs": ZFSPATH, "alloc": ALLOCPATH, "httpd": HTTPDDIR, "header": HEADERPATH}
676pathfilter = dict((re.escape(k), v) for k, v in pathfilter.iteritems())
677pathpattern = re.compile("|".join(pathfilter.keys()))
678
679varfilter = {"$title$": TITLE, "$date$": datenow, "$time$": timenow, "$hostname$": hostname(), "$version$": VERSION}
680varfilter = dict((re.escape(k), v) for k, v in varfilter.iteritems())
681varpattern = re.compile("|".join(varfilter.keys()))
682
683
684__main__()