From 30c8057ce7b8f949229279febe48c6caeb2cb1be Mon Sep 17 00:00:00 2001 From: Andrew Lorimer Date: Wed, 28 Aug 2019 18:20:39 +1000 Subject: [PATCH 1/1] bugfixing in parsers --- logparse/formatting.py | 40 +++++++++++--------- logparse/parsers/cron.py | 16 +++++--- logparse/parsers/postfix.py | 5 ++- logparse/parsers/sshd.py | 25 +++++-------- logparse/parsers/temperature.py | 66 +++++++++++++++++---------------- 5 files changed, 78 insertions(+), 74 deletions(-) diff --git a/logparse/formatting.py b/logparse/formatting.py index b2670f8..19eda99 100644 --- a/logparse/formatting.py +++ b/logparse/formatting.py @@ -28,6 +28,7 @@ CORNERCHARS_DOUBLE = ['╚', '╝', '╗', '╔'] CORNERCHARS_SINGLE = ['└', '┘', '┐', '┌'] LINECHARS_DOUBLE = ['║', '═'] LINECHARS_SINGLE = ['│', '─'] +INDENT = " " class Output: """ @@ -78,7 +79,7 @@ class PlaintextOutput(Output): self.append('\n'*2) for data in section.data: self.append(self._fmt_data(data.subtitle, data.items)) - self.append('\n') + self.append('\n') def _fmt_data(self, subtitle, data = None): # write title and data if (subtitle == ""): @@ -97,20 +98,21 @@ class PlaintextOutput(Output): itemoutput = subtitle + '\n' for datum in data: datum = '• ' + datum - if len(datum) > config.prefs['linewidth']: + if len(datum) > config.prefs['linewidth'] - 3: words = datum.split() - if max(map(len, words)) > config.prefs['linewidth']: - raise ValueError("Content width is too small") + if max(map(len, words)) > config.prefs['linewidth'] - len(INDENT): + continue res, part, others = [], words[0], words[1:] for word in others: - if len(' ') + len(word) > config.prefs['linewidth'] - len(part): + if 1 + len(word) > config.prefs['linewidth'] - len(part): res.append(part) part = word else: part += ' ' + word if part: res.append(part) - datum = '\n'.join(res) + datum = ('\n ').join(res) + datum = INDENT + datum itemoutput += datum + '\n' return itemoutput @@ -214,7 +216,7 @@ class Section: self.title = title self.data = [] - def add_data(self, data): + def append_data(self, data): self.data.append(data) class Data: @@ -222,7 +224,7 @@ class Data: Each section (parser) can have one or more Data() objects which are essentially glorified lists. """ - def __init__(self, subtitle=None, items=[]): + def __init__(self, subtitle="", items=[]): self.subtitle = subtitle self.items = items @@ -230,16 +232,18 @@ class Data: if (len(self.items) > limit): more = str(len(self.items) - limit) self.items = self.items[:limit] - self.items..append("+ " + more + " more") + self.items.append("+ " + more + " more") - def orderbyfreq(self, l): # order a list by the frequency of its elements and remove duplicates - temp_l = l[:] - l = list(set(l)) - l = [[i, temp_l.count(i)] for i in l] # add count of each element - l.sort(key=lambda x:temp_l.count(x[0])) # sort by count - l = [i[0] + ' (' + str(i[1]) + ')' for i in l] # put element and count into string - l = l[::-1] # reverse - self.items = l + def orderbyfreq(self): # order a list by the frequency of its elements and remove duplicates +# temp = list(self.items)[:] +# logger.debug(self.items) +# self.items = list(set(self.items)) +# self.items = [[i, temp.count(i)] for i in self.items] # add count of each element +# self.items.sort(key=lambda x:temp.count(x[0])) # sort by count +# self.items = [i[0] + ' (' + str(i[1]) + ')' for i in self.items] # put element and count into string +# self.items = self.items[::-1] # reverse + unsorted = list(self.items) + self.items = [ "{0} ({1})".format(y, unsorted.count(y)) for y in sorted(set(unsorted), key = lambda x: -unsorted.count(x)) ] class PlaintextLine: @@ -298,7 +302,7 @@ class PlaintextBox: if len(line) > contentwidth: words = line.split() if max(map(len, words)) > contentwidth: - raise ValueError("Content width is too small") + continue res, part, others = [], words[0], words[1:] for word in others: if len(' ') + len(word) > contentwidth - len(part): diff --git a/logparse/parsers/cron.py b/logparse/parsers/cron.py index 3aba140..01a6135 100644 --- a/logparse/parsers/cron.py +++ b/logparse/parsers/cron.py @@ -24,12 +24,16 @@ def parse_log(): # commands.append([str(match)for match in matches]) #logger.debug("found cron command " + str(commands)) logger.info("Found " + str(num) + " cron jobs") - subtitle = str(num) + " cron jobs run" - section.add_data(Data(subtitle)) + jobs_data = Data(str(num) + " cron jobs run") + section.append_data(jobs_data) + if (len(matches) > 0): - commands = ("`{0}`".format(x) for x in commands) - commands = orderbyfreq(list(commands)) - commands = truncl(commands, config.prefs['maxcmd']) - section.add_data(Data("top cron commands", commands)) + logger.debug("Analysing cron commands") + cmd_data = Data("Top cron commands") + cmd_data.items = ("`{0}`".format(x) for x in commands) + cmd_data.orderbyfreq() + cmd_data.truncl(config.prefs['maxcmd']) + section.append_data(cmd_data) + logger.info("Finished cron section") return section diff --git a/logparse/parsers/postfix.py b/logparse/parsers/postfix.py index bee1809..ea4bac9 100644 --- a/logparse/parsers/postfix.py +++ b/logparse/parsers/postfix.py @@ -38,12 +38,13 @@ def parse_log(): rec_data.items = r rec_data.orderbyfreq() rec_data.truncl(config.prefs['maxlist']) + rec_data.subtitle = n + " messages sent to" else: rec_data.subtitle = n + " messages sent to " + r[0] section.append_data(rec_data) else: - section.append_data(Data(subtitle=n + " messages sent"))) + section.append_data(Data(subtitle=n + " messages sent")) logger.info("Found {0} messages sent to {1} recipients".format(n, str(len(r)))) - section.append_data(Data(subtitle="total of " + size)) + section.append_data(Data(subtitle="Total of " + size)) logger.info("Finished postfix section") return section diff --git a/logparse/parsers/sshd.py b/logparse/parsers/sshd.py index 38b3064..f233a84 100644 --- a/logparse/parsers/sshd.py +++ b/logparse/parsers/sshd.py @@ -30,24 +30,17 @@ def parse_log(): ip = entry.group(2) userhost = user + '@' + resolve(ip, fqdn=config.prefs['sshd']['resolve-domains']) - exists = [i for i, item in enumerate(users) if re.search(userhost, item[0])] - if (exists == []): - users.append([userhost, 1]) - else: - users[exists[0]][1] += 1 + users.append(userhost) logger.debug("Parsed list of authorised users") - auth_data = Data(subtitle=plural('login', num) + ' from') - - if (len(users) == 1): # if only one user, do not display no of logins for this user - logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0][0]) - auth_data.subtitle += ' ' + users[0][0] - else: - for user in users: - auth_data.items.append(user[0] + ' (' + str(user[1]) + ')') - auth_data.orderbyfreq() - auth_data.truncl(config.prefs['maxlist']) - logger.debug("found " + str(len(matches)) + " ssh logins for users " + str(data)) + auth_data = Data(subtitle=plural('login', num) + ' from', items=users) + + if (len(auth_data.items) == 1): # if only one user, do not display no of logins for this user + logger.debug("found " + str(len(matches)) + " ssh logins for user " + users[0]) + auth_data.subtitle += ' ' + auth_data.items[0] + auth_data.orderbyfreq() + auth_data.truncl(config.prefs['maxlist']) + logger.debug("Found " + str(len(matches)) + " ssh logins for users " + str(data)) section.append_data(auth_data) logger.info("Finished sshd section") return section diff --git a/logparse/parsers/temperature.py b/logparse/parsers/temperature.py index 2680a44..dfebb9c 100644 --- a/logparse/parsers/temperature.py +++ b/logparse/parsers/temperature.py @@ -63,51 +63,56 @@ class HddtempClient: def get_drives(self) -> List[Drive]: # Obtain data from telnet server try: with Telnet(self.host, self.port, timeout=self.timeout) as tn: - data = tn.read_all() - return self._parse(data.decode('ascii')) # Return parsed data + raw_data = tn.read_all() + return self._parse(raw_data.decode('ascii')) # Return parsed data except Exception as e: logger.warning("Couldn't read data from {0}:{1} - {2}".format(self.host, self.port, str(e))) return 1 def parse_log(): + logger.debug("Starting temp section") section = Section("temperatures") - # cpu temp - sensors.init() + coretemps = [] pkgtemp = 0 systemp = 0 + + systemp_data = Data("Sys") + coretemp_data = Data("Cores") + pkgtemp_data = Data("Processor") + try: + for chip in sensors.iter_detected_chips(): for feature in chip: if "Core" in feature.label: - coretemps.append([feature.label, feature.get_value()]) - logger.debug("found core " + feature.label + " at temp " + str(feature.get_value())) + coretemp_data.items.append([feature.label, feature.get_value()]) + logger.debug("Found core " + feature.label + " at temp " + str(feature.get_value())) if "CPUTIN" in feature.label: - pkgtemp = str(feature.get_value()) - logger.debug("found cpu package at temperature " + pkgtemp) + pkgtem_data.items.append([feature.label, str(feature.get_value())]) + logger.debug("Found CPU package at temp" + str(feature.get_value())) if "SYS" in feature.label: - systemp = feature.get_value() - logger.debug("found sys input " + feature.label + " at temp " + str(feature.get_value())) - logger.debug("Core temp data is: " + str(coretemps)) -# core_avg = reduce(lambda x, y: x[1] + y[1], coretemps) / len(coretemps) - core_avg = sum(core[1] for core in coretemps) / len(coretemps) - logger.debug("average cpu temp is " + str(core_avg)) - coretemps.append(["avg", str(core_avg)]) - coretemps.append(["pkg", pkgtemp]) - coretemps = [x[0] + ": " + str(x[1]) + DEG + CEL for x in coretemps] + systemp_data.items.append([feature.label, str(feature.get_value())]) + logger.debug("Found sys input " + feature.label + " at temp " + str(feature.get_value())) + + for temp_data in [systemp_data, coretemp_data, pkgtemp_data]: + if len(temp_data.items) > 1: + avg = sum(feature[1] for feature in temp_data.items) / len(temp_data.items) + logger.debug("Avg temp for {0} is {1} {2}{3}".format(temp_data.subtitle, str(avg), DEG, CEL)) + temp_data.subtitle += " (avg {0}{1}{2}):".format(str(avg), DEG, CEL) + temp_data.items = ["{0}: {1}{2}{3}".format(feature[0], feature[1], DEG, CEL) for feature in temp_data] + else: + temp_data.items = temp_data[0][1] + DEG + CEL + section.append_data(temp_data) + finally: + logger.info("Finished reading onboard temperatures") sensors.cleanup() - if (systemp != 0): - output += writedata("sys: " + str(systemp) + DEG) - if (coretemps != ''): - output += writedata("cores", coretemps) - - logger.info("Finished reading onboard temperatures") # drive temp @@ -117,29 +122,26 @@ def parse_log(): received = '' sumtemp = 0.0 data = "" - fields = [] + hddtemp_data = Data("Disks") client = HddtempClient(host=config.prefs['hddtemp']['host'], port=int(config.prefs['hddtemp']['port']), sep=config.prefs['hddtemp']['separator'], timeout=int(config.prefs['hddtemp']['timeout'])) drives = client.get_drives() logger.debug("Received drive info: " + str(drives)) + for drive in sorted(drives, key=lambda x: x.path): if drive.path in config.prefs['hddtemp']['drives']: sumtemp += drive.temperature - fields.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs['hddtemp']['show-model'] else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units)) + hddtemp_data.items.append(("{0} ({1})".format(drive.path, drive.model) if config.prefs['hddtemp']['show-model'] else drive.path) + ": {0}{1}{2}".format(drive.temperature, DEG, drive.units)) else: drives.remove(drive) - logger.debug("Ignoring drive {0} ({1})due to config".format(drive.path, drive.model)) + logger.debug("Ignoring drive {0} ({1}) due to config".format(drive.path, drive.model)) logger.debug("Sorted drive info: " + str(drives)) hddavg = '{0:.1f}{1}{2}'.format(sumtemp/len(drives), DEG, drives[0].units) # use units of first drive logger.debug("Sum of temperatures: {}; Number of drives: {}; => Avg disk temp is {}".format(str(sumtemp), str(len(drives)), hddavg)) - fields.append("avg: " + str(hddavg)) + hddtemp_data.subtitle += " (avg {0}{1}{2})".format(str(hddavg), DEG, CEL) - if (config.prefs['hddtemp']['drives'] != ''): - output += writedata("disks", fields) logger.info("Finished processing drive temperatures") - - output += closetag('div', 1) logger.info("Finished temp section") - return output + return section -- 2.47.0