# smbd.py
#
# Get login statistics for a samba server.
-# TODO: add feature to specify shares to check in config file
+#
+# NOTE: This file is now deprecated in favour of the newer journald mechanism
+# used in smbd-journald.py. This parser is still functional but is slower and
+# has less features. Please switch over if possible.
#
import re
import glob
-from ..formatting import *
-from ..util import readlog, resolve
-from .. import config
+from logparse.formatting import *
+from logparse.util import readlog, resolve
+from logparse import config
+from logparse.load_parsers import Parser
+
+class Smbd(Parser):
-import logging
-logger = logging.getLogger(__name__)
+ def __init__(self):
+ super().__init__()
+ self.name = "smbd"
+ self.info = "Get login statistics for a samba server."
+ self.deprecated = True
+ self.successor = "smbd_journald"
-def parse_log():
- logger.debug("Starting smbd section")
- section = Section("smbd")
- files = glob.glob(config.prefs['logs']['smb'] + "/log.*[!\.gz][!\.old]") # find list of logfiles
- # for f in files:
+ def parse_log(self):
+ logger.debug("Starting smbd section")
+ section = Section("smbd")
+ files = glob.glob(config.prefs.get("logs", "smbd") + "/log.*[!\.gz][!\.old]") # find list of logfiles
+ # for f in files:
- # file_mod_time = os.stat(f).st_mtime
+ # file_mod_time = os.stat(f).st_mtime
- # Time in seconds since epoch for time, in which logfile can be unmodified.
- # should_time = time.time() - (30 * 60)
+ # Time in seconds since epoch for time, in which logfile can be unmodified.
+ # should_time = time.time() - (30 * 60)
- # Time in minutes since last modification of file
- # last_time = (time.time() - file_mod_time)
- # logger.debug(last_time)
+ # Time in minutes since last modification of file
+ # last_time = (time.time() - file_mod_time)
+ # logger.debug(last_time)
- # if (file_mod_time - should_time) < args.time:
- # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
- # else:
+ # if (file_mod_time - should_time) < args.time:
+ # print "CRITICAL: {} last modified {:.2f} minutes. Threshold set to 30 minutes".format(last_time, file, last_time)
+ # else:
- # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
- # files.remove(f)
- logger.debug("Found log files " + str(files))
- n_auths = 0 # total number of logins from all users
- sigma_auths = [] # contains users
+ # if (datetime.timedelta(datetime.datetime.now() - datetime.fromtimestamp(os.path.getmtime(f))).days > 7):
+ # files.remove(f)
+ logger.debug("Found log files " + str(files))
+ n_auths = 0 # total number of logins from all users
+ sigma_auths = [] # contains users
- for file in files: # one log file for each client
+ for file in files: # one log file for each client
- logger.debug("Looking at file " + file)
+ logger.debug("Looking at file " + file)
- # find the machine (ip or hostname) that this file represents
- ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
- host = resolve(ip, fqdn=config.prefs['smbd']['resolve-domains'])
- if (host == ip and (config.prefs['smbd']['resolve-domains'] or config.prefs['resolve-domains']) != 'ip'): # if ip has disappeared, fall back to a hostname from logfile
- newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
- if (len(set(newhost)) == 1): # all hosts in one file should be the same
- host = newhost[0].lower()
+ # find the machine (ip or hostname) that this file represents
+ ip = re.search('log\.(.*)', file).group(1) # get ip or hostname from file path (/var/log/samba/log.host)
+ host = resolve(ip, fqdn=config.prefs.get("smbd", "smbd-resolve-domains"))
+ if host == ip and (config.prefs.get("smbd", "smbd-resolve-domains") != "ip" or config.prefs.get("logparse", "resolve-domains") != "ip"): # if ip has disappeared, fall back to a hostname from logfile
+ newhost = re.findall('.*\]\@\[(.*)\]', readlog(file))
+ if (len(set(newhost)) == 1): # all hosts in one file should be the same
+ host = newhost[0].lower()
- # count number of logins from each user-host pair
- matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
- for match in matches:
- userhost = match + "@" + host
- sigma_auths.append(userhost)
- # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
- # if (exists == []):
- # sigma_auths.append([userhost, 1])
- # else:
- # sigma_auths[exists[0]][1] += 1
- n_auths += 1
- auth_data = Data(subtitle=plural("login", n_auths) + " from")
- if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
- auth_data.subtitle += ' ' + sigma_auths[0][0]
+ # count number of logins from each user-host pair
+ matches = re.findall('.*(?:authentication for user \[|connect to service .* initially as user )(\S*)(?:\] .*succeeded| \()', readlog(file))
+ for match in matches:
+ userhost = match + "@" + host
+ sigma_auths.append(userhost)
+ # exists = [i for i, item in enumerate(sigma_auths) if re.search(userhost, item[0])]
+ # if (exists == []):
+ # sigma_auths.append([userhost, 1])
+ # else:
+ # sigma_auths[exists[0]][1] += 1
+ n_auths += 1
+ auth_data = Data(subtitle=plural("login", n_auths) + " from")
+ if (len(sigma_auths) == 1): # if only one user, do not display no of logins for this user
+ auth_data.subtitle += ' ' + sigma_auths[0][0]
+ section.append_data(auth_data)
+ else: # multiple users
+ auth_data.items = sigma_auths
+ auth_data.orderbyfreq()
+ auth_data.truncl(config.prefs.getint("logparse", "maxlist"))
+ logger.debug("Found {0} samba logins".format(str(n_auths)))
section.append_data(auth_data)
- else: # multiple users
- auth_data.items = sigma_auths
- auth_data.orderbyfreq()
- auth_data.truncl(config.prefs['maxlist'])
- logger.debug("Found {0} samba logins".format(str(n_auths)))
- section.append_data(auth_data)
- logger.info("Finished smbd section")
- return section
+ logger.info("Finished smbd section")
+ return section