1import logging
2import gzip
3import os
4import signal
5from stat import ST_SIZE, ST_INO
6import time
7try:
8    import bz2
9    HAS_BZ2 = True
10except ImportError:
11    HAS_BZ2 = False
12    bz2 = None
13
14from .allowedhosts import AllowedHosts
15from .constants import *
16from .daemon import createdaemon
17from .denyfileutil import Purge
18from .filetracker import FileTracker
19from .loginattempt import LoginAttempt
20from . import plugin
21from .regex import *
22from .report import Report
23from .restricted import Restricted
24from .sync import Sync
25from .util import die, is_true, parse_host, send_email, is_valid_ip_address
26from .version import VERSION
27
28debug = logging.getLogger("denyhosts").debug
29info = logging.getLogger("denyhosts").info
30error = logging.getLogger("denyhosts").error
31warning = logging.getLogger("denyhosts").warning
32
33
34class DenyHosts(object):
35    def __init__(self, logfile, prefs, lock_file,
36                 ignore_offset=0, first_time=0,
37                 noemail=0, daemon=0, foreground=0):
38        self.__denied_hosts = {}
39        self.__prefs = prefs
40        self.__lock_file = lock_file
41        self.__first_time = first_time
42        self.__noemail = noemail
43        self.__report = Report(prefs.get("HOSTNAME_LOOKUP"), is_true(prefs['SYSLOG_REPORT']))
44        self.__daemon = daemon
45        self.__foreground = foreground
46        self.__sync_server = prefs.get('SYNC_SERVER')
47        self.__sync_upload = is_true(prefs.get("SYNC_UPLOAD"))
48        self.__sync_download = is_true(prefs.get("SYNC_DOWNLOAD"))
49        self.__iptables = prefs.get("IPTABLES")
50        self.__blockport = prefs.get("BLOCKPORT")
51        self.__pfctl = prefs.get("PFCTL_PATH")
52        self.__pftable = prefs.get("PF_TABLE")
53        self.__pftablefile = prefs.get("PF_TABLE_FILE")
54        self.purge_counter = 0
55        self.sync_counter = 0
56        self.__sshd_format_regex = None
57        self.__successful_entry_regex = None
58        self.__failed_entry_regex_map = {}
59        self.__failed_dovecot_entry_regex = None
60
61        r = Restricted(prefs)
62        self.__restricted = r.get_restricted()
63        info("restricted: %s", self.__restricted)
64        self.init_regex()
65
66        try:
67            self.file_tracker = FileTracker(self.__prefs.get('WORK_DIR'),
68                                            logfile)
69        except Exception as e:
70            self.__lock_file.remove()
71            die("Can't read: %s" % logfile, e)
72
73        self.__allowed_hosts = AllowedHosts(self.__prefs)
74
75        if ignore_offset:
76            last_offset = 0
77        else:
78            last_offset = self.file_tracker.get_offset()
79
80        if last_offset is not None:
81            self.get_denied_hosts()
82            info("Processing log file (%s) from offset (%ld)",
83                 logfile,
84                 last_offset)
85            offset = self.process_log(logfile, last_offset)
86            if offset != last_offset:
87                self.file_tracker.save_offset(offset)
88                last_offset = offset
89        elif not daemon:
90            info("Log file size has not changed.  Nothing to do.")
91
92        if daemon and not foreground:
93            info("launching DenyHosts daemon (version %s)..." % VERSION)
94
95            # logging.getLogger().setLevel(logging.WARN)
96
97            # remove lock file since createDaemon will
98            # create a new pid.  A new lock
99            # will be created when runDaemon is invoked
100            self.__lock_file.remove()
101
102            retcode = createdaemon()
103            if retcode == 0:
104                self.rundaemon(logfile, last_offset)
105            else:
106                die("Error creating daemon: %s (%d)" % (retcode[1], retcode[0]))
107        elif foreground:
108            info("launching DenyHost (version %s)..." % VERSION)
109            self.__lock_file.remove()
110            self.rundaemon(logfile, last_offset)
111
112    @staticmethod
113    def killdaemon():
114        debug("Received SIGTERM")
115        info("DenyHosts daemon is shutting down")
116        # signal handler
117
118        # self.__lock_file.remove()
119        # lock will be freed on SIGTERM by denyhosts.py
120        # exception handler (SystemExit)
121        sys.exit(0)
122
123    @staticmethod
124    def toggledebug():
125        level = logging.getLogger().getEffectiveLevel()
126        if level == logging.INFO:
127            level = logging.DEBUG
128            name = "DEBUG"
129        else:
130            level = logging.INFO
131            name = "INFO"
132        info("setting debug level to: %s", name)
133        logging.getLogger().setLevel(level)
134
135    def rundaemon(self, logfile, last_offset):
136        # signal.signal(signal.SIGHUP, self.killdaemon)
137        signal.signal(signal.SIGTERM, self.killdaemon)
138        signal.signal(signal.SIGUSR1, self.toggledebug)
139        info("DenyHost daemon is now running, pid: %s", os.getpid())
140        info("send daemon process a TERM signal to terminate cleanly")
141        info("  eg.  kill -TERM %s", os.getpid())
142        self.__lock_file.create()
143
144        info("monitoring log: %s", logfile)
145        daemon_sleep = self.__prefs.get('DAEMON_SLEEP')
146        purge_time = self.__prefs.get('PURGE_DENY')
147        sync_time = self.__prefs.get('SYNC_INTERVAL')
148        info("sync_time: %s", str(sync_time))
149
150        if purge_time:
151            daemon_purge = self.__prefs.get('DAEMON_PURGE')
152            daemon_purge = max(daemon_sleep, daemon_purge)
153            purge_sleep_ratio = daemon_purge / daemon_sleep
154            self.purge_counter = 0
155            info("daemon_purge:      %ld", daemon_purge)
156            info("daemon_sleep:      %ld", daemon_sleep)
157            info("purge_sleep_ratio: %ld", purge_sleep_ratio)
158        else:
159            purge_sleep_ratio = None
160            info("purging of %s is disabled", self.__prefs.get('HOSTS_DENY'))
161
162        if sync_time and self.__sync_server:
163            if sync_time < SYNC_MIN_INTERVAL:
164                info("SYNC_INTERVAL (%d) should be atleast %d",
165                     sync_time,
166                     SYNC_MIN_INTERVAL)
167                sync_time = SYNC_MIN_INTERVAL
168            sync_time = max(daemon_sleep, sync_time)
169            info("sync_time:      : %ld", sync_time)
170            sync_sleep_ratio = sync_time / daemon_sleep
171            self.sync_counter = 0
172            info("sync_sleep_ratio: %ld", sync_sleep_ratio)
173        else:
174            sync_sleep_ratio = None
175            info("denyhost synchronization disabled")
176
177        self.daemonloop(logfile, last_offset, daemon_sleep,
178                        purge_time, purge_sleep_ratio, sync_sleep_ratio)
179
180    def daemonloop(self, logfile, last_offset, daemon_sleep,
181                   purge_time, purge_sleep_ratio, sync_sleep_ratio):
182
183        fp = open(logfile, "r")
184        inode = os.fstat(fp.fileno())[ST_INO]
185
186        while 1:
187
188            try:
189                curr_inode = os.stat(logfile)[ST_INO]
190            except OSError:
191                info("%s has been deleted", logfile)
192                self.sleepandpurge(daemon_sleep,
193                                   purge_time,
194                                   purge_sleep_ratio)
195                continue
196
197            if curr_inode != inode:
198                info("%s has been rotated", logfile)
199                inode = curr_inode
200                try:
201                    fp.close()
202                except IOError:
203                    pass
204
205                fp = open(logfile, "r")
206                # this ultimately forces offset (if not 0) to be < last_offset
207                last_offset = sys.maxsize
208
209            offset = os.fstat(fp.fileno())[ST_SIZE]
210            if last_offset is None:
211                last_offset = offset
212
213            if offset > last_offset:
214                # new data added to logfile
215                debug("%s has additional data", logfile)
216
217                self.get_denied_hosts()
218                last_offset = self.process_log(logfile, last_offset)
219
220                self.file_tracker.save_offset(last_offset)
221            elif offset == 0:
222                # log file rotated, nothing to do yet...
223                # since there is no first_line
224                debug("%s is empty.  File was rotated", logfile)
225            elif offset < last_offset:
226                # file was rotated or replaced and now has data
227                debug("%s most likely rotated and now has data", logfile)
228                last_offset = 0
229                self.file_tracker.update_first_line()
230                continue
231
232            self.sleepandpurge(daemon_sleep, purge_time,
233                               purge_sleep_ratio, sync_sleep_ratio)
234
235    def sleepandpurge(self, sleep_time, purge_time,
236                      purge_sleep_ratio=None, sync_sleep_ratio=None):
237        time.sleep(sleep_time)
238        if purge_time:
239            self.purge_counter += 1
240            if self.purge_counter == purge_sleep_ratio:
241                try:
242                    Purge(self.__prefs, purge_time)
243                except Exception as e:
244                    logging.getLogger().exception(e)
245                    raise
246                self.purge_counter = 0
247
248        if sync_sleep_ratio:
249            # debug("sync count: %d", self.sync_counter)
250            self.sync_counter += 1
251            if self.sync_counter == sync_sleep_ratio:
252                try:
253                    sync = Sync(self.__prefs)
254                    if self.__sync_upload:
255                        debug("sync upload")
256                        sync.send_new_hosts()
257                    if self.__sync_download:
258                        debug("sync download")
259                        new_hosts = sync.receive_new_hosts()
260                        if new_hosts:
261                            info("received new hosts: %s", str(new_hosts))
262                            self.get_denied_hosts()
263                            self.update_hosts_deny(new_hosts)
264                    sync.xmlrpc_disconnect()
265                except Exception as e:
266                    logging.getLogger().exception(e)
267                    raise
268                self.sync_counter = 0
269
270    def get_denied_hosts(self):
271        self.__denied_hosts = {}
272        with open(self.__prefs.get('HOSTS_DENY'), 'r') as fhdh:
273            line = fhdh.readline()
274            while line:
275                if line[0] not in ('#', '\n'):
276
277                    idx = line.find('#')
278                    if idx != 1:
279                        line = line[:idx]
280                    try:
281                        host = parse_host(line)
282                        self.__denied_hosts[host] = 0
283                        if host in self.__allowed_hosts:
284                            self.__allowed_hosts.add_warned_host(host)
285                    except Exception:
286                        pass
287                line = fhdh.readline()
288
289        new_warned_hosts = self.__allowed_hosts.get_new_warned_hosts()
290        if new_warned_hosts:
291            self.__allowed_hosts.save_warned_hosts()
292
293            text = """WARNING: The following hosts appear in %s but should be
294allowed based on your %s file""" % (self.__prefs.get("HOSTS_DENY"),
295                                    os.path.join(self.__prefs.get("WORK_DIR"),
296                                                 ALLOWED_HOSTS))
297            self.__report.add_section(text, new_warned_hosts)
298            self.__allowed_hosts.clear_warned_hosts()
299
300    def update_hosts_deny(self, deny_hosts):
301        if not deny_hosts:
302            return None, None
303
304        # info("keys: %s", str( self.__denied_hosts.keys()))
305        new_hosts = [host for host in deny_hosts
306                     if host not in self.__denied_hosts
307                     and host not in self.__allowed_hosts]
308
309        debug("new hosts: %s", str(new_hosts))
310
311        try:
312            fp = open(self.__prefs.get('HOSTS_DENY'), "a")
313            status = 1
314        except Exception as e:
315            print(e)
316            print("These hosts should be manually added to: %s", self.__prefs.get('HOSTS_DENY'))
317            # print(self.__prefs.get('HOSTS_DENY'))
318            fp = sys.stdout
319            status = 0
320
321        write_timestamp = self.__prefs.get('PURGE_DENY') is not None
322        for host in new_hosts:
323            block_service = self.__prefs.get('BLOCK_SERVICE')
324            if block_service:
325                block_service = "%s: " % block_service
326                output = "%s%s%s" % (block_service, host, BSD_STYLE)
327            else:
328                output = "%s" % host
329
330            if write_timestamp:
331                fp.write("%s %s%s%s\n" % (DENY_DELIMITER,
332                                          time.asctime(),
333                                          ENTRY_DELIMITER,
334                                          output))
335            fp.write("%s\n" % output)
336
337        if self.__iptables:
338            debug("Trying to create iptables rules")
339            try:
340                for host in new_hosts:
341                    my_host = str(host)
342                    if self.__blockport is not None and ',' in self.__blockport:
343                        new_rule = self.__iptables + " -I INPUT -p tcp -m multiport --dports " + self.__blockport + \
344                                   " -s " + my_host + " -j DROP"
345                    elif self.__blockport:
346                        new_rule = self.__iptables + " -I INPUT -p tcp --dport " + self.__blockport + " -s " + \
347                                   my_host + " -j DROP"
348                    else:
349                        new_rule = self.__iptables + " -I INPUT -s " + my_host + " -j DROP"
350                    debug("Running iptabes rule: %s", new_rule)
351                    info("Creating new firewall rule %s", new_rule)
352                    os.system(new_rule)
353
354            except Exception as e:
355                print(e)
356                print("Unable to write new firewall rule.")
357
358        if self.__pfctl and self.__pftable:
359            debug("Trying to update PF table.")
360            try:
361                for host in new_hosts:
362                    my_host = str(host)
363                    new_rule = self.__pfctl + " -t " + self.__pftable + " -T add " + my_host
364                    debug("Running PF update rule: %s", new_rule)
365                    info("Creating new PF rule %s", new_rule)
366                    os.system(new_rule)
367
368            except Exception as e:
369                print(e)
370                print("Unable to write new PF rule.")
371                debug("Unable to create PF rule. %s", e)
372        if self.__pftablefile:
373            debug("Trying to write host to PF table file %s", self.__pftablefile)
374            try:
375                pf_file = open(self.__pftablefile, "a")
376                for host in new_hosts:
377                    my_host = str(host)
378                    pf_file.write("%s\n" % my_host)
379                    info("Wrote new host %s to table file %s", my_host, self.__pftablefile)
380                pf_file.close()
381            except Exception as e:
382                print(e)
383                print("Unable to write new host to PF table file.")
384                debug("Unable to write new host to PF table file %s", self.__pftablefile)
385
386        if fp != sys.stdout:
387            fp.close()
388
389        return new_hosts, status
390
391    @staticmethod
392    def is_valid(rx_match):
393        invalid = 0
394        try:
395            if rx_match.group("invalid"):
396                invalid = 1
397        except Exception:
398            invalid = 1
399        return invalid
400
401    def process_log(self, logfile, offset):
402        try:
403            if logfile.endswith(".gz"):
404                fp = gzip.open(logfile)
405            elif logfile.endswith(".bz2"):
406                if HAS_BZ2:
407                    fp = bz2.BZ2File(logfile, "r")
408                else:
409                    raise Exception("Can not open bzip2 file (missing bz2 module)")
410            else:
411                fp = open(logfile, "r")
412        except Exception as e:
413            print("Could not open log file: %s" % logfile)
414            print(e)
415            return -1
416
417        try:
418            fp.seek(offset)
419        except IOError:
420            pass
421
422        suspicious_always = is_true(self.__prefs.get('SUSPICIOUS_LOGIN_REPORT_ALLOWED_HOSTS'))
423
424        login_attempt = LoginAttempt(self.__prefs,
425                                     self.__allowed_hosts,
426                                     suspicious_always,
427                                     self.__first_time,
428                                     1,  # fetch all
429                                     self.__restricted)
430
431        for line in fp:
432            success = invalid = 0
433            m = None
434            sshd_m = self.__sshd_format_regex.match(line)
435            if sshd_m:
436                message = sshd_m.group('message')
437
438                # did this line match any of the fixed failed regexes?
439                for inc in FAILED_ENTRY_REGEX_RANGE:
440                    rxx = self.__failed_entry_regex_map.get(inc)
441                    if rxx is None:
442                        continue
443                    m = rxx.search(message)
444                    if m:
445                        invalid = self.is_valid(m)
446                        break
447                else:  # didn't match any of the failed regex'es, was it succesful?
448                    m = self.__successful_entry_regex.match(message)
449                    if m:
450                        success = 1
451            # otherwise, did the line match a failed dovelog login attempt?
452            elif is_true(self.__prefs.get("DETECT_DOVECOT_LOGIN_ATTEMPTS")):
453                rxx = self.__failed_dovecot_entry_regex
454                m = rxx.search(line)
455                if m:
456                    # debug("matched (host=%s): %s", m.group("host"), rx.pattern)
457                    invalid = self.is_valid(m)
458            # otherwise, did the line match one of the userdef regexes?
459            elif not m:
460                for rxx in self.__prefs.get('USERDEF_FAILED_ENTRY_REGEX'):
461                    m = rxx.search(line)
462                    if m:
463                        # info("matched: %s" % rxx.pattern)
464                        invalid = self.is_valid(m)
465                        break
466
467            if not m:
468                # line isn't important
469                continue
470
471            try:
472                user = m.group("user")
473            except Exception:
474                user = ""
475            try:
476                host = m.group("host")
477            except Exception:
478                error("regex pattern ( %s ) is missing 'host' group" % m.re.pattern)
479                continue
480
481            if not is_valid_ip_address(host):
482                warning("got invalid host (%s), ignoring" % host)
483                continue
484
485            debug("user: %s - host: %s - success: %d - invalid: %d",
486                  user,
487                  host,
488                  success,
489                  invalid)
490            login_attempt.add(user, host, success, invalid)
491
492        offset = fp.tell()
493        fp.close()
494
495        login_attempt.save_all_stats()
496        deny_hosts = login_attempt.get_deny_hosts()
497
498        # print deny_hosts
499        new_denied_hosts, status = self.update_hosts_deny(deny_hosts)
500        if new_denied_hosts:
501            if not status:
502                msg = "WARNING: Could not add the following hosts to %s" % self.__prefs.get('HOSTS_DENY')
503            else:
504                msg = "Added the following hosts to %s" % self.__prefs.get('HOSTS_DENY')
505                # run plugins since the rest has ran correctly
506                plugin_deny = self.__prefs.get('PLUGIN_DENY')
507                # check if PLUGIN_DENY is uncommented and has content
508                if plugin_deny:
509                    # check if there's a , delimiting multiple plugins
510                    if ',' in plugin_deny:
511                        # explode plugins by ,
512                        m_plugin_deny = plugin_deny.split(',')
513                        # loop through multiple plugins
514                        for m_plugin in m_plugin_deny:
515                            if m_plugin:
516                                plugin.execute(m_plugin.strip(), new_denied_hosts)
517                    else:
518                        plugin.execute(plugin_deny, new_denied_hosts)
519
520            self.__report.add_section(msg, new_denied_hosts)
521            if self.__sync_server:
522                self.sync_add_hosts(new_denied_hosts)
523
524        new_suspicious_logins = login_attempt.get_new_suspicious_logins()
525        if new_suspicious_logins:
526            msg = "Observed the following suspicious login activity"
527            self.__report.add_section(msg, list(new_suspicious_logins.keys()))
528
529        if new_denied_hosts:
530            info("new denied hosts: %s", str(new_denied_hosts))
531        else:
532            debug("no new denied hosts")
533
534        if new_suspicious_logins:
535            info("new suspicious logins: %s", str(list(new_suspicious_logins.keys())))
536        else:
537            debug("no new suspicious logins")
538
539        if not self.__report.empty():
540            if not self.__noemail:
541                # send the report via email if configured
542                send_email(self.__prefs, self.__report.get_report())
543            elif not self.__daemon:
544                # otherwise, if not in daemon mode, log the report to the console
545                info(self.__report.get_report())
546            self.__report.clear()
547
548        return offset
549
550    def sync_add_hosts(self, hosts):
551        try:
552            filename = os.path.join(self.__prefs.get("WORK_DIR"), SYNC_HOSTS)
553            fp = open(filename, "a")
554            for host in hosts:
555                fp.write("%s\n" % host)
556            fp.close()
557            os.chmod(filename, 0o644)
558        except Exception as e:
559            error(str(e))
560
561    def get_regex(self, name, default):
562        val = self.__prefs.get(name)
563        if not val:
564            return default
565        else:
566            return re.compile(val)
567
568    def init_regex(self):
569        self.__sshd_format_regex = self.get_regex('SSHD_FORMAT_REGEX', SSHD_FORMAT_REGEX)
570
571        self.__successful_entry_regex = self.get_regex('SUCCESSFUL_ENTRY_REGEX',
572                                                       SUCCESSFUL_ENTRY_REGEX)
573
574        self.__failed_entry_regex_map = {}
575        for inc in FAILED_ENTRY_REGEX_RANGE:
576            if inc == 1:
577                _extra = ""
578            else:
579                _extra = "%i" % inc
580            self.__failed_entry_regex_map[inc] = self.get_regex('FAILED_ENTRY_REGEX%s' % _extra,
581                                                                FAILED_ENTRY_REGEX_MAP[inc])
582
583        self.__failed_dovecot_entry_regex = self.get_regex('FAILED_DOVECOT_ENTRY_REGEX', FAILED_DOVECOT_ENTRY_REGEX)
584