1# -*- coding: utf-8 -*-
2from datetime import datetime
3from haproxy import filters
4from haproxy.logfile import Log
5from haproxy.main import main
6from time import sleep
7
8import os
9import unittest
10
11
12class LogFileTest(unittest.TestCase):
13
14    def tearDown(self):
15        """Be sure to remove all pickle files so to not keep stale files
16        around.
17        """
18        path = 'haproxy/tests/files'
19        for filename in os.listdir(path):
20            if filename.endswith('.pickle'):
21                os.remove('{0}/{1}'.format(path, filename))
22
23    def test_from_main(self):
24        log_path = 'haproxy/tests/files/small.log'
25        data = {
26            'start': None,
27            'delta': None,
28            'log': log_path,
29            'commands': ['counter', ],
30            'negate_filter': None,
31            'filters': None,
32            'list_commands': False,
33            'list_filters': False,
34        }
35        logfile = main(data)
36
37        self.assertEqual(logfile.logfile, log_path)
38
39    def test_parsed(self):
40        """Check that log files are parsed."""
41        log_file = Log(
42            logfile='haproxy/tests/files/small.log'
43        )
44        self.assertTrue(log_file.cmd_counter() > 0)
45
46    def test_total_lines(self):
47        """Check that even if some lines are not valid, 'total_lines' counts
48        all of them.
49        """
50        log_file = Log(
51            logfile='haproxy/tests/files/2_ok_1_invalid.log'
52        )
53        self.assertEqual(log_file.total_lines, 3)
54
55    def test_valid_and_invalid_lines(self):
56        """Check that if some log lines can not be parsed both numbers are
57        correctly reported.
58        """
59        log_file = Log(
60            logfile='haproxy/tests/files/2_ok_1_invalid.log'
61        )
62        self.assertEqual(log_file.cmd_counter(), 2)
63        self.assertEqual(log_file.cmd_counter_invalid(), 1)
64
65    def test_lines_sorted(self):
66        """Check that after parsing a log file, the valid log lines are kept
67        sorted to ease further work on them.
68        """
69        log_file = Log(
70            logfile='haproxy/tests/files/small.log',
71        )
72
73        previous = log_file._valid_lines[0]
74        previous_date = previous.accept_date
75        for line in log_file._valid_lines[1:]:
76            self.assertTrue(previous_date < line.accept_date)
77
78    def test_cmd_http_methods(self):
79        """Check that the http methods command reports as expected."""
80        log_file = Log(
81            logfile='haproxy/tests/files/small.log',
82        )
83        http_methods = log_file.cmd_http_methods()
84
85        self.assertEqual(len(http_methods), 3)
86        self.assertEqual(http_methods['GET'], 4)
87        self.assertEqual(http_methods['POST'], 2)
88        self.assertEqual(http_methods['HEAD'], 3)
89
90    def test_cmd_ip_counter(self):
91        """Check that the ip counter command reports as expected."""
92        log_file = Log(
93            logfile='haproxy/tests/files/small.log',
94        )
95        ip_counter = log_file.cmd_ip_counter()
96
97        self.assertEqual(len(ip_counter), 4)
98        self.assertEqual(ip_counter['123.123.123.123'], 4)
99        self.assertEqual(ip_counter['123.123.124.124'], 2)
100        self.assertEqual(ip_counter['123.123.124.123'], 1)
101        self.assertEqual(ip_counter['123.123.123.124'], 1)
102
103    def test_cmd_status_codes(self):
104        """Check that the status codes command reports as expected."""
105        log_file = Log(
106            logfile='haproxy/tests/files/small.log',
107        )
108        status_codes = log_file.cmd_status_codes_counter()
109
110        self.assertEqual(len(status_codes), 3)
111        self.assertEqual(status_codes['404'], 3)
112        self.assertEqual(status_codes['200'], 2)
113        self.assertEqual(status_codes['300'], 4)
114
115    def test_cmd_request_path_counter(self):
116        """Check that the request path counter command reports as expected."""
117        log_file = Log(
118            logfile='haproxy/tests/files/small.log',
119        )
120        path_counter = log_file.cmd_request_path_counter()
121
122        self.assertEqual(len(path_counter), 5)
123        self.assertEqual(path_counter['/hello'], 3)
124        self.assertEqual(path_counter['/world'], 2)
125        self.assertEqual(path_counter['/free'], 2)
126        self.assertEqual(path_counter['/fra'], 1)
127        self.assertEqual(path_counter['/freitag'], 1)
128
129    def test_cmd_slow_requests(self):
130        """Check that the slow requests command reports as expected."""
131        log_file = Log(
132            logfile='haproxy/tests/files/small.log',
133        )
134        slow_requests = log_file.cmd_slow_requests()
135
136        self.assertEqual(len(slow_requests), 5)
137        slow_requests.sort()  # sort them as the log analyzer sorts by dates
138        self.assertEqual(slow_requests,
139                         [1293, 2936, 2942, 20095, 29408, ])
140
141    def test_cmd_counter_slow_requests(self):
142        """Check that the slow requests counter command reports as expected.
143        """
144        log_file = Log(
145            logfile='haproxy/tests/files/small.log',
146        )
147        slow_requests = log_file.cmd_counter_slow_requests()
148
149        self.assertEqual(slow_requests, 5)
150
151    def test_cmd_server_load(self):
152        """Check that the server load counter command reports as expected."""
153        log_file = Log(
154            logfile='haproxy/tests/files/small.log',
155        )
156        servers = log_file.cmd_server_load()
157
158        self.assertEqual(len(servers), 3)
159        self.assertEqual(servers['instance1'], 4)
160        self.assertEqual(servers['instance2'], 3)
161        self.assertEqual(servers['instance3'], 2)
162
163    def test_cmd_queue_peaks(self):
164        """Check that the queue peaks command reports as expected."""
165        log_file = Log(
166            logfile='haproxy/tests/files/queue.log',
167        )
168        peaks = log_file.cmd_queue_peaks()
169
170        self.assertEqual(len(peaks), 4)
171        self.assertEqual(peaks[0]['peak'], 4)
172        self.assertEqual(peaks[0]['span'], 5)
173
174        self.assertEqual(peaks[1]['peak'], 19)
175        self.assertEqual(peaks[1]['span'], 5)
176
177        self.assertEqual(peaks[2]['peak'], 49)
178        self.assertEqual(peaks[2]['span'], 3)
179
180        self.assertEqual(peaks[3]['peak'], 3)
181        self.assertEqual(peaks[3]['span'], 1)
182
183        self.assertTrue(peaks[0]['first'] < peaks[1]['first'])
184        self.assertTrue(peaks[1]['first'] < peaks[2]['first'])
185        self.assertTrue(peaks[2]['first'] < peaks[3]['first'])
186
187        self.assertTrue(peaks[0]['last'] < peaks[1]['last'])
188        self.assertTrue(peaks[1]['last'] < peaks[2]['last'])
189        self.assertTrue(peaks[2]['last'] < peaks[3]['last'])
190
191    def test_cmd_queue_peaks_no_end(self):
192        """Check that the queue peaks command reports as expected when the
193        last log request did not have any queue.
194        """
195        log_file = Log(
196            logfile='haproxy/tests/files/queue_2.log',
197        )
198        peaks = log_file.cmd_queue_peaks()
199
200        self.assertEqual(len(peaks), 3)
201        self.assertEqual(peaks[0]['peak'], 4)
202        self.assertEqual(peaks[0]['span'], 5)
203
204        self.assertEqual(peaks[1]['peak'], 19)
205        self.assertEqual(peaks[1]['span'], 5)
206
207        self.assertEqual(peaks[2]['peak'], 49)
208        self.assertEqual(peaks[2]['span'], 3)
209
210        self.assertTrue(peaks[0]['first'] < peaks[1]['first'])
211        self.assertTrue(peaks[1]['first'] < peaks[2]['first'])
212
213        self.assertTrue(peaks[0]['last'] < peaks[1]['last'])
214        self.assertTrue(peaks[1]['last'] < peaks[2]['last'])
215
216    def test_cmd_top_ips(self):
217        """Check that the top ips command reports as expected."""
218        log_file = Log(
219            logfile='haproxy/tests/files/top_ips.log',
220        )
221        top_ips = log_file.cmd_top_ips()
222
223        self.assertEqual(len(top_ips), 10)
224        self.assertEqual(top_ips[0], ('1.1.1.15', 6))
225        self.assertEqual(top_ips[1], ('1.1.1.11', 5))
226
227        # as the 3rd and 4th have the same repetitions their order is unknown
228        self.assertEqual(top_ips[2][1], 4)
229        self.assertEqual(top_ips[3][1], 4)
230        self.assertTrue(top_ips[2][0] in ('1.1.1.10', '1.1.1.19'))
231        self.assertTrue(top_ips[3][0] in ('1.1.1.10', '1.1.1.19'))
232
233        # the same as above for all the others
234        other_ips = [
235            '1.1.1.12',
236            '1.1.1.13',
237            '1.1.1.14',
238            '1.1.1.16',
239            '1.1.1.17',
240            '1.1.1.18',
241        ]
242        for ip_info in top_ips[4:]:
243            self.assertEqual(ip_info[1], 2)
244            self.assertTrue(ip_info[0] in other_ips)
245
246            # remove the other_ips to ensure all ips are there
247            for position, current in enumerate(other_ips):
248                if current == ip_info[0]:
249                    del other_ips[position]
250                    break
251
252        self.assertEqual(other_ips, [])
253
254    def test_cmd_top_request_paths(self):
255        """Check that the top request paths command reports as expected."""
256        log_file = Log(
257            logfile='haproxy/tests/files/top_paths.log',
258        )
259        top_paths = log_file.cmd_top_request_paths()
260
261        self.assertEqual(len(top_paths), 10)
262        self.assertEqual(top_paths[0], ('/14', 6))
263        self.assertEqual(top_paths[1], ('/13', 4))
264
265        # as the 3rd and 4th have the same repetitions their order is unknown
266        self.assertEqual(top_paths[2][1], 3)
267        self.assertEqual(top_paths[3][1], 3)
268        self.assertEqual(top_paths[4][1], 3)
269        self.assertTrue(top_paths[2][0] in ('/12', '/15', '/11', ))
270        self.assertTrue(top_paths[3][0] in ('/12', '/15', '/11', ))
271        self.assertTrue(top_paths[4][0] in ('/12', '/15', '/11', ))
272
273        # the same as above for all the others
274        other_paths = [
275            '/1',
276            '/2',
277            '/3',
278            '/4',
279            '/5',
280            '/6',
281            '/7',
282            '/8',
283            '/9',
284        ]
285        for path_info in top_paths[5:]:
286            self.assertEqual(path_info[1], 2)
287            self.assertTrue(path_info[0] in other_paths)
288
289            # remove the other_ips to ensure all ips are there
290            for position, current in enumerate(other_paths):
291                if current == path_info[0]:
292                    del other_paths[position]
293                    break
294
295        self.assertEqual(len(other_paths), 4)
296
297    def test_cmd_connection_type(self):
298        """Check that the connection type command reports as expected."""
299        log_file = Log(
300            logfile='haproxy/tests/files/connection.log',
301        )
302        ssl, non_ssl = log_file.cmd_connection_type()
303
304        self.assertEqual(ssl, 7)
305        self.assertEqual(non_ssl, 5)
306
307    def test_cmd_requests_per_minute(self):
308        """Check that the requests per minute command reports as expected."""
309        log_file = Log(
310            logfile='haproxy/tests/files/requests_per_minute.log',
311        )
312        requests = log_file.cmd_requests_per_minute()
313
314        self.assertEqual(len(requests), 5)
315
316        self.assertEqual(
317            requests[0],
318            (datetime(2013, 12, 11, 11, 2), 8)
319        )
320        self.assertEqual(
321            requests[1],
322            (datetime(2013, 12, 11, 11, 3), 3)
323        )
324        self.assertEqual(
325            requests[2],
326            (datetime(2013, 12, 11, 11, 13), 5)
327        )
328        self.assertEqual(
329            requests[3],
330            (datetime(2013, 12, 11, 11, 52), 7)
331        )
332        self.assertEqual(
333            requests[4],
334            (datetime(2013, 12, 11, 12, 2), 9)
335        )
336
337    def test_cmd_requests_per_minute_empty(self):
338        """Check that the requests per minute command reports nothing if
339        there are no valid lines for whichever reason.
340        """
341        log_file = Log(
342            logfile='haproxy/tests/files/empty.log',
343        )
344        requests = log_file.cmd_requests_per_minute()
345
346        self.assertEqual(None, requests)
347
348    def test_negate_filter(self):
349        """Check that reversing a filter output works as expected."""
350        filter_func = filters.filter_ssl()
351        log_file = Log(
352            logfile='haproxy/tests/files/connection.log',
353        )
354
355        # total number of log lines
356        self.assertEqual(log_file.cmd_counter(), 12)
357
358        # only SSL lines
359        only_ssl = log_file.filter(filter_func)
360        self.assertEqual(only_ssl.cmd_counter(), 7)
361
362        # non SSL lines
363        non_ssl = log_file.filter(filter_func, reverse=True)
364        self.assertEqual(non_ssl.cmd_counter(), 5)
365
366        # we did get all lines?
367        self.assertEqual(
368            log_file.cmd_counter(),
369            only_ssl.cmd_counter() + non_ssl.cmd_counter()
370        )
371
372    def test_cmd_print_empty(self):
373        """Check that the print command prints an empty string if the log file
374        is empty.
375        """
376        log_file = Log(
377            logfile='haproxy/tests/files/empty.log',
378        )
379        data = log_file.cmd_print()
380        self.assertEqual('', data)
381
382    def test_cmd_print(self):
383        """Check that the print command prints the valid lines."""
384        log_file = Log(
385            logfile='haproxy/tests/files/2_ok_1_invalid.log',
386        )
387        data = log_file.cmd_print()
388        self.assertNotEqual('', data)
389
390        lines = data.split('\n')
391        self.assertEqual(len(lines), 3)
392
393    def test_cmd_average_response_time(self):
394        """Check that the average response time returns the expected output."""
395        log_file = Log(
396            logfile='haproxy/tests/files/average_response.log',
397        )
398        data = log_file.cmd_average_response_time()
399        self.assertEqual(data, 105)
400
401    def test_cmd_average_response_time_aborted(self):
402        """Check that the average response time returns the expected output
403        when there are aborted connections.
404        """
405        log_file = Log(
406            logfile='haproxy/tests/files/average_response_aborted.log',
407        )
408        data = log_file.cmd_average_response_time()
409        self.assertEqual(data, 110)
410
411    def test_cmd_average_response_time_no_wait(self):
412        """Check that the average response time returns the expected output
413        when there are connections that did not take any millisecond to reply.
414        """
415        log_file = Log(
416            logfile='haproxy/tests/files/average_response_no_wait.log',
417        )
418        data = log_file.cmd_average_response_time()
419        self.assertEqual(data, 74)
420
421    def test_cmd_average_response_time_empty_log(self):
422        """Check that the average response time does not break if no log lines
423        are logged.
424        """
425        log_file = Log(
426            logfile='haproxy/tests/files/empty.log',
427        )
428        data = log_file.cmd_average_response_time()
429        self.assertEqual(data, 0)
430
431    def test_cmd_average_waiting_time(self):
432        """Check that the average time waiting on queues returns the expected
433        output.
434        """
435        log_file = Log(
436            logfile='haproxy/tests/files/average_waiting.log',
437        )
438        data = log_file.cmd_average_waiting_time()
439        self.assertEqual(data, 105)
440
441    def test_cmd_average_waiting_time_empty_log(self):
442        """Check that the average time waiting on queues does not break if no
443        log lines are logged.
444        """
445        log_file = Log(
446            logfile='haproxy/tests/files/empty.log',
447        )
448        data = log_file.cmd_average_waiting_time()
449        self.assertEqual(data, 0)
450
451    def test_cmd_average_waiting_time_aborted(self):
452        """Check that the average time waiting on queues returns the expected
453        output when there are aborted connections.
454        """
455        log_file = Log(
456            logfile='haproxy/tests/files/average_waiting_aborted.log',
457        )
458        data = log_file.cmd_average_waiting_time()
459        self.assertEqual(data, 110)
460
461    def test_cmd_average_waiting_time_no_wait(self):
462        """Check that the average time waiting on queues returns the expected
463        output when there are requests that did not wait at all (i.e.
464        time_wait_queues is 0).
465        """
466        log_file = Log(
467            logfile='haproxy/tests/files/average_waiting_no_wait.log',
468        )
469        data = log_file.cmd_average_waiting_time()
470        self.assertEqual(data, 52.5)
471
472    def test_pickle_exists(self):
473        """Check that a pickle file is created after the first run."""
474        filename = 'haproxy/tests/files/average_waiting_aborted.log'
475        pickle_file = '{0}.pickle'.format(filename)
476
477        # pickle files does not exist
478        self.assertFalse(os.path.exists(pickle_file))
479
480        Log(logfile=filename)
481        # it does exist after parsing the file
482        self.assertTrue(os.path.exists(pickle_file))
483
484    def test_pickle_is_recreated(self):
485        """Check that a pickle file is recreated if the log file is newer
486        than the pickle file."""
487        filename = 'haproxy/tests/files/average_waiting_aborted.log'
488        pickle_file = '{0}.pickle'.format(filename)
489
490        # create the pickle file and get its modified time
491        Log(logfile=filename)
492        old_pickle_time = os.path.getmtime(pickle_file)
493
494        # any second or nth run will not change the modified time
495        Log(logfile=filename)
496        second_old_pickle_time = os.path.getmtime(pickle_file)
497        self.assertEqual(old_pickle_time, second_old_pickle_time)
498
499        # 'update' the original log file
500        sleep(1)  # os.path.getmtime has a resolution up to seconds
501        os.utime(filename, None)
502
503        # the existing pickle file is discarded and a newer one will be created
504        Log(logfile=filename)
505        new_pickle_time = os.path.getmtime(pickle_file)
506
507        self.assertTrue(new_pickle_time > old_pickle_time)
508