1# logging.py
2# DNF Logging Subsystem.
3#
4# Copyright (C) 2013-2016 Red Hat, Inc.
5#
6# This copyrighted material is made available to anyone wishing to use,
7# modify, copy, or redistribute it subject to the terms and conditions of
8# the GNU General Public License v.2, or (at your option) any later version.
9# This program is distributed in the hope that it will be useful, but WITHOUT
10# ANY WARRANTY expressed or implied, including the implied warranties of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
12# Public License for more details.  You should have received a copy of the
13# GNU General Public License along with this program; if not, write to the
14# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
15# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
16# source code or documentation are not subject to the GNU General Public
17# License and may only be used or replicated with the express permission of
18# Red Hat, Inc.
19#
20
21from __future__ import absolute_import
22from __future__ import unicode_literals
23import dnf.exceptions
24import dnf.const
25import dnf.lock
26import dnf.util
27import libdnf.repo
28import logging
29import logging.handlers
30import os
31import sys
32import time
33import warnings
34import gzip
35
36# :api loggers are: 'dnf', 'dnf.plugin', 'dnf.rpm'
37
38SUPERCRITICAL = 100 # do not use this for logging
39CRITICAL = logging.CRITICAL
40ERROR = logging.ERROR
41WARNING = logging.WARNING
42INFO = logging.INFO
43DEBUG = logging.DEBUG
44DDEBUG = 8  # used by anaconda (pyanaconda/payload/dnfpayload.py)
45SUBDEBUG = 6
46TRACE = 4
47ALL = 2
48
49def only_once(func):
50    """Method decorator turning the method into noop on second or later calls."""
51    def noop(*_args, **_kwargs):
52        pass
53    def swan_song(self, *args, **kwargs):
54        func(self, *args, **kwargs)
55        setattr(self, func.__name__, noop)
56    return swan_song
57
58class _MaxLevelFilter(object):
59    def __init__(self, max_level):
60        self.max_level = max_level
61
62    def filter(self, record):
63        if record.levelno >= self.max_level:
64            return 0
65        return 1
66
67_VERBOSE_VAL_MAPPING = {
68    0 : SUPERCRITICAL,
69    1 : logging.INFO,
70    2 : logging.INFO, # the default
71    3 : logging.DEBUG,
72    4 : logging.DEBUG,
73    5 : logging.DEBUG,
74    6 : logging.DEBUG, # verbose value
75    7 : DDEBUG,
76    8 : SUBDEBUG,
77    9 : TRACE,
78    10: ALL,   # more verbous librepo and hawkey
79    }
80
81def _cfg_verbose_val2level(cfg_errval):
82    assert 0 <= cfg_errval <= 10
83    return _VERBOSE_VAL_MAPPING.get(cfg_errval, TRACE)
84
85
86# Both the DNF default and the verbose default are WARNING. Note that ERROR has
87# no specific level.
88_ERR_VAL_MAPPING = {
89    0: SUPERCRITICAL,
90    1: logging.CRITICAL,
91    2: logging.ERROR
92    }
93
94def _cfg_err_val2level(cfg_errval):
95    assert 0 <= cfg_errval <= 10
96    return _ERR_VAL_MAPPING.get(cfg_errval, logging.WARNING)
97
98
99def compression_namer(name):
100    return name + ".gz"
101
102
103CHUNK_SIZE = 128 * 1024 # 128 KB
104
105
106def compression_rotator(source, dest):
107    with open(source, "rb") as sf:
108        with gzip.open(dest, 'wb') as wf:
109            while True:
110                data = sf.read(CHUNK_SIZE)
111                if not data:
112                    break
113                wf.write(data)
114    os.remove(source)
115
116
117class MultiprocessRotatingFileHandler(logging.handlers.RotatingFileHandler):
118    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
119        super(MultiprocessRotatingFileHandler, self).__init__(
120            filename, mode, maxBytes, backupCount, encoding, delay)
121        self.rotate_lock = dnf.lock.build_log_lock("/var/log/", True)
122
123    def emit(self, record):
124        while True:
125            try:
126                if self.shouldRollover(record):
127                    with self.rotate_lock:
128                        # Do rollover while preserving the mode of the new log file
129                        mode = os.stat(self.baseFilename).st_mode
130                        self.doRollover()
131                        os.chmod(self.baseFilename, mode)
132                logging.FileHandler.emit(self, record)
133                return
134            except (dnf.exceptions.ProcessLockError, dnf.exceptions.ThreadLockError):
135                time.sleep(0.01)
136            except Exception:
137                self.handleError(record)
138                return
139
140
141def _create_filehandler(logfile, log_size, log_rotate, log_compress):
142    if not os.path.exists(logfile):
143        dnf.util.ensure_dir(os.path.dirname(logfile))
144        dnf.util.touch(logfile)
145    handler = MultiprocessRotatingFileHandler(logfile, maxBytes=log_size, backupCount=log_rotate)
146    formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s",
147                                  "%Y-%m-%dT%H:%M:%S%z")
148    formatter.converter = time.localtime
149    handler.setFormatter(formatter)
150    if log_compress:
151        handler.rotator = compression_rotator
152        handler.namer = compression_namer
153    return handler
154
155def _paint_mark(logger):
156    logger.log(INFO, dnf.const.LOG_MARKER)
157
158
159class Logging(object):
160    def __init__(self):
161        self.stdout_handler = self.stderr_handler = None
162        logging.addLevelName(DDEBUG, "DDEBUG")
163        logging.addLevelName(SUBDEBUG, "SUBDEBUG")
164        logging.addLevelName(TRACE, "TRACE")
165        logging.addLevelName(ALL, "ALL")
166        logging.captureWarnings(True)
167        logging.raiseExceptions = False
168
169    @only_once
170    def _presetup(self):
171        logger_dnf = logging.getLogger("dnf")
172        logger_dnf.setLevel(TRACE)
173
174        # setup stdout
175        stdout = logging.StreamHandler(sys.stdout)
176        stdout.setLevel(INFO)
177        stdout.addFilter(_MaxLevelFilter(logging.WARNING))
178        logger_dnf.addHandler(stdout)
179        self.stdout_handler = stdout
180
181        # setup stderr
182        stderr = logging.StreamHandler(sys.stderr)
183        stderr.setLevel(WARNING)
184        logger_dnf.addHandler(stderr)
185        self.stderr_handler = stderr
186
187    @only_once
188    def _setup_file_loggers(self, logfile_level, logdir, log_size, log_rotate, log_compress):
189        logger_dnf = logging.getLogger("dnf")
190        logger_dnf.setLevel(TRACE)
191
192        # setup file logger
193        logfile = os.path.join(logdir, dnf.const.LOG)
194        handler = _create_filehandler(logfile, log_size, log_rotate, log_compress)
195        handler.setLevel(logfile_level)
196        logger_dnf.addHandler(handler)
197
198        # setup Python warnings
199        logger_warnings = logging.getLogger("py.warnings")
200        logger_warnings.addHandler(handler)
201
202        logger_librepo = logging.getLogger("librepo")
203        logger_librepo.setLevel(TRACE)
204        logfile = os.path.join(logdir, dnf.const.LOG_LIBREPO)
205        handler = _create_filehandler(logfile, log_size, log_rotate, log_compress)
206        logger_librepo.addHandler(handler)
207        libdnf.repo.LibrepoLog.addHandler(logfile, logfile_level <= ALL)
208
209        # setup RPM callbacks logger
210        logger_rpm = logging.getLogger("dnf.rpm")
211        logger_rpm.propagate = False
212        logger_rpm.setLevel(SUBDEBUG)
213        logfile = os.path.join(logdir, dnf.const.LOG_RPM)
214        handler = _create_filehandler(logfile, log_size, log_rotate, log_compress)
215        logger_rpm.addHandler(handler)
216
217    @only_once
218    def _setup(self, verbose_level, error_level, logfile_level, logdir, log_size, log_rotate, log_compress):
219        self._presetup()
220
221        self._setup_file_loggers(logfile_level, logdir, log_size, log_rotate, log_compress)
222
223        logger_warnings = logging.getLogger("py.warnings")
224        logger_warnings.addHandler(self.stderr_handler)
225
226        # setup RPM callbacks logger
227        logger_rpm = logging.getLogger("dnf.rpm")
228        logger_rpm.addHandler(self.stdout_handler)
229        logger_rpm.addHandler(self.stderr_handler)
230
231        logger_dnf = logging.getLogger("dnf")
232        # temporarily turn off stdout/stderr handlers:
233        self.stdout_handler.setLevel(WARNING)
234        self.stderr_handler.setLevel(WARNING)
235        _paint_mark(logger_dnf)
236        _paint_mark(logger_rpm)
237        # bring std handlers to the preferred level
238        self.stdout_handler.setLevel(verbose_level)
239        self.stderr_handler.setLevel(error_level)
240
241    def _setup_from_dnf_conf(self, conf, file_loggers_only=False):
242        verbose_level_r = _cfg_verbose_val2level(conf.debuglevel)
243        error_level_r = _cfg_err_val2level(conf.errorlevel)
244        logfile_level_r = _cfg_verbose_val2level(conf.logfilelevel)
245        logdir = conf.logdir
246        log_size = conf.log_size
247        log_rotate = conf.log_rotate
248        log_compress = conf.log_compress
249        if file_loggers_only:
250            return self._setup_file_loggers(logfile_level_r, logdir, log_size, log_rotate, log_compress)
251        else:
252            return self._setup(
253                verbose_level_r, error_level_r, logfile_level_r, logdir, log_size, log_rotate, log_compress)
254
255
256class Timer(object):
257    def __init__(self, what):
258        self.what = what
259        self.start = time.time()
260
261    def __call__(self):
262        diff = time.time() - self.start
263        msg = 'timer: %s: %d ms' % (self.what, diff * 1000)
264        logging.getLogger("dnf").log(DDEBUG, msg)
265
266
267_LIBDNF_TO_DNF_LOGLEVEL_MAPPING = {
268    libdnf.utils.Logger.Level_CRITICAL: CRITICAL,
269    libdnf.utils.Logger.Level_ERROR: ERROR,
270    libdnf.utils.Logger.Level_WARNING: WARNING,
271    libdnf.utils.Logger.Level_NOTICE: INFO,
272    libdnf.utils.Logger.Level_INFO: INFO,
273    libdnf.utils.Logger.Level_DEBUG: DEBUG,
274    libdnf.utils.Logger.Level_TRACE: TRACE
275}
276
277
278class LibdnfLoggerCB(libdnf.utils.Logger):
279    def __init__(self):
280        super(LibdnfLoggerCB, self).__init__()
281        self._dnf_logger = logging.getLogger("dnf")
282        self._librepo_logger = logging.getLogger("librepo")
283
284    def write(self, source, *args):
285        """Log message.
286
287        source -- integer, defines origin (libdnf, librepo, ...) of message, 0 - unknown
288        """
289        if len(args) == 2:
290            level, message = args
291        elif len(args) == 4:
292            time, pid, level, message = args
293        if source == libdnf.utils.Logger.LOG_SOURCE_LIBREPO:
294            self._librepo_logger.log(_LIBDNF_TO_DNF_LOGLEVEL_MAPPING[level], message)
295        else:
296            self._dnf_logger.log(_LIBDNF_TO_DNF_LOGLEVEL_MAPPING[level], message)
297
298
299libdnfLoggerCB = LibdnfLoggerCB()
300libdnf.utils.Log.setLogger(libdnfLoggerCB)
301