1# Copyright 2011 OpenStack Foundation.
2# Copyright 2010 United States Government as represented by the
3# Administrator of the National Aeronautics and Space Administration.
4# All Rights Reserved.
5#
6#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7#    not use this file except in compliance with the License. You may obtain
8#    a copy of the License at
9#
10#         http://www.apache.org/licenses/LICENSE-2.0
11#
12#    Unless required by applicable law or agreed to in writing, software
13#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15#    License for the specific language governing permissions and limitations
16#    under the License.
17
18"""OpenStack logging handler.
19
20This module adds to logging functionality by adding the option to specify
21a context object when calling the various log methods.  If the context object
22is not specified, default formatting is used. Additionally, an instance uuid
23may be passed as part of the log message, which is intended to make it easier
24for admins to find messages related to a specific instance.
25
26It also allows setting of formatting information through conf.
27
28"""
29
30import configparser
31import logging
32import logging.config
33import logging.handlers
34import os
35import platform
36import sys
37try:
38    import syslog
39except ImportError:
40    syslog = None
41
42from oslo_config import cfg
43from oslo_utils import importutils
44from oslo_utils import units
45
46from oslo_log._i18n import _
47from oslo_log import _options
48from oslo_log import formatters
49from oslo_log import handlers
50
51CRITICAL = logging.CRITICAL
52FATAL = logging.FATAL
53ERROR = logging.ERROR
54WARNING = logging.WARNING
55WARN = logging.WARNING
56INFO = logging.INFO
57DEBUG = logging.DEBUG
58NOTSET = logging.NOTSET
59TRACE = handlers._TRACE
60
61logging.addLevelName(TRACE, 'TRACE')
62
63LOG_ROTATE_INTERVAL_MAPPING = {
64    'seconds': 's',
65    'minutes': 'm',
66    'hours': 'h',
67    'days': 'd',
68    'weekday': 'w',
69    'midnight': 'midnight'
70}
71
72
73def _get_log_file_path(conf, binary=None):
74    logfile = conf.log_file
75    logdir = conf.log_dir
76
77    if logfile and not logdir:
78        return logfile
79
80    if logfile and logdir:
81        return os.path.join(logdir, logfile)
82
83    if logdir:
84        binary = binary or handlers._get_binary_name()
85        return '%s.log' % (os.path.join(logdir, binary),)
86
87    return None
88
89
90def _iter_loggers():
91    """Iterate on existing loggers."""
92
93    # Sadly, Logger.manager and Manager.loggerDict are not documented,
94    # but there is no logging public function to iterate on all loggers.
95
96    # The root logger is not part of loggerDict.
97    yield logging.getLogger()
98
99    manager = logging.Logger.manager
100    for logger in manager.loggerDict.values():
101        if isinstance(logger, logging.PlaceHolder):
102            continue
103        yield logger
104
105
106class BaseLoggerAdapter(logging.LoggerAdapter):
107
108    warn = logging.LoggerAdapter.warning
109
110    @property
111    def handlers(self):
112        return self.logger.handlers
113
114    def trace(self, msg, *args, **kwargs):
115        self.log(TRACE, msg, *args, **kwargs)
116
117
118class KeywordArgumentAdapter(BaseLoggerAdapter):
119    """Logger adapter to add keyword arguments to log record's extra data
120
121    Keywords passed to the log call are added to the "extra"
122    dictionary passed to the underlying logger so they are emitted
123    with the log message and available to the format string.
124
125    Special keywords:
126
127    extra
128      An existing dictionary of extra values to be passed to the
129      logger. If present, the dictionary is copied and extended.
130    resource
131      A dictionary-like object containing a ``name`` key or ``type``
132       and ``id`` keys.
133
134    """
135
136    def process(self, msg, kwargs):
137        # Make a new extra dictionary combining the values we were
138        # given when we were constructed and anything from kwargs.
139        extra = {}
140        extra.update(self.extra)
141        if 'extra' in kwargs:
142            extra.update(kwargs.pop('extra'))
143        # Move any unknown keyword arguments into the extra
144        # dictionary.
145        for name in list(kwargs.keys()):
146            if name == 'exc_info':
147                continue
148            extra[name] = kwargs.pop(name)
149        # NOTE(dhellmann): The gap between when the adapter is called
150        # and when the formatter needs to know what the extra values
151        # are is large enough that we can't get back to the original
152        # extra dictionary easily. We leave a hint to ourselves here
153        # in the form of a list of keys, which will eventually be
154        # attributes of the LogRecord processed by the formatter. That
155        # allows the formatter to know which values were original and
156        # which were extra, so it can treat them differently (see
157        # JSONFormatter for an example of this). We sort the keys so
158        # it is possible to write sane unit tests.
159        extra['extra_keys'] = list(sorted(extra.keys()))
160        # Place the updated extra values back into the keyword
161        # arguments.
162        kwargs['extra'] = extra
163
164        # NOTE(jdg): We would like an easy way to add resource info
165        # to logging, for example a header like 'volume-<uuid>'
166        # Turns out Nova implemented this but it's Nova specific with
167        # instance.  Also there's resource_uuid that's been added to
168        # context, but again that only works for Instances, and it
169        # only works for contexts that have the resource id set.
170        resource = kwargs['extra'].get('resource', None)
171        if resource:
172
173            # Many OpenStack resources have a name entry in their db ref
174            # of the form <resource_type>-<uuid>, let's just use that if
175            # it's passed in
176            if not resource.get('name', None):
177
178                # For resources that don't have the name of the format we wish
179                # to use (or places where the LOG call may not have the full
180                # object ref, allow them to pass in a dict:
181                # resource={'type': volume, 'id': uuid}
182
183                resource_type = resource.get('type', None)
184                resource_id = resource.get('id', None)
185
186                if resource_type and resource_id:
187                    kwargs['extra']['resource'] = ('[' + resource_type +
188                                                   '-' + resource_id + '] ')
189            else:
190                # FIXME(jdg): Since the name format can be specified via conf
191                # entry, we may want to consider allowing this to be configured
192                # here as well
193                kwargs['extra']['resource'] = ('[' + resource.get('name', '')
194                                               + '] ')
195
196        return msg, kwargs
197
198
199def _create_logging_excepthook(product_name):
200    def logging_excepthook(exc_type, value, tb):
201        extra = {'exc_info': (exc_type, value, tb)}
202        getLogger(product_name).critical('Unhandled error', **extra)
203    return logging_excepthook
204
205
206class LogConfigError(Exception):
207
208    message = _('Error loading logging config %(log_config)s: %(err_msg)s')
209
210    def __init__(self, log_config, err_msg):
211        self.log_config = log_config
212        self.err_msg = err_msg
213
214    def __str__(self):
215        return self.message % dict(log_config=self.log_config,
216                                   err_msg=self.err_msg)
217
218
219def _load_log_config(log_config_append):
220    try:
221        if not hasattr(_load_log_config, "old_time"):
222            _load_log_config.old_time = 0
223        new_time = os.path.getmtime(log_config_append)
224        if _load_log_config.old_time != new_time:
225            # Reset all existing loggers before reloading config as fileConfig
226            # does not reset non-child loggers.
227            for logger in _iter_loggers():
228                logger.setLevel(logging.NOTSET)
229                logger.handlers = []
230                logger.propagate = 1
231            logging.config.fileConfig(log_config_append,
232                                      disable_existing_loggers=False)
233            _load_log_config.old_time = new_time
234    except (configparser.Error, KeyError, os.error) as exc:
235        raise LogConfigError(log_config_append, str(exc))
236
237
238def _mutate_hook(conf, fresh):
239    """Reconfigures oslo.log according to the mutated options."""
240
241    if (None, 'debug') in fresh:
242        _refresh_root_level(conf.debug)
243
244    if (None, 'log-config-append') in fresh:
245        _load_log_config.old_time = 0
246    if conf.log_config_append:
247        _load_log_config(conf.log_config_append)
248
249
250def register_options(conf):
251    """Register the command line and configuration options used by oslo.log."""
252
253    # Sometimes logging occurs before logging is ready (e.g., oslo_config).
254    # To avoid "No handlers could be found," temporarily log to sys.stderr.
255    root_logger = logging.getLogger(None)
256    if not root_logger.handlers:
257        root_logger.addHandler(logging.StreamHandler())
258
259    conf.register_cli_opts(_options.common_cli_opts)
260    conf.register_cli_opts(_options.logging_cli_opts)
261    conf.register_opts(_options.generic_log_opts)
262    conf.register_opts(_options.log_opts)
263    formatters._store_global_conf(conf)
264
265    conf.register_mutate_hook(_mutate_hook)
266
267
268def setup(conf, product_name, version='unknown'):
269    """Setup logging for the current application."""
270    if conf.log_config_append:
271        _load_log_config(conf.log_config_append)
272    else:
273        _setup_logging_from_conf(conf, product_name, version)
274    sys.excepthook = _create_logging_excepthook(product_name)
275
276
277def set_defaults(logging_context_format_string=None,
278                 default_log_levels=None):
279    """Set default values for the configuration options used by oslo.log."""
280    # Just in case the caller is not setting the
281    # default_log_level. This is insurance because
282    # we introduced the default_log_level parameter
283    # later in a backwards in-compatible change
284    if default_log_levels is not None:
285        cfg.set_defaults(
286            _options.log_opts,
287            default_log_levels=default_log_levels)
288    if logging_context_format_string is not None:
289        cfg.set_defaults(
290            _options.log_opts,
291            logging_context_format_string=logging_context_format_string)
292
293
294def tempest_set_log_file(filename):
295    """Provide an API for tempest to set the logging filename.
296
297    .. warning:: Only Tempest should use this function.
298
299    We don't want applications to set a default log file, so we don't
300    want this in set_defaults(). Because tempest doesn't use a
301    configuration file we don't have another convenient way to safely
302    set the log file default.
303
304    """
305    cfg.set_defaults(_options.logging_cli_opts, log_file=filename)
306
307
308def _find_facility(facility):
309    # NOTE(jd): Check the validity of facilities at run time as they differ
310    # depending on the OS and Python version being used.
311    valid_facilities = [f for f in
312                        ["LOG_KERN", "LOG_USER", "LOG_MAIL",
313                         "LOG_DAEMON", "LOG_AUTH", "LOG_SYSLOG",
314                         "LOG_LPR", "LOG_NEWS", "LOG_UUCP",
315                         "LOG_CRON", "LOG_AUTHPRIV", "LOG_FTP",
316                         "LOG_LOCAL0", "LOG_LOCAL1", "LOG_LOCAL2",
317                         "LOG_LOCAL3", "LOG_LOCAL4", "LOG_LOCAL5",
318                         "LOG_LOCAL6", "LOG_LOCAL7"]
319                        if getattr(syslog, f, None)]
320
321    facility = facility.upper()
322
323    if not facility.startswith("LOG_"):
324        facility = "LOG_" + facility
325
326    if facility not in valid_facilities:
327        raise TypeError(_('syslog facility must be one of: %s') %
328                        ', '.join("'%s'" % fac
329                                  for fac in valid_facilities))
330
331    return getattr(syslog, facility)
332
333
334def _refresh_root_level(debug):
335    """Set the level of the root logger.
336
337    :param debug: If 'debug' is True, the level will be DEBUG.
338     Otherwise the level will be INFO.
339    """
340    log_root = getLogger(None).logger
341    if debug:
342        log_root.setLevel(logging.DEBUG)
343    else:
344        log_root.setLevel(logging.INFO)
345
346
347def _setup_logging_from_conf(conf, project, version):
348    log_root = getLogger(None).logger
349
350    # Remove all handlers
351    for handler in list(log_root.handlers):
352        log_root.removeHandler(handler)
353
354    logpath = _get_log_file_path(conf)
355    if logpath:
356        # On Windows, in-use files cannot be moved or deleted.
357        if conf.watch_log_file and platform.system() == 'Linux':
358            from oslo_log import watchers
359            file_handler = watchers.FastWatchedFileHandler
360            filelog = file_handler(logpath)
361        elif conf.log_rotation_type.lower() == "interval":
362            file_handler = logging.handlers.TimedRotatingFileHandler
363            when = conf.log_rotate_interval_type.lower()
364            interval_type = LOG_ROTATE_INTERVAL_MAPPING[when]
365            # When weekday is configured, "when" has to be a value between
366            # 'w0'-'w6' (w0 for Monday, w1 for Tuesday, and so on)'
367            if interval_type == 'w':
368                interval_type = interval_type + str(conf.log_rotate_interval)
369            filelog = file_handler(logpath,
370                                   when=interval_type,
371                                   interval=conf.log_rotate_interval,
372                                   backupCount=conf.max_logfile_count)
373        elif conf.log_rotation_type.lower() == "size":
374            file_handler = logging.handlers.RotatingFileHandler
375            maxBytes = conf.max_logfile_size_mb * units.Mi
376            filelog = file_handler(logpath,
377                                   maxBytes=maxBytes,
378                                   backupCount=conf.max_logfile_count)
379        else:
380            file_handler = logging.handlers.WatchedFileHandler
381            filelog = file_handler(logpath)
382
383        log_root.addHandler(filelog)
384
385    if conf.use_stderr:
386        streamlog = handlers.ColorHandler()
387        log_root.addHandler(streamlog)
388
389    if conf.use_journal:
390        if syslog is None:
391            raise RuntimeError("syslog is not available on this platform")
392        facility = _find_facility(conf.syslog_log_facility)
393        journal = handlers.OSJournalHandler(facility=facility)
394        log_root.addHandler(journal)
395
396    if conf.use_eventlog:
397        if platform.system() == 'Windows':
398            eventlog = logging.handlers.NTEventLogHandler(project)
399            log_root.addHandler(eventlog)
400        else:
401            raise RuntimeError(_("Windows Event Log is not available on this "
402                                 "platform."))
403
404    # if None of the above are True, then fall back to standard out
405    if not logpath and not conf.use_stderr and not conf.use_journal:
406        # pass sys.stdout as a positional argument
407        # python2.6 calls the argument strm, in 2.7 it's stream
408        streamlog = handlers.ColorHandler(sys.stdout)
409        log_root.addHandler(streamlog)
410
411    if conf.publish_errors:
412        handler = importutils.import_object(
413            "oslo_messaging.notify.log_handler.PublishErrorsHandler",
414            logging.ERROR)
415        log_root.addHandler(handler)
416
417    if conf.use_syslog:
418        if syslog is None:
419            raise RuntimeError("syslog is not available on this platform")
420        facility = _find_facility(conf.syslog_log_facility)
421        syslog_handler = handlers.OSSysLogHandler(facility=facility)
422        log_root.addHandler(syslog_handler)
423
424    datefmt = conf.log_date_format
425    if not conf.use_json:
426        for handler in log_root.handlers:
427            handler.setFormatter(formatters.ContextFormatter(project=project,
428                                                             version=version,
429                                                             datefmt=datefmt,
430                                                             config=conf))
431    else:
432        for handler in log_root.handlers:
433            handler.setFormatter(formatters.JSONFormatter(datefmt=datefmt))
434    _refresh_root_level(conf.debug)
435
436    for pair in conf.default_log_levels:
437        mod, _sep, level_name = pair.partition('=')
438        logger = logging.getLogger(mod)
439        numeric_level = None
440        try:
441            # NOTE(harlowja): integer's are valid level names, and for some
442            # libraries they have a lower level than DEBUG that is typically
443            # defined at level 5, so to make that accessible, try to convert
444            # this to a integer, and if not keep the original...
445            numeric_level = int(level_name)
446        except ValueError:  # nosec
447            pass
448        if numeric_level is not None:
449            logger.setLevel(numeric_level)
450        else:
451            logger.setLevel(level_name)
452
453    if conf.rate_limit_burst >= 1 and conf.rate_limit_interval >= 1:
454        from oslo_log import rate_limit
455        rate_limit.install_filter(conf.rate_limit_burst,
456                                  conf.rate_limit_interval,
457                                  conf.rate_limit_except)
458
459
460_loggers = {}
461
462
463def get_loggers():
464    """Return a copy of the oslo loggers dictionary."""
465    return _loggers.copy()
466
467
468def getLogger(name=None, project='unknown', version='unknown'):
469    """Build a logger with the given name.
470
471    :param name: The name for the logger. This is usually the module
472                 name, ``__name__``.
473    :type name: string
474    :param project: The name of the project, to be injected into log
475                    messages. For example, ``'nova'``.
476    :type project: string
477    :param version: The version of the project, to be injected into log
478                    messages. For example, ``'2014.2'``.
479    :type version: string
480    """
481    # NOTE(dhellmann): To maintain backwards compatibility with the
482    # old oslo namespace package logger configurations, and to make it
483    # possible to control all oslo logging with one logger node, we
484    # replace "oslo_" with "oslo." so that modules under the new
485    # non-namespaced packages get loggers as though they are.
486    if name and name.startswith('oslo_'):
487        name = 'oslo.' + name[5:]
488    if name not in _loggers:
489        _loggers[name] = KeywordArgumentAdapter(logging.getLogger(name),
490                                                {'project': project,
491                                                 'version': version})
492    return _loggers[name]
493
494
495def get_default_log_levels():
496    """Return the Oslo Logging default log levels.
497
498    Returns a copy of the list so an application can change the value
499    and not affect the default value used in the log_opts configuration
500    setup.
501    """
502    return list(_options.DEFAULT_LOG_LEVELS)
503
504
505def is_debug_enabled(conf):
506    """Determine if debug logging mode is enabled."""
507    return conf.debug
508