1# Licensed to the Apache Software Foundation (ASF) under one
2# or more contributor license agreements.  See the NOTICE file
3# distributed with this work for additional information
4# regarding copyright ownership.  The ASF licenses this file
5# to you under the Apache License, Version 2.0 (the
6# "License"); you may not use this file except in compliance
7# with the License.  You may obtain a copy of the License at
8#
9#   http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing,
12# software distributed under the License is distributed on an
13# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14# KIND, either express or implied.  See the License for the
15# specific language governing permissions and limitations
16# under the License.
17
18# coding: utf-8
19"""TensorBoard functions that can be used to log various status during epoch."""
20
21import logging
22
23
24class LogMetricsCallback(object):
25    """Log metrics periodically in TensorBoard.
26    This callback works almost same as `callback.Speedometer`, but write TensorBoard event file
27    for visualization. For more usage, please refer https://github.com/dmlc/tensorboard
28
29    Parameters
30    ----------
31    logging_dir : str
32        TensorBoard event file directory.
33        After that, use `tensorboard --logdir=path/to/logs` to launch TensorBoard visualization.
34    prefix : str
35        Prefix for a metric name of `scalar` value.
36        You might want to use this param to leverage TensorBoard plot feature,
37        where TensorBoard plots different curves in one graph when they have same `name`.
38        The follow example shows the usage(how to compare a train and eval metric in a same graph).
39
40    Examples
41    --------
42    >>> # log train and eval metrics under different directories.
43    >>> training_log = 'logs/train'
44    >>> evaluation_log = 'logs/eval'
45    >>> # in this case, each training and evaluation metric pairs has same name,
46    >>> # you can add a prefix to make it separate.
47    >>> batch_end_callbacks = [mx.contrib.tensorboard.LogMetricsCallback(training_log)]
48    >>> eval_end_callbacks = [mx.contrib.tensorboard.LogMetricsCallback(evaluation_log)]
49    >>> # run
50    >>> model.fit(train,
51    >>>     ...
52    >>>     batch_end_callback = batch_end_callbacks,
53    >>>     eval_end_callback  = eval_end_callbacks)
54    >>> # Then use `tensorboard --logdir=logs/` to launch TensorBoard visualization.
55    """
56    def __init__(self, logging_dir, prefix=None):
57        self.prefix = prefix
58        try:
59            from mxboard import SummaryWriter
60            self.summary_writer = SummaryWriter(logging_dir)
61        except ImportError:
62            logging.error('You can install mxboard via `pip install mxboard`.')
63
64    def __call__(self, param):
65        """Callback to log training speed and metrics in TensorBoard."""
66        if param.eval_metric is None:
67            return
68        name_value = param.eval_metric.get_name_value()
69        for name, value in name_value:
70            if self.prefix is not None:
71                name = '%s-%s' % (self.prefix, name)
72            self.summary_writer.add_scalar(name, value, global_step=param.epoch)
73