1<!
2 Licensed to the Apache Software Foundation (ASF) under one
3 or more contributor license agreements.  See the NOTICE file
4 distributed with this work for additional information
5 regarding copyright ownership.  The ASF licenses this file
6 to you under the Apache License, Version 2.0 (the
7 "License"); you may not use this file except in compliance
8 with the License.  You may obtain a copy of the License at
9
10     http://www.apache.org/licenses/LICENSE-2.0
11
12 Unless required by applicable law or agreed to in writing,
13 software distributed under the License is distributed on an
14 "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 KIND, either express or implied.  See the License for the
16 specific language governing permissions and limitations
17 under the License.
18!>
19class TrainMetrics():
20
21    metric_map = {}
22
23    def __init__(self, display=None, average_loss=1):
24        self.average_loss = average_loss
25        self.display = display
26
27
28    def process(self, batch_num, module, label):
29        if self.display == None:
30            return
31
32        if self.average_loss == 1:
33            if batch_num % self.display == 0:
34                self.update_metrics(module, label, reset=True)
35                self.print_metrics(batch_num)
36        else:
37            # Metrics must be print 'average_loss' iterations from now.
38            # Append a metric which will get updated starting now.
39            if((batch_num + self.average_loss) % self.display == 0):
40                self.append_one()
41
42            # Less that 'average_loss' iteration away from a display step. Update metrics.
43            if((batch_num + self.average_loss) % self.display \< self.average_loss):
44                self.update_metrics(module, label)
45
46            # At display step. Print metrics.
47            if(batch_num % self.display == 0):
48                self.print_metrics(batch_num, remove_heads=True)
49
50    def add(self, metric):
51        self.metric_map[metric.name] = [metric]
52
53    def append_one(self):
54        for key, lst in self.metric_map.iteritems():
55            last_element = lst[-1]
56            new_element = copy.deepcopy(last_element)
57            new_element.reset()
58            lst.append(new_element)
59
60    def update_metrics(self, module, label, reset=False):
61        for key, lst in self.metric_map.iteritems():
62            for metric in lst:
63                if reset:
64                    metric.reset()
65                module.update_metric(metric, label)
66
67    def print_metrics(self, batch_num, remove_heads=False):
68
69        total_loss = 0
70        for key, lst in self.metric_map.iteritems():
71                total_loss += lst[0].get()[1]
72
73        logger.info("Iteration %d, loss = %f" % (batch_num, total_loss))
74
75        for key, lst in self.metric_map.iteritems():
76            if remove_heads:
77                metric = lst.pop(0)
78            else:
79                metric = lst[0]
80
81            logger.info("    %s" % metric)
82
83
84class TestMetrics():
85
86    metrics = []
87
88    def add(self, metric):
89        self.metrics.append(metric)
90
91    def score_and_print(self, module, itr, num_batch):
92        for metric in self.metrics:
93            metric.reset()
94            module.score(itr, metric, num_batch=num_batch)
95            logger.info("    %s" % metric)
96
97<if(display)>
98display = <display>
99<endif>
100<if(average_loss)>
101average_loss = <average_loss>
102<endif>
103train_metrics = TrainMetrics(<if(display)>display=display<endif><if(average_loss)>, average_loss=average_loss<endif>)
104test_metrics = TestMetrics()
105