1 #ifndef OUTPUT_H_
2 #define OUTPUT_H_
3 
4 #include <Eigen/Core>
5 #include <stdexcept>
6 #include "Config.h"
7 
8 namespace MiniDNN
9 {
10 
11 
12 ///
13 /// \defgroup Outputs Output Layers
14 ///
15 
16 ///
17 /// \ingroup Outputs
18 ///
19 /// The interface of the output layer of a neural network model. The output
20 /// layer is a special layer that associates the last hidden layer with the
21 /// target response variable.
22 ///
23 class Output
24 {
25     protected:
26         typedef Eigen::Matrix<Scalar, Eigen::Dynamic, Eigen::Dynamic> Matrix;
27         typedef Eigen::Matrix<Scalar, Eigen::Dynamic, 1> Vector;
28         typedef Eigen::RowVectorXi IntegerVector;
29 
30     public:
~Output()31         virtual ~Output() {}
32 
33         // Check the format of target data, e.g. in classification problems the
34         // target data should be binary (either 0 or 1)
check_target_data(const Matrix & target)35         virtual void check_target_data(const Matrix& target) {}
36 
37         // Another type of target data where each element is a class label
38         // This version may not be sensible for regression tasks, so by default
39         // we raise an exception
check_target_data(const IntegerVector & target)40         virtual void check_target_data(const IntegerVector& target)
41         {
42             throw std::invalid_argument("[class Output]: This output type cannot take class labels as target data");
43         }
44 
45         // A combination of the forward stage and the back-propagation stage for the output layer
46         // The computed derivative of the input should be stored in this layer, and can be retrieved by
47         // the backprop_data() function
48         virtual void evaluate(const Matrix& prev_layer_data, const Matrix& target) = 0;
49 
50         // Another type of target data where each element is a class label
51         // This version may not be sensible for regression tasks, so by default
52         // we raise an exception
evaluate(const Matrix & prev_layer_data,const IntegerVector & target)53         virtual void evaluate(const Matrix& prev_layer_data,
54                               const IntegerVector& target)
55         {
56             throw std::invalid_argument("[class Output]: This output type cannot take class labels as target data");
57         }
58 
59         // The derivative of the input of this layer, which is also the derivative
60         // of the output of previous layer
61         virtual const Matrix& backprop_data() const = 0;
62 
63         // Return the loss function value after the evaluation
64         // This function can be assumed to be called after evaluate(), so that it can make use of the
65         // intermediate result to save some computation
66         virtual Scalar loss() const = 0;
67 
68         // Return the output layer type. It is used to export the NN model.
69         virtual std::string output_type() const = 0;
70 };
71 
72 
73 } // namespace MiniDNN
74 
75 
76 #endif /* OUTPUT_H_ */
77