1 /*!
2  * Copyright 2017-2019 XGBoost contributors
3  */
4 #ifndef XGBOOST_OBJECTIVE_REGRESSION_LOSS_H_
5 #define XGBOOST_OBJECTIVE_REGRESSION_LOSS_H_
6 
7 #include <dmlc/omp.h>
8 #include <xgboost/logging.h>
9 #include <algorithm>
10 #include "../common/math.h"
11 
12 namespace xgboost {
13 namespace obj {
14 
15 // common regressions
16 // linear regression
17 struct LinearSquareLoss {
18   // duplication is necessary, as __device__ specifier
19   // cannot be made conditional on template parameter
PredTransformLinearSquareLoss20   XGBOOST_DEVICE static bst_float PredTransform(bst_float x) { return x; }
CheckLabelLinearSquareLoss21   XGBOOST_DEVICE static bool CheckLabel(bst_float) { return true; }
FirstOrderGradientLinearSquareLoss22   XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
23     return predt - label;
24   }
SecondOrderGradientLinearSquareLoss25   XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float, bst_float) {
26     return 1.0f;
27   }
28   template <typename T>
PredTransformLinearSquareLoss29   static T PredTransform(T x) { return x; }
30   template <typename T>
FirstOrderGradientLinearSquareLoss31   static T FirstOrderGradient(T predt, T label) { return predt - label; }
32   template <typename T>
SecondOrderGradientLinearSquareLoss33   static T SecondOrderGradient(T predt, T label) { return T(1.0f); }
ProbToMarginLinearSquareLoss34   static bst_float ProbToMargin(bst_float base_score) { return base_score; }
LabelErrorMsgLinearSquareLoss35   static const char* LabelErrorMsg() { return ""; }
DefaultEvalMetricLinearSquareLoss36   static const char* DefaultEvalMetric() { return "rmse"; }
37 
NameLinearSquareLoss38   static const char* Name() { return "reg:squarederror"; }
39 };
40 
41 struct SquaredLogError {
PredTransformSquaredLogError42   XGBOOST_DEVICE static bst_float PredTransform(bst_float x) { return x; }
CheckLabelSquaredLogError43   XGBOOST_DEVICE static bool CheckLabel(bst_float label) {
44     return label > -1;
45   }
FirstOrderGradientSquaredLogError46   XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
47     predt = fmaxf(predt, -1 + 1e-6);  // ensure correct value for log1p
48     return (std::log1p(predt) - std::log1p(label)) / (predt + 1);
49   }
SecondOrderGradientSquaredLogError50   XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
51     predt = fmaxf(predt, -1 + 1e-6);
52     float res = (-std::log1p(predt) + std::log1p(label) + 1) /
53                 std::pow(predt + 1, 2);
54     res = fmaxf(res, 1e-6f);
55     return res;
56   }
ProbToMarginSquaredLogError57   static bst_float ProbToMargin(bst_float base_score) { return base_score; }
LabelErrorMsgSquaredLogError58   static const char* LabelErrorMsg() {
59     return "label must be greater than -1 for rmsle so that log(label + 1) can be valid.";
60   }
DefaultEvalMetricSquaredLogError61   static const char* DefaultEvalMetric() { return "rmsle"; }
62 
NameSquaredLogError63   static const char* Name() { return "reg:squaredlogerror"; }
64 };
65 
66 // logistic loss for probability regression task
67 struct LogisticRegression {
68   // duplication is necessary, as __device__ specifier
69   // cannot be made conditional on template parameter
PredTransformLogisticRegression70   XGBOOST_DEVICE static bst_float PredTransform(bst_float x) { return common::Sigmoid(x); }
CheckLabelLogisticRegression71   XGBOOST_DEVICE static bool CheckLabel(bst_float x) { return x >= 0.0f && x <= 1.0f; }
FirstOrderGradientLogisticRegression72   XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
73     return predt - label;
74   }
SecondOrderGradientLogisticRegression75   XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float predt, bst_float) {
76     const float eps = 1e-16f;
77     return fmaxf(predt * (1.0f - predt), eps);
78   }
79   template <typename T>
PredTransformLogisticRegression80   static T PredTransform(T x) { return common::Sigmoid(x); }
81   template <typename T>
FirstOrderGradientLogisticRegression82   static T FirstOrderGradient(T predt, T label) { return predt - label; }
83   template <typename T>
SecondOrderGradientLogisticRegression84   static T SecondOrderGradient(T predt, T label) {
85     const T eps = T(1e-16f);
86     return std::max(predt * (T(1.0f) - predt), eps);
87   }
ProbToMarginLogisticRegression88   static bst_float ProbToMargin(bst_float base_score) {
89     CHECK(base_score > 0.0f && base_score < 1.0f)
90         << "base_score must be in (0,1) for logistic loss, got: " << base_score;
91     return -logf(1.0f / base_score - 1.0f);
92   }
LabelErrorMsgLogisticRegression93   static const char* LabelErrorMsg() {
94     return "label must be in [0,1] for logistic regression";
95   }
DefaultEvalMetricLogisticRegression96   static const char* DefaultEvalMetric() { return "rmse"; }
97 
NameLogisticRegression98   static const char* Name() { return "reg:logistic"; }
99 };
100 
101 struct PseudoHuberError {
PredTransformPseudoHuberError102   XGBOOST_DEVICE static bst_float PredTransform(bst_float x) {
103     return x;
104   }
CheckLabelPseudoHuberError105   XGBOOST_DEVICE static bool CheckLabel(bst_float) {
106     return true;
107   }
FirstOrderGradientPseudoHuberError108   XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
109     const float z = predt - label;
110     const float scale_sqrt = std::sqrt(1 + std::pow(z, 2));
111     return z/scale_sqrt;
112   }
SecondOrderGradientPseudoHuberError113   XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
114     const float scale = 1 + std::pow(predt - label, 2);
115     const float scale_sqrt = std::sqrt(scale);
116     return 1/(scale*scale_sqrt);
117   }
ProbToMarginPseudoHuberError118   static bst_float ProbToMargin(bst_float base_score) {
119     return base_score;
120   }
LabelErrorMsgPseudoHuberError121   static const char* LabelErrorMsg() {
122     return "";
123   }
DefaultEvalMetricPseudoHuberError124   static const char* DefaultEvalMetric() {
125     return "mphe";
126   }
NamePseudoHuberError127   static const char* Name() {
128     return "reg:pseudohubererror";
129   }
130 };
131 
132 // logistic loss for binary classification task
133 struct LogisticClassification : public LogisticRegression {
DefaultEvalMetricLogisticClassification134   static const char* DefaultEvalMetric() { return "logloss"; }
NameLogisticClassification135   static const char* Name() { return "binary:logistic"; }
136 };
137 
138 // logistic loss, but predict un-transformed margin
139 struct LogisticRaw : public LogisticRegression {
140   // duplication is necessary, as __device__ specifier
141   // cannot be made conditional on template parameter
PredTransformLogisticRaw142   XGBOOST_DEVICE static bst_float PredTransform(bst_float x) { return x; }
FirstOrderGradientLogisticRaw143   XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
144     predt = common::Sigmoid(predt);
145     return predt - label;
146   }
SecondOrderGradientLogisticRaw147   XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float predt, bst_float) {
148     const float eps = 1e-16f;
149     predt = common::Sigmoid(predt);
150     return fmaxf(predt * (1.0f - predt), eps);
151   }
152   template <typename T>
PredTransformLogisticRaw153     static T PredTransform(T x) { return x; }
154   template <typename T>
FirstOrderGradientLogisticRaw155     static T FirstOrderGradient(T predt, T label) {
156     predt = common::Sigmoid(predt);
157     return predt - label;
158   }
159   template <typename T>
SecondOrderGradientLogisticRaw160     static T SecondOrderGradient(T predt, T label) {
161     const T eps = T(1e-16f);
162     predt = common::Sigmoid(predt);
163     return std::max(predt * (T(1.0f) - predt), eps);
164   }
ProbToMarginLogisticRaw165   static bst_float ProbToMargin(bst_float base_score) {
166     return base_score;
167   }
DefaultEvalMetricLogisticRaw168   static const char* DefaultEvalMetric() { return "logloss"; }
169 
NameLogisticRaw170   static const char* Name() { return "binary:logitraw"; }
171 };
172 
173 }  // namespace obj
174 }  // namespace xgboost
175 
176 #endif  // XGBOOST_OBJECTIVE_REGRESSION_LOSS_H_
177