1 /*!
2  * Copyright 2015-2019 by Contributors
3  * \file custom_metric.cc
4  * \brief This is an example to define plugin of xgboost.
5  *  This plugin defines the additional metric function.
6  */
7 #include <xgboost/base.h>
8 #include <xgboost/parameter.h>
9 #include <xgboost/objective.h>
10 #include <xgboost/json.h>
11 
12 namespace xgboost {
13 namespace obj {
14 
15 // This is a helpful data structure to define parameters
16 // You do not have to use it.
17 // see http://dmlc-core.readthedocs.org/en/latest/parameter.html
18 // for introduction of this module.
19 struct MyLogisticParam : public XGBoostParameter<MyLogisticParam> {
20   float scale_neg_weight;
21   // declare parameters
DMLC_DECLARE_PARAMETERxgboost::obj::MyLogisticParam22   DMLC_DECLARE_PARAMETER(MyLogisticParam) {
23     DMLC_DECLARE_FIELD(scale_neg_weight).set_default(1.0f).set_lower_bound(0.0f)
24         .describe("Scale the weight of negative examples by this factor");
25   }
26 };
27 
28 DMLC_REGISTER_PARAMETER(MyLogisticParam);
29 
30 // Define a customized logistic regression objective in C++.
31 // Implement the interface.
32 class MyLogistic : public ObjFunction {
33  public:
Configure(const std::vector<std::pair<std::string,std::string>> & args)34   void Configure(const std::vector<std::pair<std::string, std::string> >& args) override {
35     param_.UpdateAllowUnknown(args);
36   }
GetGradient(const HostDeviceVector<bst_float> & preds,const MetaInfo & info,int iter,HostDeviceVector<GradientPair> * out_gpair)37   void GetGradient(const HostDeviceVector<bst_float> &preds,
38                    const MetaInfo &info,
39                    int iter,
40                    HostDeviceVector<GradientPair> *out_gpair) override {
41     out_gpair->Resize(preds.Size());
42     const std::vector<bst_float>& preds_h = preds.HostVector();
43     std::vector<GradientPair>& out_gpair_h = out_gpair->HostVector();
44     const std::vector<bst_float>& labels_h = info.labels_.HostVector();
45     for (size_t i = 0; i < preds_h.size(); ++i) {
46       bst_float w = info.GetWeight(i);
47       // scale the negative examples!
48       if (labels_h[i] == 0.0f) w *= param_.scale_neg_weight;
49       // logistic transformation
50       bst_float p = 1.0f / (1.0f + std::exp(-preds_h[i]));
51       // this is the gradient
52       bst_float grad = (p - labels_h[i]) * w;
53       // this is the second order gradient
54       bst_float hess = p * (1.0f - p) * w;
55       out_gpair_h.at(i) = GradientPair(grad, hess);
56     }
57   }
DefaultEvalMetric() const58   const char* DefaultEvalMetric() const override {
59     return "logloss";
60   }
PredTransform(HostDeviceVector<bst_float> * io_preds) const61   void PredTransform(HostDeviceVector<bst_float> *io_preds) const override {
62     // transform margin value to probability.
63     std::vector<bst_float> &preds = io_preds->HostVector();
64     for (auto& pred : preds) {
65       pred = 1.0f / (1.0f + std::exp(-pred));
66     }
67   }
ProbToMargin(bst_float base_score) const68   bst_float ProbToMargin(bst_float base_score) const override {
69     // transform probability to margin value
70     return -std::log(1.0f / base_score - 1.0f);
71   }
72 
SaveConfig(Json * p_out) const73   void SaveConfig(Json* p_out) const override {
74     auto& out = *p_out;
75     out["name"] = String("my_logistic");
76     out["my_logistic_param"] = ToJson(param_);
77   }
78 
LoadConfig(Json const & in)79   void LoadConfig(Json const& in) override {
80     FromJson(in["my_logistic_param"], &param_);
81   }
82 
83  private:
84   MyLogisticParam param_;
85 };
86 
87 // Finally register the objective function.
88 // After it succeeds you can try use xgboost with objective=mylogistic
89 XGBOOST_REGISTER_OBJECTIVE(MyLogistic, "mylogistic")
90 .describe("User defined logistic regression plugin")
__anon01b959c70102() 91 .set_body([]() { return new MyLogistic(); });
92 
93 }  // namespace obj
94 }  // namespace xgboost
95