objective_function.cpp 7.45 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
#include <LightGBM/objective_function.h>
6

Guolin Ke's avatar
Guolin Ke committed
7
#include "binary_objective.hpp"
8
#include "multiclass_objective.hpp"
9
10
#include "rank_objective.hpp"
#include "regression_objective.hpp"
11
#include "xentropy_objective.hpp"
Guolin Ke's avatar
Guolin Ke committed
12
13
14

namespace LightGBM {

Guolin Ke's avatar
Guolin Ke committed
15
ObjectiveFunction* ObjectiveFunction::CreateObjectiveFunction(const std::string& type, const Config& config) {
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
  #ifdef USE_CUDA_EXP
  if (config.device_type == std::string("cuda_exp")) {
    if (type == std::string("regression")) {
      Log::Warning("Objective regression is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionL2loss(config);
    } else if (type == std::string("regression_l1")) {
      Log::Warning("Objective regression_l1 is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionL1loss(config);
    } else if (type == std::string("quantile")) {
      Log::Warning("Objective quantile is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionQuantileloss(config);
    } else if (type == std::string("huber")) {
      Log::Warning("Objective huber is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionHuberLoss(config);
    } else if (type == std::string("fair")) {
      Log::Warning("Objective fair is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionFairLoss(config);
    } else if (type == std::string("poisson")) {
      Log::Warning("Objective poisson is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionPoissonLoss(config);
    } else if (type == std::string("binary")) {
      Log::Warning("Objective binary is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new BinaryLogloss(config);
    } else if (type == std::string("lambdarank")) {
      Log::Warning("Objective lambdarank is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new LambdarankNDCG(config);
    } else if (type == std::string("rank_xendcg")) {
      Log::Warning("Objective rank_xendcg is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RankXENDCG(config);
    } else if (type == std::string("multiclass")) {
      Log::Warning("Objective multiclass is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new MulticlassSoftmax(config);
    } else if (type == std::string("multiclassova")) {
      Log::Warning("Objective multiclassova is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new MulticlassOVA(config);
    } else if (type == std::string("cross_entropy")) {
      Log::Warning("Objective cross_entropy is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new CrossEntropy(config);
    } else if (type == std::string("cross_entropy_lambda")) {
      Log::Warning("Objective cross_entropy_lambda is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new CrossEntropyLambda(config);
    } else if (type == std::string("mape")) {
      Log::Warning("Objective mape is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionMAPELOSS(config);
    } else if (type == std::string("gamma")) {
      Log::Warning("Objective gamma is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionGammaLoss(config);
    } else if (type == std::string("tweedie")) {
      Log::Warning("Objective tweedie is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionTweedieLoss(config);
    } else if (type == std::string("custom")) {
      Log::Warning("Using customized objective with cuda_exp. This requires copying gradients from CPU to GPU, which can be slow.");
      return nullptr;
    }
  } else {
  #endif  // USE_CUDA_EXP
    if (type == std::string("regression")) {
      return new RegressionL2loss(config);
    } else if (type == std::string("regression_l1")) {
      return new RegressionL1loss(config);
    } else if (type == std::string("quantile")) {
      return new RegressionQuantileloss(config);
    } else if (type == std::string("huber")) {
      return new RegressionHuberLoss(config);
    } else if (type == std::string("fair")) {
      return new RegressionFairLoss(config);
    } else if (type == std::string("poisson")) {
      return new RegressionPoissonLoss(config);
    } else if (type == std::string("binary")) {
      return new BinaryLogloss(config);
    } else if (type == std::string("lambdarank")) {
      return new LambdarankNDCG(config);
    } else if (type == std::string("rank_xendcg")) {
      return new RankXENDCG(config);
    } else if (type == std::string("multiclass")) {
      return new MulticlassSoftmax(config);
    } else if (type == std::string("multiclassova")) {
      return new MulticlassOVA(config);
    } else if (type == std::string("cross_entropy")) {
      return new CrossEntropy(config);
    } else if (type == std::string("cross_entropy_lambda")) {
      return new CrossEntropyLambda(config);
    } else if (type == std::string("mape")) {
      return new RegressionMAPELOSS(config);
    } else if (type == std::string("gamma")) {
      return new RegressionGammaLoss(config);
    } else if (type == std::string("tweedie")) {
      return new RegressionTweedieLoss(config);
    } else if (type == std::string("custom")) {
      return nullptr;
    }
  #ifdef USE_CUDA_EXP
Guolin Ke's avatar
Guolin Ke committed
108
  }
109
  #endif  // USE_CUDA_EXP
110
  Log::Fatal("Unknown objective type name: %s", type.c_str());
Guolin Ke's avatar
Guolin Ke committed
111
  return nullptr;
Guolin Ke's avatar
Guolin Ke committed
112
}
113
114

ObjectiveFunction* ObjectiveFunction::CreateObjectiveFunction(const std::string& str) {
Guolin Ke's avatar
Guolin Ke committed
115
  auto strs = Common::Split(str.c_str(), ' ');
116
117
118
119
120
  auto type = strs[0];
  if (type == std::string("regression")) {
    return new RegressionL2loss(strs);
  } else if (type == std::string("regression_l1")) {
    return new RegressionL1loss(strs);
121
122
  } else if (type == std::string("quantile")) {
    return new RegressionQuantileloss(strs);
123
124
125
126
127
128
129
130
131
132
  } else if (type == std::string("huber")) {
    return new RegressionHuberLoss(strs);
  } else if (type == std::string("fair")) {
    return new RegressionFairLoss(strs);
  } else if (type == std::string("poisson")) {
    return new RegressionPoissonLoss(strs);
  } else if (type == std::string("binary")) {
    return new BinaryLogloss(strs);
  } else if (type == std::string("lambdarank")) {
    return new LambdarankNDCG(strs);
133
134
  } else if (type == std::string("rank_xendcg")) {
    return new RankXENDCG(strs);
135
136
137
138
  } else if (type == std::string("multiclass")) {
    return new MulticlassSoftmax(strs);
  } else if (type == std::string("multiclassova")) {
    return new MulticlassOVA(strs);
Guolin Ke's avatar
Guolin Ke committed
139
  } else if (type == std::string("cross_entropy")) {
140
    return new CrossEntropy(strs);
Guolin Ke's avatar
Guolin Ke committed
141
  } else if (type == std::string("cross_entropy_lambda")) {
142
    return new CrossEntropyLambda(strs);
Guolin Ke's avatar
Guolin Ke committed
143
  } else if (type == std::string("mape")) {
Guolin Ke's avatar
Guolin Ke committed
144
    return new RegressionMAPELOSS(strs);
Guolin Ke's avatar
Guolin Ke committed
145
146
147
148
  } else if (type == std::string("gamma")) {
    return new RegressionGammaLoss(strs);
  } else if (type == std::string("tweedie")) {
    return new RegressionTweedieLoss(strs);
Guolin Ke's avatar
Guolin Ke committed
149
  } else if (type == std::string("custom")) {
150
    return nullptr;
151
  }
152
  Log::Fatal("Unknown objective type name: %s", type.c_str());
Guolin Ke's avatar
Guolin Ke committed
153
  return nullptr;
154
155
}

Guolin Ke's avatar
Guolin Ke committed
156
}  // namespace LightGBM