objective_function.cpp 6.6 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
#include <LightGBM/objective_function.h>
6

Guolin Ke's avatar
Guolin Ke committed
7
#include "binary_objective.hpp"
8
#include "multiclass_objective.hpp"
9
10
#include "rank_objective.hpp"
#include "regression_objective.hpp"
11
#include "xentropy_objective.hpp"
Guolin Ke's avatar
Guolin Ke committed
12

13
#include "cuda/cuda_binary_objective.hpp"
14
#include "cuda/cuda_multiclass_objective.hpp"
15
#include "cuda/cuda_rank_objective.hpp"
16
#include "cuda/cuda_regression_objective.hpp"
17

Guolin Ke's avatar
Guolin Ke committed
18
19
namespace LightGBM {

Guolin Ke's avatar
Guolin Ke committed
20
ObjectiveFunction* ObjectiveFunction::CreateObjectiveFunction(const std::string& type, const Config& config) {
21
  #ifdef USE_CUDA_EXP
shiyu1994's avatar
shiyu1994 committed
22
  if (config.device_type == std::string("cuda_exp") && config.boosting == std::string("gbdt")) {
23
    if (type == std::string("regression")) {
24
      return new CUDARegressionL2loss(config);
25
    } else if (type == std::string("regression_l1")) {
26
      return new CUDARegressionL1loss(config);
27
28
29
30
    } else if (type == std::string("quantile")) {
      Log::Warning("Objective quantile is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionQuantileloss(config);
    } else if (type == std::string("huber")) {
31
      return new CUDARegressionHuberLoss(config);
32
    } else if (type == std::string("fair")) {
33
      return new CUDARegressionFairLoss(config);
34
    } else if (type == std::string("poisson")) {
35
      return new CUDARegressionPoissonLoss(config);
36
    } else if (type == std::string("binary")) {
37
      return new CUDABinaryLogloss(config);
38
    } else if (type == std::string("lambdarank")) {
39
      return new CUDALambdarankNDCG(config);
40
    } else if (type == std::string("rank_xendcg")) {
41
      return new CUDARankXENDCG(config);
42
    } else if (type == std::string("multiclass")) {
43
      return new CUDAMulticlassSoftmax(config);
44
    } else if (type == std::string("multiclassova")) {
45
      return new CUDAMulticlassOVA(config);
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
    } else if (type == std::string("cross_entropy")) {
      Log::Warning("Objective cross_entropy is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new CrossEntropy(config);
    } else if (type == std::string("cross_entropy_lambda")) {
      Log::Warning("Objective cross_entropy_lambda is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new CrossEntropyLambda(config);
    } else if (type == std::string("mape")) {
      Log::Warning("Objective mape is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionMAPELOSS(config);
    } else if (type == std::string("gamma")) {
      Log::Warning("Objective gamma is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionGammaLoss(config);
    } else if (type == std::string("tweedie")) {
      Log::Warning("Objective tweedie is not implemented in cuda_exp version. Fall back to boosting on CPU.");
      return new RegressionTweedieLoss(config);
    } else if (type == std::string("custom")) {
      Log::Warning("Using customized objective with cuda_exp. This requires copying gradients from CPU to GPU, which can be slow.");
      return nullptr;
    }
  } else {
  #endif  // USE_CUDA_EXP
    if (type == std::string("regression")) {
      return new RegressionL2loss(config);
    } else if (type == std::string("regression_l1")) {
      return new RegressionL1loss(config);
    } else if (type == std::string("quantile")) {
      return new RegressionQuantileloss(config);
    } else if (type == std::string("huber")) {
      return new RegressionHuberLoss(config);
    } else if (type == std::string("fair")) {
      return new RegressionFairLoss(config);
    } else if (type == std::string("poisson")) {
      return new RegressionPoissonLoss(config);
    } else if (type == std::string("binary")) {
      return new BinaryLogloss(config);
    } else if (type == std::string("lambdarank")) {
      return new LambdarankNDCG(config);
    } else if (type == std::string("rank_xendcg")) {
      return new RankXENDCG(config);
    } else if (type == std::string("multiclass")) {
      return new MulticlassSoftmax(config);
    } else if (type == std::string("multiclassova")) {
      return new MulticlassOVA(config);
    } else if (type == std::string("cross_entropy")) {
      return new CrossEntropy(config);
    } else if (type == std::string("cross_entropy_lambda")) {
      return new CrossEntropyLambda(config);
    } else if (type == std::string("mape")) {
      return new RegressionMAPELOSS(config);
    } else if (type == std::string("gamma")) {
      return new RegressionGammaLoss(config);
    } else if (type == std::string("tweedie")) {
      return new RegressionTweedieLoss(config);
    } else if (type == std::string("custom")) {
      return nullptr;
    }
  #ifdef USE_CUDA_EXP
Guolin Ke's avatar
Guolin Ke committed
103
  }
104
  #endif  // USE_CUDA_EXP
105
  Log::Fatal("Unknown objective type name: %s", type.c_str());
Guolin Ke's avatar
Guolin Ke committed
106
  return nullptr;
Guolin Ke's avatar
Guolin Ke committed
107
}
108
109

ObjectiveFunction* ObjectiveFunction::CreateObjectiveFunction(const std::string& str) {
Guolin Ke's avatar
Guolin Ke committed
110
  auto strs = Common::Split(str.c_str(), ' ');
111
112
113
114
115
  auto type = strs[0];
  if (type == std::string("regression")) {
    return new RegressionL2loss(strs);
  } else if (type == std::string("regression_l1")) {
    return new RegressionL1loss(strs);
116
117
  } else if (type == std::string("quantile")) {
    return new RegressionQuantileloss(strs);
118
119
120
121
122
123
124
125
126
127
  } else if (type == std::string("huber")) {
    return new RegressionHuberLoss(strs);
  } else if (type == std::string("fair")) {
    return new RegressionFairLoss(strs);
  } else if (type == std::string("poisson")) {
    return new RegressionPoissonLoss(strs);
  } else if (type == std::string("binary")) {
    return new BinaryLogloss(strs);
  } else if (type == std::string("lambdarank")) {
    return new LambdarankNDCG(strs);
128
129
  } else if (type == std::string("rank_xendcg")) {
    return new RankXENDCG(strs);
130
131
132
133
  } else if (type == std::string("multiclass")) {
    return new MulticlassSoftmax(strs);
  } else if (type == std::string("multiclassova")) {
    return new MulticlassOVA(strs);
Guolin Ke's avatar
Guolin Ke committed
134
  } else if (type == std::string("cross_entropy")) {
135
    return new CrossEntropy(strs);
Guolin Ke's avatar
Guolin Ke committed
136
  } else if (type == std::string("cross_entropy_lambda")) {
137
    return new CrossEntropyLambda(strs);
Guolin Ke's avatar
Guolin Ke committed
138
  } else if (type == std::string("mape")) {
Guolin Ke's avatar
Guolin Ke committed
139
    return new RegressionMAPELOSS(strs);
Guolin Ke's avatar
Guolin Ke committed
140
141
142
143
  } else if (type == std::string("gamma")) {
    return new RegressionGammaLoss(strs);
  } else if (type == std::string("tweedie")) {
    return new RegressionTweedieLoss(strs);
Guolin Ke's avatar
Guolin Ke committed
144
  } else if (type == std::string("custom")) {
145
    return nullptr;
146
  }
147
  Log::Fatal("Unknown objective type name: %s", type.c_str());
Guolin Ke's avatar
Guolin Ke committed
148
  return nullptr;
149
150
}

Guolin Ke's avatar
Guolin Ke committed
151
}  // namespace LightGBM