Commit 4398906d authored by Guolin Ke's avatar Guolin Ke
Browse files

fix softmax objective.

parent 841a8987
...@@ -175,7 +175,6 @@ public: ...@@ -175,7 +175,6 @@ public:
if (weights_ == nullptr) { if (weights_ == nullptr) {
#pragma omp parallel for schedule(static) reduction(+:sum_loss) #pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) { for (data_size_t i = 0; i < num_data_; ++i) {
std::vector<double> rec(num_class_);
size_t idx = static_cast<size_t>(num_data_) * static_cast<int>(label_[i]) + i; size_t idx = static_cast<size_t>(num_data_) * static_cast<int>(label_[i]) + i;
double prob = 1.0f / (1.0f + std::exp(-sigmoid_ * score[idx])); double prob = 1.0f / (1.0f + std::exp(-sigmoid_ * score[idx]));
if (prob < kEpsilon) { prob = kEpsilon; } if (prob < kEpsilon) { prob = kEpsilon; }
......
...@@ -56,7 +56,7 @@ public: ...@@ -56,7 +56,7 @@ public:
} else { } else {
gradients[idx] = static_cast<score_t>(p); gradients[idx] = static_cast<score_t>(p);
} }
hessians[idx] = static_cast<score_t>(p * (1.0f - p)); hessians[idx] = static_cast<score_t>(2.0f * p * (1.0f - p));
} }
} }
} else { } else {
...@@ -77,7 +77,7 @@ public: ...@@ -77,7 +77,7 @@ public:
} else { } else {
gradients[idx] = static_cast<score_t>(p * weights_[i]); gradients[idx] = static_cast<score_t>(p * weights_[i]);
} }
hessians[idx] = static_cast<score_t>(p * (1.0f - p) * weights_[i]); hessians[idx] = static_cast<score_t>(2.0f * p * (1.0f - p) * weights_[i]);
} }
} }
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment