Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
tianlh
LightGBM-DCU
Commits
85e90f21
Commit
85e90f21
authored
Oct 24, 2016
by
Hui Xue
Browse files
update for typo.
parent
1aefcd8a
Changes
7
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
14 additions
and
14 deletions
+14
-14
src/boosting/gbdt.h
src/boosting/gbdt.h
+3
-3
src/boosting/score_updater.hpp
src/boosting/score_updater.hpp
+2
-2
src/io/dataset.cpp
src/io/dataset.cpp
+4
-4
src/io/dense_bin.hpp
src/io/dense_bin.hpp
+1
-1
src/io/ordered_sparse_bin.hpp
src/io/ordered_sparse_bin.hpp
+1
-1
src/metric/binary_metric.hpp
src/metric/binary_metric.hpp
+1
-1
src/treelearner/serial_tree_learner.cpp
src/treelearner/serial_tree_learner.cpp
+2
-2
No files found.
src/boosting/gbdt.h
View file @
85e90f21
...
...
@@ -107,7 +107,7 @@ private:
*/
void
UpdateScore
(
const
Tree
*
tree
);
/*!
* \brief Print
M
etric result of current iteration
* \brief Print
m
etric result of current iteration
* \param iter Current interation
*/
void
OutputMetric
(
int
iter
);
...
...
@@ -116,11 +116,11 @@ private:
const
Dataset
*
train_data_
;
/*! \brief Config of gbdt */
const
GBDTConfig
*
gbdt_config_
;
/*! \brief Tree learner, will use t
i
hs class to learn trees */
/*! \brief Tree learner, will use th
i
s class to learn trees */
TreeLearner
*
tree_learner_
;
/*! \brief Objective function */
const
ObjectiveFunction
*
object_function_
;
/*! \brief Store and update traning data's score */
/*! \brief Store and update tra
i
ning data's score */
ScoreUpdater
*
train_score_updater_
;
/*! \brief Metrics for training data */
std
::
vector
<
const
Metric
*>
training_metrics_
;
...
...
src/boosting/score_updater.hpp
View file @
85e90f21
...
...
@@ -57,8 +57,8 @@ public:
* \brief Like AddScore(const Tree* tree), but only for part of data
* Used for prediction of training out-of-bad data
* \param tree Trained tree model
* \param data_indices Indices of data that w
ant
proccess
to
* \param data_cnt Number of data that w
ant
proccess
to
* \param data_indices Indices of data that w
ill be
proccess
ed
* \param data_cnt Number of data that w
ill be
proccess
ed
*/
inline
void
AddScore
(
const
Tree
*
tree
,
const
data_size_t
*
data_indices
,
data_size_t
data_cnt
)
{
...
...
src/io/dataset.cpp
View file @
85e90f21
...
...
@@ -31,12 +31,12 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
// create text parser
parser_
=
Parser
::
CreateParser
(
data_filename_
,
0
,
nullptr
);
if
(
parser_
==
nullptr
)
{
Log
::
Stderr
(
"cannot recogni
s
e input data format, filename: %s"
,
data_filename_
);
Log
::
Stderr
(
"cannot recogni
z
e input data format, filename: %s"
,
data_filename_
);
}
// create text reader
text_reader_
=
new
TextReader
<
data_size_t
>
(
data_filename
);
}
else
{
// only need to load initilize score, other meta data will load from bin flie
// only need to load initilize score, other meta data will
be
load
ed
from bin flie
metadata_
.
Init
(
init_score_filename
);
Log
::
Stdout
(
"will load data set from binary file"
);
parser_
=
nullptr
;
...
...
@@ -613,7 +613,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
size_t
size_of_metadata
=
*
(
reinterpret_cast
<
size_t
*>
(
buffer
));
// re-alloc
m
ate space if not enough
// re-allocate space if not enough
if
(
size_of_metadata
>
buffer_size
)
{
delete
[]
buffer
;
buffer_size
=
size_of_metadata
;
...
...
@@ -673,7 +673,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
Log
::
Stderr
(
"binary file format error at feature %d's size"
,
i
);
}
size_t
size_of_feature
=
*
(
reinterpret_cast
<
size_t
*>
(
buffer
));
// re-alloc
m
ate space if not enough
// re-allocate space if not enough
if
(
size_of_feature
>
buffer_size
)
{
delete
[]
buffer
;
buffer_size
=
size_of_feature
;
...
...
src/io/dense_bin.hpp
View file @
85e90f21
...
...
@@ -10,7 +10,7 @@
namespace
LightGBM
{
/*!
* \brief Used to
S
tore bins for dense feature
* \brief Used to
s
tore bins for dense feature
* Use template to reduce memory cost
*/
template
<
typename
VAL_T
>
...
...
src/io/ordered_sparse_bin.hpp
View file @
85e90f21
...
...
@@ -13,7 +13,7 @@
namespace
LightGBM
{
/*!
* \brief Ordered bin for sparse feature .
e
fficient for construct histogram, especally for sparse bin
* \brief Ordered bin for sparse feature .
E
fficient for construct histogram, especally for sparse bin
* There are 2 advantages for using ordered bin.
* 1. group the data by leaf, improve the cache hit.
* 2. only store the non-zero bin, which can speed up the histogram cconsturction for sparse feature.
...
...
src/metric/binary_metric.hpp
View file @
85e90f21
...
...
@@ -225,7 +225,7 @@ public:
}
private:
/*! \brief Output frequen
tl
y */
/*! \brief Output frequen
c
y */
int
output_freq_
;
/*! \brief Number of data */
data_size_t
num_data_
;
...
...
src/treelearner/serial_tree_learner.cpp
View file @
85e90f21
...
...
@@ -91,7 +91,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
// initialize ordered gradients and hessians
ordered_gradients_
=
new
score_t
[
num_data_
];
ordered_hessians_
=
new
score_t
[
num_data_
];
// if has ordered bin, need allocat
a
a buffer to fast split
// if has ordered bin, need allocat
e
a buffer to fast split
if
(
has_ordered_bin_
)
{
is_data_in_leaf_
=
new
char
[
num_data_
];
}
...
...
@@ -331,7 +331,7 @@ void SerialTreeLearner::FindBestThresholds() {
// only has root leaf
if
(
larger_leaf_splits_
==
nullptr
||
larger_leaf_splits_
->
LeafIndex
()
<
0
)
continue
;
// construct histgr
o
ms for large leaf, we initialize larger leaf as the parent,
// construct hist
o
gr
a
ms for large leaf, we initialize larger leaf as the parent,
// so we can just subtract the smaller leaf's histograms
larger_leaf_histogram_array_
[
feature_index
].
Subtract
(
smaller_leaf_histogram_array_
[
feature_index
]);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment