Commit 462612b4 authored by Nikita Titov's avatar Nikita Titov Committed by Guolin Ke
Browse files

fixed modifiers indent (#1997)

parent 8e286b38
......@@ -23,7 +23,7 @@ class Metric;
* and save the score to disk.
*/
class Application {
public:
public:
Application(int argc, char** argv);
/*! \brief Destructor */
......@@ -32,7 +32,7 @@ public:
/*! \brief To call this funciton to run application*/
inline void Run();
private:
private:
/*! \brief Load parameters from command line and config file*/
void LoadParameters(int argc, char** argv);
......
......@@ -27,7 +27,7 @@ enum MissingType {
/*! \brief Store data for one histogram bin */
struct HistogramBinEntry {
public:
public:
/*! \brief Sum of gradients on this bin */
double sum_gradients = 0.0f;
/*! \brief Sum of hessians on this bin */
......@@ -59,7 +59,7 @@ public:
/*! \brief This class used to convert feature values into bin,
* and store some meta information for bin*/
class BinMapper {
public:
public:
BinMapper();
BinMapper(const BinMapper& other);
explicit BinMapper(const void* memory);
......@@ -184,7 +184,7 @@ public:
}
}
private:
private:
/*! \brief Number of bins */
int num_bin_;
MissingType missing_type_;
......@@ -217,7 +217,7 @@ private:
* So we only using ordered bin for sparse situations.
*/
class OrderedBin {
public:
public:
/*! \brief virtual destructor */
virtual ~OrderedBin() {}
......@@ -265,7 +265,7 @@ public:
/*! \brief Iterator for one bin column */
class BinIterator {
public:
public:
/*!
* \brief Get bin data on specific row index
* \param idx Index of this data
......@@ -284,7 +284,7 @@ public:
* but it doesn't need to re-order operation, So it will be faster than OrderedBin for dense feature
*/
class Bin {
public:
public:
/*! \brief virtual destructor */
virtual ~Bin() {}
/*!
......
......@@ -20,7 +20,7 @@ struct PredictionEarlyStopInstance;
* \brief The interface for Boosting
*/
class LIGHTGBM_EXPORT Boosting {
public:
public:
/*! \brief virtual destructor */
virtual ~Boosting() {}
......@@ -294,7 +294,7 @@ public:
};
class GBDTBase : public Boosting {
public:
public:
virtual double GetLeafValue(int tree_idx, int leaf_idx) const = 0;
virtual void SetLeafValue(int tree_idx, int leaf_idx, double val) = 0;
};
......
......@@ -25,7 +25,7 @@ enum TaskType {
const int kDefaultNumLeaves = 31;
struct Config {
public:
public:
std::string ToString() const;
/*!
* \brief Get string value by specific name of key
......@@ -772,7 +772,7 @@ public:
static std::unordered_map<std::string, std::string> alias_table;
static std::unordered_set<std::string> parameter_set;
private:
private:
void CheckParamConflict();
void GetMembersFromString(const std::unordered_map<std::string, std::string>& params);
std::string SaveMembersToString() const;
......
......@@ -34,7 +34,7 @@ class DatasetLoader;
* 5. Initial score. optional. if exsitng, the model will boost from this score, otherwise will start from 0.
*/
class Metadata {
public:
public:
/*!
* \brief Null costructor
*/
......@@ -206,7 +206,7 @@ public:
/*! \brief Disable copy */
Metadata(const Metadata&) = delete;
private:
private:
/*! \brief Load initial scores from file */
void LoadInitialScore(const char* initscore_file);
/*! \brief Load wights from file */
......@@ -247,7 +247,7 @@ private:
/*! \brief Interface for Parser */
class Parser {
public:
public:
/*! \brief virtual destructor */
virtual ~Parser() {}
......@@ -276,7 +276,7 @@ public:
* which are used to traning or validation
*/
class Dataset {
public:
public:
friend DatasetLoader;
LIGHTGBM_EXPORT Dataset();
......@@ -581,7 +581,7 @@ public:
/*! \brief Disable copy */
Dataset(const Dataset&) = delete;
private:
private:
std::string data_filename_;
/*! \brief Store used features */
std::vector<std::unique_ptr<FeatureGroup>> feature_groups_;
......
......@@ -6,7 +6,7 @@
namespace LightGBM {
class DatasetLoader {
public:
public:
LIGHTGBM_EXPORT DatasetLoader(const Config& io_config, const PredictFunction& predict_fun, int num_class, const char* filename);
LIGHTGBM_EXPORT ~DatasetLoader();
......@@ -28,7 +28,7 @@ public:
/*! \brief Disable copy */
DatasetLoader(const DatasetLoader&) = delete;
private:
private:
Dataset* LoadFromBinFile(const char* data_filename, const char* bin_filename, int rank, int num_machines, int* num_global_data, std::vector<data_size_t>* used_data_indices);
void SetHeader(const char* filename);
......
......@@ -16,7 +16,7 @@ class Dataset;
class DatasetLoader;
/*! \brief Using to store data and providing some operations on one feature group*/
class FeatureGroup {
public:
public:
friend Dataset;
friend DatasetLoader;
/*!
......@@ -214,7 +214,7 @@ public:
/*! \brief Disable copy */
FeatureGroup(const FeatureGroup&) = delete;
private:
private:
/*! \brief Number of features */
int num_feature_;
/*! \brief Bin mapper for sub features */
......
......@@ -204,13 +204,13 @@ public:
typedef std::initializer_list<std::pair<std::string, Type>> shape;
bool has_shape(const shape & types, std::string & err) const;
private:
private:
std::shared_ptr<JsonValue> m_ptr;
};
// Internal class hierarchy - JsonValue objects are not exposed to users of this API.
class JsonValue {
protected:
protected:
friend class Json;
friend class JsonInt;
friend class JsonDouble;
......
......@@ -18,7 +18,7 @@ namespace LightGBM {
* Metric is used to calculate metric result
*/
class Metric {
public:
public:
/*! \brief virtual destructor */
virtual ~Metric() {}
......@@ -57,7 +57,7 @@ public:
* \brief Static class, used to calculate DCG score
*/
class DCGCalculator {
public:
public:
static void DefaultEvalAt(std::vector<int>* eval_at);
static void DefaultLabelGain(std::vector<double>* label_gain);
/*!
......@@ -123,7 +123,7 @@ public:
*/
inline static double GetDiscount(data_size_t k) { return discount_[k]; }
private:
private:
/*! \brief store gains for different label */
static std::vector<double> label_gain_;
/*! \brief store discount score for different position */
......
......@@ -17,7 +17,7 @@ class Linkers;
/*! \brief The network structure for all_gather */
class BruckMap {
public:
public:
/*! \brief The communication times for one all gather operation */
int k;
/*! \brief in_ranks[i] means the incomming rank on i-th communication */
......@@ -51,7 +51,7 @@ enum RecursiveHalvingNodeType {
/*! \brief Network structure for recursive halving algorithm */
class RecursiveHalvingMap {
public:
public:
/*! \brief Communication times for one recursize halving algorithm */
int k;
/*! \brief Node type */
......@@ -84,7 +84,7 @@ public:
/*! \brief A static class that contains some collective communication algorithm */
class Network {
public:
public:
/*!
* \brief Initialize
* \param config Config of network setting
......@@ -256,7 +256,7 @@ public:
}
}
private:
private:
static void AllgatherBruck(char* input, const comm_size_t* block_start, const comm_size_t* block_len, char* output, comm_size_t all_size);
static void AllgatherRecursiveDoubling(char* input, const comm_size_t* block_start, const comm_size_t* block_len, char* output, comm_size_t all_size);
......
......@@ -11,7 +11,7 @@ namespace LightGBM {
* \brief The interface of Objective Function.
*/
class ObjectiveFunction {
public:
public:
/*! \brief virtual destructor */
virtual ~ObjectiveFunction() {}
......
......@@ -18,7 +18,7 @@ namespace LightGBM {
* \brief Tree model
*/
class Tree {
public:
public:
/*!
* \brief Constructor
* \param max_leaves The number of max leaves
......@@ -203,7 +203,7 @@ public:
void RecomputeMaxDepth();
private:
private:
std::string NumericalDecisionIfElse(int node) const;
std::string CategoricalDecisionIfElse(int node) const;
......
......@@ -21,7 +21,7 @@ class ObjectiveFunction;
* \brief Interface for tree learner
*/
class TreeLearner {
public:
public:
/*! \brief virtual destructor */
virtual ~TreeLearner() {}
......
......@@ -12,7 +12,7 @@ namespace LightGBM {
*/
template<typename VAL_T>
class ArrayArgs {
public:
public:
inline static size_t ArgMaxMT(const std::vector<VAL_T>& array) {
int num_threads = 1;
#pragma omp parallel
......
......@@ -41,7 +41,7 @@ enum class LogLevel: int {
* \brief A static Log class
*/
class Log {
public:
public:
/*!
* \brief Resets the minimal log level. It is INFO by default.
* \param level The new minimal log level.
......@@ -83,7 +83,7 @@ public:
throw std::runtime_error(std::string(str_buf));
}
private:
private:
static void Write(LogLevel level, const char* level_str, const char *format, va_list val) {
if (level <= GetLevel()) { // omit the message with low level
// write to STDOUT
......
......@@ -11,7 +11,7 @@
#include "log.h"
class ThreadExceptionHelper {
public:
public:
ThreadExceptionHelper() {
ex_ptr_ = nullptr;
}
......@@ -31,7 +31,8 @@ public:
if (ex_ptr_ != nullptr) { return; }
ex_ptr_ = std::current_exception();
}
private:
private:
std::exception_ptr ex_ptr_;
std::mutex lock_;
};
......
......@@ -18,7 +18,7 @@ namespace LightGBM {
* \brief A pipeline file reader, use 2 threads, one read block from file, the other process the block
*/
class PipelineReader {
public:
public:
/*!
* \brief Read data from a file, use pipeline methods
* \param filename Filename of data
......
......@@ -13,7 +13,7 @@ namespace LightGBM {
* \brief A wrapper for random generator
*/
class Random {
public:
public:
/*!
* \brief Constructor, with random seed
*/
......@@ -94,7 +94,7 @@ public:
return ret;
}
private:
private:
inline int RandInt16() {
x = (214013 * x + 2531011);
return static_cast<int>((x >> 16) & 0x7FFF);
......
......@@ -19,7 +19,7 @@ namespace LightGBM {
*/
template<typename INDEX_T>
class TextReader {
public:
public:
/*!
* \brief Constructor
* \param filename Filename of data
......@@ -306,7 +306,7 @@ public:
});
}
private:
private:
/*! \brief Filename of text data */
const char* filename_;
/*! \brief Cache the read text data */
......
......@@ -9,7 +9,7 @@
namespace LightGBM {
class Threading {
public:
public:
template<typename INDEX_T>
static inline void For(INDEX_T start, INDEX_T end, const std::function<void(int, INDEX_T, INDEX_T)>& inner_fun) {
int num_threads = 1;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment