dataset_loader.cpp 67.6 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
6
#include <LightGBM/dataset_loader.h>

Guolin Ke's avatar
Guolin Ke committed
7
#include <LightGBM/network.h>
8
#include <LightGBM/utils/array_args.h>
9
#include <LightGBM/utils/json11.h>
10
11
#include <LightGBM/utils/log.h>
#include <LightGBM/utils/openmp_wrapper.h>
Guolin Ke's avatar
Guolin Ke committed
12

13
#include <chrono>
14
15
#include <fstream>

Guolin Ke's avatar
Guolin Ke committed
16
17
namespace LightGBM {

18
19
using json11::Json;

Guolin Ke's avatar
Guolin Ke committed
20
21
DatasetLoader::DatasetLoader(const Config& io_config, const PredictFunction& predict_fun, int num_class, const char* filename)
  :config_(io_config), random_(config_.data_random_seed), predict_fun_(predict_fun), num_class_(num_class) {
Guolin Ke's avatar
Guolin Ke committed
22
23
24
25
  label_idx_ = 0;
  weight_idx_ = NO_SPECIFIC;
  group_idx_ = NO_SPECIFIC;
  SetHeader(filename);
26
27
28
29
  store_raw_ = false;
  if (io_config.linear_tree) {
    store_raw_ = true;
  }
Guolin Ke's avatar
Guolin Ke committed
30
31
32
33
34
}

DatasetLoader::~DatasetLoader() {
}

Guolin Ke's avatar
Guolin Ke committed
35
void DatasetLoader::SetHeader(const char* filename) {
Guolin Ke's avatar
Guolin Ke committed
36
  std::unordered_map<std::string, int> name2idx;
Guolin Ke's avatar
Guolin Ke committed
37
  std::string name_prefix("name:");
38
  if (filename != nullptr && CheckCanLoadFromBin(filename) == "") {
Guolin Ke's avatar
Guolin Ke committed
39
    TextReader<data_size_t> text_reader(filename, config_.header);
Guolin Ke's avatar
Guolin Ke committed
40

Guolin Ke's avatar
Guolin Ke committed
41
    // get column names
Guolin Ke's avatar
Guolin Ke committed
42
    if (config_.header) {
Guolin Ke's avatar
Guolin Ke committed
43
      std::string first_line = text_reader.first_line();
44
      feature_names_ = Common::Split(first_line.c_str(), "\t,");
45
46
47
48
49
50
51
52
53
54
55
56
    } else if (!config_.parser_config_file.empty()) {
      // support to get header from parser config, so could utilize following label name to id mapping logic.
      TextReader<data_size_t> parser_config_reader(config_.parser_config_file.c_str(), false);
      parser_config_reader.ReadAllLines();
      std::string parser_config_str = parser_config_reader.JoinedLines();
      if (!parser_config_str.empty()) {
        std::string header_in_parser_config = Common::GetFromParserConfig(parser_config_str, "header");
        if (!header_in_parser_config.empty()) {
          Log::Info("Get raw column names from parser config.");
          feature_names_ = Common::Split(header_in_parser_config.c_str(), "\t,");
        }
      }
Guolin Ke's avatar
Guolin Ke committed
57
58
    }

Guolin Ke's avatar
Guolin Ke committed
59
    // load label idx first
Guolin Ke's avatar
Guolin Ke committed
60
61
62
    if (config_.label_column.size() > 0) {
      if (Common::StartsWith(config_.label_column, name_prefix)) {
        std::string name = config_.label_column.substr(name_prefix.size());
Guolin Ke's avatar
Guolin Ke committed
63
64
65
66
67
68
69
70
71
72
        label_idx_ = -1;
        for (int i = 0; i < static_cast<int>(feature_names_.size()); ++i) {
          if (name == feature_names_[i]) {
            label_idx_ = i;
            break;
          }
        }
        if (label_idx_ >= 0) {
          Log::Info("Using column %s as label", name.c_str());
        } else {
73
74
          Log::Fatal("Could not find label column %s in data file \n"
                     "or data file doesn't contain header", name.c_str());
Guolin Ke's avatar
Guolin Ke committed
75
        }
Guolin Ke's avatar
Guolin Ke committed
76
      } else {
Guolin Ke's avatar
Guolin Ke committed
77
        if (!Common::AtoiAndCheck(config_.label_column.c_str(), &label_idx_)) {
78
79
80
          Log::Fatal("label_column is not a number,\n"
                     "if you want to use a column name,\n"
                     "please add the prefix \"name:\" to the column name");
Guolin Ke's avatar
Guolin Ke committed
81
82
        }
        Log::Info("Using column number %d as label", label_idx_);
Guolin Ke's avatar
Guolin Ke committed
83
84
      }
    }
Guolin Ke's avatar
Guolin Ke committed
85

86
87
88
89
90
91
92
93
94
    if (!config_.parser_config_file.empty()) {
      // if parser config file exists, feature names may be changed after customized parser applied.
      // clear here so could use default filled feature names during dataset construction.
      // may improve by saving real feature names defined in parser in the future.
      if (!feature_names_.empty()) {
        feature_names_.clear();
      }
    }

Guolin Ke's avatar
Guolin Ke committed
95
    if (!feature_names_.empty()) {
Guolin Ke's avatar
Guolin Ke committed
96
97
98
99
      // erase label column name
      feature_names_.erase(feature_names_.begin() + label_idx_);
      for (size_t i = 0; i < feature_names_.size(); ++i) {
        name2idx[feature_names_[i]] = static_cast<int>(i);
Guolin Ke's avatar
Guolin Ke committed
100
      }
Guolin Ke's avatar
Guolin Ke committed
101
102
103
    }

    // load ignore columns
Guolin Ke's avatar
Guolin Ke committed
104
105
106
    if (config_.ignore_column.size() > 0) {
      if (Common::StartsWith(config_.ignore_column, name_prefix)) {
        std::string names = config_.ignore_column.substr(name_prefix.size());
Guolin Ke's avatar
Guolin Ke committed
107
108
109
110
111
112
113
114
115
        for (auto name : Common::Split(names.c_str(), ',')) {
          if (name2idx.count(name) > 0) {
            int tmp = name2idx[name];
            ignore_features_.emplace(tmp);
          } else {
            Log::Fatal("Could not find ignore column %s in data file", name.c_str());
          }
        }
      } else {
Guolin Ke's avatar
Guolin Ke committed
116
        for (auto token : Common::Split(config_.ignore_column.c_str(), ',')) {
Guolin Ke's avatar
Guolin Ke committed
117
118
          int tmp = 0;
          if (!Common::AtoiAndCheck(token.c_str(), &tmp)) {
119
120
121
            Log::Fatal("ignore_column is not a number,\n"
                       "if you want to use a column name,\n"
                       "please add the prefix \"name:\" to the column name");
Guolin Ke's avatar
Guolin Ke committed
122
123
          }
          ignore_features_.emplace(tmp);
Guolin Ke's avatar
Guolin Ke committed
124
125
126
        }
      }
    }
Guolin Ke's avatar
Guolin Ke committed
127
    // load weight idx
Guolin Ke's avatar
Guolin Ke committed
128
129
130
    if (config_.weight_column.size() > 0) {
      if (Common::StartsWith(config_.weight_column, name_prefix)) {
        std::string name = config_.weight_column.substr(name_prefix.size());
Guolin Ke's avatar
Guolin Ke committed
131
132
133
134
135
136
        if (name2idx.count(name) > 0) {
          weight_idx_ = name2idx[name];
          Log::Info("Using column %s as weight", name.c_str());
        } else {
          Log::Fatal("Could not find weight column %s in data file", name.c_str());
        }
Guolin Ke's avatar
Guolin Ke committed
137
      } else {
Guolin Ke's avatar
Guolin Ke committed
138
        if (!Common::AtoiAndCheck(config_.weight_column.c_str(), &weight_idx_)) {
139
140
141
          Log::Fatal("weight_column is not a number,\n"
                     "if you want to use a column name,\n"
                     "please add the prefix \"name:\" to the column name");
Guolin Ke's avatar
Guolin Ke committed
142
143
        }
        Log::Info("Using column number %d as weight", weight_idx_);
Guolin Ke's avatar
Guolin Ke committed
144
      }
Guolin Ke's avatar
Guolin Ke committed
145
      ignore_features_.emplace(weight_idx_);
Guolin Ke's avatar
Guolin Ke committed
146
    }
Guolin Ke's avatar
Guolin Ke committed
147
    // load group idx
Guolin Ke's avatar
Guolin Ke committed
148
149
150
    if (config_.group_column.size() > 0) {
      if (Common::StartsWith(config_.group_column, name_prefix)) {
        std::string name = config_.group_column.substr(name_prefix.size());
Guolin Ke's avatar
Guolin Ke committed
151
152
153
154
155
156
157
        if (name2idx.count(name) > 0) {
          group_idx_ = name2idx[name];
          Log::Info("Using column %s as group/query id", name.c_str());
        } else {
          Log::Fatal("Could not find group/query column %s in data file", name.c_str());
        }
      } else {
Guolin Ke's avatar
Guolin Ke committed
158
        if (!Common::AtoiAndCheck(config_.group_column.c_str(), &group_idx_)) {
159
160
161
          Log::Fatal("group_column is not a number,\n"
                     "if you want to use a column name,\n"
                     "please add the prefix \"name:\" to the column name");
Guolin Ke's avatar
Guolin Ke committed
162
163
164
165
        }
        Log::Info("Using column number %d as group/query id", group_idx_);
      }
      ignore_features_.emplace(group_idx_);
Guolin Ke's avatar
Guolin Ke committed
166
167
    }
  }
Guolin Ke's avatar
Guolin Ke committed
168
169
170
  if (config_.categorical_feature.size() > 0) {
    if (Common::StartsWith(config_.categorical_feature, name_prefix)) {
      std::string names = config_.categorical_feature.substr(name_prefix.size());
171
172
173
174
175
      for (auto name : Common::Split(names.c_str(), ',')) {
        if (name2idx.count(name) > 0) {
          int tmp = name2idx[name];
          categorical_features_.emplace(tmp);
        } else {
Guolin Ke's avatar
Guolin Ke committed
176
          Log::Fatal("Could not find categorical_feature %s in data file", name.c_str());
177
178
179
        }
      }
    } else {
Guolin Ke's avatar
Guolin Ke committed
180
      for (auto token : Common::Split(config_.categorical_feature.c_str(), ',')) {
181
182
        int tmp = 0;
        if (!Common::AtoiAndCheck(token.c_str(), &tmp)) {
Guolin Ke's avatar
Guolin Ke committed
183
          Log::Fatal("categorical_feature is not a number,\n"
184
185
                     "if you want to use a column name,\n"
                     "please add the prefix \"name:\" to the column name");
186
187
188
189
190
        }
        categorical_features_.emplace(tmp);
      }
    }
  }
Guolin Ke's avatar
Guolin Ke committed
191
192
}

193
194
195
196
197
198
199
200
201
202
void CheckSampleSize(size_t sample_cnt, size_t num_data) {
  if (static_cast<double>(sample_cnt) / num_data < 0.2f &&
      sample_cnt < 100000) {
    Log::Warning(
        "Using too small ``bin_construct_sample_cnt`` may encounter "
        "unexpected "
        "errors and poor accuracy.");
  }
}

203
Dataset* DatasetLoader::LoadFromFile(const char* filename, int rank, int num_machines) {
204
  // don't support query id in data file when using distributed training
Guolin Ke's avatar
Guolin Ke committed
205
  if (num_machines > 1 && !config_.pre_partition) {
Guolin Ke's avatar
Guolin Ke committed
206
    if (group_idx_ > 0) {
207
      Log::Fatal("Using a query id without pre-partitioning the data file is not supported for distributed training.\n"
208
                 "Please use an additional query file or pre-partition the data");
Guolin Ke's avatar
Guolin Ke committed
209
210
    }
  }
Guolin Ke's avatar
Guolin Ke committed
211
  auto dataset = std::unique_ptr<Dataset>(new Dataset());
212
213
214
  if (store_raw_) {
    dataset->SetHasRaw(true);
  }
Guolin Ke's avatar
Guolin Ke committed
215
216
  data_size_t num_global_data = 0;
  std::vector<data_size_t> used_data_indices;
217
  auto bin_filename = CheckCanLoadFromBin(filename);
218
  bool is_load_from_binary = false;
219
  if (bin_filename.size() == 0) {
220
    dataset->parser_config_str_ = Parser::GenerateParserConfigStr(filename, config_.parser_config_file.c_str(), config_.header, label_idx_);
Chen Yufei's avatar
Chen Yufei committed
221
    auto parser = std::unique_ptr<Parser>(Parser::CreateParser(filename, config_.header, 0, label_idx_,
222
                                                               config_.precise_float_parser, dataset->parser_config_str_));
Guolin Ke's avatar
Guolin Ke committed
223
224
225
226
    if (parser == nullptr) {
      Log::Fatal("Could not recognize data format of %s", filename);
    }
    dataset->data_filename_ = filename;
Guolin Ke's avatar
Guolin Ke committed
227
    dataset->label_idx_ = label_idx_;
228
    dataset->metadata_.Init(filename);
Guolin Ke's avatar
Guolin Ke committed
229
    if (!config_.two_round) {
Guolin Ke's avatar
Guolin Ke committed
230
      // read data to memory
231
      auto text_data = LoadTextDataToMemory(filename, dataset->metadata_, rank, num_machines, &num_global_data, &used_data_indices);
Guolin Ke's avatar
Guolin Ke committed
232
233
234
      dataset->num_data_ = static_cast<data_size_t>(text_data.size());
      // sample data
      auto sample_data = SampleTextDataFromMemory(text_data);
235
236
      CheckSampleSize(sample_data.size(),
                      static_cast<size_t>(dataset->num_data_));
237
      // construct feature bin mappers & clear sample data
Guolin Ke's avatar
Guolin Ke committed
238
      ConstructBinMappersFromTextData(rank, num_machines, sample_data, parser.get(), dataset.get());
239
      std::vector<std::string>().swap(sample_data);
240
241
242
      if (dataset->has_raw()) {
        dataset->ResizeRaw(dataset->num_data_);
      }
Guolin Ke's avatar
Guolin Ke committed
243
      // initialize label
244
      dataset->metadata_.Init(dataset->num_data_, weight_idx_, group_idx_);
Guolin Ke's avatar
Guolin Ke committed
245
      // extract features
Guolin Ke's avatar
Guolin Ke committed
246
      ExtractFeaturesFromMemory(&text_data, parser.get(), dataset.get());
Guolin Ke's avatar
Guolin Ke committed
247
248
249
250
251
252
253
254
255
      text_data.clear();
    } else {
      // sample data from file
      auto sample_data = SampleTextDataFromFile(filename, dataset->metadata_, rank, num_machines, &num_global_data, &used_data_indices);
      if (used_data_indices.size() > 0) {
        dataset->num_data_ = static_cast<data_size_t>(used_data_indices.size());
      } else {
        dataset->num_data_ = num_global_data;
      }
256
257
      CheckSampleSize(sample_data.size(),
                      static_cast<size_t>(dataset->num_data_));
258
      // construct feature bin mappers & clear sample data
Guolin Ke's avatar
Guolin Ke committed
259
      ConstructBinMappersFromTextData(rank, num_machines, sample_data, parser.get(), dataset.get());
260
      std::vector<std::string>().swap(sample_data);
261
262
263
      if (dataset->has_raw()) {
        dataset->ResizeRaw(dataset->num_data_);
      }
Guolin Ke's avatar
Guolin Ke committed
264
      // initialize label
265
      dataset->metadata_.Init(dataset->num_data_, weight_idx_, group_idx_);
266
      Log::Info("Making second pass...");
Guolin Ke's avatar
Guolin Ke committed
267
      // extract features
Guolin Ke's avatar
Guolin Ke committed
268
      ExtractFeaturesFromFile(filename, parser.get(), used_data_indices, dataset.get());
Guolin Ke's avatar
Guolin Ke committed
269
270
271
    }
  } else {
    // load data from binary file
272
273
    is_load_from_binary = true;
    Log::Info("Load from binary file %s", bin_filename.c_str());
274
    dataset.reset(LoadFromBinFile(filename, bin_filename.c_str(), rank, num_machines, &num_global_data, &used_data_indices));
275
276
277
278
279

    // checks whether there's a initial score file when loaded from binary data files
    // the intial score file should with suffix ".bin.init"
    dataset->metadata_.LoadInitialScore(bin_filename);

280
281
    dataset->device_type_ = config_.device_type;
    dataset->gpu_device_id_ = config_.gpu_device_id;
282
283
    #ifdef USE_CUDA
    if (config_.device_type == std::string("cuda")) {
284
285
286
287
288
      dataset->CreateCUDAColumnData();
      dataset->metadata_.CreateCUDAMetadata(dataset->gpu_device_id_);
    } else {
      dataset->cuda_column_data_ = nullptr;
    }
289
    #endif  // USE_CUDA
Guolin Ke's avatar
Guolin Ke committed
290
291
292
293
  }
  // check meta data
  dataset->metadata_.CheckOrPartition(num_global_data, used_data_indices);
  // need to check training data
294
295
  CheckDataset(dataset.get(), is_load_from_binary);

Guolin Ke's avatar
Guolin Ke committed
296
  return dataset.release();
Guolin Ke's avatar
Guolin Ke committed
297
298
}

299
Dataset* DatasetLoader::LoadFromFileAlignWithOtherDataset(const char* filename, const Dataset* train_data) {
Guolin Ke's avatar
Guolin Ke committed
300
301
  data_size_t num_global_data = 0;
  std::vector<data_size_t> used_data_indices;
Guolin Ke's avatar
Guolin Ke committed
302
  auto dataset = std::unique_ptr<Dataset>(new Dataset());
303
304
305
  if (store_raw_) {
    dataset->SetHasRaw(true);
  }
306
307
  auto bin_filename = CheckCanLoadFromBin(filename);
  if (bin_filename.size() == 0) {
Chen Yufei's avatar
Chen Yufei committed
308
    auto parser = std::unique_ptr<Parser>(Parser::CreateParser(filename, config_.header, 0, label_idx_,
309
                                                               config_.precise_float_parser, train_data->parser_config_str_));
Guolin Ke's avatar
Guolin Ke committed
310
311
312
313
    if (parser == nullptr) {
      Log::Fatal("Could not recognize data format of %s", filename);
    }
    dataset->data_filename_ = filename;
Guolin Ke's avatar
Guolin Ke committed
314
    dataset->label_idx_ = label_idx_;
315
    dataset->metadata_.Init(filename);
Guolin Ke's avatar
Guolin Ke committed
316
    if (!config_.two_round) {
Guolin Ke's avatar
Guolin Ke committed
317
318
319
320
      // read data in memory
      auto text_data = LoadTextDataToMemory(filename, dataset->metadata_, 0, 1, &num_global_data, &used_data_indices);
      dataset->num_data_ = static_cast<data_size_t>(text_data.size());
      // initialize label
321
      dataset->metadata_.Init(dataset->num_data_, weight_idx_, group_idx_);
Guolin Ke's avatar
Guolin Ke committed
322
      dataset->CreateValid(train_data);
323
324
325
      if (dataset->has_raw()) {
        dataset->ResizeRaw(dataset->num_data_);
      }
Guolin Ke's avatar
Guolin Ke committed
326
      // extract features
Guolin Ke's avatar
Guolin Ke committed
327
      ExtractFeaturesFromMemory(&text_data, parser.get(), dataset.get());
Guolin Ke's avatar
Guolin Ke committed
328
329
      text_data.clear();
    } else {
Guolin Ke's avatar
Guolin Ke committed
330
      TextReader<data_size_t> text_reader(filename, config_.header);
Guolin Ke's avatar
Guolin Ke committed
331
332
333
334
      // Get number of lines of data file
      dataset->num_data_ = static_cast<data_size_t>(text_reader.CountLine());
      num_global_data = dataset->num_data_;
      // initialize label
335
      dataset->metadata_.Init(dataset->num_data_, weight_idx_, group_idx_);
Guolin Ke's avatar
Guolin Ke committed
336
      dataset->CreateValid(train_data);
337
338
339
      if (dataset->has_raw()) {
        dataset->ResizeRaw(dataset->num_data_);
      }
Guolin Ke's avatar
Guolin Ke committed
340
      // extract features
Guolin Ke's avatar
Guolin Ke committed
341
      ExtractFeaturesFromFile(filename, parser.get(), used_data_indices, dataset.get());
Guolin Ke's avatar
Guolin Ke committed
342
343
344
    }
  } else {
    // load data from binary file
345
    dataset.reset(LoadFromBinFile(filename, bin_filename.c_str(), 0, 1, &num_global_data, &used_data_indices));
346
347
348
    // checks whether there's a initial score file when loaded from binary data files
    // the intial score file should with suffix ".bin.init"
    dataset->metadata_.LoadInitialScore(bin_filename);
Guolin Ke's avatar
Guolin Ke committed
349
350
351
352
  }
  // not need to check validation data
  // check meta data
  dataset->metadata_.CheckOrPartition(num_global_data, used_data_indices);
Guolin Ke's avatar
Guolin Ke committed
353
  return dataset.release();
Guolin Ke's avatar
Guolin Ke committed
354
355
}

356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
Dataset* DatasetLoader::LoadFromSerializedReference(const char* binary_data, size_t buffer_size, data_size_t num_data, int32_t num_classes) {
  auto dataset = std::unique_ptr<Dataset>(new Dataset(num_data));

  auto mem_ptr = binary_data;

  // check token
  const size_t size_of_token = std::strlen(Dataset::binary_serialized_reference_token);
  size_t size_of_token_in_input = VirtualFileWriter::AlignedSize(sizeof(char) * size_of_token);
  if (buffer_size < size_of_token_in_input) {
    Log::Fatal("Binary definition file error: token has the wrong size");
  }
  if (std::string(mem_ptr, size_of_token) != std::string(Dataset::binary_serialized_reference_token)) {
    Log::Fatal("Input file is not LightGBM binary reference file");
  }
  mem_ptr += size_of_token_in_input;

  size_t size_of_version = VirtualFileWriter::AlignedSize(Dataset::kSerializedReferenceVersionLength);
  std::string version(mem_ptr, Dataset::kSerializedReferenceVersionLength);
  if (version != std::string(Dataset::serialized_reference_version)) {
    Log::Fatal("Unexpected version of serialized binary data: %s", version.c_str());
  }
  mem_ptr += size_of_version;

  size_t size_of_header = *(reinterpret_cast<const size_t*>(mem_ptr));
  mem_ptr += sizeof(size_t);

  LoadHeaderFromMemory(dataset.get(), mem_ptr);
  dataset->num_data_ = num_data;  // update to the given num_data
  mem_ptr += size_of_header;

  // read feature group definitions
  for (int i = 0; i < dataset->num_groups_; ++i) {
    // read feature size
    const size_t size_of_feature = *(reinterpret_cast<const size_t*>(mem_ptr));
    mem_ptr += sizeof(size_t);
    dataset->feature_groups_.emplace_back(std::unique_ptr<FeatureGroup>(new FeatureGroup(mem_ptr, num_data, i)));
    mem_ptr += size_of_feature;
  }
  dataset->feature_groups_.shrink_to_fit();

  dataset->numeric_feature_map_ = std::vector<int>(dataset->num_features_, false);
  dataset->num_numeric_features_ = 0;
  for (int i = 0; i < dataset->num_features_; ++i) {
    if (dataset->FeatureBinMapper(i)->bin_type() == BinType::CategoricalBin) {
      dataset->numeric_feature_map_[i] = -1;
    } else {
      dataset->numeric_feature_map_[i] = dataset->num_numeric_features_;
      ++dataset->num_numeric_features_;
    }
  }

  int has_weights = config_.weight_column.size() > 0;
  int has_init_scores = num_classes > 0;
  int has_queries = config_.group_column.size() > 0;
  dataset->metadata_.Init(num_data, has_weights, has_init_scores, has_queries, num_classes);

  Log::Info("Loaded reference dataset: %d features, %d num_data", dataset->num_features_, num_data);

  return dataset.release();
}

417
418
419
Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* bin_filename,
                                        int rank, int num_machines, int* num_global_data,
                                        std::vector<data_size_t>* used_data_indices) {
Guolin Ke's avatar
Guolin Ke committed
420
  auto dataset = std::unique_ptr<Dataset>(new Dataset());
421
  auto reader = VirtualFileReader::Make(bin_filename);
Guolin Ke's avatar
Guolin Ke committed
422
  dataset->data_filename_ = data_filename;
423
  if (!reader->Init()) {
Guolin Ke's avatar
Guolin Ke committed
424
425
426
427
428
    Log::Fatal("Could not read binary data from %s", bin_filename);
  }

  // buffer to read binary file
  size_t buffer_size = 16 * 1024 * 1024;
Guolin Ke's avatar
Guolin Ke committed
429
  auto buffer = std::vector<char>(buffer_size);
430

431
432
  // check token
  size_t size_of_token = std::strlen(Dataset::binary_file_token);
433
434
435
436
  size_t read_cnt = reader->Read(
      buffer.data(),
      VirtualFileWriter::AlignedSize(sizeof(char) * size_of_token));
  if (read_cnt < sizeof(char) * size_of_token) {
437
438
439
    Log::Fatal("Binary file error: token has the wrong size");
  }
  if (std::string(buffer.data()) != std::string(Dataset::binary_file_token)) {
440
    Log::Fatal("Input file is not LightGBM binary file");
441
  }
Guolin Ke's avatar
Guolin Ke committed
442
443

  // read size of header
444
  read_cnt = reader->Read(buffer.data(), sizeof(size_t));
Guolin Ke's avatar
Guolin Ke committed
445

446
  if (read_cnt != sizeof(size_t)) {
Guolin Ke's avatar
Guolin Ke committed
447
448
449
    Log::Fatal("Binary file error: header has the wrong size");
  }

Guolin Ke's avatar
Guolin Ke committed
450
  size_t size_of_head = *(reinterpret_cast<size_t*>(buffer.data()));
Guolin Ke's avatar
Guolin Ke committed
451

452
  // re-allocate space if not enough
Guolin Ke's avatar
Guolin Ke committed
453
454
  if (size_of_head > buffer_size) {
    buffer_size = size_of_head;
Guolin Ke's avatar
Guolin Ke committed
455
    buffer.resize(buffer_size);
Guolin Ke's avatar
Guolin Ke committed
456
457
  }
  // read header
458
  read_cnt = reader->Read(buffer.data(), size_of_head);
Guolin Ke's avatar
Guolin Ke committed
459
460
461
462
463

  if (read_cnt != size_of_head) {
    Log::Fatal("Binary file error: header is incorrect");
  }
  // get header
Guolin Ke's avatar
Guolin Ke committed
464
  const char* mem_ptr = buffer.data();
465
  LoadHeaderFromMemory(dataset.get(), mem_ptr);
Guolin Ke's avatar
Guolin Ke committed
466
467

  // read size of meta data
468
  read_cnt = reader->Read(buffer.data(), sizeof(size_t));
Guolin Ke's avatar
Guolin Ke committed
469

470
  if (read_cnt != sizeof(size_t)) {
Guolin Ke's avatar
Guolin Ke committed
471
472
473
    Log::Fatal("Binary file error: meta data has the wrong size");
  }

Guolin Ke's avatar
Guolin Ke committed
474
  size_t size_of_metadata = *(reinterpret_cast<size_t*>(buffer.data()));
Guolin Ke's avatar
Guolin Ke committed
475
476
477
478

  // re-allocate space if not enough
  if (size_of_metadata > buffer_size) {
    buffer_size = size_of_metadata;
Guolin Ke's avatar
Guolin Ke committed
479
    buffer.resize(buffer_size);
Guolin Ke's avatar
Guolin Ke committed
480
481
  }
  //  read meta data
482
  read_cnt = reader->Read(buffer.data(), size_of_metadata);
Guolin Ke's avatar
Guolin Ke committed
483
484
485
486
487

  if (read_cnt != size_of_metadata) {
    Log::Fatal("Binary file error: meta data is incorrect");
  }
  // load meta data
Guolin Ke's avatar
Guolin Ke committed
488
  dataset->metadata_.LoadFromMemory(buffer.data());
Guolin Ke's avatar
Guolin Ke committed
489

490
491
  *num_global_data = dataset->num_data_;
  used_data_indices->clear();
Guolin Ke's avatar
Guolin Ke committed
492
  // sample local used data if need to partition
Guolin Ke's avatar
Guolin Ke committed
493
  if (num_machines > 1 && !config_.pre_partition) {
Guolin Ke's avatar
Guolin Ke committed
494
495
496
497
    const data_size_t* query_boundaries = dataset->metadata_.query_boundaries();
    if (query_boundaries == nullptr) {
      // if not contain query file, minimal sample unit is one record
      for (data_size_t i = 0; i < dataset->num_data_; ++i) {
Guolin Ke's avatar
Guolin Ke committed
498
        if (random_.NextShort(0, num_machines) == rank) {
499
          used_data_indices->push_back(i);
Guolin Ke's avatar
Guolin Ke committed
500
501
502
503
504
505
506
507
508
        }
      }
    } else {
      // if contain query file, minimal sample unit is one query
      data_size_t num_queries = dataset->metadata_.num_queries();
      data_size_t qid = -1;
      bool is_query_used = false;
      for (data_size_t i = 0; i < dataset->num_data_; ++i) {
        if (qid >= num_queries) {
509
510
          Log::Fatal("Current query exceeds the range of the query file,\n"
                     "please ensure the query file is correct");
Guolin Ke's avatar
Guolin Ke committed
511
512
513
514
        }
        if (i >= query_boundaries[qid + 1]) {
          // if is new query
          is_query_used = false;
Guolin Ke's avatar
Guolin Ke committed
515
          if (random_.NextShort(0, num_machines) == rank) {
Guolin Ke's avatar
Guolin Ke committed
516
517
518
519
520
            is_query_used = true;
          }
          ++qid;
        }
        if (is_query_used) {
521
          used_data_indices->push_back(i);
Guolin Ke's avatar
Guolin Ke committed
522
523
524
        }
      }
    }
525
    dataset->num_data_ = static_cast<data_size_t>((*used_data_indices).size());
Guolin Ke's avatar
Guolin Ke committed
526
  }
527
  dataset->metadata_.PartitionLabel(*used_data_indices);
Guolin Ke's avatar
Guolin Ke committed
528
  // read feature data
Guolin Ke's avatar
Guolin Ke committed
529
  for (int i = 0; i < dataset->num_groups_; ++i) {
Guolin Ke's avatar
Guolin Ke committed
530
    // read feature size
531
532
    read_cnt = reader->Read(buffer.data(), sizeof(size_t));
    if (read_cnt != sizeof(size_t)) {
Guolin Ke's avatar
Guolin Ke committed
533
534
      Log::Fatal("Binary file error: feature %d has the wrong size", i);
    }
Guolin Ke's avatar
Guolin Ke committed
535
    size_t size_of_feature = *(reinterpret_cast<size_t*>(buffer.data()));
Guolin Ke's avatar
Guolin Ke committed
536
537
538
    // re-allocate space if not enough
    if (size_of_feature > buffer_size) {
      buffer_size = size_of_feature;
Guolin Ke's avatar
Guolin Ke committed
539
      buffer.resize(buffer_size);
Guolin Ke's avatar
Guolin Ke committed
540
541
    }

542
    read_cnt = reader->Read(buffer.data(), size_of_feature);
Guolin Ke's avatar
Guolin Ke committed
543
544

    if (read_cnt != size_of_feature) {
545
      Log::Fatal("Binary file error: feature %d is incorrect, read count: %zu", i, read_cnt);
Guolin Ke's avatar
Guolin Ke committed
546
    }
Guolin Ke's avatar
Guolin Ke committed
547
    dataset->feature_groups_.emplace_back(std::unique_ptr<FeatureGroup>(
548
549
      new FeatureGroup(buffer.data(),
                       *num_global_data,
550
                       *used_data_indices, i)));
Guolin Ke's avatar
Guolin Ke committed
551
  }
Guolin Ke's avatar
Guolin Ke committed
552
  dataset->feature_groups_.shrink_to_fit();
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574

  // raw data
  dataset->numeric_feature_map_ = std::vector<int>(dataset->num_features_, false);
  dataset->num_numeric_features_ = 0;
  for (int i = 0; i < dataset->num_features_; ++i) {
    if (dataset->FeatureBinMapper(i)->bin_type() == BinType::CategoricalBin) {
      dataset->numeric_feature_map_[i] = -1;
    } else {
      dataset->numeric_feature_map_[i] = dataset->num_numeric_features_;
      ++dataset->num_numeric_features_;
    }
  }
  if (dataset->has_raw()) {
    dataset->ResizeRaw(dataset->num_data());
      size_t row_size = dataset->num_numeric_features_ * sizeof(float);
      if (row_size > buffer_size) {
        buffer_size = row_size;
        buffer.resize(buffer_size);
      }
    for (int i = 0; i < dataset->num_data(); ++i) {
      read_cnt = reader->Read(buffer.data(), row_size);
      if (read_cnt != row_size) {
575
        Log::Fatal("Binary file error: row %d of raw data is incorrect, read count: %zu", i, read_cnt);
576
577
578
579
580
581
582
583
584
585
586
587
588
      }
      mem_ptr = buffer.data();
      const float* tmp_ptr_raw_row = reinterpret_cast<const float*>(mem_ptr);
      for (int j = 0; j < dataset->num_features(); ++j) {
        int feat_ind = dataset->numeric_feature_map_[j];
        if (feat_ind >= 0) {
          dataset->raw_data_[feat_ind][i] = tmp_ptr_raw_row[feat_ind];
        }
      }
      mem_ptr += row_size;
    }
  }

Guolin Ke's avatar
Guolin Ke committed
589
  dataset->is_finish_load_ = true;
Guolin Ke's avatar
Guolin Ke committed
590
  return dataset.release();
Guolin Ke's avatar
Guolin Ke committed
591
592
}

593
Dataset* DatasetLoader::ConstructFromSampleData(double** sample_values,
594
595
596
597
598
599
600
                                                int** sample_indices,
                                                int num_col,
                                                const int* num_per_col,
                                                size_t total_sample_size,
                                                data_size_t num_local_data,
                                                int64_t num_dist_data) {
  CheckSampleSize(total_sample_size, static_cast<size_t>(num_dist_data));
601
602
603
604
605
  int num_total_features = num_col;
  if (Network::num_machines() > 1) {
    num_total_features = Network::GlobalSyncUpByMax(num_total_features);
  }
  std::vector<std::unique_ptr<BinMapper>> bin_mappers(num_total_features);
606
607
  // fill feature_names_ if not header
  if (feature_names_.empty()) {
608
    for (int i = 0; i < num_col; ++i) {
609
610
611
612
613
      std::stringstream str_buf;
      str_buf << "Column_" << i;
      feature_names_.push_back(str_buf.str());
    }
  }
Belinda Trotta's avatar
Belinda Trotta committed
614
  if (!config_.max_bin_by_feature.empty()) {
615
616
    CHECK_EQ(static_cast<size_t>(num_col), config_.max_bin_by_feature.size());
    CHECK_GT(*(std::min_element(config_.max_bin_by_feature.begin(), config_.max_bin_by_feature.end())), 1);
Belinda Trotta's avatar
Belinda Trotta committed
617
  }
618
619
620
621
622

  // get forced split
  std::string forced_bins_path = config_.forcedbins_filename;
  std::vector<std::vector<double>> forced_bin_bounds = DatasetLoader::GetForcedBins(forced_bins_path, num_col, categorical_features_);

Guolin Ke's avatar
Guolin Ke committed
623
  const data_size_t filter_cnt = static_cast<data_size_t>(
624
    static_cast<double>(config_.min_data_in_leaf * total_sample_size) / num_dist_data);
625
626
627
628
629
630
631
632
633
634
635
636
637
  if (Network::num_machines() == 1) {
    // if only one machine, find bin locally
    OMP_INIT_EX();
    #pragma omp parallel for schedule(guided)
    for (int i = 0; i < num_col; ++i) {
      OMP_LOOP_EX_BEGIN();
      if (ignore_features_.count(i) > 0) {
        bin_mappers[i] = nullptr;
        continue;
      }
      BinType bin_type = BinType::NumericalBin;
      if (categorical_features_.count(i)) {
        bin_type = BinType::CategoricalBin;
638
639
640
641
        bool feat_is_unconstrained = ((config_.monotone_constraints.size() == 0) || (config_.monotone_constraints[i] == 0));
        if (!feat_is_unconstrained) {
            Log::Fatal("The output cannot be monotone with respect to categorical features");
        }
642
643
      }
      bin_mappers[i].reset(new BinMapper());
Belinda Trotta's avatar
Belinda Trotta committed
644
645
      if (config_.max_bin_by_feature.empty()) {
        bin_mappers[i]->FindBin(sample_values[i], num_per_col[i], total_sample_size,
646
                                config_.max_bin, config_.min_data_in_bin, filter_cnt, config_.feature_pre_filter,
647
648
                                bin_type, config_.use_missing, config_.zero_as_missing,
                                forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
649
650
      } else {
        bin_mappers[i]->FindBin(sample_values[i], num_per_col[i], total_sample_size,
651
                                config_.max_bin_by_feature[i], config_.min_data_in_bin,
652
                                filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing,
653
                                config_.zero_as_missing, forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
654
      }
655
656
657
658
659
660
661
662
663
664
665
666
      OMP_LOOP_EX_END();
    }
    OMP_THROW_EX();
  } else {
    // if have multi-machines, need to find bin distributed
    // different machines will find bin for different features
    int num_machines = Network::num_machines();
    int rank = Network::rank();
    // start and len will store the process feature indices for different machines
    // machine i will find bins for features in [ start[i], start[i] + len[i] )
    std::vector<int> start(num_machines);
    std::vector<int> len(num_machines);
667
    int step = (num_total_features + num_machines - 1) / num_machines;
668
669
670
671
    if (step < 1) { step = 1; }

    start[0] = 0;
    for (int i = 0; i < num_machines - 1; ++i) {
672
      len[i] = std::min(step, num_total_features - start[i]);
673
674
      start[i + 1] = start[i] + len[i];
    }
675
    len[num_machines - 1] = num_total_features - start[num_machines - 1];
676
677
678
679
680
681
682
683
684
685
686
687
    OMP_INIT_EX();
    #pragma omp parallel for schedule(guided)
    for (int i = 0; i < len[rank]; ++i) {
      OMP_LOOP_EX_BEGIN();
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
      }
      BinType bin_type = BinType::NumericalBin;
      if (categorical_features_.count(start[rank] + i)) {
        bin_type = BinType::CategoricalBin;
      }
      bin_mappers[i].reset(new BinMapper());
688
689
690
      if (num_col <= start[rank] + i) {
        continue;
      }
Belinda Trotta's avatar
Belinda Trotta committed
691
      if (config_.max_bin_by_feature.empty()) {
692
693
        bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
                                total_sample_size, config_.max_bin, config_.min_data_in_bin,
694
                                filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing, config_.zero_as_missing,
695
                                forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
696
      } else {
697
698
        bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
                                total_sample_size, config_.max_bin_by_feature[start[rank] + i],
699
                                config_.min_data_in_bin, filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing,
700
                                config_.zero_as_missing, forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
701
      }
702
703
      OMP_LOOP_EX_END();
    }
Guolin Ke's avatar
Guolin Ke committed
704
    OMP_THROW_EX();
705
    comm_size_t self_buf_size = 0;
706
    for (int i = 0; i < len[rank]; ++i) {
707
708
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
709
      }
710
      self_buf_size += static_cast<comm_size_t>(bin_mappers[i]->SizesInByte());
Guolin Ke's avatar
Guolin Ke committed
711
    }
712
713
    std::vector<char> input_buffer(self_buf_size);
    auto cp_ptr = input_buffer.data();
714
715
716
717
    for (int i = 0; i < len[rank]; ++i) {
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
      }
718
719
      bin_mappers[i]->CopyTo(cp_ptr);
      cp_ptr += bin_mappers[i]->SizesInByte();
720
721
722
      // free
      bin_mappers[i].reset(nullptr);
    }
723
724
725
726
    std::vector<comm_size_t> size_len = Network::GlobalArray(self_buf_size);
    std::vector<comm_size_t> size_start(num_machines, 0);
    for (int i = 1; i < num_machines; ++i) {
      size_start[i] = size_start[i - 1] + size_len[i - 1];
727
    }
728
729
    comm_size_t total_buffer_size = size_start[num_machines - 1] + size_len[num_machines - 1];
    std::vector<char> output_buffer(total_buffer_size);
730
    // gather global feature bin mappers
731
732
    Network::Allgather(input_buffer.data(), size_start.data(), size_len.data(), output_buffer.data(), total_buffer_size);
    cp_ptr = output_buffer.data();
733
    // restore features bins from buffer
734
    for (int i = 0; i < num_total_features; ++i) {
735
736
737
738
739
      if (ignore_features_.count(i) > 0) {
        bin_mappers[i] = nullptr;
        continue;
      }
      bin_mappers[i].reset(new BinMapper());
740
741
      bin_mappers[i]->CopyFrom(cp_ptr);
      cp_ptr += bin_mappers[i]->SizesInByte();
742
    }
Guolin Ke's avatar
Guolin Ke committed
743
  }
744
  CheckCategoricalFeatureNumBin(bin_mappers, config_.max_bin, config_.max_bin_by_feature);
745
  auto dataset = std::unique_ptr<Dataset>(new Dataset(num_local_data));
Guolin Ke's avatar
Guolin Ke committed
746
  dataset->Construct(&bin_mappers, num_total_features, forced_bin_bounds, sample_indices, sample_values, num_per_col, num_col, total_sample_size, config_);
747
  if (dataset->has_raw()) {
748
    dataset->ResizeRaw(num_local_data);
749
  }
750
  dataset->set_feature_names(feature_names_);
Guolin Ke's avatar
Guolin Ke committed
751
  return dataset.release();
Guolin Ke's avatar
Guolin Ke committed
752
}
Guolin Ke's avatar
Guolin Ke committed
753
754
755
756


// ---- private functions ----

757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
void DatasetLoader::LoadHeaderFromMemory(Dataset* dataset, const char* buffer) {
  // get header
  const char* mem_ptr = buffer;
  dataset->num_data_ = *(reinterpret_cast<const data_size_t*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->num_data_));
  dataset->num_features_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->num_features_));
  dataset->num_total_features_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->num_total_features_));
  dataset->label_idx_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->label_idx_));
  dataset->max_bin_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->max_bin_));
  dataset->bin_construct_sample_cnt_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->bin_construct_sample_cnt_));
  dataset->min_data_in_bin_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->min_data_in_bin_));
  dataset->use_missing_ = *(reinterpret_cast<const bool*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->use_missing_));
  dataset->zero_as_missing_ = *(reinterpret_cast<const bool*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->zero_as_missing_));
  dataset->has_raw_ = *(reinterpret_cast<const bool*>(mem_ptr));

  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->has_raw_));
  const int* tmp_feature_map = reinterpret_cast<const int*>(mem_ptr);
  dataset->used_feature_map_.clear();
  for (int i = 0; i < dataset->num_total_features_; ++i) {
    dataset->used_feature_map_.push_back(tmp_feature_map[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * dataset->num_total_features_);
  // num_groups
  dataset->num_groups_ = *(reinterpret_cast<const int*>(mem_ptr));
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(dataset->num_groups_));
  // real_feature_idx_
  const int* tmp_ptr_real_feature_idx_ = reinterpret_cast<const int*>(mem_ptr);
  dataset->real_feature_idx_.clear();
  for (int i = 0; i < dataset->num_features_; ++i) {
    dataset->real_feature_idx_.push_back(tmp_ptr_real_feature_idx_[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * dataset->num_features_);
  // feature2group
  const int* tmp_ptr_feature2group = reinterpret_cast<const int*>(mem_ptr);
  dataset->feature2group_.clear();
  for (int i = 0; i < dataset->num_features_; ++i) {
    dataset->feature2group_.push_back(tmp_ptr_feature2group[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * dataset->num_features_);
  // feature2subfeature
  const int* tmp_ptr_feature2subfeature = reinterpret_cast<const int*>(mem_ptr);
  dataset->feature2subfeature_.clear();
  for (int i = 0; i < dataset->num_features_; ++i) {
    dataset->feature2subfeature_.push_back(tmp_ptr_feature2subfeature[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * dataset->num_features_);
  // group_bin_boundaries
  const uint64_t* tmp_ptr_group_bin_boundaries = reinterpret_cast<const uint64_t*>(mem_ptr);
  dataset->group_bin_boundaries_.clear();
  for (int i = 0; i < dataset->num_groups_ + 1; ++i) {
    dataset->group_bin_boundaries_.push_back(tmp_ptr_group_bin_boundaries[i]);
  }
  mem_ptr += sizeof(uint64_t) * (dataset->num_groups_ + 1);

  // group_feature_start_
  const int* tmp_ptr_group_feature_start = reinterpret_cast<const int*>(mem_ptr);
  dataset->group_feature_start_.clear();
  for (int i = 0; i < dataset->num_groups_; ++i) {
    dataset->group_feature_start_.push_back(tmp_ptr_group_feature_start[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * (dataset->num_groups_));

  // group_feature_cnt_
  const int* tmp_ptr_group_feature_cnt = reinterpret_cast<const int*>(mem_ptr);
  dataset->group_feature_cnt_.clear();
  for (int i = 0; i < dataset->num_groups_; ++i) {
    dataset->group_feature_cnt_.push_back(tmp_ptr_group_feature_cnt[i]);
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int) * (dataset->num_groups_));

  if (!config_.max_bin_by_feature.empty()) {
    CHECK_EQ(static_cast<size_t>(dataset->num_total_features_), config_.max_bin_by_feature.size());
    CHECK_GT(*(std::min_element(config_.max_bin_by_feature.begin(), config_.max_bin_by_feature.end())), 1);
    dataset->max_bin_by_feature_.resize(dataset->num_total_features_);
    dataset->max_bin_by_feature_.assign(config_.max_bin_by_feature.begin(), config_.max_bin_by_feature.end());
  } else {
    const int32_t* tmp_ptr_max_bin_by_feature = reinterpret_cast<const int32_t*>(mem_ptr);
    dataset->max_bin_by_feature_.clear();
    for (int i = 0; i < dataset->num_total_features_; ++i) {
      dataset->max_bin_by_feature_.push_back(tmp_ptr_max_bin_by_feature[i]);
    }
  }
  mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int32_t) * (dataset->num_total_features_));
  if (ArrayArgs<int32_t>::CheckAll(dataset->max_bin_by_feature_, -1)) {
    dataset->max_bin_by_feature_.clear();
  }

  // get feature names
  dataset->feature_names_.clear();
  for (int i = 0; i < dataset->num_total_features_; ++i) {
    int str_len = *(reinterpret_cast<const int*>(mem_ptr));
    mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int));
    std::stringstream str_buf;
    auto tmp_arr = reinterpret_cast<const char*>(mem_ptr);
    for (int j = 0; j < str_len; ++j) {
      char tmp_char = tmp_arr[j];
      str_buf << tmp_char;
    }
    mem_ptr += VirtualFileWriter::AlignedSize(sizeof(char) * str_len);
    dataset->feature_names_.emplace_back(str_buf.str());
  }
  // get forced_bin_bounds_
  dataset->forced_bin_bounds_ = std::vector<std::vector<double>>(dataset->num_total_features_, std::vector<double>());
  for (int i = 0; i < dataset->num_total_features_; ++i) {
    int num_bounds = *(reinterpret_cast<const int*>(mem_ptr));
    mem_ptr += VirtualFileWriter::AlignedSize(sizeof(int));
    dataset->forced_bin_bounds_[i] = std::vector<double>();
    const double* tmp_ptr_forced_bounds =
      reinterpret_cast<const double*>(mem_ptr);
    for (int j = 0; j < num_bounds; ++j) {
      double bound = tmp_ptr_forced_bounds[j];
      dataset->forced_bin_bounds_[i].push_back(bound);
    }
    mem_ptr += num_bounds * sizeof(double);
  }
}

882
void DatasetLoader::CheckDataset(const Dataset* dataset, bool is_load_from_binary) {
Guolin Ke's avatar
Guolin Ke committed
883
  if (dataset->num_data_ <= 0) {
Guolin Ke's avatar
Guolin Ke committed
884
    Log::Fatal("Data file %s is empty", dataset->data_filename_.c_str());
Guolin Ke's avatar
Guolin Ke committed
885
  }
886
887
  if (dataset->feature_names_.size() != static_cast<size_t>(dataset->num_total_features_)) {
    Log::Fatal("Size of feature name error, should be %d, got %d", dataset->num_total_features_,
888
               static_cast<int>(dataset->feature_names_.size()));
889
  }
Guolin Ke's avatar
Guolin Ke committed
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
  bool is_feature_order_by_group = true;
  int last_group = -1;
  int last_sub_feature = -1;
  // if features are ordered, not need to use hist_buf
  for (int i = 0; i < dataset->num_features_; ++i) {
    int group = dataset->feature2group_[i];
    int sub_feature = dataset->feature2subfeature_[i];
    if (group < last_group) {
      is_feature_order_by_group = false;
    } else if (group == last_group) {
      if (sub_feature <= last_sub_feature) {
        is_feature_order_by_group = false;
        break;
      }
    }
    last_group = group;
    last_sub_feature = sub_feature;
  }
  if (!is_feature_order_by_group) {
909
    Log::Fatal("Features in dataset should be ordered by group");
Guolin Ke's avatar
Guolin Ke committed
910
  }
911
912
913

  if (is_load_from_binary) {
    if (dataset->max_bin_ != config_.max_bin) {
914
915
      Log::Fatal("Dataset was constructed with parameter max_bin=%d. It cannot be changed to %d when loading from binary file.",
                 dataset->max_bin_, config_.max_bin);
916
917
    }
    if (dataset->min_data_in_bin_ != config_.min_data_in_bin) {
918
919
      Log::Fatal("Dataset was constructed with parameter min_data_in_bin=%d. It cannot be changed to %d when loading from binary file.",
                 dataset->min_data_in_bin_, config_.min_data_in_bin);
920
921
    }
    if (dataset->use_missing_ != config_.use_missing) {
922
923
      Log::Fatal("Dataset was constructed with parameter use_missing=%d. It cannot be changed to %d when loading from binary file.",
                 dataset->use_missing_, config_.use_missing);
924
925
    }
    if (dataset->zero_as_missing_ != config_.zero_as_missing) {
926
927
      Log::Fatal("Dataset was constructed with parameter zero_as_missing=%d. It cannot be changed to %d when loading from binary file.",
                 dataset->zero_as_missing_, config_.zero_as_missing);
928
929
    }
    if (dataset->bin_construct_sample_cnt_ != config_.bin_construct_sample_cnt) {
930
931
      Log::Fatal("Dataset was constructed with parameter bin_construct_sample_cnt=%d. It cannot be changed to %d when loading from binary file.",
                 dataset->bin_construct_sample_cnt_, config_.bin_construct_sample_cnt);
932
933
934
935
    }
    if ((dataset->max_bin_by_feature_.size() != config_.max_bin_by_feature.size()) ||
        !std::equal(dataset->max_bin_by_feature_.begin(), dataset->max_bin_by_feature_.end(),
            config_.max_bin_by_feature.begin())) {
936
      Log::Fatal("Parameter max_bin_by_feature cannot be changed when loading from binary file.");
937
938
    }

939
    if (config_.label_column != "") {
940
      Log::Warning("Parameter label_column works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
941
942
    }
    if (config_.weight_column != "") {
943
      Log::Warning("Parameter weight_column works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
944
945
    }
    if (config_.group_column != "") {
946
      Log::Warning("Parameter group_column works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
947
948
    }
    if (config_.ignore_column != "") {
949
      Log::Warning("Parameter ignore_column works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
950
    }
951
    if (config_.two_round) {
952
      Log::Warning("Parameter two_round works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
953
954
    }
    if (config_.header) {
955
      Log::Warning("Parameter header works only in case of loading data directly from text file. It will be ignored when loading from binary file.");
956
    }
957
  }
Guolin Ke's avatar
Guolin Ke committed
958
959
960
}

std::vector<std::string> DatasetLoader::LoadTextDataToMemory(const char* filename, const Metadata& metadata,
961
962
                                                             int rank, int num_machines, int* num_global_data,
                                                             std::vector<data_size_t>* used_data_indices) {
963
  TextReader<data_size_t> text_reader(filename, config_.header, config_.file_load_progress_interval_bytes);
Guolin Ke's avatar
Guolin Ke committed
964
  used_data_indices->clear();
Guolin Ke's avatar
Guolin Ke committed
965
  if (num_machines == 1 || config_.pre_partition) {
Guolin Ke's avatar
Guolin Ke committed
966
967
968
969
970
971
972
973
974
    // read all lines
    *num_global_data = text_reader.ReadAllLines();
  } else {  // need partition data
            // get query data
    const data_size_t* query_boundaries = metadata.query_boundaries();

    if (query_boundaries == nullptr) {
      // if not contain query data, minimal sample unit is one record
      *num_global_data = text_reader.ReadAndFilterLines([this, rank, num_machines](data_size_t) {
Guolin Ke's avatar
Guolin Ke committed
975
        if (random_.NextShort(0, num_machines) == rank) {
Guolin Ke's avatar
Guolin Ke committed
976
977
978
979
980
981
982
983
984
985
986
987
988
989
          return true;
        } else {
          return false;
        }
      }, used_data_indices);
    } else {
      // if contain query data, minimal sample unit is one query
      data_size_t num_queries = metadata.num_queries();
      data_size_t qid = -1;
      bool is_query_used = false;
      *num_global_data = text_reader.ReadAndFilterLines(
        [this, rank, num_machines, &qid, &query_boundaries, &is_query_used, num_queries]
      (data_size_t line_idx) {
        if (qid >= num_queries) {
990
991
          Log::Fatal("Current query exceeds the range of the query file,\n"
                     "please ensure the query file is correct");
Guolin Ke's avatar
Guolin Ke committed
992
993
994
995
        }
        if (line_idx >= query_boundaries[qid + 1]) {
          // if is new query
          is_query_used = false;
Guolin Ke's avatar
Guolin Ke committed
996
          if (random_.NextShort(0, num_machines) == rank) {
Guolin Ke's avatar
Guolin Ke committed
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
            is_query_used = true;
          }
          ++qid;
        }
        return is_query_used;
      }, used_data_indices);
    }
  }
  return std::move(text_reader.Lines());
}

std::vector<std::string> DatasetLoader::SampleTextDataFromMemory(const std::vector<std::string>& data) {
Guolin Ke's avatar
Guolin Ke committed
1009
  int sample_cnt = config_.bin_construct_sample_cnt;
1010
1011
  if (static_cast<size_t>(sample_cnt) > data.size()) {
    sample_cnt = static_cast<int>(data.size());
1012
  }
1013
  auto sample_indices = random_.Sample(static_cast<int>(data.size()), sample_cnt);
Guolin Ke's avatar
Guolin Ke committed
1014
  std::vector<std::string> out(sample_indices.size());
Guolin Ke's avatar
Guolin Ke committed
1015
1016
  for (size_t i = 0; i < sample_indices.size(); ++i) {
    const size_t idx = sample_indices[i];
Guolin Ke's avatar
Guolin Ke committed
1017
    out[i] = data[idx];
Guolin Ke's avatar
Guolin Ke committed
1018
1019
1020
1021
  }
  return out;
}

1022
1023
1024
std::vector<std::string> DatasetLoader::SampleTextDataFromFile(const char* filename, const Metadata& metadata,
                                                               int rank, int num_machines, int* num_global_data,
                                                               std::vector<data_size_t>* used_data_indices) {
Guolin Ke's avatar
Guolin Ke committed
1025
  const data_size_t sample_cnt = static_cast<data_size_t>(config_.bin_construct_sample_cnt);
1026
  TextReader<data_size_t> text_reader(filename, config_.header, config_.file_load_progress_interval_bytes);
Guolin Ke's avatar
Guolin Ke committed
1027
  std::vector<std::string> out_data;
Guolin Ke's avatar
Guolin Ke committed
1028
  if (num_machines == 1 || config_.pre_partition) {
Guolin Ke's avatar
Guolin Ke committed
1029
    *num_global_data = static_cast<data_size_t>(text_reader.SampleFromFile(&random_, sample_cnt, &out_data));
Guolin Ke's avatar
Guolin Ke committed
1030
1031
1032
1033
1034
1035
1036
  } else {  // need partition data
            // get query data
    const data_size_t* query_boundaries = metadata.query_boundaries();
    if (query_boundaries == nullptr) {
      // if not contain query file, minimal sample unit is one record
      *num_global_data = text_reader.SampleAndFilterFromFile([this, rank, num_machines]
      (data_size_t) {
Guolin Ke's avatar
Guolin Ke committed
1037
        if (random_.NextShort(0, num_machines) == rank) {
Guolin Ke's avatar
Guolin Ke committed
1038
1039
1040
1041
          return true;
        } else {
          return false;
        }
Guolin Ke's avatar
Guolin Ke committed
1042
      }, used_data_indices, &random_, sample_cnt, &out_data);
Guolin Ke's avatar
Guolin Ke committed
1043
1044
1045
1046
1047
1048
1049
1050
1051
    } else {
      // if contain query file, minimal sample unit is one query
      data_size_t num_queries = metadata.num_queries();
      data_size_t qid = -1;
      bool is_query_used = false;
      *num_global_data = text_reader.SampleAndFilterFromFile(
        [this, rank, num_machines, &qid, &query_boundaries, &is_query_used, num_queries]
      (data_size_t line_idx) {
        if (qid >= num_queries) {
1052
1053
          Log::Fatal("Query id exceeds the range of the query file, "
                     "please ensure the query file is correct");
Guolin Ke's avatar
Guolin Ke committed
1054
1055
1056
1057
        }
        if (line_idx >= query_boundaries[qid + 1]) {
          // if is new query
          is_query_used = false;
Guolin Ke's avatar
Guolin Ke committed
1058
          if (random_.NextShort(0, num_machines) == rank) {
Guolin Ke's avatar
Guolin Ke committed
1059
1060
1061
1062
1063
            is_query_used = true;
          }
          ++qid;
        }
        return is_query_used;
Guolin Ke's avatar
Guolin Ke committed
1064
      }, used_data_indices, &random_, sample_cnt, &out_data);
Guolin Ke's avatar
Guolin Ke committed
1065
1066
1067
1068
1069
    }
  }
  return out_data;
}

1070
1071
1072
void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
                                                    const std::vector<std::string>& sample_data,
                                                    const Parser* parser, Dataset* dataset) {
1073
  auto t1 = std::chrono::high_resolution_clock::now();
Guolin Ke's avatar
Guolin Ke committed
1074
  std::vector<std::vector<double>> sample_values;
Guolin Ke's avatar
Guolin Ke committed
1075
  std::vector<std::vector<int>> sample_indices;
Guolin Ke's avatar
Guolin Ke committed
1076
1077
  std::vector<std::pair<int, double>> oneline_features;
  double label;
Guolin Ke's avatar
Guolin Ke committed
1078
  for (int i = 0; i < static_cast<int>(sample_data.size()); ++i) {
Guolin Ke's avatar
Guolin Ke committed
1079
1080
1081
1082
    oneline_features.clear();
    // parse features
    parser->ParseOneLine(sample_data[i].c_str(), &oneline_features, &label);
    for (std::pair<int, double>& inner_data : oneline_features) {
1083
      if (static_cast<size_t>(inner_data.first) >= sample_values.size()) {
Guolin Ke's avatar
Guolin Ke committed
1084
1085
        sample_values.resize(inner_data.first + 1);
        sample_indices.resize(inner_data.first + 1);
1086
      }
Guolin Ke's avatar
Guolin Ke committed
1087
      if (std::fabs(inner_data.second) > kZeroThreshold || std::isnan(inner_data.second)) {
Guolin Ke's avatar
Guolin Ke committed
1088
1089
        sample_values[inner_data.first].emplace_back(inner_data.second);
        sample_indices[inner_data.first].emplace_back(i);
Guolin Ke's avatar
Guolin Ke committed
1090
1091
1092
1093
      }
    }
  }

Guolin Ke's avatar
Guolin Ke committed
1094
  dataset->feature_groups_.clear();
1095
1096
1097
1098
1099
  dataset->num_total_features_ = std::max(static_cast<int>(sample_values.size()), parser->NumFeatures());
  if (num_machines > 1) {
    dataset->num_total_features_ = Network::GlobalSyncUpByMax(dataset->num_total_features_);
  }
  if (!feature_names_.empty()) {
1100
    CHECK_EQ(dataset->num_total_features_, static_cast<int>(feature_names_.size()));
1101
  }
Guolin Ke's avatar
Guolin Ke committed
1102

Belinda Trotta's avatar
Belinda Trotta committed
1103
  if (!config_.max_bin_by_feature.empty()) {
1104
1105
    CHECK_EQ(static_cast<size_t>(dataset->num_total_features_), config_.max_bin_by_feature.size());
    CHECK_GT(*(std::min_element(config_.max_bin_by_feature.begin(), config_.max_bin_by_feature.end())), 1);
Belinda Trotta's avatar
Belinda Trotta committed
1106
1107
  }

1108
1109
  // get forced split
  std::string forced_bins_path = config_.forcedbins_filename;
1110
1111
  std::vector<std::vector<double>> forced_bin_bounds = DatasetLoader::GetForcedBins(forced_bins_path,
                                                                                    dataset->num_total_features_,
1112
1113
                                                                                    categorical_features_);

Guolin Ke's avatar
Guolin Ke committed
1114
  // check the range of label_idx, weight_idx and group_idx
1115
1116
1117
1118
1119
  // skip label check if user input parser config file,
  // because label id is got from raw features while dataset features are consistent with customized parser.
  if (dataset->parser_config_str_.empty()) {
    CHECK(label_idx_ >= 0 && label_idx_ <= dataset->num_total_features_);
  }
Guolin Ke's avatar
Guolin Ke committed
1120
1121
1122
1123
  CHECK(weight_idx_ < 0 || weight_idx_ < dataset->num_total_features_);
  CHECK(group_idx_ < 0 || group_idx_ < dataset->num_total_features_);

  // fill feature_names_ if not header
Guolin Ke's avatar
Guolin Ke committed
1124
  if (feature_names_.empty()) {
Guolin Ke's avatar
Guolin Ke committed
1125
1126
1127
1128
1129
1130
    for (int i = 0; i < dataset->num_total_features_; ++i) {
      std::stringstream str_buf;
      str_buf << "Column_" << i;
      feature_names_.push_back(str_buf.str());
    }
  }
1131
  dataset->set_feature_names(feature_names_);
Guolin Ke's avatar
Guolin Ke committed
1132
  std::vector<std::unique_ptr<BinMapper>> bin_mappers(dataset->num_total_features_);
Guolin Ke's avatar
Guolin Ke committed
1133
  const data_size_t filter_cnt = static_cast<data_size_t>(
Guolin Ke's avatar
Guolin Ke committed
1134
    static_cast<double>(config_.min_data_in_leaf* sample_data.size()) / dataset->num_data_);
Guolin Ke's avatar
Guolin Ke committed
1135
1136
1137
  // start find bins
  if (num_machines == 1) {
    // if only one machine, find bin locally
1138
    OMP_INIT_EX();
1139
    #pragma omp parallel for schedule(guided)
Guolin Ke's avatar
Guolin Ke committed
1140
    for (int i = 0; i < static_cast<int>(sample_values.size()); ++i) {
1141
      OMP_LOOP_EX_BEGIN();
Guolin Ke's avatar
Guolin Ke committed
1142
      if (ignore_features_.count(i) > 0) {
Guolin Ke's avatar
Guolin Ke committed
1143
        bin_mappers[i] = nullptr;
Guolin Ke's avatar
Guolin Ke committed
1144
1145
        continue;
      }
1146
1147
1148
1149
      BinType bin_type = BinType::NumericalBin;
      if (categorical_features_.count(i)) {
        bin_type = BinType::CategoricalBin;
      }
Guolin Ke's avatar
Guolin Ke committed
1150
      bin_mappers[i].reset(new BinMapper());
Belinda Trotta's avatar
Belinda Trotta committed
1151
1152
      if (config_.max_bin_by_feature.empty()) {
        bin_mappers[i]->FindBin(sample_values[i].data(), static_cast<int>(sample_values[i].size()),
1153
                                sample_data.size(), config_.max_bin, config_.min_data_in_bin,
1154
                                filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing, config_.zero_as_missing,
1155
                                forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
1156
1157
      } else {
        bin_mappers[i]->FindBin(sample_values[i].data(), static_cast<int>(sample_values[i].size()),
1158
                                sample_data.size(), config_.max_bin_by_feature[i],
1159
                                config_.min_data_in_bin, filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing,
1160
                                config_.zero_as_missing, forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
1161
      }
1162
      OMP_LOOP_EX_END();
Guolin Ke's avatar
Guolin Ke committed
1163
    }
1164
    OMP_THROW_EX();
Guolin Ke's avatar
Guolin Ke committed
1165
1166
  } else {
    // start and len will store the process feature indices for different machines
1167
    // machine i will find bins for features in [ start[i], start[i] + len[i] )
Guolin Ke's avatar
Guolin Ke committed
1168
1169
    std::vector<int> start(num_machines);
    std::vector<int> len(num_machines);
1170
    int step = (dataset->num_total_features_ + num_machines - 1) / num_machines;
Guolin Ke's avatar
Guolin Ke committed
1171
1172
1173
1174
    if (step < 1) { step = 1; }

    start[0] = 0;
    for (int i = 0; i < num_machines - 1; ++i) {
1175
      len[i] = std::min(step, dataset->num_total_features_ - start[i]);
Guolin Ke's avatar
Guolin Ke committed
1176
1177
      start[i + 1] = start[i] + len[i];
    }
1178
    len[num_machines - 1] = dataset->num_total_features_ - start[num_machines - 1];
1179
    OMP_INIT_EX();
1180
    #pragma omp parallel for schedule(guided)
1181
    for (int i = 0; i < len[rank]; ++i) {
1182
      OMP_LOOP_EX_BEGIN();
1183
1184
1185
1186
1187
1188
1189
1190
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
      }
      BinType bin_type = BinType::NumericalBin;
      if (categorical_features_.count(start[rank] + i)) {
        bin_type = BinType::CategoricalBin;
      }
      bin_mappers[i].reset(new BinMapper());
Nikita Titov's avatar
Nikita Titov committed
1191
      if (static_cast<int>(sample_values.size()) <= start[rank] + i) {
1192
1193
        continue;
      }
Belinda Trotta's avatar
Belinda Trotta committed
1194
      if (config_.max_bin_by_feature.empty()) {
1195
        bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
Belinda Trotta's avatar
Belinda Trotta committed
1196
                                static_cast<int>(sample_values[start[rank] + i].size()),
1197
                                sample_data.size(), config_.max_bin, config_.min_data_in_bin,
1198
                                filter_cnt, config_.feature_pre_filter, bin_type, config_.use_missing, config_.zero_as_missing,
1199
                                forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
1200
      } else {
1201
        bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
Belinda Trotta's avatar
Belinda Trotta committed
1202
                                static_cast<int>(sample_values[start[rank] + i].size()),
1203
                                sample_data.size(), config_.max_bin_by_feature[i],
1204
                                config_.min_data_in_bin, filter_cnt, config_.feature_pre_filter, bin_type,
1205
                                config_.use_missing, config_.zero_as_missing, forced_bin_bounds[i]);
Belinda Trotta's avatar
Belinda Trotta committed
1206
      }
1207
      OMP_LOOP_EX_END();
1208
    }
1209
    OMP_THROW_EX();
1210
    comm_size_t self_buf_size = 0;
Guolin Ke's avatar
Guolin Ke committed
1211
    for (int i = 0; i < len[rank]; ++i) {
1212
1213
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
Guolin Ke's avatar
Guolin Ke committed
1214
      }
1215
      self_buf_size += static_cast<comm_size_t>(bin_mappers[i]->SizesInByte());
Guolin Ke's avatar
Guolin Ke committed
1216
    }
1217
1218
    std::vector<char> input_buffer(self_buf_size);
    auto cp_ptr = input_buffer.data();
Guolin Ke's avatar
Guolin Ke committed
1219
    for (int i = 0; i < len[rank]; ++i) {
Guolin Ke's avatar
Guolin Ke committed
1220
1221
1222
      if (ignore_features_.count(start[rank] + i) > 0) {
        continue;
      }
1223
1224
      bin_mappers[i]->CopyTo(cp_ptr);
      cp_ptr += bin_mappers[i]->SizesInByte();
1225
1226
      // free
      bin_mappers[i].reset(nullptr);
Guolin Ke's avatar
Guolin Ke committed
1227
    }
1228
1229
1230
1231
    std::vector<comm_size_t> size_len = Network::GlobalArray(self_buf_size);
    std::vector<comm_size_t> size_start(num_machines, 0);
    for (int i = 1; i < num_machines; ++i) {
      size_start[i] = size_start[i - 1] + size_len[i - 1];
Guolin Ke's avatar
Guolin Ke committed
1232
    }
1233
1234
    comm_size_t total_buffer_size = size_start[num_machines - 1] + size_len[num_machines - 1];
    std::vector<char> output_buffer(total_buffer_size);
Guolin Ke's avatar
Guolin Ke committed
1235
    // gather global feature bin mappers
1236
1237
    Network::Allgather(input_buffer.data(), size_start.data(), size_len.data(), output_buffer.data(), total_buffer_size);
    cp_ptr = output_buffer.data();
Guolin Ke's avatar
Guolin Ke committed
1238
    // restore features bins from buffer
1239
    for (int i = 0; i < dataset->num_total_features_; ++i) {
Guolin Ke's avatar
Guolin Ke committed
1240
      if (ignore_features_.count(i) > 0) {
Guolin Ke's avatar
Guolin Ke committed
1241
        bin_mappers[i] = nullptr;
Guolin Ke's avatar
Guolin Ke committed
1242
1243
        continue;
      }
Guolin Ke's avatar
Guolin Ke committed
1244
      bin_mappers[i].reset(new BinMapper());
1245
1246
      bin_mappers[i]->CopyFrom(cp_ptr);
      cp_ptr += bin_mappers[i]->SizesInByte();
Guolin Ke's avatar
Guolin Ke committed
1247
1248
    }
  }
1249
  CheckCategoricalFeatureNumBin(bin_mappers, config_.max_bin, config_.max_bin_by_feature);
1250
  dataset->Construct(&bin_mappers, dataset->num_total_features_, forced_bin_bounds, Common::Vector2Ptr<int>(&sample_indices).data(),
Guolin Ke's avatar
Guolin Ke committed
1251
                     Common::Vector2Ptr<double>(&sample_values).data(),
1252
                     Common::VectorSize<int>(sample_indices).data(), static_cast<int>(sample_indices.size()), sample_data.size(), config_);
1253
  if (dataset->has_raw()) {
1254
    dataset->ResizeRaw(static_cast<int>(sample_data.size()));
1255
  }
1256
1257
1258
1259

  auto t2 = std::chrono::high_resolution_clock::now();
  Log::Info("Construct bin mappers from text data time %.2f seconds",
            std::chrono::duration<double, std::milli>(t2 - t1) * 1e-3);
Guolin Ke's avatar
Guolin Ke committed
1260
1261
1262
}

/*! \brief Extract local features from memory */
Guolin Ke's avatar
Guolin Ke committed
1263
void DatasetLoader::ExtractFeaturesFromMemory(std::vector<std::string>* text_data, const Parser* parser, Dataset* dataset) {
Guolin Ke's avatar
Guolin Ke committed
1264
1265
  std::vector<std::pair<int, double>> oneline_features;
  double tmp_label = 0.0f;
Guolin Ke's avatar
Guolin Ke committed
1266
  auto& ref_text_data = *text_data;
1267
  std::vector<float> feature_row(dataset->num_features_);
1268
  if (!predict_fun_) {
1269
    OMP_INIT_EX();
Guolin Ke's avatar
Guolin Ke committed
1270
    // if doesn't need to prediction with initial model
1271
    #pragma omp parallel for schedule(static) private(oneline_features) firstprivate(tmp_label, feature_row)
Guolin Ke's avatar
Guolin Ke committed
1272
    for (data_size_t i = 0; i < dataset->num_data_; ++i) {
1273
      OMP_LOOP_EX_BEGIN();
Guolin Ke's avatar
Guolin Ke committed
1274
1275
1276
      const int tid = omp_get_thread_num();
      oneline_features.clear();
      // parser
Guolin Ke's avatar
Guolin Ke committed
1277
      parser->ParseOneLine(ref_text_data[i].c_str(), &oneline_features, &tmp_label);
Guolin Ke's avatar
Guolin Ke committed
1278
      // set label
1279
      dataset->metadata_.SetLabelAt(i, static_cast<label_t>(tmp_label));
Guolin Ke's avatar
Guolin Ke committed
1280
      // free processed line:
Guolin Ke's avatar
Guolin Ke committed
1281
      ref_text_data[i].clear();
Guolin Ke's avatar
Guolin Ke committed
1282
1283
      // shrink_to_fit will be very slow in linux, and seems not free memory, disable for now
      // text_reader_->Lines()[i].shrink_to_fit();
Guolin Ke's avatar
Guolin Ke committed
1284
      std::vector<bool> is_feature_added(dataset->num_features_, false);
Guolin Ke's avatar
Guolin Ke committed
1285
1286
      // push data
      for (auto& inner_data : oneline_features) {
1287
        if (inner_data.first >= dataset->num_total_features_) { continue; }
Guolin Ke's avatar
Guolin Ke committed
1288
1289
        int feature_idx = dataset->used_feature_map_[inner_data.first];
        if (feature_idx >= 0) {
Guolin Ke's avatar
Guolin Ke committed
1290
          is_feature_added[feature_idx] = true;
Guolin Ke's avatar
Guolin Ke committed
1291
          // if is used feature
Guolin Ke's avatar
Guolin Ke committed
1292
1293
1294
          int group = dataset->feature2group_[feature_idx];
          int sub_feature = dataset->feature2subfeature_[feature_idx];
          dataset->feature_groups_[group]->PushData(tid, sub_feature, i, inner_data.second);
1295
          if (dataset->has_raw()) {
1296
            feature_row[feature_idx] = static_cast<float>(inner_data.second);
1297
          }
Guolin Ke's avatar
Guolin Ke committed
1298
1299
        } else {
          if (inner_data.first == weight_idx_) {
1300
            dataset->metadata_.SetWeightAt(i, static_cast<label_t>(inner_data.second));
Guolin Ke's avatar
Guolin Ke committed
1301
1302
1303
1304
1305
          } else if (inner_data.first == group_idx_) {
            dataset->metadata_.SetQueryAt(i, static_cast<data_size_t>(inner_data.second));
          }
        }
      }
1306
1307
1308
1309
1310
1311
1312
1313
      if (dataset->has_raw()) {
        for (size_t j = 0; j < feature_row.size(); ++j) {
          int feat_ind = dataset->numeric_feature_map_[j];
          if (feat_ind >= 0) {
            dataset->raw_data_[feat_ind][i] = feature_row[j];
          }
        }
      }
Guolin Ke's avatar
Guolin Ke committed
1314
      dataset->FinishOneRow(tid, i, is_feature_added);
1315
      OMP_LOOP_EX_END();
Guolin Ke's avatar
Guolin Ke committed
1316
    }
1317
    OMP_THROW_EX();
Guolin Ke's avatar
Guolin Ke committed
1318
  } else {
1319
    OMP_INIT_EX();
Guolin Ke's avatar
Guolin Ke committed
1320
    // if need to prediction with initial model
1321
    std::vector<double> init_score(static_cast<size_t>(dataset->num_data_) * num_class_);
1322
    #pragma omp parallel for schedule(static) private(oneline_features) firstprivate(tmp_label, feature_row)
Guolin Ke's avatar
Guolin Ke committed
1323
    for (data_size_t i = 0; i < dataset->num_data_; ++i) {
1324
      OMP_LOOP_EX_BEGIN();
Guolin Ke's avatar
Guolin Ke committed
1325
1326
1327
      const int tid = omp_get_thread_num();
      oneline_features.clear();
      // parser
Guolin Ke's avatar
Guolin Ke committed
1328
      parser->ParseOneLine(ref_text_data[i].c_str(), &oneline_features, &tmp_label);
Guolin Ke's avatar
Guolin Ke committed
1329
      // set initial score
Guolin Ke's avatar
Guolin Ke committed
1330
1331
      std::vector<double> oneline_init_score(num_class_);
      predict_fun_(oneline_features, oneline_init_score.data());
1332
      for (int k = 0; k < num_class_; ++k) {
1333
        init_score[k * dataset->num_data_ + i] = static_cast<double>(oneline_init_score[k]);
Guolin Ke's avatar
Guolin Ke committed
1334
1335
      }
      // set label
1336
      dataset->metadata_.SetLabelAt(i, static_cast<label_t>(tmp_label));
Guolin Ke's avatar
Guolin Ke committed
1337
      // free processed line:
1338
      ref_text_data[i].clear();
Andrew Ziem's avatar
Andrew Ziem committed
1339
      // shrink_to_fit will be very slow in Linux, and seems not free memory, disable for now
Guolin Ke's avatar
Guolin Ke committed
1340
1341
      // text_reader_->Lines()[i].shrink_to_fit();
      // push data
Guolin Ke's avatar
Guolin Ke committed
1342
      std::vector<bool> is_feature_added(dataset->num_features_, false);
Guolin Ke's avatar
Guolin Ke committed
1343
      for (auto& inner_data : oneline_features) {
1344
        if (inner_data.first >= dataset->num_total_features_) { continue; }
Guolin Ke's avatar
Guolin Ke committed
1345
1346
        int feature_idx = dataset->used_feature_map_[inner_data.first];
        if (feature_idx >= 0) {
Guolin Ke's avatar
Guolin Ke committed
1347
          is_feature_added[feature_idx] = true;
Guolin Ke's avatar
Guolin Ke committed
1348
          // if is used feature
Guolin Ke's avatar
Guolin Ke committed
1349
1350
          int group = dataset->feature2group_[feature_idx];
          int sub_feature = dataset->feature2subfeature_[feature_idx];
1351
          dataset->feature_groups_[group]->PushData(tid, sub_feature, i, inner_data.second);
1352
          if (dataset->has_raw()) {
1353
            feature_row[feature_idx] = static_cast<float>(inner_data.second);
1354
          }
Guolin Ke's avatar
Guolin Ke committed
1355
1356
        } else {
          if (inner_data.first == weight_idx_) {
1357
            dataset->metadata_.SetWeightAt(i, static_cast<label_t>(inner_data.second));
Guolin Ke's avatar
Guolin Ke committed
1358
1359
1360
1361
1362
          } else if (inner_data.first == group_idx_) {
            dataset->metadata_.SetQueryAt(i, static_cast<data_size_t>(inner_data.second));
          }
        }
      }
Guolin Ke's avatar
Guolin Ke committed
1363
      dataset->FinishOneRow(tid, i, is_feature_added);
1364
1365
1366
1367
1368
1369
1370
1371
      if (dataset->has_raw()) {
        for (size_t j = 0; j < feature_row.size(); ++j) {
          int feat_ind = dataset->numeric_feature_map_[j];
          if (feat_ind >= 0) {
            dataset->raw_data_[feat_ind][i] = feature_row[j];
          }
        }
      }
1372
      OMP_LOOP_EX_END();
Guolin Ke's avatar
Guolin Ke committed
1373
    }
1374
    OMP_THROW_EX();
Guolin Ke's avatar
Guolin Ke committed
1375
    // metadata_ will manage space of init_score
1376
    dataset->metadata_.SetInitScore(init_score.data(), dataset->num_data_ * num_class_);
Guolin Ke's avatar
Guolin Ke committed
1377
  }
Guolin Ke's avatar
Guolin Ke committed
1378
  dataset->FinishLoad();
Guolin Ke's avatar
Guolin Ke committed
1379
  // text data can be free after loaded feature values
Guolin Ke's avatar
Guolin Ke committed
1380
  text_data->clear();
Guolin Ke's avatar
Guolin Ke committed
1381
1382
1383
}

/*! \brief Extract local features from file */
1384
1385
void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* parser,
                                            const std::vector<data_size_t>& used_data_indices, Dataset* dataset) {
1386
  std::vector<double> init_score;
1387
  if (predict_fun_) {
1388
    init_score = std::vector<double>(static_cast<size_t>(dataset->num_data_) * num_class_);
Guolin Ke's avatar
Guolin Ke committed
1389
1390
1391
1392
1393
1394
  }
  std::function<void(data_size_t, const std::vector<std::string>&)> process_fun =
    [this, &init_score, &parser, &dataset]
  (data_size_t start_idx, const std::vector<std::string>& lines) {
    std::vector<std::pair<int, double>> oneline_features;
    double tmp_label = 0.0f;
1395
    std::vector<float> feature_row(dataset->num_features_);
1396
    OMP_INIT_EX();
1397
    #pragma omp parallel for schedule(static) private(oneline_features) firstprivate(tmp_label, feature_row)
Guolin Ke's avatar
Guolin Ke committed
1398
    for (data_size_t i = 0; i < static_cast<data_size_t>(lines.size()); ++i) {
1399
      OMP_LOOP_EX_BEGIN();
Guolin Ke's avatar
Guolin Ke committed
1400
1401
1402
1403
1404
      const int tid = omp_get_thread_num();
      oneline_features.clear();
      // parser
      parser->ParseOneLine(lines[i].c_str(), &oneline_features, &tmp_label);
      // set initial score
Guolin Ke's avatar
Guolin Ke committed
1405
      if (!init_score.empty()) {
Guolin Ke's avatar
Guolin Ke committed
1406
1407
        std::vector<double> oneline_init_score(num_class_);
        predict_fun_(oneline_features, oneline_init_score.data());
1408
        for (int k = 0; k < num_class_; ++k) {
1409
          init_score[k * dataset->num_data_ + start_idx + i] = static_cast<double>(oneline_init_score[k]);
Guolin Ke's avatar
Guolin Ke committed
1410
1411
1412
        }
      }
      // set label
1413
      dataset->metadata_.SetLabelAt(start_idx + i, static_cast<label_t>(tmp_label));
Guolin Ke's avatar
Guolin Ke committed
1414
      std::vector<bool> is_feature_added(dataset->num_features_, false);
Guolin Ke's avatar
Guolin Ke committed
1415
1416
      // push data
      for (auto& inner_data : oneline_features) {
1417
        if (inner_data.first >= dataset->num_total_features_) { continue; }
Guolin Ke's avatar
Guolin Ke committed
1418
1419
        int feature_idx = dataset->used_feature_map_[inner_data.first];
        if (feature_idx >= 0) {
Guolin Ke's avatar
Guolin Ke committed
1420
          is_feature_added[feature_idx] = true;
Guolin Ke's avatar
Guolin Ke committed
1421
          // if is used feature
Guolin Ke's avatar
Guolin Ke committed
1422
1423
1424
          int group = dataset->feature2group_[feature_idx];
          int sub_feature = dataset->feature2subfeature_[feature_idx];
          dataset->feature_groups_[group]->PushData(tid, sub_feature, start_idx + i, inner_data.second);
1425
          if (dataset->has_raw()) {
1426
            feature_row[feature_idx] = static_cast<float>(inner_data.second);
1427
          }
Guolin Ke's avatar
Guolin Ke committed
1428
1429
        } else {
          if (inner_data.first == weight_idx_) {
1430
            dataset->metadata_.SetWeightAt(start_idx + i, static_cast<label_t>(inner_data.second));
Guolin Ke's avatar
Guolin Ke committed
1431
1432
1433
1434
1435
          } else if (inner_data.first == group_idx_) {
            dataset->metadata_.SetQueryAt(start_idx + i, static_cast<data_size_t>(inner_data.second));
          }
        }
      }
1436
1437
1438
1439
1440
1441
1442
1443
      if (dataset->has_raw()) {
        for (size_t j = 0; j < feature_row.size(); ++j) {
          int feat_ind = dataset->numeric_feature_map_[j];
          if (feat_ind >= 0) {
            dataset->raw_data_[feat_ind][i] = feature_row[j];
          }
        }
      }
Guolin Ke's avatar
Guolin Ke committed
1444
      dataset->FinishOneRow(tid, i, is_feature_added);
1445
      OMP_LOOP_EX_END();
Guolin Ke's avatar
Guolin Ke committed
1446
    }
1447
    OMP_THROW_EX();
Guolin Ke's avatar
Guolin Ke committed
1448
  };
1449
  TextReader<data_size_t> text_reader(filename, config_.header, config_.file_load_progress_interval_bytes);
Guolin Ke's avatar
Guolin Ke committed
1450
  if (!used_data_indices.empty()) {
Guolin Ke's avatar
Guolin Ke committed
1451
1452
1453
1454
1455
1456
1457
1458
    // only need part of data
    text_reader.ReadPartAndProcessParallel(used_data_indices, process_fun);
  } else {
    // need full data
    text_reader.ReadAllAndProcessParallel(process_fun);
  }

  // metadata_ will manage space of init_score
Guolin Ke's avatar
Guolin Ke committed
1459
  if (!init_score.empty()) {
1460
    dataset->metadata_.SetInitScore(init_score.data(), dataset->num_data_ * num_class_);
Guolin Ke's avatar
Guolin Ke committed
1461
  }
Guolin Ke's avatar
Guolin Ke committed
1462
  dataset->FinishLoad();
Guolin Ke's avatar
Guolin Ke committed
1463
1464
1465
}

/*! \brief Check can load from binary file */
1466
std::string DatasetLoader::CheckCanLoadFromBin(const char* filename) {
Guolin Ke's avatar
Guolin Ke committed
1467
1468
1469
  std::string bin_filename(filename);
  bin_filename.append(".bin");

1470
  auto reader = VirtualFileReader::Make(bin_filename.c_str());
Guolin Ke's avatar
Guolin Ke committed
1471

1472
  if (!reader->Init()) {
1473
    bin_filename = std::string(filename);
1474
1475
    reader = VirtualFileReader::Make(bin_filename.c_str());
    if (!reader->Init()) {
1476
      Log::Fatal("Cannot open data file %s", bin_filename.c_str());
1477
    }
1478
  }
1479
1480
1481
1482
1483

  size_t buffer_size = 256;
  auto buffer = std::vector<char>(buffer_size);
  // read size of token
  size_t size_of_token = std::strlen(Dataset::binary_file_token);
1484
  size_t read_cnt = reader->Read(buffer.data(), size_of_token);
1485
1486
  if (read_cnt == size_of_token
      && std::string(buffer.data()) == std::string(Dataset::binary_file_token)) {
1487
    return bin_filename;
Guolin Ke's avatar
Guolin Ke committed
1488
  } else {
1489
    return std::string();
Guolin Ke's avatar
Guolin Ke committed
1490
1491
1492
  }
}

1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
std::vector<std::vector<double>> DatasetLoader::GetForcedBins(std::string forced_bins_path, int num_total_features,
                                                              const std::unordered_set<int>& categorical_features) {
  std::vector<std::vector<double>> forced_bins(num_total_features, std::vector<double>());
  if (forced_bins_path != "") {
    std::ifstream forced_bins_stream(forced_bins_path.c_str());
    if (forced_bins_stream.fail()) {
      Log::Warning("Could not open %s. Will ignore.", forced_bins_path.c_str());
    } else {
      std::stringstream buffer;
      buffer << forced_bins_stream.rdbuf();
      std::string err;
Guolin Ke's avatar
Guolin Ke committed
1504
      Json forced_bins_json = Json::parse(buffer.str(), &err);
1505
1506
1507
1508
      CHECK(forced_bins_json.is_array());
      std::vector<Json> forced_bins_arr = forced_bins_json.array_items();
      for (size_t i = 0; i < forced_bins_arr.size(); ++i) {
        int feature_num = forced_bins_arr[i]["feature"].int_value();
Nikita Titov's avatar
Nikita Titov committed
1509
        CHECK_LT(feature_num, num_total_features);
1510
        if (categorical_features.count(feature_num)) {
1511
          Log::Warning("Feature %d is categorical. Will ignore forced bins for this feature.", feature_num);
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
        } else {
          std::vector<Json> bounds_arr = forced_bins_arr[i]["bin_upper_bound"].array_items();
          for (size_t j = 0; j < bounds_arr.size(); ++j) {
            forced_bins[feature_num].push_back(bounds_arr[j].number_value());
          }
        }
      }
      // remove duplicates
      for (int i = 0; i < num_total_features; ++i) {
        auto new_end = std::unique(forced_bins[i].begin(), forced_bins[i].end());
        forced_bins[i].erase(new_end, forced_bins[i].end());
      }
    }
  }
  return forced_bins;
}

1529
1530
1531
1532
1533
1534
1535
void DatasetLoader::CheckCategoricalFeatureNumBin(
  const std::vector<std::unique_ptr<BinMapper>>& bin_mappers,
  const int max_bin, const std::vector<int>& max_bin_by_feature) const {
  bool need_warning = false;
  if (bin_mappers.size() < 1024) {
    for (size_t i = 0; i < bin_mappers.size(); ++i) {
      const int max_bin_for_this_feature = max_bin_by_feature.empty() ? max_bin : max_bin_by_feature[i];
1536
      if (bin_mappers[i] != nullptr && bin_mappers[i]->bin_type() == BinType::CategoricalBin && bin_mappers[i]->num_bin() > max_bin_for_this_feature) {
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
        need_warning = true;
        break;
      }
    }
  } else {
    const int num_threads = OMP_NUM_THREADS();
    std::vector<bool> thread_need_warning(num_threads, false);
    Threading::For<size_t>(0, bin_mappers.size(), 1,
      [&bin_mappers, &thread_need_warning, &max_bin_by_feature, max_bin] (int thread_index, size_t start, size_t end) {
        for (size_t i = start; i < end; ++i) {
          thread_need_warning[thread_index] = false;
          const int max_bin_for_this_feature = max_bin_by_feature.empty() ? max_bin : max_bin_by_feature[i];
1549
          if (bin_mappers[i] != nullptr && bin_mappers[i]->bin_type() == BinType::CategoricalBin && bin_mappers[i]->num_bin() > max_bin_for_this_feature) {
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
            thread_need_warning[thread_index] = true;
            break;
          }
        }
      });
    for (int thread_index = 0; thread_index < num_threads; ++thread_index) {
      if (thread_need_warning[thread_index]) {
        need_warning = true;
        break;
      }
    }
  }

  if (need_warning) {
    Log::Warning("Categorical features with more bins than the configured maximum bin number found.");
    Log::Warning("For categorical features, max_bin and max_bin_by_feature may be ignored with a large number of categories.");
  }
}

1569
}  // namespace LightGBM