dask.py 34.4 KB
Newer Older
1
# coding: utf-8
2
"""Distributed training with LightGBM and dask.distributed.
3

4
This module enables you to perform distributed training with LightGBM on
5
dask.Array and dask.DataFrame collections.
6
7

It is based on dask-lightgbm, which was based on dask-xgboost.
8
"""
9
import socket
10
from collections import defaultdict
11
from copy import deepcopy
12
from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union
13
14
15
from urllib.parse import urlparse

import numpy as np
16
17
import scipy.sparse as ss

18
from .basic import _choose_param_value, _ConfigAliases, _LIB, _log_warning, _safe_call, LightGBMError
19
from .compat import (PANDAS_INSTALLED, pd_DataFrame, pd_Series, concat,
20
                     SKLEARN_INSTALLED, LGBMNotFittedError,
21
                     DASK_INSTALLED, dask_DataFrame, dask_Array, dask_Series, delayed, Client, default_client, get_worker, wait)
22
23
24
25
26
27
28
29
from .sklearn import (
    _lgbmmodel_doc_fit,
    _lgbmmodel_doc_predict,
    LGBMClassifier,
    LGBMModel,
    LGBMRegressor,
    LGBMRanker
)
30
31
32
33
34

_DaskCollection = Union[dask_Array, dask_DataFrame, dask_Series]
_DaskMatrixLike = Union[dask_Array, dask_DataFrame]
_DaskPart = Union[np.ndarray, pd_DataFrame, pd_Series, ss.spmatrix]
_PredictionDtype = Union[Type[np.float32], Type[np.float64], Type[np.int32], Type[np.int64]]
35
36


37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
def _get_dask_client(client: Optional[Client]) -> Client:
    """Choose a Dask client to use.

    Parameters
    ----------
    client : dask.distributed.Client or None
        Dask client.

    Returns
    -------
    client : dask.distributed.Client
        A Dask client.
    """
    if client is None:
        return default_client()
    else:
        return client


56
57
def _find_open_port(worker_ip: str, local_listen_port: int, ports_to_skip: Iterable[int]) -> int:
    """Find an open port.
58

59
60
    This function tries to find a free port on the machine it's run on. It is intended to
    be run once on each Dask worker, sequentially.
61

62
63
64
65
66
67
68
69
70
71
72
73
74
75
    Parameters
    ----------
    worker_ip : str
        IP address for the Dask worker.
    local_listen_port : int
        First port to try when searching for open ports.
    ports_to_skip: Iterable[int]
        An iterable of integers referring to ports that should be skipped. Since multiple Dask
        workers can run on the same physical machine, this method may be called multiple times
        on the same machine. ``ports_to_skip`` is used to ensure that LightGBM doesn't try to use
        the same port for two worker processes running on the same machine.

    Returns
    -------
76
    port : int
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
        A free port on the machine referenced by ``worker_ip``.
    """
    max_tries = 1000
    out_port = None
    found_port = False
    for i in range(max_tries):
        out_port = local_listen_port + i
        if out_port in ports_to_skip:
            continue
        try:
            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
                s.bind((worker_ip, out_port))
            found_port = True
            break
        # if unavailable, you'll get OSError: Address already in use
        except OSError:
            continue
    if not found_port:
        msg = "LightGBM tried %s:%d-%d and could not create a connection. Try setting local_listen_port to a different value."
        raise RuntimeError(msg % (worker_ip, local_listen_port, out_port))
    return out_port


def _find_ports_for_workers(client: Client, worker_addresses: Iterable[str], local_listen_port: int) -> Dict[str, int]:
    """Find an open port on each worker.

    LightGBM distributed training uses TCP sockets by default, and this method is used to
    identify open ports on each worker so LightGBM can reliable create those sockets.
105
106
107

    Parameters
    ----------
108
109
110
    client : dask.distributed.Client
        Dask client.
    worker_addresses : Iterable[str]
111
        An iterable of addresses for workers in the cluster. These are strings of the form ``<protocol>://<host>:port``.
112
    local_listen_port : int
113
        First port to try when searching for open ports.
114
115
116

    Returns
    -------
117
118
    result : Dict[str, int]
        Dictionary where keys are worker addresses and values are an open port for LightGBM to use.
119
    """
120
121
122
123
124
125
126
127
128
129
130
131
132
133
    lightgbm_ports = set()
    worker_ip_to_port = {}
    for worker_address in worker_addresses:
        port = client.submit(
            func=_find_open_port,
            workers=[worker_address],
            worker_ip=urlparse(worker_address).hostname,
            local_listen_port=local_listen_port,
            ports_to_skip=lightgbm_ports
        ).result()
        lightgbm_ports.add(port)
        worker_ip_to_port[worker_address] = port

    return worker_ip_to_port
134
135


136
def _concat(seq: List[_DaskPart]) -> _DaskPart:
137
138
    if isinstance(seq[0], np.ndarray):
        return np.concatenate(seq, axis=0)
139
    elif isinstance(seq[0], (pd_DataFrame, pd_Series)):
140
        return concat(seq, axis=0)
141
142
143
144
145
146
    elif isinstance(seq[0], ss.spmatrix):
        return ss.vstack(seq, format='csr')
    else:
        raise TypeError('Data must be one of: numpy arrays, pandas dataframes, sparse matrices (from scipy). Got %s.' % str(type(seq[0])))


147
148
149
150
151
152
153
154
155
def _train_part(
    params: Dict[str, Any],
    model_factory: Type[LGBMModel],
    list_of_parts: List[Dict[str, _DaskPart]],
    worker_address_to_port: Dict[str, int],
    return_model: bool,
    time_out: int = 120,
    **kwargs: Any
) -> Optional[LGBMModel]:
156
157
158
159
160
161
162
163
164
165
166
167
    local_worker_address = get_worker().address
    machine_list = ','.join([
        '%s:%d' % (urlparse(worker_address).hostname, port)
        for worker_address, port
        in worker_address_to_port.items()
    ])
    network_params = {
        'machines': machine_list,
        'local_listen_port': worker_address_to_port[local_worker_address],
        'time_out': time_out,
        'num_machines': len(worker_address_to_port)
    }
168
169
    params.update(network_params)

170
171
    is_ranker = issubclass(model_factory, LGBMRanker)

172
    # Concatenate many parts into one
173
174
175
176
177
178
179
180
181
182
183
184
    data = _concat([x['data'] for x in list_of_parts])
    label = _concat([x['label'] for x in list_of_parts])

    if 'weight' in list_of_parts[0]:
        weight = _concat([x['weight'] for x in list_of_parts])
    else:
        weight = None

    if 'group' in list_of_parts[0]:
        group = _concat([x['group'] for x in list_of_parts])
    else:
        group = None
185
186
187

    try:
        model = model_factory(**params)
188
        if is_ranker:
189
            model.fit(data, label, sample_weight=weight, group=group, **kwargs)
190
        else:
191
            model.fit(data, label, sample_weight=weight, **kwargs)
192

193
194
195
196
197
198
    finally:
        _safe_call(_LIB.LGBM_NetworkFree())

    return model if return_model else None


199
def _split_to_parts(data: _DaskCollection, is_matrix: bool) -> List[_DaskPart]:
200
201
    parts = data.to_delayed()
    if isinstance(parts, np.ndarray):
202
203
204
205
        if is_matrix:
            assert parts.shape[1] == 1
        else:
            assert parts.ndim == 1 or parts.shape[1] == 1
206
207
208
209
        parts = parts.flatten().tolist()
    return parts


210
211
212
213
214
215
216
217
218
219
def _train(
    client: Client,
    data: _DaskMatrixLike,
    label: _DaskCollection,
    params: Dict[str, Any],
    model_factory: Type[LGBMModel],
    sample_weight: Optional[_DaskCollection] = None,
    group: Optional[_DaskCollection] = None,
    **kwargs: Any
) -> LGBMModel:
220
221
222
223
    """Inner train routine.

    Parameters
    ----------
224
225
    client : dask.distributed.Client
        Dask client.
226
    data : Dask Array or Dask DataFrame of shape = [n_samples, n_features]
227
        Input feature matrix.
228
    label : Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]
229
230
        The target values (class labels in classification, real numbers in regression).
    params : dict
231
        Parameters passed to constructor of the local underlying model.
232
    model_factory : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
233
        Class of the local underlying model.
234
    sample_weight : Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)
235
        Weights of training data.
236
    group : Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)
237
238
239
240
241
        Group/query data.
        Only used in the learning-to-rank task.
        sum(group) = n_samples.
        For example, if you have a 100-document dataset with ``group = [10, 20, 40, 10, 10, 10]``, that means that you have 6 groups,
        where the first 10 records are in the first group, records 11-30 are in the second group, records 31-70 are in the third group, etc.
242
243
244
245
246
247
248
    **kwargs
        Other parameters passed to ``fit`` method of the local underlying model.

    Returns
    -------
    model : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
        Returns fitted underlying model.
249
    """
250
251
    params = deepcopy(params)

252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
    params = _choose_param_value(
        main_param_name="local_listen_port",
        params=params,
        default_value=12400
    )

    params = _choose_param_value(
        main_param_name="tree_learner",
        params=params,
        default_value="data"
    )
    allowed_tree_learners = {
        'data',
        'data_parallel',
        'feature',
        'feature_parallel',
        'voting',
        'voting_parallel'
    }
    if params["tree_learner"] not in allowed_tree_learners:
272
        _log_warning('Parameter tree_learner set to %s, which is not allowed. Using "data" as default' % params['tree_learner'])
273
274
275
276
277
278
279
280
281
282
283
284
        params['tree_learner'] = 'data'

    if params['tree_learner'] not in {'data', 'data_parallel'}:
        _log_warning(
            'Support for tree_learner %s in lightgbm.dask is experimental and may break in a future release. \n'
            'Use "data" for a stable, well-tested interface.' % params['tree_learner']
        )

    # Some passed-in parameters can be removed:
    #   * 'machines': constructed automatically from Dask worker list
    #   * 'num_machines': set automatically from Dask worker list
    #   * 'num_threads': overridden to match nthreads on each Dask process
285
    for param_alias in _ConfigAliases.get('machines', 'num_machines', 'num_threads'):
286
        params.pop(param_alias, None)
287

288
    # Split arrays/dataframes into parts. Arrange parts into dicts to enforce co-locality
289
290
    data_parts = _split_to_parts(data=data, is_matrix=True)
    label_parts = _split_to_parts(data=label, is_matrix=False)
291
    parts = [{'data': x, 'label': y} for (x, y) in zip(data_parts, label_parts)]
292
293
294

    if sample_weight is not None:
        weight_parts = _split_to_parts(data=sample_weight, is_matrix=False)
295
296
        for i in range(len(parts)):
            parts[i]['weight'] = weight_parts[i]
297
298
299

    if group is not None:
        group_parts = _split_to_parts(data=group, is_matrix=False)
300
301
        for i in range(len(parts)):
            parts[i]['group'] = group_parts[i]
302
303

    # Start computation in the background
304
    parts = list(map(delayed, parts))
305
306
307
308
309
310
311
312
    parts = client.compute(parts)
    wait(parts)

    for part in parts:
        if part.status == 'error':
            return part  # trigger error locally

    # Find locations of all parts and map them to particular Dask workers
313
    key_to_part_dict = {part.key: part for part in parts}
314
315
316
317
318
319
320
321
    who_has = client.who_has(parts)
    worker_map = defaultdict(list)
    for key, workers in who_has.items():
        worker_map[next(iter(workers))].append(key_to_part_dict[key])

    master_worker = next(iter(worker_map))
    worker_ncores = client.ncores()

322
323
324
325
326
327
    # find an open port on each worker. note that multiple workers can run
    # on the same machine, so this needs to ensure that each one gets its
    # own port
    worker_address_to_port = _find_ports_for_workers(
        client=client,
        worker_addresses=worker_map.keys(),
328
        local_listen_port=params["local_listen_port"]
329
330
    )

331
    # Tell each worker to train on the parts that it has locally
332
333
334
335
336
337
338
339
340
341
342
343
344
    futures_classifiers = [
        client.submit(
            _train_part,
            model_factory=model_factory,
            params={**params, 'num_threads': worker_ncores[worker]},
            list_of_parts=list_of_parts,
            worker_address_to_port=worker_address_to_port,
            time_out=params.get('time_out', 120),
            return_model=(worker == master_worker),
            **kwargs
        )
        for worker, list_of_parts in worker_map.items()
    ]
345
346
347
348
349
350

    results = client.gather(futures_classifiers)
    results = [v for v in results if v]
    return results[0]


351
352
353
354
355
356
357
358
359
def _predict_part(
    part: _DaskPart,
    model: LGBMModel,
    raw_score: bool,
    pred_proba: bool,
    pred_leaf: bool,
    pred_contrib: bool,
    **kwargs: Any
) -> _DaskPart:
360

361
    if part.shape[0] == 0:
362
        result = np.array([])
363
364
    elif pred_proba:
        result = model.predict_proba(
365
            part,
366
367
368
369
370
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
371
    else:
372
        result = model.predict(
373
            part,
374
375
376
377
378
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
379

380
    # dask.DataFrame.map_partitions() expects each call to return a pandas DataFrame or Series
381
    if isinstance(part, pd_DataFrame):
382
        if pred_proba or pred_contrib or pred_leaf:
383
            result = pd_DataFrame(result, index=part.index)
384
        else:
385
            result = pd_Series(result, index=part.index, name='predictions')
386
387
388
389

    return result


390
391
392
393
394
395
396
397
398
399
def _predict(
    model: LGBMModel,
    data: _DaskMatrixLike,
    raw_score: bool = False,
    pred_proba: bool = False,
    pred_leaf: bool = False,
    pred_contrib: bool = False,
    dtype: _PredictionDtype = np.float32,
    **kwargs: Any
) -> dask_Array:
400
401
402
403
    """Inner predict routine.

    Parameters
    ----------
404
    model : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
405
        Fitted underlying model.
406
    data : Dask Array or Dask DataFrame of shape = [n_samples, n_features]
407
        Input feature matrix.
408
409
    raw_score : bool, optional (default=False)
        Whether to predict raw scores.
410
411
412
413
414
415
    pred_proba : bool, optional (default=False)
        Should method return results of ``predict_proba`` (``pred_proba=True``) or ``predict`` (``pred_proba=False``).
    pred_leaf : bool, optional (default=False)
        Whether to predict leaf index.
    pred_contrib : bool, optional (default=False)
        Whether to predict feature contributions.
416
    dtype : np.dtype, optional (default=np.float32)
417
        Dtype of the output.
418
    **kwargs
419
        Other parameters passed to ``predict`` or ``predict_proba`` method.
420
421
422

    Returns
    -------
423
    predicted_result : Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]
424
        The predicted values.
425
    X_leaves : Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]
426
        If ``pred_leaf=True``, the predicted leaf of every tree for each sample.
427
    X_SHAP_values : Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]
428
        If ``pred_contrib=True``, the feature contributions for each sample.
429
    """
430
431
    if not all((DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED)):
        raise LightGBMError('dask, pandas and scikit-learn are required for lightgbm.dask')
432
    if isinstance(data, dask_DataFrame):
433
434
435
436
437
438
439
440
441
        return data.map_partitions(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        ).values
442
    elif isinstance(data, dask_Array):
443
        if pred_proba:
444
445
446
            kwargs['chunks'] = (data.chunks[0], (model.n_classes_,))
        else:
            kwargs['drop_axis'] = 1
447
448
449
450
451
452
453
454
455
456
        return data.map_blocks(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            dtype=dtype,
            **kwargs
        )
457
    else:
458
        raise TypeError('Data must be either Dask Array or Dask DataFrame. Got %s.' % str(type(data)))
459
460


461
class _DaskLGBMModel:
462

463
464
    @property
    def client_(self) -> Client:
465
        """:obj:`dask.distributed.Client`: Dask client.
466
467
468
469
470
471
472
473
474
475
476
477
478
479

        This property can be passed in the constructor or updated
        with ``model.set_params(client=client)``.
        """
        if not getattr(self, "fitted_", False):
            raise LGBMNotFittedError('Cannot access property client_ before calling fit().')

        return _get_dask_client(client=self.client)

    def _lgb_getstate(self) -> Dict[Any, Any]:
        """Remove un-picklable attributes before serialization."""
        client = self.__dict__.pop("client", None)
        self._other_params.pop("client", None)
        out = deepcopy(self.__dict__)
480
        out.update({"client": None})
481
482
483
        self.client = client
        return out

484
485
486
487
488
489
490
491
492
    def _fit(
        self,
        model_factory: Type[LGBMModel],
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        group: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "_DaskLGBMModel":
493
494
        if not all((DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED)):
            raise LightGBMError('dask, pandas and scikit-learn are required for lightgbm.dask')
495
496

        params = self.get_params(True)
497
        params.pop("client", None)
498
499

        model = _train(
500
            client=_get_dask_client(self.client),
501
502
503
504
505
506
507
508
            data=X,
            label=y,
            params=params,
            model_factory=model_factory,
            sample_weight=sample_weight,
            group=group,
            **kwargs
        )
509
510
511
512
513
514

        self.set_params(**model.get_params())
        self._copy_extra_params(model, self)

        return self

515
    def _to_local(self, model_factory: Type[LGBMModel]) -> LGBMModel:
516
517
518
        params = self.get_params()
        params.pop("client", None)
        model = model_factory(**params)
519
        self._copy_extra_params(self, model)
520
        model._other_params.pop("client", None)
521
522
523
        return model

    @staticmethod
524
    def _copy_extra_params(source: Union["_DaskLGBMModel", LGBMModel], dest: Union["_DaskLGBMModel", LGBMModel]) -> None:
525
526
527
528
        params = source.get_params()
        attributes = source.__dict__
        extra_param_names = set(attributes.keys()).difference(params.keys())
        for name in extra_param_names:
529
            setattr(dest, name, attributes[name])
530
531


532
class DaskLGBMClassifier(LGBMClassifier, _DaskLGBMModel):
533
534
    """Distributed version of lightgbm.LGBMClassifier."""

535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMClassifier.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMClassifier.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
588
    _base_doc = (
589
590
591
592
593
594
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

595
596
597
598
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

599
600
601
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

602
603
604
605
606
607
608
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMClassifier":
609
        """Docstring is inherited from the lightgbm.LGBMClassifier.fit."""
610
611
612
613
614
615
616
617
        return self._fit(
            model_factory=LGBMClassifier,
            X=X,
            y=y,
            sample_weight=sample_weight,
            **kwargs
        )

618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMClassifier does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMClassifier support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
633
        + ' ' * 12 + 'Other parameters passed through to ``LGBMClassifier.fit()``.\n'
634
    )
635

636
    def predict(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
637
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict."""
638
639
640
641
642
643
644
        return _predict(
            model=self.to_local(),
            data=X,
            dtype=self.classes_.dtype,
            **kwargs
        )

645
646
647
648
649
650
651
652
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]"
    )
653

654
    def predict_proba(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
655
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict_proba."""
656
657
658
659
660
661
662
        return _predict(
            model=self.to_local(),
            data=X,
            pred_proba=True,
            **kwargs
        )

663
664
665
666
    predict_proba.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted probability for each class for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_probability",
667
        predicted_result_shape="Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]",
668
669
670
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]"
    )
671

672
    def to_local(self) -> LGBMClassifier:
673
674
675
676
677
        """Create regular version of lightgbm.LGBMClassifier from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMClassifier
678
            Local underlying model.
679
680
681
682
        """
        return self._to_local(LGBMClassifier)


683
class DaskLGBMRegressor(LGBMRegressor, _DaskLGBMModel):
684
    """Distributed version of lightgbm.LGBMRegressor."""
685

686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMRegressor.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMRegressor.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
739
    _base_doc = (
740
741
742
743
744
745
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

746
747
748
749
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

750
751
752
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

753
754
755
756
757
758
759
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMRegressor":
760
        """Docstring is inherited from the lightgbm.LGBMRegressor.fit."""
761
762
763
764
765
766
767
768
        return self._fit(
            model_factory=LGBMRegressor,
            X=X,
            y=y,
            sample_weight=sample_weight,
            **kwargs
        )

769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMRegressor does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMRegressor support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
784
        + ' ' * 12 + 'Other parameters passed through to ``LGBMRegressor.fit()``.\n'
785
    )
786

787
    def predict(self, X: _DaskMatrixLike, **kwargs) -> dask_Array:
788
        """Docstring is inherited from the lightgbm.LGBMRegressor.predict."""
789
790
791
792
793
794
        return _predict(
            model=self.to_local(),
            data=X,
            **kwargs
        )

795
796
797
798
799
800
801
802
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1]"
    )
803

804
    def to_local(self) -> LGBMRegressor:
805
806
807
808
809
        """Create regular version of lightgbm.LGBMRegressor from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRegressor
810
            Local underlying model.
811
812
        """
        return self._to_local(LGBMRegressor)
813
814


815
class DaskLGBMRanker(LGBMRanker, _DaskLGBMModel):
816
    """Distributed version of lightgbm.LGBMRanker."""
817

818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMRanker.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMRanker.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
871
    _base_doc = (
872
873
874
875
876
877
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

878
879
880
881
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

882
883
884
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

885
886
887
888
889
890
891
892
893
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        init_score: Optional[_DaskCollection] = None,
        group: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMRanker":
894
895
896
897
        """Docstring is inherited from the lightgbm.LGBMRanker.fit."""
        if init_score is not None:
            raise RuntimeError('init_score is not currently supported in lightgbm.dask')

898
899
900
901
902
903
904
905
906
        return self._fit(
            model_factory=LGBMRanker,
            X=X,
            y=y,
            sample_weight=sample_weight,
            group=group,
            **kwargs
        )

907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMRanker does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('init_score :'):])

    _base_doc = (_base_doc[:_base_doc.find('eval_set :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMRanker support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
925
        + ' ' * 12 + 'Other parameters passed through to ``LGBMRanker.fit()``.\n'
926
    )
927

928
    def predict(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
929
930
        """Docstring is inherited from the lightgbm.LGBMRanker.predict."""
        return _predict(self.to_local(), X, **kwargs)
931

932
933
934
935
936
937
938
939
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1]"
    )
940

941
    def to_local(self) -> LGBMRanker:
942
943
944
945
946
        """Create regular version of lightgbm.LGBMRanker from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRanker
947
            Local underlying model.
948
949
        """
        return self._to_local(LGBMRanker)