dask.py 34.4 KB
Newer Older
1
# coding: utf-8
2
"""Distributed training with LightGBM and dask.distributed.
3

4
This module enables you to perform distributed training with LightGBM on
5
dask.Array and dask.DataFrame collections.
6
7

It is based on dask-lightgbm, which was based on dask-xgboost.
8
"""
9
import socket
10
from collections import defaultdict
11
from copy import deepcopy
12
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type, Union
13
14
15
from urllib.parse import urlparse

import numpy as np
16
17
import scipy.sparse as ss

18
19
20
21
22
from .basic import _LIB, LightGBMError, _choose_param_value, _ConfigAliases, _log_warning, _safe_call
from .compat import (DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED, Client, LGBMNotFittedError, concat,
                     dask_Array, dask_DataFrame, dask_Series, default_client, delayed, get_worker, pd_DataFrame,
                     pd_Series, wait)
from .sklearn import LGBMClassifier, LGBMModel, LGBMRanker, LGBMRegressor, _lgbmmodel_doc_fit, _lgbmmodel_doc_predict
23
24
25
26
27

_DaskCollection = Union[dask_Array, dask_DataFrame, dask_Series]
_DaskMatrixLike = Union[dask_Array, dask_DataFrame]
_DaskPart = Union[np.ndarray, pd_DataFrame, pd_Series, ss.spmatrix]
_PredictionDtype = Union[Type[np.float32], Type[np.float64], Type[np.int32], Type[np.int64]]
28
29


30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
def _get_dask_client(client: Optional[Client]) -> Client:
    """Choose a Dask client to use.

    Parameters
    ----------
    client : dask.distributed.Client or None
        Dask client.

    Returns
    -------
    client : dask.distributed.Client
        A Dask client.
    """
    if client is None:
        return default_client()
    else:
        return client


49
50
def _find_open_port(worker_ip: str, local_listen_port: int, ports_to_skip: Iterable[int]) -> int:
    """Find an open port.
51

52
53
    This function tries to find a free port on the machine it's run on. It is intended to
    be run once on each Dask worker, sequentially.
54

55
56
57
58
59
60
61
62
63
64
65
66
67
68
    Parameters
    ----------
    worker_ip : str
        IP address for the Dask worker.
    local_listen_port : int
        First port to try when searching for open ports.
    ports_to_skip: Iterable[int]
        An iterable of integers referring to ports that should be skipped. Since multiple Dask
        workers can run on the same physical machine, this method may be called multiple times
        on the same machine. ``ports_to_skip`` is used to ensure that LightGBM doesn't try to use
        the same port for two worker processes running on the same machine.

    Returns
    -------
69
    port : int
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
        A free port on the machine referenced by ``worker_ip``.
    """
    max_tries = 1000
    found_port = False
    for i in range(max_tries):
        out_port = local_listen_port + i
        if out_port in ports_to_skip:
            continue
        try:
            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
                s.bind((worker_ip, out_port))
            found_port = True
            break
        # if unavailable, you'll get OSError: Address already in use
        except OSError:
            continue
    if not found_port:
        msg = "LightGBM tried %s:%d-%d and could not create a connection. Try setting local_listen_port to a different value."
        raise RuntimeError(msg % (worker_ip, local_listen_port, out_port))
    return out_port


def _find_ports_for_workers(client: Client, worker_addresses: Iterable[str], local_listen_port: int) -> Dict[str, int]:
    """Find an open port on each worker.

    LightGBM distributed training uses TCP sockets by default, and this method is used to
    identify open ports on each worker so LightGBM can reliable create those sockets.
97
98
99

    Parameters
    ----------
100
101
102
    client : dask.distributed.Client
        Dask client.
    worker_addresses : Iterable[str]
103
        An iterable of addresses for workers in the cluster. These are strings of the form ``<protocol>://<host>:port``.
104
    local_listen_port : int
105
        First port to try when searching for open ports.
106
107
108

    Returns
    -------
109
110
    result : Dict[str, int]
        Dictionary where keys are worker addresses and values are an open port for LightGBM to use.
111
    """
112
    lightgbm_ports: Set[int] = set()
113
114
115
116
117
118
119
120
121
122
123
124
125
    worker_ip_to_port = {}
    for worker_address in worker_addresses:
        port = client.submit(
            func=_find_open_port,
            workers=[worker_address],
            worker_ip=urlparse(worker_address).hostname,
            local_listen_port=local_listen_port,
            ports_to_skip=lightgbm_ports
        ).result()
        lightgbm_ports.add(port)
        worker_ip_to_port[worker_address] = port

    return worker_ip_to_port
126
127


128
def _concat(seq: List[_DaskPart]) -> _DaskPart:
129
130
    if isinstance(seq[0], np.ndarray):
        return np.concatenate(seq, axis=0)
131
    elif isinstance(seq[0], (pd_DataFrame, pd_Series)):
132
        return concat(seq, axis=0)
133
134
135
136
137
138
    elif isinstance(seq[0], ss.spmatrix):
        return ss.vstack(seq, format='csr')
    else:
        raise TypeError('Data must be one of: numpy arrays, pandas dataframes, sparse matrices (from scipy). Got %s.' % str(type(seq[0])))


139
140
141
142
143
144
145
146
147
def _train_part(
    params: Dict[str, Any],
    model_factory: Type[LGBMModel],
    list_of_parts: List[Dict[str, _DaskPart]],
    worker_address_to_port: Dict[str, int],
    return_model: bool,
    time_out: int = 120,
    **kwargs: Any
) -> Optional[LGBMModel]:
148
149
150
151
152
153
154
155
156
157
158
159
    local_worker_address = get_worker().address
    machine_list = ','.join([
        '%s:%d' % (urlparse(worker_address).hostname, port)
        for worker_address, port
        in worker_address_to_port.items()
    ])
    network_params = {
        'machines': machine_list,
        'local_listen_port': worker_address_to_port[local_worker_address],
        'time_out': time_out,
        'num_machines': len(worker_address_to_port)
    }
160
161
    params.update(network_params)

162
163
    is_ranker = issubclass(model_factory, LGBMRanker)

164
    # Concatenate many parts into one
165
166
167
168
169
170
171
172
173
174
175
176
    data = _concat([x['data'] for x in list_of_parts])
    label = _concat([x['label'] for x in list_of_parts])

    if 'weight' in list_of_parts[0]:
        weight = _concat([x['weight'] for x in list_of_parts])
    else:
        weight = None

    if 'group' in list_of_parts[0]:
        group = _concat([x['group'] for x in list_of_parts])
    else:
        group = None
177
178
179

    try:
        model = model_factory(**params)
180
        if is_ranker:
181
            model.fit(data, label, sample_weight=weight, group=group, **kwargs)
182
        else:
183
            model.fit(data, label, sample_weight=weight, **kwargs)
184

185
186
187
188
189
190
    finally:
        _safe_call(_LIB.LGBM_NetworkFree())

    return model if return_model else None


191
def _split_to_parts(data: _DaskCollection, is_matrix: bool) -> List[_DaskPart]:
192
193
    parts = data.to_delayed()
    if isinstance(parts, np.ndarray):
194
195
196
197
        if is_matrix:
            assert parts.shape[1] == 1
        else:
            assert parts.ndim == 1 or parts.shape[1] == 1
198
199
200
201
        parts = parts.flatten().tolist()
    return parts


202
203
204
205
206
207
208
209
210
211
def _train(
    client: Client,
    data: _DaskMatrixLike,
    label: _DaskCollection,
    params: Dict[str, Any],
    model_factory: Type[LGBMModel],
    sample_weight: Optional[_DaskCollection] = None,
    group: Optional[_DaskCollection] = None,
    **kwargs: Any
) -> LGBMModel:
212
213
214
215
    """Inner train routine.

    Parameters
    ----------
216
217
    client : dask.distributed.Client
        Dask client.
218
    data : Dask Array or Dask DataFrame of shape = [n_samples, n_features]
219
        Input feature matrix.
220
    label : Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]
221
222
        The target values (class labels in classification, real numbers in regression).
    params : dict
223
        Parameters passed to constructor of the local underlying model.
224
    model_factory : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
225
        Class of the local underlying model.
226
    sample_weight : Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)
227
        Weights of training data.
228
    group : Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)
229
230
231
232
233
        Group/query data.
        Only used in the learning-to-rank task.
        sum(group) = n_samples.
        For example, if you have a 100-document dataset with ``group = [10, 20, 40, 10, 10, 10]``, that means that you have 6 groups,
        where the first 10 records are in the first group, records 11-30 are in the second group, records 31-70 are in the third group, etc.
234
235
236
237
238
239
240
    **kwargs
        Other parameters passed to ``fit`` method of the local underlying model.

    Returns
    -------
    model : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
        Returns fitted underlying model.
241
    """
242
243
    params = deepcopy(params)

244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
    params = _choose_param_value(
        main_param_name="local_listen_port",
        params=params,
        default_value=12400
    )

    params = _choose_param_value(
        main_param_name="tree_learner",
        params=params,
        default_value="data"
    )
    allowed_tree_learners = {
        'data',
        'data_parallel',
        'feature',
        'feature_parallel',
        'voting',
        'voting_parallel'
    }
    if params["tree_learner"] not in allowed_tree_learners:
264
        _log_warning('Parameter tree_learner set to %s, which is not allowed. Using "data" as default' % params['tree_learner'])
265
266
267
268
269
270
271
272
273
274
275
276
        params['tree_learner'] = 'data'

    if params['tree_learner'] not in {'data', 'data_parallel'}:
        _log_warning(
            'Support for tree_learner %s in lightgbm.dask is experimental and may break in a future release. \n'
            'Use "data" for a stable, well-tested interface.' % params['tree_learner']
        )

    # Some passed-in parameters can be removed:
    #   * 'machines': constructed automatically from Dask worker list
    #   * 'num_machines': set automatically from Dask worker list
    #   * 'num_threads': overridden to match nthreads on each Dask process
277
    for param_alias in _ConfigAliases.get('machines', 'num_machines', 'num_threads'):
278
        params.pop(param_alias, None)
279

280
    # Split arrays/dataframes into parts. Arrange parts into dicts to enforce co-locality
281
282
    data_parts = _split_to_parts(data=data, is_matrix=True)
    label_parts = _split_to_parts(data=label, is_matrix=False)
283
    parts = [{'data': x, 'label': y} for (x, y) in zip(data_parts, label_parts)]
284
    n_parts = len(parts)
285
286
287

    if sample_weight is not None:
        weight_parts = _split_to_parts(data=sample_weight, is_matrix=False)
288
        for i in range(n_parts):
289
            parts[i]['weight'] = weight_parts[i]
290
291
292

    if group is not None:
        group_parts = _split_to_parts(data=group, is_matrix=False)
293
        for i in range(n_parts):
294
            parts[i]['group'] = group_parts[i]
295
296

    # Start computation in the background
297
    parts = list(map(delayed, parts))
298
299
300
301
    parts = client.compute(parts)
    wait(parts)

    for part in parts:
302
        if part.status == 'error':  # type: ignore
303
304
305
            return part  # trigger error locally

    # Find locations of all parts and map them to particular Dask workers
306
    key_to_part_dict = {part.key: part for part in parts}  # type: ignore
307
308
309
310
311
312
313
314
    who_has = client.who_has(parts)
    worker_map = defaultdict(list)
    for key, workers in who_has.items():
        worker_map[next(iter(workers))].append(key_to_part_dict[key])

    master_worker = next(iter(worker_map))
    worker_ncores = client.ncores()

315
316
317
318
319
320
    # find an open port on each worker. note that multiple workers can run
    # on the same machine, so this needs to ensure that each one gets its
    # own port
    worker_address_to_port = _find_ports_for_workers(
        client=client,
        worker_addresses=worker_map.keys(),
321
        local_listen_port=params["local_listen_port"]
322
323
    )

324
    # Tell each worker to train on the parts that it has locally
325
326
327
328
329
330
331
332
333
334
335
336
337
    futures_classifiers = [
        client.submit(
            _train_part,
            model_factory=model_factory,
            params={**params, 'num_threads': worker_ncores[worker]},
            list_of_parts=list_of_parts,
            worker_address_to_port=worker_address_to_port,
            time_out=params.get('time_out', 120),
            return_model=(worker == master_worker),
            **kwargs
        )
        for worker, list_of_parts in worker_map.items()
    ]
338
339
340
341
342
343

    results = client.gather(futures_classifiers)
    results = [v for v in results if v]
    return results[0]


344
345
346
347
348
349
350
351
352
def _predict_part(
    part: _DaskPart,
    model: LGBMModel,
    raw_score: bool,
    pred_proba: bool,
    pred_leaf: bool,
    pred_contrib: bool,
    **kwargs: Any
) -> _DaskPart:
353

354
    if part.shape[0] == 0:
355
        result = np.array([])
356
357
    elif pred_proba:
        result = model.predict_proba(
358
            part,
359
360
361
362
363
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
364
    else:
365
        result = model.predict(
366
            part,
367
368
369
370
371
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
372

373
    # dask.DataFrame.map_partitions() expects each call to return a pandas DataFrame or Series
374
    if isinstance(part, pd_DataFrame):
375
        if pred_proba or pred_contrib or pred_leaf:
376
            result = pd_DataFrame(result, index=part.index)
377
        else:
378
            result = pd_Series(result, index=part.index, name='predictions')
379
380
381
382

    return result


383
384
385
386
387
388
389
390
391
392
def _predict(
    model: LGBMModel,
    data: _DaskMatrixLike,
    raw_score: bool = False,
    pred_proba: bool = False,
    pred_leaf: bool = False,
    pred_contrib: bool = False,
    dtype: _PredictionDtype = np.float32,
    **kwargs: Any
) -> dask_Array:
393
394
395
396
    """Inner predict routine.

    Parameters
    ----------
397
    model : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
398
        Fitted underlying model.
399
    data : Dask Array or Dask DataFrame of shape = [n_samples, n_features]
400
        Input feature matrix.
401
402
    raw_score : bool, optional (default=False)
        Whether to predict raw scores.
403
404
405
406
407
408
    pred_proba : bool, optional (default=False)
        Should method return results of ``predict_proba`` (``pred_proba=True``) or ``predict`` (``pred_proba=False``).
    pred_leaf : bool, optional (default=False)
        Whether to predict leaf index.
    pred_contrib : bool, optional (default=False)
        Whether to predict feature contributions.
409
    dtype : np.dtype, optional (default=np.float32)
410
        Dtype of the output.
411
    **kwargs
412
        Other parameters passed to ``predict`` or ``predict_proba`` method.
413
414
415

    Returns
    -------
416
    predicted_result : Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]
417
        The predicted values.
418
    X_leaves : Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]
419
        If ``pred_leaf=True``, the predicted leaf of every tree for each sample.
420
    X_SHAP_values : Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]
421
        If ``pred_contrib=True``, the feature contributions for each sample.
422
    """
423
424
    if not all((DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED)):
        raise LightGBMError('dask, pandas and scikit-learn are required for lightgbm.dask')
425
    if isinstance(data, dask_DataFrame):
426
427
428
429
430
431
432
433
434
        return data.map_partitions(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        ).values
435
    elif isinstance(data, dask_Array):
436
        if pred_proba:
437
438
439
            kwargs['chunks'] = (data.chunks[0], (model.n_classes_,))
        else:
            kwargs['drop_axis'] = 1
440
441
442
443
444
445
446
447
448
449
        return data.map_blocks(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            dtype=dtype,
            **kwargs
        )
450
    else:
451
        raise TypeError('Data must be either Dask Array or Dask DataFrame. Got %s.' % str(type(data)))
452
453


454
class _DaskLGBMModel:
455

456
457
    @property
    def client_(self) -> Client:
458
        """:obj:`dask.distributed.Client`: Dask client.
459
460
461
462
463
464
465
466
467
468
469
470
471
472

        This property can be passed in the constructor or updated
        with ``model.set_params(client=client)``.
        """
        if not getattr(self, "fitted_", False):
            raise LGBMNotFittedError('Cannot access property client_ before calling fit().')

        return _get_dask_client(client=self.client)

    def _lgb_getstate(self) -> Dict[Any, Any]:
        """Remove un-picklable attributes before serialization."""
        client = self.__dict__.pop("client", None)
        self._other_params.pop("client", None)
        out = deepcopy(self.__dict__)
473
        out.update({"client": None})
474
475
476
        self.client = client
        return out

477
478
479
480
481
482
483
484
485
    def _fit(
        self,
        model_factory: Type[LGBMModel],
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        group: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "_DaskLGBMModel":
486
487
        if not all((DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED)):
            raise LightGBMError('dask, pandas and scikit-learn are required for lightgbm.dask')
488
489

        params = self.get_params(True)
490
        params.pop("client", None)
491
492

        model = _train(
493
            client=_get_dask_client(self.client),
494
495
496
497
498
499
500
501
            data=X,
            label=y,
            params=params,
            model_factory=model_factory,
            sample_weight=sample_weight,
            group=group,
            **kwargs
        )
502
503
504
505
506
507

        self.set_params(**model.get_params())
        self._copy_extra_params(model, self)

        return self

508
    def _to_local(self, model_factory: Type[LGBMModel]) -> LGBMModel:
509
510
511
        params = self.get_params()
        params.pop("client", None)
        model = model_factory(**params)
512
        self._copy_extra_params(self, model)
513
        model._other_params.pop("client", None)
514
515
516
        return model

    @staticmethod
517
    def _copy_extra_params(source: Union["_DaskLGBMModel", LGBMModel], dest: Union["_DaskLGBMModel", LGBMModel]) -> None:
518
519
520
521
        params = source.get_params()
        attributes = source.__dict__
        extra_param_names = set(attributes.keys()).difference(params.keys())
        for name in extra_param_names:
522
            setattr(dest, name, attributes[name])
523
524


525
class DaskLGBMClassifier(LGBMClassifier, _DaskLGBMModel):
526
527
    """Distributed version of lightgbm.LGBMClassifier."""

528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMClassifier.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMClassifier.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
581
    _base_doc = (
582
583
584
585
586
587
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

588
589
590
591
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

592
593
594
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

595
596
597
598
599
600
601
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMClassifier":
602
        """Docstring is inherited from the lightgbm.LGBMClassifier.fit."""
603
604
605
606
607
608
609
610
        return self._fit(
            model_factory=LGBMClassifier,
            X=X,
            y=y,
            sample_weight=sample_weight,
            **kwargs
        )

611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMClassifier does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMClassifier support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
626
        + ' ' * 12 + 'Other parameters passed through to ``LGBMClassifier.fit()``.\n'
627
    )
628

629
    def predict(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
630
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict."""
631
632
633
634
635
636
637
        return _predict(
            model=self.to_local(),
            data=X,
            dtype=self.classes_.dtype,
            **kwargs
        )

638
639
640
641
642
643
644
645
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]"
    )
646

647
    def predict_proba(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
648
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict_proba."""
649
650
651
652
653
654
655
        return _predict(
            model=self.to_local(),
            data=X,
            pred_proba=True,
            **kwargs
        )

656
657
658
659
    predict_proba.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted probability for each class for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_probability",
660
        predicted_result_shape="Dask Array of shape = [n_samples] or shape = [n_samples, n_classes]",
661
662
663
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees] or shape = [n_samples, n_trees * n_classes]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1] or shape = [n_samples, (n_features + 1) * n_classes]"
    )
664

665
    def to_local(self) -> LGBMClassifier:
666
667
668
669
670
        """Create regular version of lightgbm.LGBMClassifier from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMClassifier
671
            Local underlying model.
672
673
674
675
        """
        return self._to_local(LGBMClassifier)


676
class DaskLGBMRegressor(LGBMRegressor, _DaskLGBMModel):
677
    """Distributed version of lightgbm.LGBMRegressor."""
678

679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMRegressor.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMRegressor.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
732
    _base_doc = (
733
734
735
736
737
738
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

739
740
741
742
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

743
744
745
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

746
747
748
749
750
751
752
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMRegressor":
753
        """Docstring is inherited from the lightgbm.LGBMRegressor.fit."""
754
755
756
757
758
759
760
761
        return self._fit(
            model_factory=LGBMRegressor,
            X=X,
            y=y,
            sample_weight=sample_weight,
            **kwargs
        )

762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMRegressor does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMRegressor support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
777
        + ' ' * 12 + 'Other parameters passed through to ``LGBMRegressor.fit()``.\n'
778
    )
779

780
    def predict(self, X: _DaskMatrixLike, **kwargs) -> dask_Array:
781
        """Docstring is inherited from the lightgbm.LGBMRegressor.predict."""
782
783
784
785
786
787
        return _predict(
            model=self.to_local(),
            data=X,
            **kwargs
        )

788
789
790
791
792
793
794
795
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1]"
    )
796

797
    def to_local(self) -> LGBMRegressor:
798
799
800
801
802
        """Create regular version of lightgbm.LGBMRegressor from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRegressor
803
            Local underlying model.
804
805
        """
        return self._to_local(LGBMRegressor)
806
807


808
class DaskLGBMRanker(LGBMRanker, _DaskLGBMModel):
809
    """Distributed version of lightgbm.LGBMRanker."""
810

811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
    def __init__(
        self,
        boosting_type: str = 'gbdt',
        num_leaves: int = 31,
        max_depth: int = -1,
        learning_rate: float = 0.1,
        n_estimators: int = 100,
        subsample_for_bin: int = 200000,
        objective: Optional[Union[Callable, str]] = None,
        class_weight: Optional[Union[dict, str]] = None,
        min_split_gain: float = 0.,
        min_child_weight: float = 1e-3,
        min_child_samples: int = 20,
        subsample: float = 1.,
        subsample_freq: int = 0,
        colsample_bytree: float = 1.,
        reg_alpha: float = 0.,
        reg_lambda: float = 0.,
        random_state: Optional[Union[int, np.random.RandomState]] = None,
        n_jobs: int = -1,
        silent: bool = True,
        importance_type: str = 'split',
        client: Optional[Client] = None,
        **kwargs: Any
    ):
        """Docstring is inherited from the lightgbm.LGBMRanker.__init__."""
        self.client = client
        super().__init__(
            boosting_type=boosting_type,
            num_leaves=num_leaves,
            max_depth=max_depth,
            learning_rate=learning_rate,
            n_estimators=n_estimators,
            subsample_for_bin=subsample_for_bin,
            objective=objective,
            class_weight=class_weight,
            min_split_gain=min_split_gain,
            min_child_weight=min_child_weight,
            min_child_samples=min_child_samples,
            subsample=subsample,
            subsample_freq=subsample_freq,
            colsample_bytree=colsample_bytree,
            reg_alpha=reg_alpha,
            reg_lambda=reg_lambda,
            random_state=random_state,
            n_jobs=n_jobs,
            silent=silent,
            importance_type=importance_type,
            **kwargs
        )

    _base_doc = LGBMRanker.__init__.__doc__
    _before_kwargs, _kwargs, _after_kwargs = _base_doc.partition('**kwargs')
864
    _base_doc = (
865
866
867
868
869
870
        _before_kwargs
        + 'client : dask.distributed.Client or None, optional (default=None)\n'
        + ' ' * 12 + 'Dask client. If ``None``, ``distributed.default_client()`` will be used at runtime. The Dask client used by this class will not be saved if the model object is pickled.\n'
        + ' ' * 8 + _kwargs + _after_kwargs
    )

871
872
873
874
    # the note on custom objective functions in LGBMModel.__init__ is not
    # currently relevant for the Dask estimators
    __init__.__doc__ = _base_doc[:_base_doc.find('Note\n')]

875
876
877
    def __getstate__(self) -> Dict[Any, Any]:
        return self._lgb_getstate()

878
879
880
881
882
883
884
885
886
    def fit(
        self,
        X: _DaskMatrixLike,
        y: _DaskCollection,
        sample_weight: Optional[_DaskCollection] = None,
        init_score: Optional[_DaskCollection] = None,
        group: Optional[_DaskCollection] = None,
        **kwargs: Any
    ) -> "DaskLGBMRanker":
887
888
889
890
        """Docstring is inherited from the lightgbm.LGBMRanker.fit."""
        if init_score is not None:
            raise RuntimeError('init_score is not currently supported in lightgbm.dask')

891
892
893
894
895
896
897
898
899
        return self._fit(
            model_factory=LGBMRanker,
            X=X,
            y=y,
            sample_weight=sample_weight,
            group=group,
            **kwargs
        )

900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
    _base_doc = _lgbmmodel_doc_fit.format(
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        y_shape="Dask Array, Dask DataFrame or Dask Series of shape = [n_samples]",
        sample_weight_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)",
        group_shape="Dask Array, Dask DataFrame, Dask Series of shape = [n_samples] or None, optional (default=None)"
    )

    # DaskLGBMRanker does not support init_score, evaluation data, or early stopping
    _base_doc = (_base_doc[:_base_doc.find('init_score :')]
                 + _base_doc[_base_doc.find('init_score :'):])

    _base_doc = (_base_doc[:_base_doc.find('eval_set :')]
                 + _base_doc[_base_doc.find('verbose :'):])

    # DaskLGBMRanker support for callbacks and init_model is not tested
    fit.__doc__ = (
        _base_doc[:_base_doc.find('callbacks :')]
        + '**kwargs\n'
918
        + ' ' * 12 + 'Other parameters passed through to ``LGBMRanker.fit()``.\n'
919
    )
920

921
    def predict(self, X: _DaskMatrixLike, **kwargs: Any) -> dask_Array:
922
923
        """Docstring is inherited from the lightgbm.LGBMRanker.predict."""
        return _predict(self.to_local(), X, **kwargs)
924

925
926
927
928
929
930
931
932
    predict.__doc__ = _lgbmmodel_doc_predict.format(
        description="Return the predicted value for each sample.",
        X_shape="Dask Array or Dask DataFrame of shape = [n_samples, n_features]",
        output_name="predicted_result",
        predicted_result_shape="Dask Array of shape = [n_samples]",
        X_leaves_shape="Dask Array of shape = [n_samples, n_trees]",
        X_SHAP_values_shape="Dask Array of shape = [n_samples, n_features + 1]"
    )
933

934
    def to_local(self) -> LGBMRanker:
935
936
937
938
939
        """Create regular version of lightgbm.LGBMRanker from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRanker
940
            Local underlying model.
941
942
        """
        return self._to_local(LGBMRanker)