dask.py 18 KB
Newer Older
1
2
3
# coding: utf-8
"""Distributed training with LightGBM and Dask.distributed.

4
5
6
7
This module enables you to perform distributed training with LightGBM on
Dask.Array and Dask.DataFrame collections.

It is based on dask-lightgbm, which was based on dask-xgboost.
8
"""
9
import socket
10
from collections import defaultdict
11
from copy import deepcopy
12
from typing import Dict, Iterable
13
14
15
16
from urllib.parse import urlparse

import numpy as np
import pandas as pd
17
18
import scipy.sparse as ss

19
20
21
from dask import array as da
from dask import dataframe as dd
from dask import delayed
22
from dask.distributed import Client, default_client, get_worker, wait
23

24
from .basic import _choose_param_value, _ConfigAliases, _LIB, _log_warning, _safe_call, LightGBMError
25
from .compat import DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED
26
from .sklearn import LGBMClassifier, LGBMRegressor, LGBMRanker
27
28


29
30
def _find_open_port(worker_ip: str, local_listen_port: int, ports_to_skip: Iterable[int]) -> int:
    """Find an open port.
31

32
33
    This function tries to find a free port on the machine it's run on. It is intended to
    be run once on each Dask worker, sequentially.
34

35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
    Parameters
    ----------
    worker_ip : str
        IP address for the Dask worker.
    local_listen_port : int
        First port to try when searching for open ports.
    ports_to_skip: Iterable[int]
        An iterable of integers referring to ports that should be skipped. Since multiple Dask
        workers can run on the same physical machine, this method may be called multiple times
        on the same machine. ``ports_to_skip`` is used to ensure that LightGBM doesn't try to use
        the same port for two worker processes running on the same machine.

    Returns
    -------
    result : int
        A free port on the machine referenced by ``worker_ip``.
    """
    max_tries = 1000
    out_port = None
    found_port = False
    for i in range(max_tries):
        out_port = local_listen_port + i
        if out_port in ports_to_skip:
            continue
        try:
            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
                s.bind((worker_ip, out_port))
            found_port = True
            break
        # if unavailable, you'll get OSError: Address already in use
        except OSError:
            continue
    if not found_port:
        msg = "LightGBM tried %s:%d-%d and could not create a connection. Try setting local_listen_port to a different value."
        raise RuntimeError(msg % (worker_ip, local_listen_port, out_port))
    return out_port


def _find_ports_for_workers(client: Client, worker_addresses: Iterable[str], local_listen_port: int) -> Dict[str, int]:
    """Find an open port on each worker.

    LightGBM distributed training uses TCP sockets by default, and this method is used to
    identify open ports on each worker so LightGBM can reliable create those sockets.
78
79
80

    Parameters
    ----------
81
82
83
84
    client : dask.distributed.Client
        Dask client.
    worker_addresses : Iterable[str]
        An iterable of addresses for workers in the cluster. These are strings of the form ``<protocol>://<host>:port``
85
    local_listen_port : int
86
        First port to try when searching for open ports.
87
88
89

    Returns
    -------
90
91
    result : Dict[str, int]
        Dictionary where keys are worker addresses and values are an open port for LightGBM to use.
92
    """
93
94
95
96
97
98
99
100
101
102
103
104
105
106
    lightgbm_ports = set()
    worker_ip_to_port = {}
    for worker_address in worker_addresses:
        port = client.submit(
            func=_find_open_port,
            workers=[worker_address],
            worker_ip=urlparse(worker_address).hostname,
            local_listen_port=local_listen_port,
            ports_to_skip=lightgbm_ports
        ).result()
        lightgbm_ports.add(port)
        worker_ip_to_port[worker_address] = port

    return worker_ip_to_port
107
108
109
110
111
112
113
114
115
116
117
118
119


def _concat(seq):
    if isinstance(seq[0], np.ndarray):
        return np.concatenate(seq, axis=0)
    elif isinstance(seq[0], (pd.DataFrame, pd.Series)):
        return pd.concat(seq, axis=0)
    elif isinstance(seq[0], ss.spmatrix):
        return ss.vstack(seq, format='csr')
    else:
        raise TypeError('Data must be one of: numpy arrays, pandas dataframes, sparse matrices (from scipy). Got %s.' % str(type(seq[0])))


120
def _train_part(params, model_factory, list_of_parts, worker_address_to_port, return_model,
121
                time_out=120, **kwargs):
122
123
124
125
126
127
128
129
130
131
132
133
    local_worker_address = get_worker().address
    machine_list = ','.join([
        '%s:%d' % (urlparse(worker_address).hostname, port)
        for worker_address, port
        in worker_address_to_port.items()
    ])
    network_params = {
        'machines': machine_list,
        'local_listen_port': worker_address_to_port[local_worker_address],
        'time_out': time_out,
        'num_machines': len(worker_address_to_port)
    }
134
135
    params.update(network_params)

136
137
    is_ranker = issubclass(model_factory, LGBMRanker)

138
    # Concatenate many parts into one
139
140
141
142
143
144
145
146
147
148
149
150
    data = _concat([x['data'] for x in list_of_parts])
    label = _concat([x['label'] for x in list_of_parts])

    if 'weight' in list_of_parts[0]:
        weight = _concat([x['weight'] for x in list_of_parts])
    else:
        weight = None

    if 'group' in list_of_parts[0]:
        group = _concat([x['group'] for x in list_of_parts])
    else:
        group = None
151
152
153

    try:
        model = model_factory(**params)
154
155
156
157
158
        if is_ranker:
            model.fit(data, y=label, sample_weight=weight, group=group, **kwargs)
        else:
            model.fit(data, y=label, sample_weight=weight, **kwargs)

159
160
161
162
163
164
165
166
167
    finally:
        _safe_call(_LIB.LGBM_NetworkFree())

    return model if return_model else None


def _split_to_parts(data, is_matrix):
    parts = data.to_delayed()
    if isinstance(parts, np.ndarray):
168
169
170
171
        if is_matrix:
            assert parts.shape[1] == 1
        else:
            assert parts.ndim == 1 or parts.shape[1] == 1
172
173
174
175
        parts = parts.flatten().tolist()
    return parts


176
def _train(client, data, label, params, model_factory, sample_weight=None, group=None, **kwargs):
177
178
179
180
181
182
183
184
185
186
    """Inner train routine.

    Parameters
    ----------
    client: dask.Client - client
    X : dask array of shape = [n_samples, n_features]
        Input feature matrix.
    y : dask array of shape = [n_samples]
        The target values (class labels in classification, real numbers in regression).
    params : dict
187
    model_factory : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
188
    sample_weight : array-like of shape = [n_samples] or None, optional (default=None)
189
190
191
192
193
194
195
        Weights of training data.
    group : array-like or None, optional (default=None)
        Group/query data.
        Only used in the learning-to-rank task.
        sum(group) = n_samples.
        For example, if you have a 100-document dataset with ``group = [10, 20, 40, 10, 10, 10]``, that means that you have 6 groups,
        where the first 10 records are in the first group, records 11-30 are in the second group, records 31-70 are in the third group, etc.
196
    """
197
198
    params = deepcopy(params)

199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
    params = _choose_param_value(
        main_param_name="local_listen_port",
        params=params,
        default_value=12400
    )

    params = _choose_param_value(
        main_param_name="tree_learner",
        params=params,
        default_value="data"
    )
    allowed_tree_learners = {
        'data',
        'data_parallel',
        'feature',
        'feature_parallel',
        'voting',
        'voting_parallel'
    }
    if params["tree_learner"] not in allowed_tree_learners:
        _log_warning('Parameter tree_learner set to %s, which is not allowed. Using "data" as default' % tree_learner)
        params['tree_learner'] = 'data'

    if params['tree_learner'] not in {'data', 'data_parallel'}:
        _log_warning(
            'Support for tree_learner %s in lightgbm.dask is experimental and may break in a future release. \n'
            'Use "data" for a stable, well-tested interface.' % params['tree_learner']
        )

    # Some passed-in parameters can be removed:
    #   * 'machines': constructed automatically from Dask worker list
    #   * 'machine_list_filename': not relevant for the Dask interface
    #   * 'num_machines': set automatically from Dask worker list
    #   * 'num_threads': overridden to match nthreads on each Dask process
    for param_name in ['machines', 'machine_list_filename', 'num_machines', 'num_threads']:
        for param_alias in _ConfigAliases.get(param_name):
            params.pop(param_alias, None)

237
    # Split arrays/dataframes into parts. Arrange parts into dicts to enforce co-locality
238
239
    data_parts = _split_to_parts(data=data, is_matrix=True)
    label_parts = _split_to_parts(data=label, is_matrix=False)
240
    parts = [{'data': x, 'label': y} for (x, y) in zip(data_parts, label_parts)]
241
242
243

    if sample_weight is not None:
        weight_parts = _split_to_parts(data=sample_weight, is_matrix=False)
244
245
        for i in range(len(parts)):
            parts[i]['weight'] = weight_parts[i]
246
247
248

    if group is not None:
        group_parts = _split_to_parts(data=group, is_matrix=False)
249
250
        for i in range(len(parts)):
            parts[i]['group'] = group_parts[i]
251
252

    # Start computation in the background
253
    parts = list(map(delayed, parts))
254
255
256
257
258
259
260
261
    parts = client.compute(parts)
    wait(parts)

    for part in parts:
        if part.status == 'error':
            return part  # trigger error locally

    # Find locations of all parts and map them to particular Dask workers
262
    key_to_part_dict = {part.key: part for part in parts}
263
264
265
266
267
268
269
270
    who_has = client.who_has(parts)
    worker_map = defaultdict(list)
    for key, workers in who_has.items():
        worker_map[next(iter(workers))].append(key_to_part_dict[key])

    master_worker = next(iter(worker_map))
    worker_ncores = client.ncores()

271
272
273
274
275
276
    # find an open port on each worker. note that multiple workers can run
    # on the same machine, so this needs to ensure that each one gets its
    # own port
    worker_address_to_port = _find_ports_for_workers(
        client=client,
        worker_addresses=worker_map.keys(),
277
        local_listen_port=params["local_listen_port"]
278
279
    )

280
    # Tell each worker to train on the parts that it has locally
281
282
283
284
285
286
287
288
289
290
291
292
293
    futures_classifiers = [
        client.submit(
            _train_part,
            model_factory=model_factory,
            params={**params, 'num_threads': worker_ncores[worker]},
            list_of_parts=list_of_parts,
            worker_address_to_port=worker_address_to_port,
            time_out=params.get('time_out', 120),
            return_model=(worker == master_worker),
            **kwargs
        )
        for worker, list_of_parts in worker_map.items()
    ]
294
295
296
297
298
299

    results = client.gather(futures_classifiers)
    results = [v for v in results if v]
    return results[0]


300
def _predict_part(part, model, raw_score, pred_proba, pred_leaf, pred_contrib, **kwargs):
301
302
303
304
    data = part.values if isinstance(part, pd.DataFrame) else part

    if data.shape[0] == 0:
        result = np.array([])
305
306
307
308
309
310
311
312
    elif pred_proba:
        result = model.predict_proba(
            data,
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
313
    else:
314
315
316
317
318
319
320
        result = model.predict(
            data,
            raw_score=raw_score,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        )
321
322

    if isinstance(part, pd.DataFrame):
323
        if pred_proba or pred_contrib:
324
325
326
327
328
329
330
            result = pd.DataFrame(result, index=part.index)
        else:
            result = pd.Series(result, index=part.index, name='predictions')

    return result


331
332
def _predict(model, data, raw_score=False, pred_proba=False, pred_leaf=False, pred_contrib=False,
             dtype=np.float32, **kwargs):
333
334
335
336
    """Inner predict routine.

    Parameters
    ----------
337
    model : lightgbm.LGBMClassifier, lightgbm.LGBMRegressor, or lightgbm.LGBMRanker class
338
339
    data : dask array of shape = [n_samples, n_features]
        Input feature matrix.
340
341
342
343
344
345
    pred_proba : bool, optional (default=False)
        Should method return results of ``predict_proba`` (``pred_proba=True``) or ``predict`` (``pred_proba=False``).
    pred_leaf : bool, optional (default=False)
        Whether to predict leaf index.
    pred_contrib : bool, optional (default=False)
        Whether to predict feature contributions.
346
    dtype : np.dtype
347
        Dtype of the output.
348
349
    kwargs : dict
        Other parameters passed to ``predict`` or ``predict_proba`` method.
350
351
    """
    if isinstance(data, dd._Frame):
352
353
354
355
356
357
358
359
360
        return data.map_partitions(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            **kwargs
        ).values
361
    elif isinstance(data, da.Array):
362
        if pred_proba:
363
364
365
            kwargs['chunks'] = (data.chunks[0], (model.n_classes_,))
        else:
            kwargs['drop_axis'] = 1
366
367
368
369
370
371
372
373
374
375
        return data.map_blocks(
            _predict_part,
            model=model,
            raw_score=raw_score,
            pred_proba=pred_proba,
            pred_leaf=pred_leaf,
            pred_contrib=pred_contrib,
            dtype=dtype,
            **kwargs
        )
376
377
378
379
380
    else:
        raise TypeError('Data must be either Dask array or dataframe. Got %s.' % str(type(data)))


class _LGBMModel:
381
382
383
    def __init__(self):
        if not all((DASK_INSTALLED, PANDAS_INSTALLED, SKLEARN_INSTALLED)):
            raise LightGBMError('dask, pandas and scikit-learn are required for lightgbm.dask')
384

385
    def _fit(self, model_factory, X, y=None, sample_weight=None, group=None, client=None, **kwargs):
386
387
388
389
390
        """Docstring is inherited from the LGBMModel."""
        if client is None:
            client = default_client()

        params = self.get_params(True)
391
392
393
394
395
396
397
398
399
400
401

        model = _train(
            client=client,
            data=X,
            label=y,
            params=params,
            model_factory=model_factory,
            sample_weight=sample_weight,
            group=group,
            **kwargs
        )
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421

        self.set_params(**model.get_params())
        self._copy_extra_params(model, self)

        return self

    def _to_local(self, model_factory):
        model = model_factory(**self.get_params())
        self._copy_extra_params(self, model)
        return model

    @staticmethod
    def _copy_extra_params(source, dest):
        params = source.get_params()
        attributes = source.__dict__
        extra_param_names = set(attributes.keys()).difference(params.keys())
        for name in extra_param_names:
            setattr(dest, name, attributes[name])


422
class DaskLGBMClassifier(LGBMClassifier, _LGBMModel):
423
424
425
    """Distributed version of lightgbm.LGBMClassifier."""

    def fit(self, X, y=None, sample_weight=None, client=None, **kwargs):
426
        """Docstring is inherited from the lightgbm.LGBMClassifier.fit."""
427
428
429
430
431
432
433
434
435
        return self._fit(
            model_factory=LGBMClassifier,
            X=X,
            y=y,
            sample_weight=sample_weight,
            client=client,
            **kwargs
        )

436
437
438
439
    fit.__doc__ = LGBMClassifier.fit.__doc__

    def predict(self, X, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict."""
440
441
442
443
444
445
446
        return _predict(
            model=self.to_local(),
            data=X,
            dtype=self.classes_.dtype,
            **kwargs
        )

447
448
449
450
    predict.__doc__ = LGBMClassifier.predict.__doc__

    def predict_proba(self, X, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMClassifier.predict_proba."""
451
452
453
454
455
456
457
        return _predict(
            model=self.to_local(),
            data=X,
            pred_proba=True,
            **kwargs
        )

458
459
460
461
462
463
464
465
466
467
468
469
    predict_proba.__doc__ = LGBMClassifier.predict_proba.__doc__

    def to_local(self):
        """Create regular version of lightgbm.LGBMClassifier from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMClassifier
        """
        return self._to_local(LGBMClassifier)


470
class DaskLGBMRegressor(LGBMRegressor, _LGBMModel):
471
472
473
474
    """Docstring is inherited from the lightgbm.LGBMRegressor."""

    def fit(self, X, y=None, sample_weight=None, client=None, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMRegressor.fit."""
475
476
477
478
479
480
481
482
483
        return self._fit(
            model_factory=LGBMRegressor,
            X=X,
            y=y,
            sample_weight=sample_weight,
            client=client,
            **kwargs
        )

484
485
486
487
    fit.__doc__ = LGBMRegressor.fit.__doc__

    def predict(self, X, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMRegressor.predict."""
488
489
490
491
492
493
        return _predict(
            model=self.to_local(),
            data=X,
            **kwargs
        )

494
495
496
497
498
499
500
501
502
503
    predict.__doc__ = LGBMRegressor.predict.__doc__

    def to_local(self):
        """Create regular version of lightgbm.LGBMRegressor from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRegressor
        """
        return self._to_local(LGBMRegressor)
504
505


506
class DaskLGBMRanker(LGBMRanker, _LGBMModel):
507
508
509
510
511
512
513
    """Docstring is inherited from the lightgbm.LGBMRanker."""

    def fit(self, X, y=None, sample_weight=None, init_score=None, group=None, client=None, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMRanker.fit."""
        if init_score is not None:
            raise RuntimeError('init_score is not currently supported in lightgbm.dask')

514
515
516
517
518
519
520
521
522
523
        return self._fit(
            model_factory=LGBMRanker,
            X=X,
            y=y,
            sample_weight=sample_weight,
            group=group,
            client=client,
            **kwargs
        )

524
525
526
527
528
    fit.__doc__ = LGBMRanker.fit.__doc__

    def predict(self, X, **kwargs):
        """Docstring is inherited from the lightgbm.LGBMRanker.predict."""
        return _predict(self.to_local(), X, **kwargs)
529

530
531
532
533
534
535
536
537
538
539
    predict.__doc__ = LGBMRanker.predict.__doc__

    def to_local(self):
        """Create regular version of lightgbm.LGBMRanker from the distributed version.

        Returns
        -------
        model : lightgbm.LGBMRanker
        """
        return self._to_local(LGBMRanker)