"src/treelearner/cuda/cuda_single_gpu_tree_learner.hpp" did not exist on "b857ee10cc9a913e6dedd15c2475765d1e923c7b"
test_lgb.plot.interpretation.R 2.88 KB
Newer Older
1
2
3
4
VERBOSITY <- as.integer(
    Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

5
6
context("lgb.plot.interpretation")

7
8
.sigmoid <- function(x) {
    1.0 / (1.0 + exp(-x))
9
}
10
11
.logit <- function(x) {
    log(x / (1.0 - x))
12
13
14
15
16
17
}

test_that("lgb.plot.interepretation works as expected for binary classification", {
    data(agaricus.train, package = "lightgbm")
    train <- agaricus.train
    dtrain <- lgb.Dataset(train$data, label = train$label)
18
    set_field(
19
        dataset = dtrain
20
21
        , field_name = "init_score"
        , data = rep(
22
23
24
25
26
27
28
29
30
            .logit(mean(train$label))
            , length(train$label)
        )
    )
    data(agaricus.test, package = "lightgbm")
    test <- agaricus.test
    params <- list(
        objective = "binary"
        , learning_rate = 0.01
31
32
33
34
        , num_leaves = 63L
        , max_depth = -1L
        , min_data_in_leaf = 1L
        , min_sum_hessian_in_leaf = 1.0
35
        , verbosity = VERBOSITY
36
37
38
39
    )
    model <- lgb.train(
        params = params
        , data = dtrain
40
        , nrounds = 3L
41
    )
42
    num_trees <- 5L
43
44
45
    tree_interpretation <- lgb.interprete(
        model = model
        , data = test$data
46
        , idxset = seq_len(num_trees)
47
48
49
    )
    expect_true({
        lgb.plot.interpretation(
50
51
            tree_interpretation_dt = tree_interpretation[[1L]]
            , top_n = 5L
52
53
54
55
56
57
        )
        TRUE
    })

    # should also work when you explicitly pass cex
    plot_res <- lgb.plot.interpretation(
58
59
        tree_interpretation_dt = tree_interpretation[[1L]]
        , top_n = 5L
60
61
62
63
64
65
66
67
68
69
70
        , cex = 0.95
    )
    expect_null(plot_res)
})

test_that("lgb.plot.interepretation works as expected for multiclass classification", {
    data(iris)

    # We must convert factors to numeric
    # They must be starting from number 0 to use multiclass
    # For instance: 0, 1, 2, 3, 4, 5...
71
    iris$Species <- as.numeric(as.factor(iris$Species)) - 1L
72
73

    # Create imbalanced training data (20, 30, 40 examples for classes 0, 1, 2)
74
    train <- as.matrix(iris[c(1L:20L, 51L:80L, 101L:140L), ])
75
    # The 10 last samples of each class are for validation
76
77
78
    test <- as.matrix(iris[c(41L:50L, 91L:100L, 141L:150L), ])
    dtrain <- lgb.Dataset(data = train[, 1L:4L], label = train[, 5L])
    dtest <- lgb.Dataset.create.valid(dtrain, data = test[, 1L:4L], label = test[, 5L])
79
80
81
    params <- list(
        objective = "multiclass"
        , metric = "multi_logloss"
82
        , num_class = 3L
83
        , learning_rate = 0.00001
84
        , min_data = 1L
85
86
87
88
    )
    model <- lgb.train(
        params = params
        , data = dtrain
89
        , nrounds = 3L
90
        , verbose = VERBOSITY
91
    )
92
    num_trees <- 5L
93
94
    tree_interpretation <- lgb.interprete(
        model = model
95
96
        , data = test[, 1L:4L]
        , idxset = seq_len(num_trees)
97
98
    )
    plot_res <- lgb.plot.interpretation(
99
100
        tree_interpretation_dt = tree_interpretation[[1L]]
        , top_n = 5L
101
102
103
    )
    expect_null(plot_res)
})