test_weighted_loss.R 1.43 KB
Newer Older
1
2
3
4
VERBOSITY <- as.integer(
  Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
context("Case weights are respected")

test_that("Gamma regression reacts on 'weight'", {
  n <- 100L
  set.seed(87L)
  X <- matrix(runif(2L * n), ncol = 2L)
  y <- X[, 1L] + X[, 2L] + runif(n)
  X_pred <- X[1L:5L, ]

  params <- list(objective = "gamma")

  # Unweighted
  dtrain <- lgb.Dataset(X, label = y)
  bst <- lgb.train(
    params = params
    , data = dtrain
    , nrounds = 4L
22
    , verbose = VERBOSITY
23
24
25
26
27
28
29
30
31
32
33
34
35
  )
  pred_unweighted <- predict(bst, X_pred)

  # Constant weight 1
  dtrain <- lgb.Dataset(
    X
    , label = y
    , weight = rep(1.0, n)
  )
  bst <- lgb.train(
    params = params
    , data = dtrain
    , nrounds = 4L
36
    , verbose = VERBOSITY
37
38
39
40
41
42
43
44
45
46
47
48
49
  )
  pred_weighted_1 <- predict(bst, X_pred)

  # Constant weight 2
  dtrain <- lgb.Dataset(
    X
    , label = y
    , weight = rep(2.0, n)
  )
  bst <- lgb.train(
    params = params
    , data = dtrain
    , nrounds = 4L
50
    , verbose = VERBOSITY
51
52
53
54
55
56
57
58
59
60
61
62
63
  )
  pred_weighted_2 <- predict(bst, X_pred)

  # Non-constant weights
  dtrain <- lgb.Dataset(
    X
    , label = y
    , weight = seq(0.0, 1.0, length.out = n)
  )
  bst <- lgb.train(
    params = params
    , data = dtrain
    , nrounds = 4L
64
    , verbose = VERBOSITY
65
66
67
68
69
70
71
  )
  pred_weighted <- predict(bst, X_pred)

  expect_equal(pred_unweighted, pred_weighted_1)
  expect_equal(pred_weighted_1, pred_weighted_2)
  expect_false(all(pred_unweighted == pred_weighted))
})