mpt-7b_glue_0-shot.json 1.46 KB
Newer Older
Julen Etxaniz's avatar
Julen Etxaniz committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
{
  "results": {
    "cola": {
      "mcc": -0.044109532013985,
      "mcc_stderr": 0.031207420387242075
    },
    "mnli": {
      "acc": 0.3782985226693836,
      "acc_stderr": 0.0048953668826959605
    },
    "mnli_mismatched": {
      "acc": 0.37489829129373475,
      "acc_stderr": 0.004882398521925481
    },
    "mrpc": {
      "acc": 0.6299019607843137,
      "acc_stderr": 0.023933029030729185,
      "f1": 0.7560581583198709,
      "f1_stderr": 0.019295620717095126
    },
    "qnli": {
      "acc": 0.513454146073586,
      "acc_stderr": 0.006762960839582666
    },
    "qqp": {
      "acc": 0.5035864457086322,
      "acc_stderr": 0.002486636660034357,
      "f1": 0.5414038936111873,
      "f1_stderr": 0.0028706161186533327
    },
    "rte": {
      "acc": 0.6389891696750902,
      "acc_stderr": 0.028910281676964168
    },
    "sst": {
      "acc": 0.768348623853211,
      "acc_stderr": 0.014295110726150017
    },
    "wnli": {
      "acc": 0.4788732394366197,
      "acc_stderr": 0.05970805879899505
    }
  },
  "versions": {
    "cola": 0,
    "mnli": 0,
    "mnli_mismatched": 0,
    "mrpc": 0,
    "qnli": 0,
    "qqp": 0,
    "rte": 0,
    "sst": 0,
    "wnli": 1
  },
  "config": {
    "model": "hf-causal-experimental",
    "model_args": "pretrained=mosaicml/mpt-7b,trust_remote_code=True",
    "num_fewshot": 0,
    "batch_size": "auto",
    "device": "cuda",
    "no_cache": true,
    "limit": null,
    "bootstrap_iters": 100000,
    "description_dict": {}
  }
}