Commit 4a0b0d6e authored by lintangsutawika's avatar lintangsutawika
Browse files

Merge branch 'gakada-big-refactor-merge' into big-refactor

parents 6ae376e3 c490f165
{
"results": {
"truthfulqa_mc": {
"mc1": 0.2533659730722154,
"mc1_stderr": 0.01522589934082683,
"mc2": 0.4179977378869182,
"mc2_stderr": 0.014601549068840484
},
"webqs": {
"acc": 0.013779527559055118,
"acc_stderr": 0.002586718737195641
},
"logiqa": {
"acc": 0.1889400921658986,
"acc_stderr": 0.01535436463822078,
"acc_norm": 0.2565284178187404,
"acc_norm_stderr": 0.017129443327887562
},
"squad2": {
"exact": 4.169123220752969,
"f1": 6.5956997780058355,
"HasAns_exact": 2.192982456140351,
"HasAns_f1": 7.05309437656277,
"NoAns_exact": 6.139613120269134,
"NoAns_f1": 6.139613120269134,
"best_exact": 50.07159100480081,
"best_f1": 50.07159100480081
},
"headqa_es": {
"acc": 0.24434719183078046,
"acc_stderr": 0.008207488987159709,
"acc_norm": 0.2830051057622174,
"acc_norm_stderr": 0.008604004902114394
},
"headqa_en": {
"acc": 0.26440554339897887,
"acc_stderr": 0.008423643607316284,
"acc_norm": 0.30488694383661563,
"acc_norm_stderr": 0.008793112278191295
},
"triviaqa": {
"acc": 0.026783346592415803,
"acc_stderr": 0.001517985028991893
}
},
"versions": {
"truthfulqa_mc": 1,
"webqs": 0,
"logiqa": 0,
"squad2": 1,
"headqa_es": 0,
"headqa_en": 0,
"triviaqa": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"drop": {
"em": 0.03313758389261745,
"em_stderr": 0.0018330841858875643,
"f1": 0.08634542785234882,
"f1_stderr": 0.0022136353860709133
},
"coqa": {
"f1": 0.4557083534540516,
"f1_stderr": 0.01876948425119881,
"em": 0.3298333333333334,
"em_stderr": 0.019473215823053027
},
"race": {
"acc": 0.3263157894736842,
"acc_stderr": 0.014510987877134932
}
},
"versions": {
"drop": 1,
"coqa": 1,
"race": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xcopa_id": {
"acc": 0.648,
"acc_stderr": 0.02138004238594605
},
"xcopa_ht": {
"acc": 0.53,
"acc_stderr": 0.022342748192502843
},
"xcopa_it": {
"acc": 0.508,
"acc_stderr": 0.02238020883492804
},
"xcopa_et": {
"acc": 0.506,
"acc_stderr": 0.022381462412439324
},
"xcopa_ta": {
"acc": 0.57,
"acc_stderr": 0.02216263442665284
},
"xcopa_th": {
"acc": 0.532,
"acc_stderr": 0.022337186479044296
},
"xcopa_sw": {
"acc": 0.544,
"acc_stderr": 0.022296238348407056
},
"xcopa_zh": {
"acc": 0.594,
"acc_stderr": 0.02198396209008634
},
"xcopa_qu": {
"acc": 0.512,
"acc_stderr": 0.02237662679792717
},
"xcopa_tr": {
"acc": 0.53,
"acc_stderr": 0.02234274819250285
},
"xcopa_vi": {
"acc": 0.624,
"acc_stderr": 0.021683827539286115
}
},
"versions": {
"xcopa_id": 0,
"xcopa_ht": 0,
"xcopa_it": 0,
"xcopa_et": 0,
"xcopa_ta": 0,
"xcopa_th": 0,
"xcopa_sw": 0,
"xcopa_zh": 0,
"xcopa_qu": 0,
"xcopa_tr": 0,
"xcopa_vi": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xnli_sw": {
"acc": 0.3528942115768463,
"acc_stderr": 0.0067520304764183674
},
"xnli_bg": {
"acc": 0.3413173652694611,
"acc_stderr": 0.006699490620395283
},
"xnli_el": {
"acc": 0.3403193612774451,
"acc_stderr": 0.006694754901092155
},
"xnli_hi": {
"acc": 0.4231536926147705,
"acc_stderr": 0.006980774514705842
},
"xnli_th": {
"acc": 0.3375249500998004,
"acc_stderr": 0.00668131870192652
},
"xnli_ar": {
"acc": 0.3393213572854291,
"acc_stderr": 0.006689986106838006
},
"xnli_de": {
"acc": 0.3964071856287425,
"acc_stderr": 0.0069114198150005334
},
"xnli_ru": {
"acc": 0.40459081836327343,
"acc_stderr": 0.006934900899149144
},
"xnli_vi": {
"acc": 0.44451097804391215,
"acc_stderr": 0.00702107269988888
},
"xnli_tr": {
"acc": 0.34790419161676644,
"acc_stderr": 0.006729921818907745
},
"xnli_ur": {
"acc": 0.37325349301397204,
"acc_stderr": 0.0068339592620100505
},
"xnli_fr": {
"acc": 0.47145708582834334,
"acc_stderr": 0.007053191822382807
},
"xnli_en": {
"acc": 0.5147704590818363,
"acc_stderr": 0.007061629189884944
},
"xnli_es": {
"acc": 0.47984031936127747,
"acc_stderr": 0.00705896771560341
},
"xnli_zh": {
"acc": 0.36227544910179643,
"acc_stderr": 0.006791418670232308
}
},
"versions": {
"xnli_sw": 0,
"xnli_bg": 0,
"xnli_el": 0,
"xnli_hi": 0,
"xnli_th": 0,
"xnli_ar": 0,
"xnli_de": 0,
"xnli_ru": 0,
"xnli_vi": 0,
"xnli_tr": 0,
"xnli_ur": 0,
"xnli_fr": 0,
"xnli_en": 0,
"xnli_es": 0,
"xnli_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xstory_cloze_te": {
"acc": 0.5638649900727994,
"acc_stderr": 0.012761730431435764
},
"xstory_cloze_ar": {
"acc": 0.528788881535407,
"acc_stderr": 0.012845779070719484
},
"xstory_cloze_zh": {
"acc": 0.5804103242885507,
"acc_stderr": 0.01269964226820075
},
"xstory_cloze_ru": {
"acc": 0.4824619457313038,
"acc_stderr": 0.012859207453266304
},
"xstory_cloze_en": {
"acc": 0.6254136333553938,
"acc_stderr": 0.012455787254852474
},
"xstory_cloze_id": {
"acc": 0.5790866975512905,
"acc_stderr": 0.012705145598630695
},
"xstory_cloze_my": {
"acc": 0.4619457313037723,
"acc_stderr": 0.012829804720321695
},
"xstory_cloze_sw": {
"acc": 0.5056254136333554,
"acc_stderr": 0.012866310923072511
},
"xstory_cloze_es": {
"acc": 0.5830575777630708,
"acc_stderr": 0.01268835412160781
},
"xstory_cloze_hi": {
"acc": 0.5552614162806089,
"acc_stderr": 0.012788295970207786
},
"xstory_cloze_eu": {
"acc": 0.5433487756452681,
"acc_stderr": 0.012818676452481956
}
},
"versions": {
"xstory_cloze_te": 0,
"xstory_cloze_ar": 0,
"xstory_cloze_zh": 0,
"xstory_cloze_ru": 0,
"xstory_cloze_en": 0,
"xstory_cloze_id": 0,
"xstory_cloze_my": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_es": 0,
"xstory_cloze_hi": 0,
"xstory_cloze_eu": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_ru": {
"acc": 0.5428571428571428,
"acc_stderr": 0.028112788378274862
},
"xwinograd_en": {
"acc": 0.6997849462365592,
"acc_stderr": 0.009507809437511165
},
"xwinograd_jp": {
"acc": 0.5286757038581856,
"acc_stderr": 0.016127677684108978
},
"xwinograd_fr": {
"acc": 0.6626506024096386,
"acc_stderr": 0.05221260262032129
},
"xwinograd_zh": {
"acc": 0.6924603174603174,
"acc_stderr": 0.02057614603593188
},
"xwinograd_pt": {
"acc": 0.6311787072243346,
"acc_stderr": 0.02980804663449022
}
},
"versions": {
"xwinograd_ru": 0,
"xwinograd_en": 0,
"xwinograd_jp": 0,
"xwinograd_fr": 0,
"xwinograd_zh": 0,
"xwinograd_pt": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b1",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# bloom-1b7
## bloom-1b7_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |23.55|± | 1.24|
| | |acc_norm|26.79|± | 1.29|
|arc_easy | 0|acc |56.31|± | 1.02|
| | |acc_norm|48.11|± | 1.03|
|boolq | 1|acc |61.77|± | 0.85|
|copa | 0|acc |70.00|± | 4.61|
|hellaswag | 0|acc |37.62|± | 0.48|
| | |acc_norm|46.56|± | 0.50|
|mc_taco | 0|em |12.54| | |
| | |f1 |47.46| | |
|openbookqa | 0|acc |21.40|± | 1.84|
| | |acc_norm|30.00|± | 2.05|
|piqa | 0|acc |68.77|± | 1.08|
| | |acc_norm|70.08|± | 1.07|
|prost | 0|acc |23.52|± | 0.31|
| | |acc_norm|26.70|± | 0.32|
|swag | 0|acc |45.32|± | 0.35|
| | |acc_norm|61.15|± | 0.34|
|winogrande | 0|acc |57.14|± | 1.39|
|wsc273 | 0|acc |72.89|± | 2.70|
## bloom-1b7_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 1.29|± | 0.31|
## bloom-1b7_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 1.49|± | 0.12|
| | |f1 | 4.31|± | 0.15|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.74|± | 0.37|
|math_prealgebra | 1|acc | 0.23|± | 0.16|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |24.29|± | 0.79|
| | |acc_norm|24.62|± | 0.79|
## bloom-1b7_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |48.75|± | 1.12|
|pawsx_en| 0|acc |48.90|± | 1.12|
|pawsx_es| 0|acc |51.30|± | 1.12|
|pawsx_fr| 0|acc |46.20|± | 1.12|
|pawsx_ja| 0|acc |44.70|± | 1.11|
|pawsx_ko| 0|acc |45.80|± | 1.11|
|pawsx_zh| 0|acc |45.40|± | 1.11|
## bloom-1b7_question_answering_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|------------|----:|---|-----:|
|headqa_en | 0|acc |27.75|± | 0.86|
| | |acc_norm |32.57|± | 0.90|
|headqa_es | 0|acc |25.42|± | 0.83|
| | |acc_norm |29.58|± | 0.87|
|logiqa | 0|acc |21.66|± | 1.62|
| | |acc_norm |28.11|± | 1.76|
|squad2 | 1|exact | 1.80| | |
| | |f1 | 4.38| | |
| | |HasAns_exact| 2.40| | |
| | |HasAns_f1 | 7.56| | |
| | |NoAns_exact | 1.21| | |
| | |NoAns_f1 | 1.21| | |
| | |best_exact |50.07| | |
| | |best_f1 |50.07| | |
|triviaqa | 1|acc | 3.14|± | 0.16|
|truthfulqa_mc| 1|mc1 |24.48|± | 1.51|
| | |mc2 |41.32|± | 1.44|
|webqs | 0|acc | 1.28|± | 0.25|
## bloom-1b7_reading_comprehension_0-shot.json
|Task|Version|Metric|Value| |Stderr|
|----|------:|------|----:|---|-----:|
|coqa| 1|f1 |53.55|± | 1.89|
| | |em |40.90|± | 2.03|
|drop| 1|em | 0.69|± | 0.08|
| | |f1 | 6.89|± | 0.16|
|race| 1|acc |33.21|± | 1.46|
## bloom-1b7_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 47.4|± | 2.24|
|xcopa_ht| 0|acc | 50.4|± | 2.24|
|xcopa_id| 0|acc | 63.2|± | 2.16|
|xcopa_it| 0|acc | 52.6|± | 2.24|
|xcopa_qu| 0|acc | 50.6|± | 2.24|
|xcopa_sw| 0|acc | 51.8|± | 2.24|
|xcopa_ta| 0|acc | 56.6|± | 2.22|
|xcopa_th| 0|acc | 53.2|± | 2.23|
|xcopa_tr| 0|acc | 52.8|± | 2.23|
|xcopa_vi| 0|acc | 65.8|± | 2.12|
|xcopa_zh| 0|acc | 61.4|± | 2.18|
## bloom-1b7_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.57|± | 0.67|
|xnli_bg| 0|acc |35.43|± | 0.68|
|xnli_de| 0|acc |40.58|± | 0.69|
|xnli_el| 0|acc |33.99|± | 0.67|
|xnli_en| 0|acc |50.14|± | 0.71|
|xnli_es| 0|acc |47.82|± | 0.71|
|xnli_fr| 0|acc |48.18|± | 0.71|
|xnli_hi| 0|acc |43.95|± | 0.70|
|xnli_ru| 0|acc |39.32|± | 0.69|
|xnli_sw| 0|acc |34.51|± | 0.67|
|xnli_th| 0|acc |33.37|± | 0.67|
|xnli_tr| 0|acc |34.93|± | 0.67|
|xnli_ur| 0|acc |40.50|± | 0.69|
|xnli_vi| 0|acc |46.23|± | 0.70|
|xnli_zh| 0|acc |36.21|± | 0.68|
## bloom-1b7_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |55.00|± | 1.28|
|xstory_cloze_en| 0|acc |64.66|± | 1.23|
|xstory_cloze_es| 0|acc |60.82|± | 1.26|
|xstory_cloze_eu| 0|acc |54.93|± | 1.28|
|xstory_cloze_hi| 0|acc |56.78|± | 1.27|
|xstory_cloze_id| 0|acc |59.76|± | 1.26|
|xstory_cloze_my| 0|acc |47.25|± | 1.28|
|xstory_cloze_ru| 0|acc |50.36|± | 1.29|
|xstory_cloze_sw| 0|acc |52.28|± | 1.29|
|xstory_cloze_te| 0|acc |56.52|± | 1.28|
|xstory_cloze_zh| 0|acc |58.24|± | 1.27|
## bloom-1b7_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |74.71|± | 0.90|
|xwinograd_fr| 0|acc |68.67|± | 5.12|
|xwinograd_jp| 0|acc |54.12|± | 1.61|
|xwinograd_pt| 0|acc |63.50|± | 2.97|
|xwinograd_ru| 0|acc |52.38|± | 2.82|
|xwinograd_zh| 0|acc |69.64|± | 2.05|
{
"results": {
"mc_taco": {
"em": 0.12537537537537538,
"f1": 0.47458014393437276
},
"arc_easy": {
"acc": 0.5631313131313131,
"acc_stderr": 0.010177672928157678,
"acc_norm": 0.4810606060606061,
"acc_norm_stderr": 0.010252420496894487
},
"boolq": {
"acc": 0.617737003058104,
"acc_stderr": 0.008499149690449272
},
"piqa": {
"acc": 0.6877040261153428,
"acc_stderr": 0.010812581599154424,
"acc_norm": 0.7007616974972797,
"acc_norm_stderr": 0.010684130673134581
},
"copa": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814
},
"prost": {
"acc": 0.23521562766865928,
"acc_stderr": 0.003098672944164254,
"acc_norm": 0.2669726729291204,
"acc_norm_stderr": 0.00323196492387981
},
"hellaswag": {
"acc": 0.37621987651862177,
"acc_stderr": 0.004834461997944872,
"acc_norm": 0.46564429396534557,
"acc_norm_stderr": 0.004977988452502641
},
"swag": {
"acc": 0.4532140357892632,
"acc_stderr": 0.0035195819088979174,
"acc_norm": 0.6114665600319904,
"acc_norm_stderr": 0.003446127007510879
},
"openbookqa": {
"acc": 0.214,
"acc_stderr": 0.01835979750238702,
"acc_norm": 0.3,
"acc_norm_stderr": 0.020514426225628046
},
"wsc273": {
"acc": 0.7289377289377289,
"acc_stderr": 0.02695226692070332
},
"arc_challenge": {
"acc": 0.2354948805460751,
"acc_stderr": 0.012399451855004752,
"acc_norm": 0.26791808873720135,
"acc_norm_stderr": 0.012942030195136423
},
"winogrande": {
"acc": 0.5714285714285714,
"acc_stderr": 0.013908353814606709
}
},
"versions": {
"mc_taco": 0,
"arc_easy": 0,
"boolq": 1,
"piqa": 0,
"copa": 0,
"prost": 0,
"hellaswag": 0,
"swag": 0,
"openbookqa": 0,
"wsc273": 0,
"arc_challenge": 0,
"winogrande": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"gsm8k": {
"acc": 0.01288855193328279,
"acc_stderr": 0.00310690126649963
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"math_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_geometry": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_prealgebra": {
"acc": 0.002296211251435132,
"acc_stderr": 0.001622733136934626
},
"math_precalc": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_num_theory": {
"acc": 0.007407407407407408,
"acc_stderr": 0.003693382168437264
},
"gsm8k": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_counting_and_prob": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_intermediate_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"drop": {
"em": 0.014890939597315436,
"em_stderr": 0.0012403460245602655,
"f1": 0.043118708053691356,
"f1_stderr": 0.0015424950823374804
},
"mathqa": {
"acc": 0.24288107202680068,
"acc_stderr": 0.00785017752394654,
"acc_norm": 0.24623115577889448,
"acc_norm_stderr": 0.007886624866001843
}
},
"versions": {
"math_algebra": 1,
"math_geometry": 1,
"math_prealgebra": 1,
"math_precalc": 1,
"math_num_theory": 1,
"gsm8k": 0,
"math_counting_and_prob": 1,
"mathqa": 0,
"math_intermediate_algebra": 1,
"drop": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7,use_accelerate=True",
"num_fewshot": 5,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"pawsx_ja": {
"acc": 0.447,
"acc_stderr": 0.011120131683767737
},
"pawsx_es": {
"acc": 0.513,
"acc_stderr": 0.011179355482070377
},
"pawsx_fr": {
"acc": 0.462,
"acc_stderr": 0.01115079235234166
},
"pawsx_de": {
"acc": 0.4875,
"acc_stderr": 0.011179640744835734
},
"pawsx_ko": {
"acc": 0.458,
"acc_stderr": 0.011143612073516636
},
"pawsx_zh": {
"acc": 0.454,
"acc_stderr": 0.0111357084193598
},
"pawsx_en": {
"acc": 0.489,
"acc_stderr": 0.011180429374603775
}
},
"versions": {
"pawsx_ja": 0,
"pawsx_es": 0,
"pawsx_fr": 0,
"pawsx_de": 0,
"pawsx_ko": 0,
"pawsx_zh": 0,
"pawsx_en": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"webqs": {
"acc": 0.012795275590551181,
"acc_stderr": 0.0024938680596856277
},
"headqa_en": {
"acc": 0.2775346462436178,
"acc_stderr": 0.008552884316239918,
"acc_norm": 0.32567469000729393,
"acc_norm_stderr": 0.008951013596145294
},
"logiqa": {
"acc": 0.21658986175115208,
"acc_stderr": 0.016156860583178303,
"acc_norm": 0.28110599078341014,
"acc_norm_stderr": 0.017632374626460005
},
"squad2": {
"exact": 1.8024088267497684,
"f1": 4.382884035952938,
"HasAns_exact": 2.395411605937922,
"HasAns_f1": 7.563762172548798,
"NoAns_exact": 1.2111017661900756,
"NoAns_f1": 1.2111017661900756,
"best_exact": 50.07159100480081,
"best_f1": 50.07207926399809
},
"headqa_es": {
"acc": 0.25419401896425964,
"acc_stderr": 0.008316509290190668,
"acc_norm": 0.29576951130561635,
"acc_norm_stderr": 0.008717251898361422
},
"triviaqa": {
"acc": 0.0313798285158667,
"acc_stderr": 0.0016392014864795154
},
"truthfulqa_mc": {
"mc1": 0.24479804161566707,
"mc1_stderr": 0.015051869486715006,
"mc2": 0.41318090310186134,
"mc2_stderr": 0.014436426641105374
}
},
"versions": {
"webqs": 0,
"headqa_en": 0,
"logiqa": 0,
"squad2": 1,
"headqa_es": 0,
"triviaqa": 1,
"truthfulqa_mc": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"race": {
"acc": 0.3320574162679426,
"acc_stderr": 0.014575582129545914
},
"drop": {
"em": 0.006921140939597316,
"em_stderr": 0.0008490247804930383,
"f1": 0.06890939597315439,
"f1_stderr": 0.001599412572762237
},
"coqa": {
"f1": 0.53547064794564,
"f1_stderr": 0.018874554692662703,
"em": 0.4089999999999999,
"em_stderr": 0.020308360980593812
}
},
"versions": {
"race": 1,
"drop": 1,
"coqa": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xcopa_ta": {
"acc": 0.566,
"acc_stderr": 0.022187215803029008
},
"xcopa_vi": {
"acc": 0.658,
"acc_stderr": 0.02123614719989926
},
"xcopa_id": {
"acc": 0.632,
"acc_stderr": 0.021588982568353544
},
"xcopa_th": {
"acc": 0.532,
"acc_stderr": 0.022337186479044296
},
"xcopa_et": {
"acc": 0.474,
"acc_stderr": 0.02235279165091416
},
"xcopa_tr": {
"acc": 0.528,
"acc_stderr": 0.022347949832668086
},
"xcopa_it": {
"acc": 0.526,
"acc_stderr": 0.022352791650914153
},
"xcopa_qu": {
"acc": 0.506,
"acc_stderr": 0.022381462412439324
},
"xcopa_sw": {
"acc": 0.518,
"acc_stderr": 0.02236856511738799
},
"xcopa_zh": {
"acc": 0.614,
"acc_stderr": 0.021793529219281165
},
"xcopa_ht": {
"acc": 0.504,
"acc_stderr": 0.022382357781962132
}
},
"versions": {
"xcopa_ta": 0,
"xcopa_vi": 0,
"xcopa_id": 0,
"xcopa_th": 0,
"xcopa_et": 0,
"xcopa_tr": 0,
"xcopa_it": 0,
"xcopa_qu": 0,
"xcopa_sw": 0,
"xcopa_zh": 0,
"xcopa_ht": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xnli_ar": {
"acc": 0.33572854291417165,
"acc_stderr": 0.006672543485924256
},
"xnli_bg": {
"acc": 0.35429141716566864,
"acc_stderr": 0.006758076124936787
},
"xnli_de": {
"acc": 0.4057884231536926,
"acc_stderr": 0.006938168852153333
},
"xnli_el": {
"acc": 0.3399201596806387,
"acc_stderr": 0.006692851356332763
},
"xnli_en": {
"acc": 0.5013972055888224,
"acc_stderr": 0.00706468484603215
},
"xnli_es": {
"acc": 0.4782435129740519,
"acc_stderr": 0.007058021171781048
},
"xnli_fr": {
"acc": 0.4818363273453094,
"acc_stderr": 0.00706004932457986
},
"xnli_hi": {
"acc": 0.43952095808383235,
"acc_stderr": 0.007012840595506878
},
"xnli_ru": {
"acc": 0.3932135728542914,
"acc_stderr": 0.006901709305026883
},
"xnli_sw": {
"acc": 0.34510978043912177,
"acc_stderr": 0.006717185937609568
},
"xnli_th": {
"acc": 0.3337325349301397,
"acc_stderr": 0.006662666282522685
},
"xnli_tr": {
"acc": 0.34930139720558884,
"acc_stderr": 0.006736193945189081
},
"xnli_ur": {
"acc": 0.40499001996007983,
"acc_stderr": 0.0069359949748568975
},
"xnli_vi": {
"acc": 0.4622754491017964,
"acc_stderr": 0.007044575556505713
},
"xnli_zh": {
"acc": 0.36207584830339323,
"acc_stderr": 0.00679060994209357
}
},
"versions": {
"xnli_ar": 0,
"xnli_bg": 0,
"xnli_de": 0,
"xnli_el": 0,
"xnli_en": 0,
"xnli_es": 0,
"xnli_fr": 0,
"xnli_hi": 0,
"xnli_ru": 0,
"xnli_sw": 0,
"xnli_th": 0,
"xnli_tr": 0,
"xnli_ur": 0,
"xnli_vi": 0,
"xnli_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xstory_cloze_es": {
"acc": 0.6082064857710126,
"acc_stderr": 0.012562199063960647
},
"xstory_cloze_eu": {
"acc": 0.5493050959629384,
"acc_stderr": 0.012804412720126671
},
"xstory_cloze_hi": {
"acc": 0.5678358702845797,
"acc_stderr": 0.01274815386459759
},
"xstory_cloze_ar": {
"acc": 0.5499669093315684,
"acc_stderr": 0.01280271359821983
},
"xstory_cloze_sw": {
"acc": 0.5228325612177366,
"acc_stderr": 0.012853702384870852
},
"xstory_cloze_ru": {
"acc": 0.5036399735274653,
"acc_stderr": 0.012866784348289235
},
"xstory_cloze_my": {
"acc": 0.47253474520185307,
"acc_stderr": 0.012847698270388216
},
"xstory_cloze_en": {
"acc": 0.6465916611515553,
"acc_stderr": 0.012301695486460658
},
"xstory_cloze_te": {
"acc": 0.5651886168100596,
"acc_stderr": 0.01275729746335297
},
"xstory_cloze_zh": {
"acc": 0.5823957643944407,
"acc_stderr": 0.012691211382848636
},
"xstory_cloze_id": {
"acc": 0.5976174718729318,
"acc_stderr": 0.012619516819528711
}
},
"versions": {
"xstory_cloze_es": 0,
"xstory_cloze_eu": 0,
"xstory_cloze_hi": 0,
"xstory_cloze_ar": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_ru": 0,
"xstory_cloze_my": 0,
"xstory_cloze_en": 0,
"xstory_cloze_te": 0,
"xstory_cloze_zh": 0,
"xstory_cloze_id": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_zh": {
"acc": 0.6964285714285714,
"acc_stderr": 0.02050145785082139
},
"xwinograd_jp": {
"acc": 0.5411887382690302,
"acc_stderr": 0.016099361618063936
},
"xwinograd_pt": {
"acc": 0.6349809885931559,
"acc_stderr": 0.02974318401093692
},
"xwinograd_fr": {
"acc": 0.6867469879518072,
"acc_stderr": 0.051219942106581456
},
"xwinograd_ru": {
"acc": 0.5238095238095238,
"acc_stderr": 0.02818462259599846
},
"xwinograd_en": {
"acc": 0.7470967741935484,
"acc_stderr": 0.009016698607546121
}
},
"versions": {
"xwinograd_zh": 0,
"xwinograd_jp": 0,
"xwinograd_pt": 0,
"xwinograd_fr": 0,
"xwinograd_ru": 0,
"xwinograd_en": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-1b7",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# bloom-3b
## bloom-3b_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |27.99|± | 1.31|
| | |acc_norm|30.55|± | 1.35|
|arc_easy | 0|acc |59.47|± | 1.01|
| | |acc_norm|53.24|± | 1.02|
|boolq | 1|acc |61.62|± | 0.85|
|copa | 0|acc |74.00|± | 4.41|
|hellaswag | 0|acc |41.26|± | 0.49|
| | |acc_norm|52.72|± | 0.50|
|mc_taco | 0|em |11.94| | |
| | |f1 |49.57| | |
|openbookqa | 0|acc |21.60|± | 1.84|
| | |acc_norm|32.20|± | 2.09|
|piqa | 0|acc |70.84|± | 1.06|
| | |acc_norm|70.51|± | 1.06|
|prost | 0|acc |22.69|± | 0.31|
| | |acc_norm|26.36|± | 0.32|
|swag | 0|acc |47.36|± | 0.35|
| | |acc_norm|64.59|± | 0.34|
|winogrande | 0|acc |58.72|± | 1.38|
|wsc273 | 0|acc |76.92|± | 2.55|
## bloom-3b_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 1.21|± | 0.3|
## bloom-3b_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 2.10|± | 0.15|
| | |f1 | 4.63|± | 0.17|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.21|± | 0.21|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.19|± | 0.19|
|math_prealgebra | 1|acc | 0.11|± | 0.11|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |25.26|± | 0.80|
| | |acc_norm|25.06|± | 0.79|
## bloom-3b_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc | 54.6|± | 1.11|
|pawsx_en| 0|acc | 56.8|± | 1.11|
|pawsx_es| 0|acc | 56.4|± | 1.11|
|pawsx_fr| 0|acc | 47.6|± | 1.12|
|pawsx_ja| 0|acc | 44.6|± | 1.11|
|pawsx_ko| 0|acc | 46.3|± | 1.12|
|pawsx_zh| 0|acc | 47.1|± | 1.12|
## bloom-3b_question_answering_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|------------|----:|---|-----:|
|headqa_en | 0|acc |28.41|± | 0.86|
| | |acc_norm |33.37|± | 0.90|
|headqa_es | 0|acc |26.44|± | 0.84|
| | |acc_norm |31.00|± | 0.88|
|logiqa | 0|acc |20.74|± | 1.59|
| | |acc_norm |29.19|± | 1.78|
|squad2 | 1|exact | 6.91| | |
| | |f1 |11.51| | |
| | |HasAns_exact|11.10| | |
| | |HasAns_f1 |20.31| | |
| | |NoAns_exact | 2.74| | |
| | |NoAns_f1 | 2.74| | |
| | |best_exact |50.07| | |
| | |best_f1 |50.08| | |
|triviaqa | 1|acc | 4.15|± | 0.19|
|truthfulqa_mc| 1|mc1 |23.26|± | 1.48|
| | |mc2 |40.57|± | 1.44|
|webqs | 0|acc | 1.67|± | 0.28|
## bloom-3b_reading_comprehension_0-shot.json
|Task|Version|Metric|Value| |Stderr|
|----|------:|------|----:|---|-----:|
|coqa| 1|f1 |61.50|± | 1.77|
| | |em |46.07|± | 2.02|
|drop| 1|em | 1.94|± | 0.14|
| | |f1 | 8.88|± | 0.20|
|race| 1|acc |35.22|± | 1.48|
## bloom-3b_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 49.2|± | 2.24|
|xcopa_ht| 0|acc | 50.2|± | 2.24|
|xcopa_id| 0|acc | 69.2|± | 2.07|
|xcopa_it| 0|acc | 51.6|± | 2.24|
|xcopa_qu| 0|acc | 50.6|± | 2.24|
|xcopa_sw| 0|acc | 51.4|± | 2.24|
|xcopa_ta| 0|acc | 58.0|± | 2.21|
|xcopa_th| 0|acc | 52.6|± | 2.24|
|xcopa_tr| 0|acc | 53.4|± | 2.23|
|xcopa_vi| 0|acc | 68.8|± | 2.07|
|xcopa_zh| 0|acc | 62.0|± | 2.17|
## bloom-3b_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.43|± | 0.67|
|xnli_bg| 0|acc |37.90|± | 0.69|
|xnli_de| 0|acc |40.40|± | 0.69|
|xnli_el| 0|acc |33.21|± | 0.67|
|xnli_en| 0|acc |53.41|± | 0.70|
|xnli_es| 0|acc |49.08|± | 0.71|
|xnli_fr| 0|acc |49.18|± | 0.71|
|xnli_hi| 0|acc |45.55|± | 0.70|
|xnli_ru| 0|acc |41.40|± | 0.70|
|xnli_sw| 0|acc |35.83|± | 0.68|
|xnli_th| 0|acc |33.39|± | 0.67|
|xnli_tr| 0|acc |33.81|± | 0.67|
|xnli_ur| 0|acc |40.00|± | 0.69|
|xnli_vi| 0|acc |46.51|± | 0.70|
|xnli_zh| 0|acc |37.43|± | 0.68|
## bloom-3b_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |56.59|± | 1.28|
|xstory_cloze_en| 0|acc |66.78|± | 1.21|
|xstory_cloze_es| 0|acc |64.13|± | 1.23|
|xstory_cloze_eu| 0|acc |55.66|± | 1.28|
|xstory_cloze_hi| 0|acc |57.58|± | 1.27|
|xstory_cloze_id| 0|acc |60.82|± | 1.26|
|xstory_cloze_my| 0|acc |46.59|± | 1.28|
|xstory_cloze_ru| 0|acc |50.69|± | 1.29|
|xstory_cloze_sw| 0|acc |53.01|± | 1.28|
|xstory_cloze_te| 0|acc |58.17|± | 1.27|
|xstory_cloze_zh| 0|acc |60.89|± | 1.26|
## bloom-3b_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |79.10|± | 0.84|
|xwinograd_fr| 0|acc |71.08|± | 5.01|
|xwinograd_jp| 0|acc |56.62|± | 1.60|
|xwinograd_pt| 0|acc |70.34|± | 2.82|
|xwinograd_ru| 0|acc |53.65|± | 2.81|
|xwinograd_zh| 0|acc |73.61|± | 1.97|
{
"results": {
"openbookqa": {
"acc": 0.216,
"acc_stderr": 0.01842190906141194,
"acc_norm": 0.322,
"acc_norm_stderr": 0.020916668330019882
},
"wsc273": {
"acc": 0.7692307692307693,
"acc_stderr": 0.02554658323673352
},
"swag": {
"acc": 0.47355793262021395,
"acc_stderr": 0.0035301452338806077,
"acc_norm": 0.6459062281315605,
"acc_norm_stderr": 0.003381229133319312
},
"mc_taco": {
"em": 0.11936936936936937,
"f1": 0.4957122298258418
},
"winogrande": {
"acc": 0.5872138910812944,
"acc_stderr": 0.0138370606486821
},
"prost": {
"acc": 0.22688941076003416,
"acc_stderr": 0.003059856614352936,
"acc_norm": 0.26361016225448336,
"acc_norm_stderr": 0.0032189046983713957
},
"arc_challenge": {
"acc": 0.27986348122866894,
"acc_stderr": 0.013119040897725922,
"acc_norm": 0.3054607508532423,
"acc_norm_stderr": 0.013460080478002498
},
"arc_easy": {
"acc": 0.5946969696969697,
"acc_stderr": 0.010074093589739182,
"acc_norm": 0.5324074074074074,
"acc_norm_stderr": 0.010238210368801902
},
"piqa": {
"acc": 0.7083786724700761,
"acc_stderr": 0.010604441527428793,
"acc_norm": 0.705114254624592,
"acc_norm_stderr": 0.010639030620156982
},
"copa": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078
},
"boolq": {
"acc": 0.6162079510703364,
"acc_stderr": 0.008505584729104964
},
"hellaswag": {
"acc": 0.41256721768571997,
"acc_stderr": 0.004912900450370833,
"acc_norm": 0.527185819557857,
"acc_norm_stderr": 0.0049824003689396615
}
},
"versions": {
"openbookqa": 0,
"wsc273": 0,
"swag": 0,
"mc_taco": 0,
"winogrande": 0,
"prost": 0,
"arc_challenge": 0,
"arc_easy": 0,
"piqa": 0,
"copa": 0,
"boolq": 1,
"hellaswag": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-3b,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"gsm8k": {
"acc": 0.012130401819560273,
"acc_stderr": 0.0030152942428909486
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bigscience/bloom-3b,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment