Unverified Commit 29f12dd9 authored by Lintang Sutawika's avatar Lintang Sutawika Committed by GitHub
Browse files

Merge branch 'big-refactor' into benchmark-scripts

parents e37698df 4168c05f
{
"results": {
"xstory_cloze_ar": {
"acc": 0.48510919920582396,
"acc_stderr": 0.012861417842074004
},
"xstory_cloze_en": {
"acc": 0.7789543348775645,
"acc_stderr": 0.010678457581809239
},
"xstory_cloze_es": {
"acc": 0.6604897418927862,
"acc_stderr": 0.01218627614665943
},
"xstory_cloze_eu": {
"acc": 0.5109199205823958,
"acc_stderr": 0.012864056278255038
},
"xstory_cloze_hi": {
"acc": 0.5168762409000662,
"acc_stderr": 0.012859793919977606
},
"xstory_cloze_id": {
"acc": 0.5519523494374586,
"acc_stderr": 0.012797478885304742
},
"xstory_cloze_my": {
"acc": 0.48378557246856385,
"acc_stderr": 0.01286035780505586
},
"xstory_cloze_ru": {
"acc": 0.5724685638649901,
"acc_stderr": 0.012731259626982528
},
"xstory_cloze_sw": {
"acc": 0.4990072799470549,
"acc_stderr": 0.012867099955422935
},
"xstory_cloze_te": {
"acc": 0.5294506949040371,
"acc_stderr": 0.012844785490017004
},
"xstory_cloze_zh": {
"acc": 0.5956320317670417,
"acc_stderr": 0.012629580396570932
}
},
"versions": {
"xstory_cloze_ar": 0,
"xstory_cloze_en": 0,
"xstory_cloze_es": 0,
"xstory_cloze_eu": 0,
"xstory_cloze_hi": 0,
"xstory_cloze_id": 0,
"xstory_cloze_my": 0,
"xstory_cloze_ru": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_te": 0,
"xstory_cloze_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_en": {
"acc": 0.8666666666666667,
"acc_stderr": 0.0070514325016347275
},
"xwinograd_fr": {
"acc": 0.6626506024096386,
"acc_stderr": 0.05221260262032129
},
"xwinograd_jp": {
"acc": 0.602711157455683,
"acc_stderr": 0.015809751560314552
},
"xwinograd_pt": {
"acc": 0.6692015209125475,
"acc_stderr": 0.02906762615931534
},
"xwinograd_ru": {
"acc": 0.6952380952380952,
"acc_stderr": 0.025976599352305375
},
"xwinograd_zh": {
"acc": 0.7162698412698413,
"acc_stderr": 0.02010051064884106
}
},
"versions": {
"xwinograd_en": 0,
"xwinograd_fr": 0,
"xwinograd_jp": 0,
"xwinograd_pt": 0,
"xwinograd_ru": 0,
"xwinograd_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-1.3b
## opt-1.3b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |23.12|± | 1.23|
| | |acc_norm|29.44|± | 1.33|
|arc_easy | 0|acc |57.03|± | 1.02|
| | |acc_norm|50.93|± | 1.03|
|hendrycksTest-abstract_algebra | 0|acc |26.00|± | 4.41|
| | |acc_norm|25.00|± | 4.35|
|hendrycksTest-anatomy | 0|acc |22.96|± | 3.63|
| | |acc_norm|21.48|± | 3.55|
|hendrycksTest-astronomy | 0|acc |23.68|± | 3.46|
| | |acc_norm|34.21|± | 3.86|
|hendrycksTest-business_ethics | 0|acc |34.00|± | 4.76|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-clinical_knowledge | 0|acc |19.25|± | 2.43|
| | |acc_norm|25.28|± | 2.67|
|hendrycksTest-college_biology | 0|acc |26.39|± | 3.69|
| | |acc_norm|27.78|± | 3.75|
|hendrycksTest-college_chemistry | 0|acc |28.00|± | 4.51|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |33.00|± | 4.73|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_mathematics | 0|acc |19.00|± | 3.94|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_medicine | 0|acc |17.92|± | 2.92|
| | |acc_norm|23.70|± | 3.24|
|hendrycksTest-college_physics | 0|acc |27.45|± | 4.44|
| | |acc_norm|30.39|± | 4.58|
|hendrycksTest-computer_security | 0|acc |29.00|± | 4.56|
| | |acc_norm|38.00|± | 4.88|
|hendrycksTest-conceptual_physics | 0|acc |21.70|± | 2.69|
| | |acc_norm|20.43|± | 2.64|
|hendrycksTest-econometrics | 0|acc |24.56|± | 4.05|
| | |acc_norm|23.68|± | 4.00|
|hendrycksTest-electrical_engineering | 0|acc |25.52|± | 3.63|
| | |acc_norm|28.97|± | 3.78|
|hendrycksTest-elementary_mathematics | 0|acc |19.58|± | 2.04|
| | |acc_norm|24.87|± | 2.23|
|hendrycksTest-formal_logic | 0|acc |29.37|± | 4.07|
| | |acc_norm|26.98|± | 3.97|
|hendrycksTest-global_facts | 0|acc |16.00|± | 3.68|
| | |acc_norm|18.00|± | 3.86|
|hendrycksTest-high_school_biology | 0|acc |20.97|± | 2.32|
| | |acc_norm|26.77|± | 2.52|
|hendrycksTest-high_school_chemistry | 0|acc |24.63|± | 3.03|
| | |acc_norm|30.54|± | 3.24|
|hendrycksTest-high_school_computer_science | 0|acc |27.00|± | 4.46|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-high_school_european_history | 0|acc |24.24|± | 3.35|
| | |acc_norm|27.27|± | 3.48|
|hendrycksTest-high_school_geography | 0|acc |22.22|± | 2.96|
| | |acc_norm|28.28|± | 3.21|
|hendrycksTest-high_school_government_and_politics| 0|acc |20.73|± | 2.93|
| | |acc_norm|23.83|± | 3.07|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.23|± | 2.31|
| | |acc_norm|29.23|± | 2.31|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|28.89|± | 2.76|
|hendrycksTest-high_school_microeconomics | 0|acc |21.43|± | 2.67|
| | |acc_norm|30.25|± | 2.98|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|25.17|± | 3.54|
|hendrycksTest-high_school_psychology | 0|acc |22.57|± | 1.79|
| | |acc_norm|24.22|± | 1.84|
|hendrycksTest-high_school_statistics | 0|acc |25.46|± | 2.97|
| | |acc_norm|27.78|± | 3.05|
|hendrycksTest-high_school_us_history | 0|acc |25.00|± | 3.04|
| | |acc_norm|25.49|± | 3.06|
|hendrycksTest-high_school_world_history | 0|acc |26.58|± | 2.88|
| | |acc_norm|27.85|± | 2.92|
|hendrycksTest-human_aging | 0|acc |35.43|± | 3.21|
| | |acc_norm|29.15|± | 3.05|
|hendrycksTest-human_sexuality | 0|acc |40.46|± | 4.30|
| | |acc_norm|31.30|± | 4.07|
|hendrycksTest-international_law | 0|acc |17.36|± | 3.46|
| | |acc_norm|47.93|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |28.70|± | 4.37|
| | |acc_norm|39.81|± | 4.73|
|hendrycksTest-logical_fallacies | 0|acc |19.02|± | 3.08|
| | |acc_norm|28.83|± | 3.56|
|hendrycksTest-machine_learning | 0|acc |27.68|± | 4.25|
| | |acc_norm|27.68|± | 4.25|
|hendrycksTest-management | 0|acc |20.39|± | 3.99|
| | |acc_norm|27.18|± | 4.41|
|hendrycksTest-marketing | 0|acc |28.21|± | 2.95|
| | |acc_norm|33.76|± | 3.10|
|hendrycksTest-medical_genetics | 0|acc |27.00|± | 4.46|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-miscellaneous | 0|acc |28.35|± | 1.61|
| | |acc_norm|28.74|± | 1.62|
|hendrycksTest-moral_disputes | 0|acc |27.17|± | 2.39|
| | |acc_norm|30.35|± | 2.48|
|hendrycksTest-moral_scenarios | 0|acc |23.80|± | 1.42|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |29.41|± | 2.61|
| | |acc_norm|39.54|± | 2.80|
|hendrycksTest-philosophy | 0|acc |23.79|± | 2.42|
| | |acc_norm|30.87|± | 2.62|
|hendrycksTest-prehistory | 0|acc |24.07|± | 2.38|
| | |acc_norm|21.60|± | 2.29|
|hendrycksTest-professional_accounting | 0|acc |25.89|± | 2.61|
| | |acc_norm|26.24|± | 2.62|
|hendrycksTest-professional_law | 0|acc |26.01|± | 1.12|
| | |acc_norm|28.03|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |24.63|± | 2.62|
| | |acc_norm|22.43|± | 2.53|
|hendrycksTest-professional_psychology | 0|acc |23.69|± | 1.72|
| | |acc_norm|25.49|± | 1.76|
|hendrycksTest-public_relations | 0|acc |25.45|± | 4.17|
| | |acc_norm|19.09|± | 3.76|
|hendrycksTest-security_studies | 0|acc |32.24|± | 2.99|
| | |acc_norm|26.53|± | 2.83|
|hendrycksTest-sociology | 0|acc |33.83|± | 3.35|
| | |acc_norm|34.33|± | 3.36|
|hendrycksTest-us_foreign_policy | 0|acc |32.00|± | 4.69|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-virology | 0|acc |34.34|± | 3.70|
| | |acc_norm|30.12|± | 3.57|
|hendrycksTest-world_religions | 0|acc |34.50|± | 3.65|
| | |acc_norm|33.92|± | 3.63|
|lambada_openai | 0|ppl | 6.64|± | 0.17|
| | |acc |57.93|± | 0.69|
|logiqa | 0|acc |22.27|± | 1.63|
| | |acc_norm|27.19|± | 1.75|
|piqa | 0|acc |71.71|± | 1.05|
| | |acc_norm|72.47|± | 1.04|
|sciq | 0|acc |84.50|± | 1.15|
| | |acc_norm|76.50|± | 1.34|
|winogrande | 0|acc |59.75|± | 1.38|
|wsc | 0|acc |38.46|± | 4.79|
{
"results": {
"hendrycksTest-high_school_chemistry": {
"acc": 0.24630541871921183,
"acc_stderr": 0.03031509928561773,
"acc_norm": 0.3054187192118227,
"acc_norm_stderr": 0.03240661565868408
},
"hendrycksTest-international_law": {
"acc": 0.17355371900826447,
"acc_stderr": 0.0345727283691767,
"acc_norm": 0.4793388429752066,
"acc_norm_stderr": 0.04560456086387235
},
"hendrycksTest-abstract_algebra": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"hendrycksTest-anatomy": {
"acc": 0.22962962962962963,
"acc_stderr": 0.03633384414073465,
"acc_norm": 0.21481481481481482,
"acc_norm_stderr": 0.03547854198560826
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.19576719576719576,
"acc_stderr": 0.0204357309715418,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.022261817692400158
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.21428571428571427,
"acc_stderr": 0.02665353159671549,
"acc_norm": 0.3025210084033613,
"acc_norm_stderr": 0.02983796238829193
},
"hendrycksTest-college_chemistry": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-college_biology": {
"acc": 0.2638888888888889,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03745554791462457
},
"hendrycksTest-high_school_psychology": {
"acc": 0.22568807339449543,
"acc_stderr": 0.017923087667803057,
"acc_norm": 0.24220183486238533,
"acc_norm_stderr": 0.01836817630659862
},
"hendrycksTest-philosophy": {
"acc": 0.2379421221864952,
"acc_stderr": 0.024185150647818707,
"acc_norm": 0.3086816720257235,
"acc_norm_stderr": 0.026236965881153256
},
"hendrycksTest-professional_law": {
"acc": 0.26010430247718386,
"acc_stderr": 0.011204382887823836,
"acc_norm": 0.2803129074315515,
"acc_norm_stderr": 0.011471555944958614
},
"hendrycksTest-college_computer_science": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-business_ethics": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"hendrycksTest-machine_learning": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697626,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697624
},
"hendrycksTest-public_relations": {
"acc": 0.2545454545454545,
"acc_stderr": 0.041723430387053825,
"acc_norm": 0.19090909090909092,
"acc_norm_stderr": 0.03764425585984925
},
"hendrycksTest-logical_fallacies": {
"acc": 0.1901840490797546,
"acc_stderr": 0.030833491146281224,
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"hendrycksTest-professional_psychology": {
"acc": 0.2369281045751634,
"acc_stderr": 0.017201662169789782,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.017630827375148383
},
"hendrycksTest-nutrition": {
"acc": 0.29411764705882354,
"acc_stderr": 0.02609016250427904,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.02799672318063145
},
"arc_challenge": {
"acc": 0.23122866894197952,
"acc_stderr": 0.012320858834772276,
"acc_norm": 0.29436860068259385,
"acc_norm_stderr": 0.013318528460539426
},
"hendrycksTest-college_medicine": {
"acc": 0.1791907514450867,
"acc_stderr": 0.02924251305906328,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.03242414757483098
},
"hendrycksTest-professional_accounting": {
"acc": 0.25886524822695034,
"acc_stderr": 0.026129572527180848,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.026244920349843
},
"hendrycksTest-jurisprudence": {
"acc": 0.28703703703703703,
"acc_stderr": 0.043733130409147614,
"acc_norm": 0.39814814814814814,
"acc_norm_stderr": 0.047323326159788154
},
"winogrande": {
"acc": 0.5974743488555643,
"acc_stderr": 0.01378286683170305
},
"hendrycksTest-high_school_us_history": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"hendrycksTest-econometrics": {
"acc": 0.24561403508771928,
"acc_stderr": 0.04049339297748142,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281336
},
"hendrycksTest-professional_medicine": {
"acc": 0.24632352941176472,
"acc_stderr": 0.02617343857052,
"acc_norm": 0.22426470588235295,
"acc_norm_stderr": 0.02533684856333237
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.19245283018867926,
"acc_stderr": 0.024262979839372277,
"acc_norm": 0.2528301886792453,
"acc_norm_stderr": 0.02674989977124124
},
"hendrycksTest-high_school_geography": {
"acc": 0.2222222222222222,
"acc_stderr": 0.029620227874790482,
"acc_norm": 0.2828282828282828,
"acc_norm_stderr": 0.03208779558786751
},
"hendrycksTest-high_school_physics": {
"acc": 0.2251655629139073,
"acc_stderr": 0.034104352820089376,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.035433042343899844
},
"hendrycksTest-astronomy": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03459777606810537,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.038607315993160925
},
"hendrycksTest-medical_genetics": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"hendrycksTest-formal_logic": {
"acc": 0.29365079365079366,
"acc_stderr": 0.040735243221471276,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.039701582732351734
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2170212765957447,
"acc_stderr": 0.026947483121496217,
"acc_norm": 0.20425531914893616,
"acc_norm_stderr": 0.026355158413349417
},
"hendrycksTest-electrical_engineering": {
"acc": 0.25517241379310346,
"acc_stderr": 0.03632984052707842,
"acc_norm": 0.2896551724137931,
"acc_norm_stderr": 0.03780019230438014
},
"hendrycksTest-virology": {
"acc": 0.3433734939759036,
"acc_stderr": 0.03696584317010601,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.0357160923005348
},
"piqa": {
"acc": 0.7170837867247007,
"acc_stderr": 0.010508949177489683,
"acc_norm": 0.7247007616974973,
"acc_norm_stderr": 0.01042142927736953
},
"hendrycksTest-high_school_statistics": {
"acc": 0.25462962962962965,
"acc_stderr": 0.02971127586000535,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03054674526495318
},
"hendrycksTest-college_physics": {
"acc": 0.27450980392156865,
"acc_stderr": 0.04440521906179327,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.045766654032077636
},
"hendrycksTest-high_school_biology": {
"acc": 0.20967741935483872,
"acc_stderr": 0.02315787934908352,
"acc_norm": 0.267741935483871,
"acc_norm_stderr": 0.02518900666021238
},
"hendrycksTest-world_religions": {
"acc": 0.34502923976608185,
"acc_stderr": 0.036459813773888065,
"acc_norm": 0.3391812865497076,
"acc_norm_stderr": 0.036310534964889056
},
"hendrycksTest-marketing": {
"acc": 0.28205128205128205,
"acc_stderr": 0.02948036054954119,
"acc_norm": 0.33760683760683763,
"acc_norm_stderr": 0.030980296992618558
},
"hendrycksTest-security_studies": {
"acc": 0.3224489795918367,
"acc_stderr": 0.029923100563683913,
"acc_norm": 0.2653061224489796,
"acc_norm_stderr": 0.028263889943784603
},
"arc_easy": {
"acc": 0.5702861952861953,
"acc_stderr": 0.010157908005763676,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.01025802414786065
},
"hendrycksTest-high_school_world_history": {
"acc": 0.26582278481012656,
"acc_stderr": 0.028756799629658335,
"acc_norm": 0.27848101265822783,
"acc_norm_stderr": 0.029178682304842555
},
"hendrycksTest-human_aging": {
"acc": 0.3542600896860987,
"acc_stderr": 0.03210062154134986,
"acc_norm": 0.2914798206278027,
"acc_norm_stderr": 0.030500283176545902
},
"sciq": {
"acc": 0.845,
"acc_stderr": 0.01145015747079947,
"acc_norm": 0.765,
"acc_norm_stderr": 0.013414729030247121
},
"hendrycksTest-moral_disputes": {
"acc": 0.27167630057803466,
"acc_stderr": 0.02394851290546836,
"acc_norm": 0.30346820809248554,
"acc_norm_stderr": 0.024752411960917212
},
"hendrycksTest-management": {
"acc": 0.20388349514563106,
"acc_stderr": 0.03989139859531771,
"acc_norm": 0.27184466019417475,
"acc_norm_stderr": 0.044052680241409216
},
"hendrycksTest-computer_security": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"hendrycksTest-moral_scenarios": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"wsc": {
"acc": 0.38461538461538464,
"acc_stderr": 0.0479366886807504
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.20725388601036268,
"acc_stderr": 0.029252823291803627,
"acc_norm": 0.23834196891191708,
"acc_norm_stderr": 0.030748905363909895
},
"hendrycksTest-human_sexuality": {
"acc": 0.40458015267175573,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.31297709923664124,
"acc_norm_stderr": 0.04066962905677698
},
"hendrycksTest-high_school_european_history": {
"acc": 0.24242424242424243,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.0347769116216366
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2923076923076923,
"acc_stderr": 0.023060438380857733,
"acc_norm": 0.2923076923076923,
"acc_norm_stderr": 0.02306043838085774
},
"hendrycksTest-college_mathematics": {
"acc": 0.19,
"acc_stderr": 0.03942772444036623,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-miscellaneous": {
"acc": 0.2835249042145594,
"acc_stderr": 0.016117318166832272,
"acc_norm": 0.28735632183908044,
"acc_norm_stderr": 0.0161824107306827
},
"logiqa": {
"acc": 0.2227342549923195,
"acc_stderr": 0.01632005404616512,
"acc_norm": 0.271889400921659,
"acc_norm_stderr": 0.01745171600943683
},
"hendrycksTest-prehistory": {
"acc": 0.24074074074074073,
"acc_stderr": 0.023788583551658537,
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.022899162918445796
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.21851851851851853,
"acc_stderr": 0.025195752251823793,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.027634907264178544
},
"lambada_openai": {
"ppl": 6.644056379058006,
"ppl_stderr": 0.1717099929921861,
"acc": 0.5792742091985251,
"acc_stderr": 0.00687786642328006
},
"hendrycksTest-global_facts": {
"acc": 0.16,
"acc_stderr": 0.03684529491774708,
"acc_norm": 0.18,
"acc_norm_stderr": 0.03861229196653695
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621503
},
"hendrycksTest-sociology": {
"acc": 0.3383084577114428,
"acc_stderr": 0.033455630703391914,
"acc_norm": 0.34328358208955223,
"acc_norm_stderr": 0.03357379665433431
}
},
"versions": {
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-nutrition": 0,
"arc_challenge": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-jurisprudence": 0,
"winogrande": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-virology": 0,
"piqa": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-security_studies": 0,
"arc_easy": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-human_aging": 0,
"sciq": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-management": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-moral_scenarios": 0,
"wsc": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-miscellaneous": 0,
"logiqa": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-high_school_mathematics": 0,
"lambada_openai": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-sociology": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-1.3b,use_accelerate=True",
"num_fewshot": 0,
"batch_size": 16,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-125m
## opt-125m.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |18.94|± | 1.15|
| | |acc_norm|22.78|± | 1.23|
|arc_easy | 0|acc |43.52|± | 1.02|
| | |acc_norm|39.98|± | 1.01|
|hendrycksTest-abstract_algebra | 0|acc |17.00|± | 3.78|
| | |acc_norm|20.00|± | 4.02|
|hendrycksTest-anatomy | 0|acc |24.44|± | 3.71|
| | |acc_norm|21.48|± | 3.55|
|hendrycksTest-astronomy | 0|acc |20.39|± | 3.28|
| | |acc_norm|33.55|± | 3.84|
|hendrycksTest-business_ethics | 0|acc |35.00|± | 4.79|
| | |acc_norm|23.00|± | 4.23|
|hendrycksTest-clinical_knowledge | 0|acc |18.87|± | 2.41|
| | |acc_norm|27.92|± | 2.76|
|hendrycksTest-college_biology | 0|acc |22.92|± | 3.51|
| | |acc_norm|23.61|± | 3.55|
|hendrycksTest-college_chemistry | 0|acc |28.00|± | 4.51|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-college_computer_science | 0|acc |27.00|± | 4.46|
| | |acc_norm|22.00|± | 4.16|
|hendrycksTest-college_mathematics | 0|acc |16.00|± | 3.68|
| | |acc_norm|25.00|± | 4.35|
|hendrycksTest-college_medicine | 0|acc |24.28|± | 3.27|
| | |acc_norm|26.59|± | 3.37|
|hendrycksTest-college_physics | 0|acc |28.43|± | 4.49|
| | |acc_norm|23.53|± | 4.22|
|hendrycksTest-computer_security | 0|acc |31.00|± | 4.65|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-conceptual_physics | 0|acc |26.38|± | 2.88|
| | |acc_norm|17.45|± | 2.48|
|hendrycksTest-econometrics | 0|acc |31.58|± | 4.37|
| | |acc_norm|28.95|± | 4.27|
|hendrycksTest-electrical_engineering | 0|acc |24.14|± | 3.57|
| | |acc_norm|30.34|± | 3.83|
|hendrycksTest-elementary_mathematics | 0|acc |24.87|± | 2.23|
| | |acc_norm|25.13|± | 2.23|
|hendrycksTest-formal_logic | 0|acc |30.95|± | 4.13|
| | |acc_norm|28.57|± | 4.04|
|hendrycksTest-global_facts | 0|acc |19.00|± | 3.94|
| | |acc_norm|22.00|± | 4.16|
|hendrycksTest-high_school_biology | 0|acc |24.84|± | 2.46|
| | |acc_norm|29.03|± | 2.58|
|hendrycksTest-high_school_chemistry | 0|acc |17.24|± | 2.66|
| | |acc_norm|24.63|± | 3.03|
|hendrycksTest-high_school_computer_science | 0|acc |21.00|± | 4.09|
| | |acc_norm|25.00|± | 4.35|
|hendrycksTest-high_school_european_history | 0|acc |24.85|± | 3.37|
| | |acc_norm|31.52|± | 3.63|
|hendrycksTest-high_school_geography | 0|acc |22.73|± | 2.99|
| | |acc_norm|28.79|± | 3.23|
|hendrycksTest-high_school_government_and_politics| 0|acc |24.87|± | 3.12|
| | |acc_norm|30.05|± | 3.31|
|hendrycksTest-high_school_macroeconomics | 0|acc |26.15|± | 2.23|
| | |acc_norm|24.87|± | 2.19|
|hendrycksTest-high_school_mathematics | 0|acc |17.04|± | 2.29|
| | |acc_norm|23.33|± | 2.58|
|hendrycksTest-high_school_microeconomics | 0|acc |23.53|± | 2.76|
| | |acc_norm|29.83|± | 2.97|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|21.19|± | 3.34|
|hendrycksTest-high_school_psychology | 0|acc |25.14|± | 1.86|
| | |acc_norm|25.69|± | 1.87|
|hendrycksTest-high_school_statistics | 0|acc |24.54|± | 2.93|
| | |acc_norm|27.78|± | 3.05|
|hendrycksTest-high_school_us_history | 0|acc |24.51|± | 3.02|
| | |acc_norm|26.47|± | 3.10|
|hendrycksTest-high_school_world_history | 0|acc |26.16|± | 2.86|
| | |acc_norm|28.69|± | 2.94|
|hendrycksTest-human_aging | 0|acc |36.32|± | 3.23|
| | |acc_norm|26.01|± | 2.94|
|hendrycksTest-human_sexuality | 0|acc |32.82|± | 4.12|
| | |acc_norm|32.06|± | 4.09|
|hendrycksTest-international_law | 0|acc |14.88|± | 3.25|
| | |acc_norm|38.02|± | 4.43|
|hendrycksTest-jurisprudence | 0|acc |15.74|± | 3.52|
| | |acc_norm|37.04|± | 4.67|
|hendrycksTest-logical_fallacies | 0|acc |26.38|± | 3.46|
| | |acc_norm|30.06|± | 3.60|
|hendrycksTest-machine_learning | 0|acc |24.11|± | 4.06|
| | |acc_norm|27.68|± | 4.25|
|hendrycksTest-management | 0|acc |17.48|± | 3.76|
| | |acc_norm|27.18|± | 4.41|
|hendrycksTest-marketing | 0|acc |33.76|± | 3.10|
| | |acc_norm|35.90|± | 3.14|
|hendrycksTest-medical_genetics | 0|acc |29.00|± | 4.56|
| | |acc_norm|38.00|± | 4.88|
|hendrycksTest-miscellaneous | 0|acc |27.46|± | 1.60|
| | |acc_norm|26.18|± | 1.57|
|hendrycksTest-moral_disputes | 0|acc |28.32|± | 2.43|
| | |acc_norm|29.48|± | 2.45|
|hendrycksTest-moral_scenarios | 0|acc |23.80|± | 1.42|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |28.76|± | 2.59|
| | |acc_norm|34.97|± | 2.73|
|hendrycksTest-philosophy | 0|acc |20.90|± | 2.31|
| | |acc_norm|31.19|± | 2.63|
|hendrycksTest-prehistory | 0|acc |27.47|± | 2.48|
| | |acc_norm|21.91|± | 2.30|
|hendrycksTest-professional_accounting | 0|acc |25.89|± | 2.61|
| | |acc_norm|26.24|± | 2.62|
|hendrycksTest-professional_law | 0|acc |25.23|± | 1.11|
| | |acc_norm|27.38|± | 1.14|
|hendrycksTest-professional_medicine | 0|acc |25.00|± | 2.63|
| | |acc_norm|22.79|± | 2.55|
|hendrycksTest-professional_psychology | 0|acc |23.86|± | 1.72|
| | |acc_norm|24.84|± | 1.75|
|hendrycksTest-public_relations | 0|acc |29.09|± | 4.35|
| | |acc_norm|25.45|± | 4.17|
|hendrycksTest-security_studies | 0|acc |33.47|± | 3.02|
| | |acc_norm|25.71|± | 2.80|
|hendrycksTest-sociology | 0|acc |27.86|± | 3.17|
| | |acc_norm|24.88|± | 3.06|
|hendrycksTest-us_foreign_policy | 0|acc |28.00|± | 4.51|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-virology | 0|acc |31.93|± | 3.63|
| | |acc_norm|27.71|± | 3.48|
|hendrycksTest-world_religions | 0|acc |28.65|± | 3.47|
| | |acc_norm|32.16|± | 3.58|
|lambada_openai | 0|ppl |26.02|± | 0.94|
| | |acc |37.90|± | 0.68|
|logiqa | 0|acc |22.73|± | 1.64|
| | |acc_norm|27.96|± | 1.76|
|piqa | 0|acc |63.00|± | 1.13|
| | |acc_norm|62.02|± | 1.13|
|sciq | 0|acc |75.10|± | 1.37|
| | |acc_norm|66.90|± | 1.49|
|winogrande | 0|acc |50.28|± | 1.41|
|wsc | 0|acc |36.54|± | 4.74|
{
"results": {
"lambada_openai": {
"ppl": 26.02103896885129,
"ppl_stderr": 0.9418773107503278,
"acc": 0.3790025228022511,
"acc_stderr": 0.006758931440268226
},
"hendrycksTest-college_biology": {
"acc": 0.22916666666666666,
"acc_stderr": 0.03514697467862388,
"acc_norm": 0.2361111111111111,
"acc_norm_stderr": 0.03551446610810826
},
"hendrycksTest-professional_law": {
"acc": 0.2522816166883963,
"acc_stderr": 0.01109278905687524,
"acc_norm": 0.2737940026075619,
"acc_norm_stderr": 0.011388612167979395
},
"hendrycksTest-virology": {
"acc": 0.3192771084337349,
"acc_stderr": 0.03629335329947861,
"acc_norm": 0.27710843373493976,
"acc_norm_stderr": 0.03484331592680587
},
"winogrande": {
"acc": 0.5027624309392266,
"acc_stderr": 0.014052271211616445
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.23529411764705882,
"acc_stderr": 0.027553614467863804,
"acc_norm": 0.29831932773109243,
"acc_norm_stderr": 0.029719142876342863
},
"hendrycksTest-college_physics": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"hendrycksTest-computer_security": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"hendrycksTest-conceptual_physics": {
"acc": 0.26382978723404255,
"acc_stderr": 0.02880998985410297,
"acc_norm": 0.17446808510638298,
"acc_norm_stderr": 0.024809442335503966
},
"hendrycksTest-high_school_world_history": {
"acc": 0.2616033755274262,
"acc_stderr": 0.028609516716994934,
"acc_norm": 0.2869198312236287,
"acc_norm_stderr": 0.02944377302259469
},
"hendrycksTest-sociology": {
"acc": 0.27860696517412936,
"acc_stderr": 0.031700561834973086,
"acc_norm": 0.24875621890547264,
"acc_norm_stderr": 0.030567675938916707
},
"sciq": {
"acc": 0.751,
"acc_stderr": 0.013681600278702308,
"acc_norm": 0.669,
"acc_norm_stderr": 0.014888272588203941
},
"hendrycksTest-professional_accounting": {
"acc": 0.25886524822695034,
"acc_stderr": 0.026129572527180848,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.026244920349843007
},
"hendrycksTest-high_school_biology": {
"acc": 0.24838709677419354,
"acc_stderr": 0.024580028921481003,
"acc_norm": 0.2903225806451613,
"acc_norm_stderr": 0.025822106119415905
},
"hendrycksTest-college_computer_science": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"hendrycksTest-econometrics": {
"acc": 0.3157894736842105,
"acc_stderr": 0.04372748290278007,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.042663394431593935
},
"piqa": {
"acc": 0.6300326441784548,
"acc_stderr": 0.011264415223415284,
"acc_norm": 0.6202393906420022,
"acc_norm_stderr": 0.011323483504715843
},
"hendrycksTest-formal_logic": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04040610178208841
},
"hendrycksTest-astronomy": {
"acc": 0.20394736842105263,
"acc_stderr": 0.03279000406310051,
"acc_norm": 0.3355263157894737,
"acc_norm_stderr": 0.038424985593952694
},
"hendrycksTest-high_school_european_history": {
"acc": 0.24848484848484848,
"acc_stderr": 0.033744026441394036,
"acc_norm": 0.3151515151515151,
"acc_norm_stderr": 0.0362773057502241
},
"hendrycksTest-high_school_geography": {
"acc": 0.22727272727272727,
"acc_stderr": 0.02985751567338641,
"acc_norm": 0.2878787878787879,
"acc_norm_stderr": 0.03225883512300993
},
"hendrycksTest-moral_disputes": {
"acc": 0.2832369942196532,
"acc_stderr": 0.02425790170532337,
"acc_norm": 0.2947976878612717,
"acc_norm_stderr": 0.024547617794803835
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.26153846153846155,
"acc_stderr": 0.022282141204204426,
"acc_norm": 0.24871794871794872,
"acc_norm_stderr": 0.021916957709213796
},
"arc_easy": {
"acc": 0.4351851851851852,
"acc_stderr": 0.010173216430370906,
"acc_norm": 0.3998316498316498,
"acc_norm_stderr": 0.010051788039412935
},
"hendrycksTest-marketing": {
"acc": 0.33760683760683763,
"acc_stderr": 0.030980296992618558,
"acc_norm": 0.358974358974359,
"acc_norm_stderr": 0.03142616993791924
},
"hendrycksTest-nutrition": {
"acc": 0.2875816993464052,
"acc_stderr": 0.02591780611714716,
"acc_norm": 0.34967320261437906,
"acc_norm_stderr": 0.027305308076274702
},
"hendrycksTest-management": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.27184466019417475,
"acc_norm_stderr": 0.044052680241409216
},
"hendrycksTest-prehistory": {
"acc": 0.27469135802469136,
"acc_stderr": 0.024836057868294677,
"acc_norm": 0.2191358024691358,
"acc_norm_stderr": 0.02301670564026219
},
"hendrycksTest-public_relations": {
"acc": 0.2909090909090909,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.041723430387053825
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.1724137931034483,
"acc_stderr": 0.026577672183036586,
"acc_norm": 0.24630541871921183,
"acc_norm_stderr": 0.030315099285617732
},
"hendrycksTest-high_school_psychology": {
"acc": 0.25137614678899084,
"acc_stderr": 0.01859920636028741,
"acc_norm": 0.25688073394495414,
"acc_norm_stderr": 0.018732492928342472
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.24867724867724866,
"acc_stderr": 0.02226181769240015,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643895
},
"hendrycksTest-electrical_engineering": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.30344827586206896,
"acc_norm_stderr": 0.038312260488503336
},
"hendrycksTest-moral_scenarios": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-logical_fallacies": {
"acc": 0.26380368098159507,
"acc_stderr": 0.03462419931615625,
"acc_norm": 0.3006134969325153,
"acc_norm_stderr": 0.0360251131880677
},
"hendrycksTest-professional_medicine": {
"acc": 0.25,
"acc_stderr": 0.026303648393696036,
"acc_norm": 0.22794117647058823,
"acc_norm_stderr": 0.025483081468029804
},
"hendrycksTest-global_facts": {
"acc": 0.19,
"acc_stderr": 0.039427724440366255,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322674
},
"hendrycksTest-abstract_algebra": {
"acc": 0.17,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036844
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.18867924528301888,
"acc_stderr": 0.02407999513006222,
"acc_norm": 0.2792452830188679,
"acc_norm_stderr": 0.027611163402399715
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.21,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"hendrycksTest-college_mathematics": {
"acc": 0.16,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"hendrycksTest-high_school_physics": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008936,
"acc_norm": 0.2119205298013245,
"acc_norm_stderr": 0.03336767086567978
},
"logiqa": {
"acc": 0.22734254992319508,
"acc_stderr": 0.016439067675117734,
"acc_norm": 0.27956989247311825,
"acc_norm_stderr": 0.01760290918682245
},
"hendrycksTest-anatomy": {
"acc": 0.24444444444444444,
"acc_stderr": 0.037125378336148665,
"acc_norm": 0.21481481481481482,
"acc_norm_stderr": 0.03547854198560826
},
"hendrycksTest-college_chemistry": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"hendrycksTest-professional_psychology": {
"acc": 0.238562091503268,
"acc_stderr": 0.0172423858287796,
"acc_norm": 0.24836601307189543,
"acc_norm_stderr": 0.01747948700136476
},
"hendrycksTest-security_studies": {
"acc": 0.3346938775510204,
"acc_stderr": 0.030209235226242307,
"acc_norm": 0.2571428571428571,
"acc_norm_stderr": 0.027979823538744546
},
"hendrycksTest-international_law": {
"acc": 0.1487603305785124,
"acc_stderr": 0.03248470083807193,
"acc_norm": 0.38016528925619836,
"acc_norm_stderr": 0.04431324501968431
},
"hendrycksTest-human_aging": {
"acc": 0.3632286995515695,
"acc_stderr": 0.032277904428505,
"acc_norm": 0.2600896860986547,
"acc_norm_stderr": 0.029442495585857473
},
"hendrycksTest-jurisprudence": {
"acc": 0.1574074074074074,
"acc_stderr": 0.035207039905179614,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024931
},
"hendrycksTest-philosophy": {
"acc": 0.2090032154340836,
"acc_stderr": 0.023093140398374224,
"acc_norm": 0.31189710610932475,
"acc_norm_stderr": 0.02631185807185416
},
"hendrycksTest-miscellaneous": {
"acc": 0.27458492975734355,
"acc_stderr": 0.015959829933084032,
"acc_norm": 0.26181353767560667,
"acc_norm_stderr": 0.015720838678445266
},
"hendrycksTest-business_ethics": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"wsc": {
"acc": 0.36538461538461536,
"acc_stderr": 0.0474473339327792
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.17037037037037037,
"acc_stderr": 0.022922554863074974,
"acc_norm": 0.23333333333333334,
"acc_norm_stderr": 0.02578787422095931
},
"hendrycksTest-medical_genetics": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"hendrycksTest-high_school_us_history": {
"acc": 0.24509803921568626,
"acc_stderr": 0.030190282453501936,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.03096451792692341
},
"hendrycksTest-human_sexuality": {
"acc": 0.3282442748091603,
"acc_stderr": 0.04118438565806298,
"acc_norm": 0.32061068702290074,
"acc_norm_stderr": 0.04093329229834278
},
"arc_challenge": {
"acc": 0.189419795221843,
"acc_stderr": 0.011450705115910769,
"acc_norm": 0.22781569965870307,
"acc_norm_stderr": 0.012256708602326914
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.24870466321243523,
"acc_stderr": 0.031195840877700304,
"acc_norm": 0.3005181347150259,
"acc_norm_stderr": 0.03308818594415751
},
"hendrycksTest-machine_learning": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952687,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697624
},
"hendrycksTest-world_religions": {
"acc": 0.28654970760233917,
"acc_stderr": 0.03467826685703826,
"acc_norm": 0.3216374269005848,
"acc_norm_stderr": 0.03582529442573122
},
"hendrycksTest-college_medicine": {
"acc": 0.24277456647398843,
"acc_stderr": 0.0326926380614177,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.03368762932259431
},
"hendrycksTest-high_school_statistics": {
"acc": 0.24537037037037038,
"acc_stderr": 0.029346665094372948,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03054674526495318
}
},
"versions": {
"lambada_openai": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-virology": 0,
"winogrande": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-sociology": 0,
"sciq": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-econometrics": 0,
"piqa": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"arc_easy": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-nutrition": 0,
"hendrycksTest-management": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-high_school_physics": 0,
"logiqa": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-business_ethics": 0,
"wsc": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-human_sexuality": 0,
"arc_challenge": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_statistics": 0
},
"config": {
"model": "gpt2",
"model_args": "pretrained=facebook/opt-125m",
"num_fewshot": 0,
"batch_size": 64,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-13b
## opt-13b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |32.94|± | 1.37|
| | |acc_norm|35.75|± | 1.40|
|arc_easy | 0|acc |67.09|± | 0.96|
| | |acc_norm|61.78|± | 1.00|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-anatomy | 0|acc |26.67|± | 3.82|
| | |acc_norm|23.70|± | 3.67|
|hendrycksTest-astronomy | 0|acc |31.58|± | 3.78|
| | |acc_norm|38.16|± | 3.95|
|hendrycksTest-business_ethics | 0|acc |29.00|± | 4.56|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-clinical_knowledge | 0|acc |24.15|± | 2.63|
| | |acc_norm|30.19|± | 2.83|
|hendrycksTest-college_biology | 0|acc |20.83|± | 3.40|
| | |acc_norm|21.53|± | 3.44|
|hendrycksTest-college_chemistry | 0|acc |24.00|± | 4.29|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-college_computer_science | 0|acc |30.00|± | 4.61|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-college_mathematics | 0|acc |21.00|± | 4.09|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_medicine | 0|acc |25.43|± | 3.32|
| | |acc_norm|22.54|± | 3.19|
|hendrycksTest-college_physics | 0|acc |20.59|± | 4.02|
| | |acc_norm|23.53|± | 4.22|
|hendrycksTest-computer_security | 0|acc |23.00|± | 4.23|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-conceptual_physics | 0|acc |24.26|± | 2.80|
| | |acc_norm|20.00|± | 2.61|
|hendrycksTest-econometrics | 0|acc |35.09|± | 4.49|
| | |acc_norm|29.82|± | 4.30|
|hendrycksTest-electrical_engineering | 0|acc |31.72|± | 3.88|
| | |acc_norm|31.72|± | 3.88|
|hendrycksTest-elementary_mathematics | 0|acc |24.07|± | 2.20|
| | |acc_norm|26.72|± | 2.28|
|hendrycksTest-formal_logic | 0|acc |28.57|± | 4.04|
| | |acc_norm|23.81|± | 3.81|
|hendrycksTest-global_facts | 0|acc |24.00|± | 4.29|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-high_school_biology | 0|acc |25.16|± | 2.47|
| | |acc_norm|28.39|± | 2.56|
|hendrycksTest-high_school_chemistry | 0|acc |23.65|± | 2.99|
| | |acc_norm|32.51|± | 3.30|
|hendrycksTest-high_school_computer_science | 0|acc |30.00|± | 4.61|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-high_school_european_history | 0|acc |25.45|± | 3.40|
| | |acc_norm|29.70|± | 3.57|
|hendrycksTest-high_school_geography | 0|acc |28.79|± | 3.23|
| | |acc_norm|30.30|± | 3.27|
|hendrycksTest-high_school_government_and_politics| 0|acc |29.53|± | 3.29|
| | |acc_norm|25.39|± | 3.14|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.49|± | 2.31|
| | |acc_norm|29.49|± | 2.31|
|hendrycksTest-high_school_mathematics | 0|acc |27.41|± | 2.72|
| | |acc_norm|30.00|± | 2.79|
|hendrycksTest-high_school_microeconomics | 0|acc |26.05|± | 2.85|
| | |acc_norm|32.77|± | 3.05|
|hendrycksTest-high_school_physics | 0|acc |24.50|± | 3.51|
| | |acc_norm|22.52|± | 3.41|
|hendrycksTest-high_school_psychology | 0|acc |27.52|± | 1.91|
| | |acc_norm|24.40|± | 1.84|
|hendrycksTest-high_school_statistics | 0|acc |29.63|± | 3.11|
| | |acc_norm|29.17|± | 3.10|
|hendrycksTest-high_school_us_history | 0|acc |23.04|± | 2.96|
| | |acc_norm|27.94|± | 3.15|
|hendrycksTest-high_school_world_history | 0|acc |25.74|± | 2.85|
| | |acc_norm|29.54|± | 2.97|
|hendrycksTest-human_aging | 0|acc |29.15|± | 3.05|
| | |acc_norm|24.66|± | 2.89|
|hendrycksTest-human_sexuality | 0|acc |32.06|± | 4.09|
| | |acc_norm|31.30|± | 4.07|
|hendrycksTest-international_law | 0|acc |28.10|± | 4.10|
| | |acc_norm|43.80|± | 4.53|
|hendrycksTest-jurisprudence | 0|acc |29.63|± | 4.41|
| | |acc_norm|43.52|± | 4.79|
|hendrycksTest-logical_fallacies | 0|acc |31.90|± | 3.66|
| | |acc_norm|30.06|± | 3.60|
|hendrycksTest-machine_learning | 0|acc |27.68|± | 4.25|
| | |acc_norm|24.11|± | 4.06|
|hendrycksTest-management | 0|acc |33.01|± | 4.66|
| | |acc_norm|32.04|± | 4.62|
|hendrycksTest-marketing | 0|acc |29.06|± | 2.97|
| | |acc_norm|29.91|± | 3.00|
|hendrycksTest-medical_genetics | 0|acc |27.00|± | 4.46|
| | |acc_norm|39.00|± | 4.90|
|hendrycksTest-miscellaneous | 0|acc |32.31|± | 1.67|
| | |acc_norm|28.99|± | 1.62|
|hendrycksTest-moral_disputes | 0|acc |30.35|± | 2.48|
| | |acc_norm|30.92|± | 2.49|
|hendrycksTest-moral_scenarios | 0|acc |27.26|± | 1.49|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |29.41|± | 2.61|
| | |acc_norm|36.93|± | 2.76|
|hendrycksTest-philosophy | 0|acc |25.40|± | 2.47|
| | |acc_norm|31.83|± | 2.65|
|hendrycksTest-prehistory | 0|acc |25.31|± | 2.42|
| | |acc_norm|20.37|± | 2.24|
|hendrycksTest-professional_accounting | 0|acc |27.30|± | 2.66|
| | |acc_norm|28.37|± | 2.69|
|hendrycksTest-professional_law | 0|acc |27.12|± | 1.14|
| | |acc_norm|28.10|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |26.47|± | 2.68|
| | |acc_norm|26.84|± | 2.69|
|hendrycksTest-professional_psychology | 0|acc |26.96|± | 1.80|
| | |acc_norm|26.96|± | 1.80|
|hendrycksTest-public_relations | 0|acc |27.27|± | 4.27|
| | |acc_norm|20.00|± | 3.83|
|hendrycksTest-security_studies | 0|acc |36.73|± | 3.09|
| | |acc_norm|28.98|± | 2.90|
|hendrycksTest-sociology | 0|acc |28.36|± | 3.19|
| | |acc_norm|28.36|± | 3.19|
|hendrycksTest-us_foreign_policy | 0|acc |31.00|± | 4.65|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-virology | 0|acc |33.73|± | 3.68|
| | |acc_norm|24.70|± | 3.36|
|hendrycksTest-world_religions | 0|acc |33.92|± | 3.63|
| | |acc_norm|39.77|± | 3.75|
|lambada_openai | 0|ppl | 4.04|± | 0.09|
| | |acc |68.64|± | 0.65|
|logiqa | 0|acc |22.73|± | 1.64|
| | |acc_norm|28.57|± | 1.77|
|piqa | 0|acc |75.95|± | 1.00|
| | |acc_norm|76.82|± | 0.98|
|sciq | 0|acc |90.80|± | 0.91|
| | |acc_norm|86.60|± | 1.08|
|winogrande | 0|acc |65.19|± | 1.34|
|wsc | 0|acc |60.58|± | 4.82|
{
"results": {
"hendrycksTest-nutrition": {
"acc": 0.29411764705882354,
"acc_stderr": 0.02609016250427904,
"acc_norm": 0.369281045751634,
"acc_norm_stderr": 0.02763417668960266
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.24150943396226415,
"acc_stderr": 0.02634148037111837,
"acc_norm": 0.3018867924528302,
"acc_norm_stderr": 0.02825420034443865
},
"hendrycksTest-high_school_us_history": {
"acc": 0.23039215686274508,
"acc_stderr": 0.02955429260569507,
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.031493281045079556
},
"hendrycksTest-public_relations": {
"acc": 0.2727272727272727,
"acc_stderr": 0.04265792110940588,
"acc_norm": 0.2,
"acc_norm_stderr": 0.03831305140884601
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2545454545454545,
"acc_stderr": 0.0340150671524904,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.03567969772268047
},
"hendrycksTest-moral_scenarios": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249588,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-anatomy": {
"acc": 0.26666666666666666,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"hendrycksTest-international_law": {
"acc": 0.2809917355371901,
"acc_stderr": 0.04103203830514512,
"acc_norm": 0.4380165289256198,
"acc_norm_stderr": 0.045291468044357915
},
"hendrycksTest-management": {
"acc": 0.3300970873786408,
"acc_stderr": 0.0465614711001235,
"acc_norm": 0.32038834951456313,
"acc_norm_stderr": 0.0462028408228004
},
"winogrande": {
"acc": 0.6519337016574586,
"acc_stderr": 0.013388004531086047
},
"hendrycksTest-college_mathematics": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"wsc": {
"acc": 0.6057692307692307,
"acc_stderr": 0.04815154775990711
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.23645320197044334,
"acc_stderr": 0.02989611429173355,
"acc_norm": 0.3251231527093596,
"acc_norm_stderr": 0.03295797566311271
},
"hendrycksTest-college_biology": {
"acc": 0.20833333333333334,
"acc_stderr": 0.03396116205845334,
"acc_norm": 0.2152777777777778,
"acc_norm_stderr": 0.034370793441061344
},
"hendrycksTest-college_chemistry": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"hendrycksTest-human_aging": {
"acc": 0.2914798206278027,
"acc_stderr": 0.030500283176545906,
"acc_norm": 0.24663677130044842,
"acc_norm_stderr": 0.028930413120910877
},
"hendrycksTest-computer_security": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"hendrycksTest-electrical_engineering": {
"acc": 0.31724137931034485,
"acc_stderr": 0.038783523721386215,
"acc_norm": 0.31724137931034485,
"acc_norm_stderr": 0.038783523721386215
},
"lambada_openai": {
"ppl": 4.037871980255364,
"ppl_stderr": 0.08695582970269694,
"acc": 0.6863962740151368,
"acc_stderr": 0.006463833164285201
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03114144782353603,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03099866630456052
},
"hendrycksTest-miscellaneous": {
"acc": 0.3231162196679438,
"acc_stderr": 0.016723726512343048,
"acc_norm": 0.28991060025542786,
"acc_norm_stderr": 0.01622501794477096
},
"hendrycksTest-human_sexuality": {
"acc": 0.32061068702290074,
"acc_stderr": 0.04093329229834278,
"acc_norm": 0.31297709923664124,
"acc_norm_stderr": 0.04066962905677697
},
"hendrycksTest-global_facts": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"hendrycksTest-marketing": {
"acc": 0.2905982905982906,
"acc_stderr": 0.029745048572674047,
"acc_norm": 0.29914529914529914,
"acc_norm_stderr": 0.029996951858349483
},
"hendrycksTest-college_computer_science": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"hendrycksTest-professional_accounting": {
"acc": 0.2730496453900709,
"acc_stderr": 0.026577860943307847,
"acc_norm": 0.28368794326241137,
"acc_norm_stderr": 0.026891709428343957
},
"hendrycksTest-college_physics": {
"acc": 0.20588235294117646,
"acc_stderr": 0.040233822736177455,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.042207736591714534
},
"hendrycksTest-professional_law": {
"acc": 0.2711864406779661,
"acc_stderr": 0.011354581451622985,
"acc_norm": 0.28096479791395046,
"acc_norm_stderr": 0.011479684550077683
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.24074074074074073,
"acc_stderr": 0.022019080012217904,
"acc_norm": 0.2671957671957672,
"acc_norm_stderr": 0.022789673145776578
},
"hendrycksTest-medical_genetics": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"hendrycksTest-sociology": {
"acc": 0.2835820895522388,
"acc_stderr": 0.03187187537919797,
"acc_norm": 0.2835820895522388,
"acc_norm_stderr": 0.031871875379197986
},
"hendrycksTest-security_studies": {
"acc": 0.3673469387755102,
"acc_stderr": 0.03086214492108756,
"acc_norm": 0.2897959183673469,
"acc_norm_stderr": 0.029043088683304328
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"hendrycksTest-professional_psychology": {
"acc": 0.2696078431372549,
"acc_stderr": 0.017952449196987862,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.017952449196987862
},
"hendrycksTest-high_school_world_history": {
"acc": 0.25738396624472576,
"acc_stderr": 0.02845882099146029,
"acc_norm": 0.29535864978902954,
"acc_norm_stderr": 0.029696338713422882
},
"hendrycksTest-jurisprudence": {
"acc": 0.2962962962962963,
"acc_stderr": 0.044143436668549335,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.04792898170907062
},
"hendrycksTest-machine_learning": {
"acc": 0.2767857142857143,
"acc_stderr": 0.042466243366976256,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952688
},
"hendrycksTest-world_religions": {
"acc": 0.3391812865497076,
"acc_stderr": 0.036310534964889056,
"acc_norm": 0.39766081871345027,
"acc_norm_stderr": 0.0375363895576169
},
"sciq": {
"acc": 0.908,
"acc_stderr": 0.009144376393151103,
"acc_norm": 0.866,
"acc_norm_stderr": 0.01077776229836969
},
"piqa": {
"acc": 0.7595212187159956,
"acc_stderr": 0.009971345364651073,
"acc_norm": 0.7682263329706203,
"acc_norm_stderr": 0.009845143772794041
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2948717948717949,
"acc_stderr": 0.023119362758232294,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.02311936275823228
},
"hendrycksTest-professional_medicine": {
"acc": 0.2647058823529412,
"acc_stderr": 0.026799562024887657,
"acc_norm": 0.26838235294117646,
"acc_norm_stderr": 0.026917481224377215
},
"arc_challenge": {
"acc": 0.3293515358361775,
"acc_stderr": 0.013734057652635474,
"acc_norm": 0.3575085324232082,
"acc_norm_stderr": 0.01400549427591657
},
"hendrycksTest-high_school_physics": {
"acc": 0.24503311258278146,
"acc_stderr": 0.035118075718047245,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008937
},
"hendrycksTest-college_medicine": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2254335260115607,
"acc_norm_stderr": 0.03186209851641143
},
"hendrycksTest-high_school_biology": {
"acc": 0.25161290322580643,
"acc_stderr": 0.024685979286239963,
"acc_norm": 0.2838709677419355,
"acc_norm_stderr": 0.025649381063029258
},
"hendrycksTest-prehistory": {
"acc": 0.25308641975308643,
"acc_stderr": 0.024191808600713002,
"acc_norm": 0.2037037037037037,
"acc_norm_stderr": 0.022409674547304168
},
"hendrycksTest-high_school_geography": {
"acc": 0.2878787878787879,
"acc_stderr": 0.03225883512300992,
"acc_norm": 0.30303030303030304,
"acc_norm_stderr": 0.03274287914026867
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2425531914893617,
"acc_stderr": 0.02802022627120022,
"acc_norm": 0.2,
"acc_norm_stderr": 0.026148818018424506
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.29533678756476683,
"acc_stderr": 0.03292296639155137,
"acc_norm": 0.2538860103626943,
"acc_norm_stderr": 0.03141024780565318
},
"hendrycksTest-philosophy": {
"acc": 0.2540192926045016,
"acc_stderr": 0.02472386150477169,
"acc_norm": 0.3183279742765273,
"acc_norm_stderr": 0.026457225067811025
},
"arc_easy": {
"acc": 0.6708754208754208,
"acc_stderr": 0.009642048058060987,
"acc_norm": 0.6178451178451179,
"acc_norm_stderr": 0.009970747281292424
},
"hendrycksTest-high_school_psychology": {
"acc": 0.27522935779816515,
"acc_stderr": 0.0191490937431552,
"acc_norm": 0.24403669724770644,
"acc_norm_stderr": 0.018415286351416416
},
"hendrycksTest-moral_disputes": {
"acc": 0.30346820809248554,
"acc_stderr": 0.024752411960917212,
"acc_norm": 0.3092485549132948,
"acc_norm_stderr": 0.02488314057007176
},
"hendrycksTest-logical_fallacies": {
"acc": 0.31901840490797545,
"acc_stderr": 0.03661997551073836,
"acc_norm": 0.3006134969325153,
"acc_norm_stderr": 0.03602511318806771
},
"hendrycksTest-econometrics": {
"acc": 0.3508771929824561,
"acc_stderr": 0.044895393502707,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537315
},
"hendrycksTest-astronomy": {
"acc": 0.3157894736842105,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.3815789473684211,
"acc_norm_stderr": 0.039531733777491945
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"hendrycksTest-virology": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.2469879518072289,
"acc_norm_stderr": 0.03357351982064536
},
"hendrycksTest-formal_logic": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04040610178208841,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.038095238095238106
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.2605042016806723,
"acc_stderr": 0.02851025151234193,
"acc_norm": 0.3277310924369748,
"acc_norm_stderr": 0.030489911417673227
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.3,
"acc_norm_stderr": 0.027940457136228412
},
"logiqa": {
"acc": 0.22734254992319508,
"acc_stderr": 0.01643906767511774,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.017719247798458293
},
"hendrycksTest-business_ethics": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
}
},
"versions": {
"hendrycksTest-nutrition": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-management": 0,
"winogrande": 0,
"hendrycksTest-college_mathematics": 0,
"wsc": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-electrical_engineering": 0,
"lambada_openai": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-world_religions": 0,
"sciq": 0,
"piqa": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-professional_medicine": 0,
"arc_challenge": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-philosophy": 0,
"arc_easy": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-high_school_mathematics": 0,
"logiqa": 0,
"hendrycksTest-business_ethics": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-13b,use_accelerate=True,device_map_option=sequential",
"num_fewshot": 0,
"batch_size": 16,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-2.7b
## opt-2.7b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |26.79|± | 1.29|
| | |acc_norm|31.23|± | 1.35|
|arc_easy | 0|acc |60.77|± | 1.00|
| | |acc_norm|54.29|± | 1.02|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|22.00|± | 4.16|
|hendrycksTest-anatomy | 0|acc |24.44|± | 3.71|
| | |acc_norm|24.44|± | 3.71|
|hendrycksTest-astronomy | 0|acc |25.00|± | 3.52|
| | |acc_norm|34.87|± | 3.88|
|hendrycksTest-business_ethics | 0|acc |25.00|± | 4.35|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-clinical_knowledge | 0|acc |22.26|± | 2.56|
| | |acc_norm|29.43|± | 2.80|
|hendrycksTest-college_biology | 0|acc |26.39|± | 3.69|
| | |acc_norm|24.31|± | 3.59|
|hendrycksTest-college_chemistry | 0|acc |21.00|± | 4.09|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_computer_science | 0|acc |34.00|± | 4.76|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-college_mathematics | 0|acc |20.00|± | 4.02|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_medicine | 0|acc |21.97|± | 3.16|
| | |acc_norm|23.70|± | 3.24|
|hendrycksTest-college_physics | 0|acc |36.27|± | 4.78|
| | |acc_norm|28.43|± | 4.49|
|hendrycksTest-computer_security | 0|acc |25.00|± | 4.35|
| | |acc_norm|35.00|± | 4.79|
|hendrycksTest-conceptual_physics | 0|acc |26.81|± | 2.90|
| | |acc_norm|22.55|± | 2.73|
|hendrycksTest-econometrics | 0|acc |27.19|± | 4.19|
| | |acc_norm|24.56|± | 4.05|
|hendrycksTest-electrical_engineering | 0|acc |33.79|± | 3.94|
| | |acc_norm|35.17|± | 3.98|
|hendrycksTest-elementary_mathematics | 0|acc |25.93|± | 2.26|
| | |acc_norm|27.78|± | 2.31|
|hendrycksTest-formal_logic | 0|acc |31.75|± | 4.16|
| | |acc_norm|26.19|± | 3.93|
|hendrycksTest-global_facts | 0|acc |25.00|± | 4.35|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-high_school_biology | 0|acc |21.94|± | 2.35|
| | |acc_norm|26.13|± | 2.50|
|hendrycksTest-high_school_chemistry | 0|acc |19.70|± | 2.80|
| | |acc_norm|30.05|± | 3.23|
|hendrycksTest-high_school_computer_science | 0|acc |19.00|± | 3.94|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-high_school_european_history | 0|acc |26.06|± | 3.43|
| | |acc_norm|27.88|± | 3.50|
|hendrycksTest-high_school_geography | 0|acc |19.19|± | 2.81|
| | |acc_norm|28.79|± | 3.23|
|hendrycksTest-high_school_government_and_politics| 0|acc |24.87|± | 3.12|
| | |acc_norm|25.39|± | 3.14|
|hendrycksTest-high_school_macroeconomics | 0|acc |26.67|± | 2.24|
| | |acc_norm|27.44|± | 2.26|
|hendrycksTest-high_school_mathematics | 0|acc |22.59|± | 2.55|
| | |acc_norm|28.15|± | 2.74|
|hendrycksTest-high_school_microeconomics | 0|acc |20.59|± | 2.63|
| | |acc_norm|26.89|± | 2.88|
|hendrycksTest-high_school_physics | 0|acc |23.18|± | 3.45|
| | |acc_norm|27.15|± | 3.63|
|hendrycksTest-high_school_psychology | 0|acc |26.79|± | 1.90|
| | |acc_norm|24.40|± | 1.84|
|hendrycksTest-high_school_statistics | 0|acc |24.54|± | 2.93|
| | |acc_norm|29.17|± | 3.10|
|hendrycksTest-high_school_us_history | 0|acc |29.90|± | 3.21|
| | |acc_norm|26.47|± | 3.10|
|hendrycksTest-high_school_world_history | 0|acc |24.89|± | 2.81|
| | |acc_norm|25.74|± | 2.85|
|hendrycksTest-human_aging | 0|acc |32.29|± | 3.14|
| | |acc_norm|25.11|± | 2.91|
|hendrycksTest-human_sexuality | 0|acc |38.17|± | 4.26|
| | |acc_norm|28.24|± | 3.95|
|hendrycksTest-international_law | 0|acc |21.49|± | 3.75|
| | |acc_norm|40.50|± | 4.48|
|hendrycksTest-jurisprudence | 0|acc |31.48|± | 4.49|
| | |acc_norm|47.22|± | 4.83|
|hendrycksTest-logical_fallacies | 0|acc |23.93|± | 3.35|
| | |acc_norm|26.99|± | 3.49|
|hendrycksTest-machine_learning | 0|acc |29.46|± | 4.33|
| | |acc_norm|25.89|± | 4.16|
|hendrycksTest-management | 0|acc |24.27|± | 4.25|
| | |acc_norm|24.27|± | 4.25|
|hendrycksTest-marketing | 0|acc |27.78|± | 2.93|
| | |acc_norm|29.91|± | 3.00|
|hendrycksTest-medical_genetics | 0|acc |28.00|± | 4.51|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-miscellaneous | 0|acc |28.74|± | 1.62|
| | |acc_norm|27.97|± | 1.61|
|hendrycksTest-moral_disputes | 0|acc |25.72|± | 2.35|
| | |acc_norm|31.79|± | 2.51|
|hendrycksTest-moral_scenarios | 0|acc |28.27|± | 1.51|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |29.74|± | 2.62|
| | |acc_norm|36.27|± | 2.75|
|hendrycksTest-philosophy | 0|acc |24.12|± | 2.43|
| | |acc_norm|30.23|± | 2.61|
|hendrycksTest-prehistory | 0|acc |22.53|± | 2.32|
| | |acc_norm|21.30|± | 2.28|
|hendrycksTest-professional_accounting | 0|acc |20.92|± | 2.43|
| | |acc_norm|25.53|± | 2.60|
|hendrycksTest-professional_law | 0|acc |26.40|± | 1.13|
| | |acc_norm|28.23|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |23.16|± | 2.56|
| | |acc_norm|24.26|± | 2.60|
|hendrycksTest-professional_psychology | 0|acc |27.29|± | 1.80|
| | |acc_norm|26.31|± | 1.78|
|hendrycksTest-public_relations | 0|acc |32.73|± | 4.49|
| | |acc_norm|22.73|± | 4.01|
|hendrycksTest-security_studies | 0|acc |32.24|± | 2.99|
| | |acc_norm|25.71|± | 2.80|
|hendrycksTest-sociology | 0|acc |25.37|± | 3.08|
| | |acc_norm|27.86|± | 3.17|
|hendrycksTest-us_foreign_policy | 0|acc |31.00|± | 4.65|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-virology | 0|acc |36.75|± | 3.75|
| | |acc_norm|33.13|± | 3.66|
|hendrycksTest-world_religions | 0|acc |33.92|± | 3.63|
| | |acc_norm|45.61|± | 3.82|
|lambada_openai | 0|ppl | 5.12|± | 0.12|
| | |acc |63.59|± | 0.67|
|logiqa | 0|acc |21.04|± | 1.60|
| | |acc_norm|25.96|± | 1.72|
|piqa | 0|acc |73.88|± | 1.02|
| | |acc_norm|74.81|± | 1.01|
|sciq | 0|acc |85.80|± | 1.10|
| | |acc_norm|79.00|± | 1.29|
|winogrande | 0|acc |61.01|± | 1.37|
|wsc | 0|acc |63.46|± | 4.74|
{
"results": {
"hendrycksTest-high_school_chemistry": {
"acc": 0.19704433497536947,
"acc_stderr": 0.027986724666736212,
"acc_norm": 0.30049261083743845,
"acc_norm_stderr": 0.03225799476233485
},
"hendrycksTest-college_mathematics": {
"acc": 0.2,
"acc_stderr": 0.04020151261036844,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"hendrycksTest-marketing": {
"acc": 0.2777777777777778,
"acc_stderr": 0.029343114798094462,
"acc_norm": 0.29914529914529914,
"acc_norm_stderr": 0.02999695185834949
},
"hendrycksTest-moral_scenarios": {
"acc": 0.28268156424581004,
"acc_stderr": 0.015060381730018065,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-college_medicine": {
"acc": 0.21965317919075145,
"acc_stderr": 0.031568093627031744,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.03242414757483098
},
"hendrycksTest-sociology": {
"acc": 0.2537313432835821,
"acc_stderr": 0.030769444967296007,
"acc_norm": 0.27860696517412936,
"acc_norm_stderr": 0.031700561834973086
},
"hendrycksTest-security_studies": {
"acc": 0.3224489795918367,
"acc_stderr": 0.029923100563683913,
"acc_norm": 0.2571428571428571,
"acc_norm_stderr": 0.027979823538744546
},
"arc_easy": {
"acc": 0.6077441077441077,
"acc_stderr": 0.010018744689650043,
"acc_norm": 0.5429292929292929,
"acc_norm_stderr": 0.01022189756425603
},
"hendrycksTest-high_school_geography": {
"acc": 0.1919191919191919,
"acc_stderr": 0.02805779167298902,
"acc_norm": 0.2878787878787879,
"acc_norm_stderr": 0.03225883512300993
},
"hendrycksTest-prehistory": {
"acc": 0.22530864197530864,
"acc_stderr": 0.02324620264781975,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.022779719088733396
},
"hendrycksTest-virology": {
"acc": 0.3674698795180723,
"acc_stderr": 0.03753267402120575,
"acc_norm": 0.3313253012048193,
"acc_norm_stderr": 0.03664314777288085
},
"hendrycksTest-college_computer_science": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-college_biology": {
"acc": 0.2638888888888889,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080341
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2606060606060606,
"acc_stderr": 0.03427743175816525,
"acc_norm": 0.2787878787878788,
"acc_norm_stderr": 0.035014387062967806
},
"hendrycksTest-high_school_us_history": {
"acc": 0.29901960784313725,
"acc_stderr": 0.032133257173736156,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.030964517926923403
},
"hendrycksTest-nutrition": {
"acc": 0.2973856209150327,
"acc_stderr": 0.02617390850671858,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.02753007844711032
},
"hendrycksTest-business_ethics": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2680851063829787,
"acc_stderr": 0.028957342788342347,
"acc_norm": 0.225531914893617,
"acc_norm_stderr": 0.02732107841738754
},
"hendrycksTest-astronomy": {
"acc": 0.25,
"acc_stderr": 0.03523807393012047,
"acc_norm": 0.34868421052631576,
"acc_norm_stderr": 0.03878139888797609
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.25925925925925924,
"acc_stderr": 0.022569897074918417,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.023068188848261124
},
"hendrycksTest-medical_genetics": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"winogrande": {
"acc": 0.6101026045777427,
"acc_stderr": 0.013707547317008462
},
"hendrycksTest-high_school_world_history": {
"acc": 0.2489451476793249,
"acc_stderr": 0.028146970599422644,
"acc_norm": 0.25738396624472576,
"acc_norm_stderr": 0.0284588209914603
},
"hendrycksTest-human_aging": {
"acc": 0.32286995515695066,
"acc_stderr": 0.03138147637575499,
"acc_norm": 0.25112107623318386,
"acc_norm_stderr": 0.02910522083322461
},
"sciq": {
"acc": 0.858,
"acc_stderr": 0.011043457699378227,
"acc_norm": 0.79,
"acc_norm_stderr": 0.012886662332274536
},
"hendrycksTest-college_chemistry": {
"acc": 0.21,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"lambada_openai": {
"ppl": 5.119486263101474,
"ppl_stderr": 0.11989428693169638,
"acc": 0.6359402289928198,
"acc_stderr": 0.006703576472856834
},
"hendrycksTest-econometrics": {
"acc": 0.2719298245614035,
"acc_stderr": 0.041857744240220554,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.04049339297748142
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.26666666666666666,
"acc_stderr": 0.022421273612923703,
"acc_norm": 0.2743589743589744,
"acc_norm_stderr": 0.022622765767493225
},
"hendrycksTest-high_school_physics": {
"acc": 0.23178807947019867,
"acc_stderr": 0.03445406271987054,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"hendrycksTest-jurisprudence": {
"acc": 0.3148148148148148,
"acc_stderr": 0.04489931073591312,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04826217294139894
},
"hendrycksTest-machine_learning": {
"acc": 0.29464285714285715,
"acc_stderr": 0.043270409325787275,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.22592592592592592,
"acc_stderr": 0.025497532639609553,
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.027420019350945277
},
"hendrycksTest-high_school_psychology": {
"acc": 0.26788990825688075,
"acc_stderr": 0.018987462257978652,
"acc_norm": 0.24403669724770644,
"acc_norm_stderr": 0.01841528635141641
},
"hendrycksTest-professional_accounting": {
"acc": 0.20921985815602837,
"acc_stderr": 0.02426476943998848,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.02601199293090201
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.22264150943396227,
"acc_stderr": 0.025604233470899098,
"acc_norm": 0.2943396226415094,
"acc_norm_stderr": 0.028049186315695248
},
"hendrycksTest-philosophy": {
"acc": 0.24115755627009647,
"acc_stderr": 0.024296594034763426,
"acc_norm": 0.3022508038585209,
"acc_norm_stderr": 0.026082700695399655
},
"hendrycksTest-college_physics": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105654,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"hendrycksTest-moral_disputes": {
"acc": 0.25722543352601157,
"acc_stderr": 0.023532925431044283,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153172
},
"hendrycksTest-electrical_engineering": {
"acc": 0.33793103448275863,
"acc_stderr": 0.039417076320648906,
"acc_norm": 0.35172413793103446,
"acc_norm_stderr": 0.03979236637497411
},
"hendrycksTest-professional_medicine": {
"acc": 0.23161764705882354,
"acc_stderr": 0.025626533803777562,
"acc_norm": 0.2426470588235294,
"acc_norm_stderr": 0.026040662474201275
},
"hendrycksTest-miscellaneous": {
"acc": 0.28735632183908044,
"acc_stderr": 0.0161824107306827,
"acc_norm": 0.2796934865900383,
"acc_norm_stderr": 0.016050792148036536
},
"hendrycksTest-professional_law": {
"acc": 0.26401564537157757,
"acc_stderr": 0.01125843553772382,
"acc_norm": 0.28226857887874834,
"acc_norm_stderr": 0.011495852176241963
},
"hendrycksTest-high_school_statistics": {
"acc": 0.24537037037037038,
"acc_stderr": 0.02934666509437294,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03099866630456052
},
"hendrycksTest-international_law": {
"acc": 0.21487603305785125,
"acc_stderr": 0.03749492448709698,
"acc_norm": 0.4049586776859504,
"acc_norm_stderr": 0.044811377559424694
},
"logiqa": {
"acc": 0.21044546850998463,
"acc_stderr": 0.015988369488888765,
"acc_norm": 0.25960061443932414,
"acc_norm_stderr": 0.017196070008180023
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.19,
"acc_stderr": 0.03942772444036622,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"hendrycksTest-world_religions": {
"acc": 0.3391812865497076,
"acc_stderr": 0.036310534964889056,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.03820042586602966
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.24870466321243523,
"acc_stderr": 0.031195840877700293,
"acc_norm": 0.2538860103626943,
"acc_norm_stderr": 0.03141024780565318
},
"hendrycksTest-logical_fallacies": {
"acc": 0.2392638036809816,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.03487825168497892
},
"hendrycksTest-public_relations": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.22727272727272727,
"acc_norm_stderr": 0.04013964554072773
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"arc_challenge": {
"acc": 0.26791808873720135,
"acc_stderr": 0.012942030195136437,
"acc_norm": 0.3122866894197952,
"acc_norm_stderr": 0.013542598541688067
},
"wsc": {
"acc": 0.6346153846153846,
"acc_stderr": 0.047447333932779195
},
"hendrycksTest-anatomy": {
"acc": 0.24444444444444444,
"acc_stderr": 0.03712537833614866,
"acc_norm": 0.24444444444444444,
"acc_norm_stderr": 0.03712537833614866
},
"hendrycksTest-high_school_biology": {
"acc": 0.21935483870967742,
"acc_stderr": 0.023540799358723295,
"acc_norm": 0.26129032258064516,
"acc_norm_stderr": 0.024993053397764805
},
"hendrycksTest-computer_security": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"piqa": {
"acc": 0.7388465723612623,
"acc_stderr": 0.010248738649935581,
"acc_norm": 0.7480957562568009,
"acc_norm_stderr": 0.010128421335088685
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.20588235294117646,
"acc_stderr": 0.02626502460827589,
"acc_norm": 0.2689075630252101,
"acc_norm_stderr": 0.028801392193631273
},
"hendrycksTest-human_sexuality": {
"acc": 0.3816793893129771,
"acc_stderr": 0.042607351576445594,
"acc_norm": 0.2824427480916031,
"acc_norm_stderr": 0.03948406125768361
},
"hendrycksTest-management": {
"acc": 0.24271844660194175,
"acc_stderr": 0.04245022486384495,
"acc_norm": 0.24271844660194175,
"acc_norm_stderr": 0.042450224863844935
},
"hendrycksTest-formal_logic": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392871
},
"hendrycksTest-professional_psychology": {
"acc": 0.272875816993464,
"acc_stderr": 0.01802047414839358,
"acc_norm": 0.2630718954248366,
"acc_norm_stderr": 0.017812676542320657
},
"hendrycksTest-global_facts": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
}
},
"versions": {
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-security_studies": 0,
"arc_easy": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-nutrition": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-medical_genetics": 0,
"winogrande": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-human_aging": 0,
"sciq": 0,
"hendrycksTest-college_chemistry": 0,
"lambada_openai": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-international_law": 0,
"logiqa": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-us_foreign_policy": 0,
"arc_challenge": 0,
"wsc": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-computer_security": 0,
"piqa": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-management": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-global_facts": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-2.7b,use_accelerate=True,device_map_option=sequential",
"num_fewshot": 0,
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-30b
## opt-30b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |34.64|± | 1.39|
| | |acc_norm|37.97|± | 1.42|
|arc_easy | 0|acc |69.99|± | 0.94|
| | |acc_norm|65.36|± | 0.98|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|23.00|± | 4.23|
|hendrycksTest-anatomy | 0|acc |24.44|± | 3.71|
| | |acc_norm|20.74|± | 3.50|
|hendrycksTest-astronomy | 0|acc |30.92|± | 3.76|
| | |acc_norm|37.50|± | 3.94|
|hendrycksTest-business_ethics | 0|acc |34.00|± | 4.76|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-clinical_knowledge | 0|acc |25.66|± | 2.69|
| | |acc_norm|30.94|± | 2.85|
|hendrycksTest-college_biology | 0|acc |27.08|± | 3.72|
| | |acc_norm|28.47|± | 3.77|
|hendrycksTest-college_chemistry | 0|acc |25.00|± | 4.35|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-college_computer_science | 0|acc |31.00|± | 4.65|
| | |acc_norm|25.00|± | 4.35|
|hendrycksTest-college_mathematics | 0|acc |22.00|± | 4.16|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-college_medicine | 0|acc |24.28|± | 3.27|
| | |acc_norm|27.17|± | 3.39|
|hendrycksTest-college_physics | 0|acc |27.45|± | 4.44|
| | |acc_norm|26.47|± | 4.39|
|hendrycksTest-computer_security | 0|acc |25.00|± | 4.35|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-conceptual_physics | 0|acc |24.68|± | 2.82|
| | |acc_norm|20.00|± | 2.61|
|hendrycksTest-econometrics | 0|acc |26.32|± | 4.14|
| | |acc_norm|21.05|± | 3.84|
|hendrycksTest-electrical_engineering | 0|acc |31.72|± | 3.88|
| | |acc_norm|33.10|± | 3.92|
|hendrycksTest-elementary_mathematics | 0|acc |26.72|± | 2.28|
| | |acc_norm|27.25|± | 2.29|
|hendrycksTest-formal_logic | 0|acc |30.95|± | 4.13|
| | |acc_norm|26.19|± | 3.93|
|hendrycksTest-global_facts | 0|acc |23.00|± | 4.23|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-high_school_biology | 0|acc |28.06|± | 2.56|
| | |acc_norm|30.32|± | 2.61|
|hendrycksTest-high_school_chemistry | 0|acc |26.60|± | 3.11|
| | |acc_norm|31.03|± | 3.26|
|hendrycksTest-high_school_computer_science | 0|acc |24.00|± | 4.29|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-high_school_european_history | 0|acc |22.42|± | 3.26|
| | |acc_norm|26.06|± | 3.43|
|hendrycksTest-high_school_geography | 0|acc |26.26|± | 3.14|
| | |acc_norm|28.79|± | 3.23|
|hendrycksTest-high_school_government_and_politics| 0|acc |24.87|± | 3.12|
| | |acc_norm|24.87|± | 3.12|
|hendrycksTest-high_school_macroeconomics | 0|acc |28.46|± | 2.29|
| | |acc_norm|28.21|± | 2.28|
|hendrycksTest-high_school_mathematics | 0|acc |25.19|± | 2.65|
| | |acc_norm|30.37|± | 2.80|
|hendrycksTest-high_school_microeconomics | 0|acc |26.89|± | 2.88|
| | |acc_norm|34.45|± | 3.09|
|hendrycksTest-high_school_physics | 0|acc |25.17|± | 3.54|
| | |acc_norm|25.17|± | 3.54|
|hendrycksTest-high_school_psychology | 0|acc |24.40|± | 1.84|
| | |acc_norm|24.59|± | 1.85|
|hendrycksTest-high_school_statistics | 0|acc |34.26|± | 3.24|
| | |acc_norm|36.11|± | 3.28|
|hendrycksTest-high_school_us_history | 0|acc |28.43|± | 3.17|
| | |acc_norm|25.98|± | 3.08|
|hendrycksTest-high_school_world_history | 0|acc |26.16|± | 2.86|
| | |acc_norm|27.00|± | 2.89|
|hendrycksTest-human_aging | 0|acc |25.56|± | 2.93|
| | |acc_norm|22.87|± | 2.82|
|hendrycksTest-human_sexuality | 0|acc |37.40|± | 4.24|
| | |acc_norm|31.30|± | 4.07|
|hendrycksTest-international_law | 0|acc |25.62|± | 3.98|
| | |acc_norm|53.72|± | 4.55|
|hendrycksTest-jurisprudence | 0|acc |35.19|± | 4.62|
| | |acc_norm|43.52|± | 4.79|
|hendrycksTest-logical_fallacies | 0|acc |30.06|± | 3.60|
| | |acc_norm|34.36|± | 3.73|
|hendrycksTest-machine_learning | 0|acc |24.11|± | 4.06|
| | |acc_norm|23.21|± | 4.01|
|hendrycksTest-management | 0|acc |27.18|± | 4.41|
| | |acc_norm|35.92|± | 4.75|
|hendrycksTest-marketing | 0|acc |25.64|± | 2.86|
| | |acc_norm|30.77|± | 3.02|
|hendrycksTest-medical_genetics | 0|acc |24.00|± | 4.29|
| | |acc_norm|39.00|± | 4.90|
|hendrycksTest-miscellaneous | 0|acc |31.16|± | 1.66|
| | |acc_norm|28.10|± | 1.61|
|hendrycksTest-moral_disputes | 0|acc |28.32|± | 2.43|
| | |acc_norm|33.53|± | 2.54|
|hendrycksTest-moral_scenarios | 0|acc |24.69|± | 1.44|
| | |acc_norm|24.47|± | 1.44|
|hendrycksTest-nutrition | 0|acc |30.39|± | 2.63|
| | |acc_norm|40.52|± | 2.81|
|hendrycksTest-philosophy | 0|acc |29.26|± | 2.58|
| | |acc_norm|33.44|± | 2.68|
|hendrycksTest-prehistory | 0|acc |24.07|± | 2.38|
| | |acc_norm|17.90|± | 2.13|
|hendrycksTest-professional_accounting | 0|acc |21.63|± | 2.46|
| | |acc_norm|25.53|± | 2.60|
|hendrycksTest-professional_law | 0|acc |27.57|± | 1.14|
| | |acc_norm|28.42|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |27.57|± | 2.71|
| | |acc_norm|27.57|± | 2.71|
|hendrycksTest-professional_psychology | 0|acc |27.94|± | 1.82|
| | |acc_norm|27.29|± | 1.80|
|hendrycksTest-public_relations | 0|acc |28.18|± | 4.31|
| | |acc_norm|20.00|± | 3.83|
|hendrycksTest-security_studies | 0|acc |37.96|± | 3.11|
| | |acc_norm|34.29|± | 3.04|
|hendrycksTest-sociology | 0|acc |25.37|± | 3.08|
| | |acc_norm|22.39|± | 2.95|
|hendrycksTest-us_foreign_policy | 0|acc |42.00|± | 4.96|
| | |acc_norm|42.00|± | 4.96|
|hendrycksTest-virology | 0|acc |32.53|± | 3.65|
| | |acc_norm|27.11|± | 3.46|
|hendrycksTest-world_religions | 0|acc |32.16|± | 3.58|
| | |acc_norm|39.18|± | 3.74|
|lambada_openai | 0|ppl | 3.63|± | 0.07|
| | |acc |71.45|± | 0.63|
|logiqa | 0|acc |21.66|± | 1.62|
| | |acc_norm|28.42|± | 1.77|
|piqa | 0|acc |77.58|± | 0.97|
| | |acc_norm|78.13|± | 0.96|
|sciq | 0|acc |91.10|± | 0.90|
| | |acc_norm|88.20|± | 1.02|
|winogrande | 0|acc |68.19|± | 1.31|
|wsc | 0|acc |59.62|± | 4.83|
{
"results": {
"hendrycksTest-us_foreign_policy": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.2518518518518518,
"acc_stderr": 0.026466117538959912,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.028037929969114993
},
"hendrycksTest-human_aging": {
"acc": 0.2556053811659193,
"acc_stderr": 0.029275891003969923,
"acc_norm": 0.22869955156950672,
"acc_norm_stderr": 0.028188240046929193
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.2689075630252101,
"acc_stderr": 0.028801392193631276,
"acc_norm": 0.3445378151260504,
"acc_norm_stderr": 0.030868682604121622
},
"hendrycksTest-college_mathematics": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"hendrycksTest-college_computer_science": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"hendrycksTest-management": {
"acc": 0.27184466019417475,
"acc_stderr": 0.044052680241409216,
"acc_norm": 0.3592233009708738,
"acc_norm_stderr": 0.04750458399041694
},
"piqa": {
"acc": 0.7758433079434167,
"acc_stderr": 0.009729897956410044,
"acc_norm": 0.7812840043525572,
"acc_norm_stderr": 0.009644731932667563
},
"hendrycksTest-professional_law": {
"acc": 0.2757496740547588,
"acc_stderr": 0.011413813609161003,
"acc_norm": 0.2842242503259452,
"acc_norm_stderr": 0.011519880596516072
},
"hendrycksTest-public_relations": {
"acc": 0.2818181818181818,
"acc_stderr": 0.043091187099464585,
"acc_norm": 0.2,
"acc_norm_stderr": 0.038313051408846034
},
"hendrycksTest-sociology": {
"acc": 0.2537313432835821,
"acc_stderr": 0.030769444967296014,
"acc_norm": 0.22388059701492538,
"acc_norm_stderr": 0.029475250236017176
},
"hendrycksTest-anatomy": {
"acc": 0.24444444444444444,
"acc_stderr": 0.037125378336148665,
"acc_norm": 0.2074074074074074,
"acc_norm_stderr": 0.03502553170678316
},
"sciq": {
"acc": 0.911,
"acc_stderr": 0.009008893392651535,
"acc_norm": 0.882,
"acc_norm_stderr": 0.010206869264381791
},
"hendrycksTest-high_school_european_history": {
"acc": 0.22424242424242424,
"acc_stderr": 0.032568666616811015,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.03427743175816524
},
"logiqa": {
"acc": 0.21658986175115208,
"acc_stderr": 0.016156860583178303,
"acc_norm": 0.28417818740399386,
"acc_norm_stderr": 0.017690542680190765
},
"hendrycksTest-philosophy": {
"acc": 0.29260450160771706,
"acc_stderr": 0.025839898334877983,
"acc_norm": 0.33440514469453375,
"acc_norm_stderr": 0.026795422327893944
},
"hendrycksTest-high_school_geography": {
"acc": 0.26262626262626265,
"acc_stderr": 0.03135305009533084,
"acc_norm": 0.2878787878787879,
"acc_norm_stderr": 0.03225883512300992
},
"hendrycksTest-logical_fallacies": {
"acc": 0.3006134969325153,
"acc_stderr": 0.03602511318806771,
"acc_norm": 0.34355828220858897,
"acc_norm_stderr": 0.03731133519673893
},
"hendrycksTest-jurisprudence": {
"acc": 0.35185185185185186,
"acc_stderr": 0.04616631111801713,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.04792898170907062
},
"hendrycksTest-college_chemistry": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-conceptual_physics": {
"acc": 0.24680851063829787,
"acc_stderr": 0.02818544130123409,
"acc_norm": 0.2,
"acc_norm_stderr": 0.026148818018424502
},
"hendrycksTest-medical_genetics": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"lambada_openai": {
"ppl": 3.6290590138646768,
"ppl_stderr": 0.07410155740289663,
"acc": 0.7145352222006598,
"acc_stderr": 0.006292165813769917
},
"hendrycksTest-nutrition": {
"acc": 0.30392156862745096,
"acc_stderr": 0.026336613469046644,
"acc_norm": 0.40522875816993464,
"acc_norm_stderr": 0.028110928492809075
},
"hendrycksTest-business_ethics": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.24870466321243523,
"acc_stderr": 0.031195840877700286,
"acc_norm": 0.24870466321243523,
"acc_norm_stderr": 0.031195840877700286
},
"wsc": {
"acc": 0.5961538461538461,
"acc_stderr": 0.048346889526540184
},
"hendrycksTest-college_biology": {
"acc": 0.2708333333333333,
"acc_stderr": 0.03716177437566016,
"acc_norm": 0.2847222222222222,
"acc_norm_stderr": 0.037738099906869355
},
"hendrycksTest-high_school_physics": {
"acc": 0.25165562913907286,
"acc_stderr": 0.035433042343899844,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.035433042343899844
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.25660377358490566,
"acc_stderr": 0.02688064788905198,
"acc_norm": 0.30943396226415093,
"acc_norm_stderr": 0.028450154794118627
},
"hendrycksTest-astronomy": {
"acc": 0.3092105263157895,
"acc_stderr": 0.03761070869867479,
"acc_norm": 0.375,
"acc_norm_stderr": 0.039397364351956274
},
"hendrycksTest-high_school_psychology": {
"acc": 0.24403669724770644,
"acc_stderr": 0.018415286351416402,
"acc_norm": 0.24587155963302754,
"acc_norm_stderr": 0.01846194096870845
},
"hendrycksTest-moral_disputes": {
"acc": 0.2832369942196532,
"acc_stderr": 0.02425790170532337,
"acc_norm": 0.3352601156069364,
"acc_norm_stderr": 0.025416003773165555
},
"hendrycksTest-marketing": {
"acc": 0.2564102564102564,
"acc_stderr": 0.028605953702004264,
"acc_norm": 0.3076923076923077,
"acc_norm_stderr": 0.030236389942173116
},
"hendrycksTest-global_facts": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"arc_easy": {
"acc": 0.6999158249158249,
"acc_stderr": 0.009404000558513353,
"acc_norm": 0.6536195286195287,
"acc_norm_stderr": 0.00976354207569573
},
"hendrycksTest-electrical_engineering": {
"acc": 0.31724137931034485,
"acc_stderr": 0.03878352372138622,
"acc_norm": 0.3310344827586207,
"acc_norm_stderr": 0.039215453124671215
},
"hendrycksTest-formal_logic": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392869
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_us_history": {
"acc": 0.28431372549019607,
"acc_stderr": 0.03166009679399812,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.03077855467869326
},
"hendrycksTest-college_medicine": {
"acc": 0.24277456647398843,
"acc_stderr": 0.0326926380614177,
"acc_norm": 0.27167630057803466,
"acc_norm_stderr": 0.03391750322321659
},
"hendrycksTest-machine_learning": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952685,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755807
},
"hendrycksTest-professional_accounting": {
"acc": 0.21631205673758866,
"acc_stderr": 0.024561720560562796,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.026011992930902013
},
"hendrycksTest-professional_medicine": {
"acc": 0.2757352941176471,
"acc_stderr": 0.02714627193662517,
"acc_norm": 0.2757352941176471,
"acc_norm_stderr": 0.027146271936625166
},
"hendrycksTest-virology": {
"acc": 0.3253012048192771,
"acc_stderr": 0.036471685236832266,
"acc_norm": 0.2710843373493976,
"acc_norm_stderr": 0.034605799075530276
},
"hendrycksTest-world_religions": {
"acc": 0.3216374269005848,
"acc_stderr": 0.03582529442573122,
"acc_norm": 0.391812865497076,
"acc_norm_stderr": 0.037439798259263996
},
"hendrycksTest-computer_security": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.2660098522167488,
"acc_stderr": 0.03108982600293752,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03255086769970103
},
"hendrycksTest-econometrics": {
"acc": 0.2631578947368421,
"acc_stderr": 0.041424397194893624,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.038351539543994194
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2846153846153846,
"acc_stderr": 0.022878322799706297,
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.0228158130988966
},
"hendrycksTest-high_school_biology": {
"acc": 0.2806451612903226,
"acc_stderr": 0.025560604721022884,
"acc_norm": 0.3032258064516129,
"acc_norm_stderr": 0.026148685930671746
},
"hendrycksTest-prehistory": {
"acc": 0.24074074074074073,
"acc_stderr": 0.023788583551658544,
"acc_norm": 0.17901234567901234,
"acc_norm_stderr": 0.021330868762127045
},
"hendrycksTest-human_sexuality": {
"acc": 0.37404580152671757,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.31297709923664124,
"acc_norm_stderr": 0.04066962905677697
},
"hendrycksTest-professional_psychology": {
"acc": 0.27941176470588236,
"acc_stderr": 0.018152871051538802,
"acc_norm": 0.272875816993464,
"acc_norm_stderr": 0.018020474148393577
},
"hendrycksTest-international_law": {
"acc": 0.256198347107438,
"acc_stderr": 0.03984979653302871,
"acc_norm": 0.5371900826446281,
"acc_norm_stderr": 0.04551711196104218
},
"arc_challenge": {
"acc": 0.3464163822525597,
"acc_stderr": 0.013905011180063256,
"acc_norm": 0.3796928327645051,
"acc_norm_stderr": 0.014182119866974872
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.2671957671957672,
"acc_stderr": 0.02278967314577656,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.022930973071633345
},
"hendrycksTest-high_school_statistics": {
"acc": 0.3425925925925926,
"acc_stderr": 0.03236585252602158,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.032757734861009996
},
"hendrycksTest-miscellaneous": {
"acc": 0.3116219667943806,
"acc_stderr": 0.016562433867284176,
"acc_norm": 0.280970625798212,
"acc_norm_stderr": 0.016073127851221235
},
"hendrycksTest-high_school_world_history": {
"acc": 0.2616033755274262,
"acc_stderr": 0.028609516716994934,
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"hendrycksTest-moral_scenarios": {
"acc": 0.24692737430167597,
"acc_stderr": 0.014422292204808835,
"acc_norm": 0.2446927374301676,
"acc_norm_stderr": 0.014378169884098405
},
"winogrande": {
"acc": 0.6819258089976322,
"acc_stderr": 0.013089285079884685
},
"hendrycksTest-security_studies": {
"acc": 0.3795918367346939,
"acc_stderr": 0.031067211262872468,
"acc_norm": 0.34285714285714286,
"acc_norm_stderr": 0.03038726291954773
},
"hendrycksTest-college_physics": {
"acc": 0.27450980392156865,
"acc_stderr": 0.04440521906179326,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.0438986995680878
}
},
"versions": {
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-management": 0,
"piqa": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-anatomy": 0,
"sciq": 0,
"hendrycksTest-high_school_european_history": 0,
"logiqa": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-abstract_algebra": 0,
"lambada_openai": 0,
"hendrycksTest-nutrition": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"wsc": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-global_facts": 0,
"arc_easy": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-international_law": 0,
"arc_challenge": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-moral_scenarios": 0,
"winogrande": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-college_physics": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-30b,use_accelerate=True,device_map_option=sequential,max_memory_per_gpu=40GIB",
"num_fewshot": 0,
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-350m
## opt-350m.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |20.73|± | 1.18|
| | |acc_norm|23.89|± | 1.25|
|arc_easy | 0|acc |44.02|± | 1.02|
| | |acc_norm|40.36|± | 1.01|
|hendrycksTest-abstract_algebra | 0|acc |22.00|± | 4.16|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-anatomy | 0|acc |22.22|± | 3.59|
| | |acc_norm|20.74|± | 3.50|
|hendrycksTest-astronomy | 0|acc |21.05|± | 3.32|
| | |acc_norm|33.55|± | 3.84|
|hendrycksTest-business_ethics | 0|acc |33.00|± | 4.73|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-clinical_knowledge | 0|acc |21.13|± | 2.51|
| | |acc_norm|30.57|± | 2.84|
|hendrycksTest-college_biology | 0|acc |27.78|± | 3.75|
| | |acc_norm|22.92|± | 3.51|
|hendrycksTest-college_chemistry | 0|acc |26.00|± | 4.41|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |28.00|± | 4.51|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-college_mathematics | 0|acc |22.00|± | 4.16|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-college_medicine | 0|acc |24.86|± | 3.30|
| | |acc_norm|23.12|± | 3.21|
|hendrycksTest-college_physics | 0|acc |19.61|± | 3.95|
| | |acc_norm|24.51|± | 4.28|
|hendrycksTest-computer_security | 0|acc |29.00|± | 4.56|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-conceptual_physics | 0|acc |27.66|± | 2.92|
| | |acc_norm|20.00|± | 2.61|
|hendrycksTest-econometrics | 0|acc |22.81|± | 3.95|
| | |acc_norm|26.32|± | 4.14|
|hendrycksTest-electrical_engineering | 0|acc |33.10|± | 3.92|
| | |acc_norm|31.03|± | 3.86|
|hendrycksTest-elementary_mathematics | 0|acc |23.81|± | 2.19|
| | |acc_norm|23.54|± | 2.19|
|hendrycksTest-formal_logic | 0|acc |32.54|± | 4.19|
| | |acc_norm|32.54|± | 4.19|
|hendrycksTest-global_facts | 0|acc |24.00|± | 4.29|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-high_school_biology | 0|acc |24.19|± | 2.44|
| | |acc_norm|27.74|± | 2.55|
|hendrycksTest-high_school_chemistry | 0|acc |19.70|± | 2.80|
| | |acc_norm|26.11|± | 3.09|
|hendrycksTest-high_school_computer_science | 0|acc |23.00|± | 4.23|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-high_school_european_history | 0|acc |24.85|± | 3.37|
| | |acc_norm|31.52|± | 3.63|
|hendrycksTest-high_school_geography | 0|acc |21.72|± | 2.94|
| | |acc_norm|27.78|± | 3.19|
|hendrycksTest-high_school_government_and_politics| 0|acc |23.83|± | 3.07|
| | |acc_norm|26.94|± | 3.20|
|hendrycksTest-high_school_macroeconomics | 0|acc |27.69|± | 2.27|
| | |acc_norm|27.95|± | 2.28|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|27.41|± | 2.72|
|hendrycksTest-high_school_microeconomics | 0|acc |19.33|± | 2.56|
| | |acc_norm|28.15|± | 2.92|
|hendrycksTest-high_school_physics | 0|acc |25.17|± | 3.54|
| | |acc_norm|24.50|± | 3.51|
|hendrycksTest-high_school_psychology | 0|acc |22.57|± | 1.79|
| | |acc_norm|23.30|± | 1.81|
|hendrycksTest-high_school_statistics | 0|acc |26.85|± | 3.02|
| | |acc_norm|28.70|± | 3.09|
|hendrycksTest-high_school_us_history | 0|acc |25.00|± | 3.04|
| | |acc_norm|28.43|± | 3.17|
|hendrycksTest-high_school_world_history | 0|acc |27.43|± | 2.90|
| | |acc_norm|29.96|± | 2.98|
|hendrycksTest-human_aging | 0|acc |35.87|± | 3.22|
| | |acc_norm|27.80|± | 3.01|
|hendrycksTest-human_sexuality | 0|acc |35.11|± | 4.19|
| | |acc_norm|32.82|± | 4.12|
|hendrycksTest-international_law | 0|acc |17.36|± | 3.46|
| | |acc_norm|44.63|± | 4.54|
|hendrycksTest-jurisprudence | 0|acc |25.00|± | 4.19|
| | |acc_norm|37.04|± | 4.67|
|hendrycksTest-logical_fallacies | 0|acc |19.02|± | 3.08|
| | |acc_norm|28.22|± | 3.54|
|hendrycksTest-machine_learning | 0|acc |29.46|± | 4.33|
| | |acc_norm|23.21|± | 4.01|
|hendrycksTest-management | 0|acc |15.53|± | 3.59|
| | |acc_norm|25.24|± | 4.30|
|hendrycksTest-marketing | 0|acc |27.78|± | 2.93|
| | |acc_norm|32.05|± | 3.06|
|hendrycksTest-medical_genetics | 0|acc |23.00|± | 4.23|
| | |acc_norm|39.00|± | 4.90|
|hendrycksTest-miscellaneous | 0|acc |28.10|± | 1.61|
| | |acc_norm|26.18|± | 1.57|
|hendrycksTest-moral_disputes | 0|acc |30.06|± | 2.47|
| | |acc_norm|31.79|± | 2.51|
|hendrycksTest-moral_scenarios | 0|acc |23.80|± | 1.42|
| | |acc_norm|26.82|± | 1.48|
|hendrycksTest-nutrition | 0|acc |28.43|± | 2.58|
| | |acc_norm|38.24|± | 2.78|
|hendrycksTest-philosophy | 0|acc |20.26|± | 2.28|
| | |acc_norm|30.87|± | 2.62|
|hendrycksTest-prehistory | 0|acc |22.84|± | 2.34|
| | |acc_norm|20.68|± | 2.25|
|hendrycksTest-professional_accounting | 0|acc |24.11|± | 2.55|
| | |acc_norm|23.40|± | 2.53|
|hendrycksTest-professional_law | 0|acc |25.62|± | 1.11|
| | |acc_norm|27.71|± | 1.14|
|hendrycksTest-professional_medicine | 0|acc |23.16|± | 2.56|
| | |acc_norm|25.74|± | 2.66|
|hendrycksTest-professional_psychology | 0|acc |24.51|± | 1.74|
| | |acc_norm|25.49|± | 1.76|
|hendrycksTest-public_relations | 0|acc |30.00|± | 4.39|
| | |acc_norm|24.55|± | 4.12|
|hendrycksTest-security_studies | 0|acc |36.73|± | 3.09|
| | |acc_norm|26.53|± | 2.83|
|hendrycksTest-sociology | 0|acc |31.34|± | 3.28|
| | |acc_norm|28.36|± | 3.19|
|hendrycksTest-us_foreign_policy | 0|acc |29.00|± | 4.56|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-virology | 0|acc |29.52|± | 3.55|
| | |acc_norm|30.72|± | 3.59|
|hendrycksTest-world_religions | 0|acc |26.32|± | 3.38|
| | |acc_norm|33.33|± | 3.62|
|lambada_openai | 0|ppl |16.40|± | 0.56|
| | |acc |45.16|± | 0.69|
|logiqa | 0|acc |21.04|± | 1.60|
| | |acc_norm|28.57|± | 1.77|
|piqa | 0|acc |64.36|± | 1.12|
| | |acc_norm|64.74|± | 1.11|
|sciq | 0|acc |74.80|± | 1.37|
| | |acc_norm|66.90|± | 1.49|
|winogrande | 0|acc |52.33|± | 1.40|
|wsc | 0|acc |36.54|± | 4.74|
{
"results": {
"hendrycksTest-college_medicine": {
"acc": 0.24855491329479767,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.23121387283236994,
"acc_norm_stderr": 0.03214737302029471
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.23834196891191708,
"acc_stderr": 0.030748905363909892,
"acc_norm": 0.2694300518134715,
"acc_norm_stderr": 0.03201867122877794
},
"hendrycksTest-high_school_biology": {
"acc": 0.24193548387096775,
"acc_stderr": 0.024362599693031086,
"acc_norm": 0.27741935483870966,
"acc_norm_stderr": 0.025470196835900055
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2765957446808511,
"acc_stderr": 0.029241883869628824,
"acc_norm": 0.2,
"acc_norm_stderr": 0.026148818018424506
},
"hendrycksTest-high_school_statistics": {
"acc": 0.26851851851851855,
"acc_stderr": 0.030225226160012404,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.03085199299325701
},
"hendrycksTest-professional_accounting": {
"acc": 0.24113475177304963,
"acc_stderr": 0.02551873104953776,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432407
},
"wsc": {
"acc": 0.36538461538461536,
"acc_stderr": 0.0474473339327792
},
"hendrycksTest-college_mathematics": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"hendrycksTest-professional_medicine": {
"acc": 0.23161764705882354,
"acc_stderr": 0.025626533803777562,
"acc_norm": 0.25735294117647056,
"acc_norm_stderr": 0.026556519470041513
},
"hendrycksTest-college_physics": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179964,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643945
},
"hendrycksTest-business_ethics": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"logiqa": {
"acc": 0.21044546850998463,
"acc_stderr": 0.015988369488888755,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.017719247798458293
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"hendrycksTest-human_aging": {
"acc": 0.35874439461883406,
"acc_stderr": 0.032190792004199956,
"acc_norm": 0.27802690582959644,
"acc_norm_stderr": 0.030069584874494043
},
"hendrycksTest-high_school_psychology": {
"acc": 0.22568807339449543,
"acc_stderr": 0.01792308766780305,
"acc_norm": 0.23302752293577983,
"acc_norm_stderr": 0.01812566918086148
},
"hendrycksTest-human_sexuality": {
"acc": 0.3511450381679389,
"acc_stderr": 0.04186445163013751,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.04118438565806298
},
"hendrycksTest-medical_genetics": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"hendrycksTest-high_school_world_history": {
"acc": 0.2742616033755274,
"acc_stderr": 0.029041333510598046,
"acc_norm": 0.29957805907172996,
"acc_norm_stderr": 0.029818024749753095
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.19327731092436976,
"acc_stderr": 0.02564947026588919,
"acc_norm": 0.2815126050420168,
"acc_norm_stderr": 0.029213549414372153
},
"hendrycksTest-management": {
"acc": 0.1553398058252427,
"acc_stderr": 0.03586594738573973,
"acc_norm": 0.2524271844660194,
"acc_norm_stderr": 0.04301250399690878
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.21851851851851853,
"acc_stderr": 0.025195752251823793,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"hendrycksTest-logical_fallacies": {
"acc": 0.1901840490797546,
"acc_stderr": 0.030833491146281245,
"acc_norm": 0.2822085889570552,
"acc_norm_stderr": 0.03536117886664743
},
"hendrycksTest-world_religions": {
"acc": 0.2631578947368421,
"acc_stderr": 0.03377310252209194,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03615507630310935
},
"hendrycksTest-abstract_algebra": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"arc_challenge": {
"acc": 0.20733788395904437,
"acc_stderr": 0.01184690578297137,
"acc_norm": 0.23890784982935154,
"acc_norm_stderr": 0.012461071376316617
},
"hendrycksTest-machine_learning": {
"acc": 0.29464285714285715,
"acc_stderr": 0.04327040932578728,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.040073418097558065
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.21132075471698114,
"acc_stderr": 0.025125766484827845,
"acc_norm": 0.30566037735849055,
"acc_norm_stderr": 0.028353298073322666
},
"hendrycksTest-professional_law": {
"acc": 0.2561929595827901,
"acc_stderr": 0.011149173153110582,
"acc_norm": 0.2770534550195567,
"acc_norm_stderr": 0.01143046244371968
},
"hendrycksTest-international_law": {
"acc": 0.17355371900826447,
"acc_stderr": 0.0345727283691767,
"acc_norm": 0.4462809917355372,
"acc_norm_stderr": 0.0453793517794788
},
"lambada_openai": {
"ppl": 16.39826111439643,
"ppl_stderr": 0.5572608146298462,
"acc": 0.45158160294973804,
"acc_stderr": 0.006933239470474417
},
"hendrycksTest-nutrition": {
"acc": 0.28431372549019607,
"acc_stderr": 0.025829163272757465,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.027826109307283683
},
"hendrycksTest-high_school_physics": {
"acc": 0.25165562913907286,
"acc_stderr": 0.035433042343899844,
"acc_norm": 0.24503311258278146,
"acc_norm_stderr": 0.03511807571804725
},
"hendrycksTest-anatomy": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.2074074074074074,
"acc_norm_stderr": 0.03502553170678316
},
"hendrycksTest-prehistory": {
"acc": 0.22839506172839505,
"acc_stderr": 0.023358211840626267,
"acc_norm": 0.20679012345679013,
"acc_norm_stderr": 0.022535006705942818
},
"hendrycksTest-public_relations": {
"acc": 0.3,
"acc_stderr": 0.04389311454644287,
"acc_norm": 0.24545454545454545,
"acc_norm_stderr": 0.041220665028782834
},
"hendrycksTest-virology": {
"acc": 0.29518072289156627,
"acc_stderr": 0.035509201856896294,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.035915667978246635
},
"hendrycksTest-moral_scenarios": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.2681564245810056,
"acc_norm_stderr": 0.014816119635317005
},
"arc_easy": {
"acc": 0.44023569023569026,
"acc_stderr": 0.01018622862451566,
"acc_norm": 0.4036195286195286,
"acc_norm_stderr": 0.010067368960348204
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.19704433497536947,
"acc_stderr": 0.027986724666736212,
"acc_norm": 0.26108374384236455,
"acc_norm_stderr": 0.030903796952114468
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.27692307692307694,
"acc_stderr": 0.022688042352424994,
"acc_norm": 0.2794871794871795,
"acc_norm_stderr": 0.022752388839776826
},
"sciq": {
"acc": 0.748,
"acc_stderr": 0.013736254390651141,
"acc_norm": 0.669,
"acc_norm_stderr": 0.014888272588203945
},
"piqa": {
"acc": 0.6436343852013058,
"acc_stderr": 0.01117410986586471,
"acc_norm": 0.6474428726877041,
"acc_norm_stderr": 0.011147074365010456
},
"hendrycksTest-high_school_european_history": {
"acc": 0.24848484848484848,
"acc_stderr": 0.03374402644139404,
"acc_norm": 0.3151515151515151,
"acc_norm_stderr": 0.0362773057502241
},
"hendrycksTest-computer_security": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-econometrics": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.04142439719489362
},
"hendrycksTest-high_school_geography": {
"acc": 0.21717171717171718,
"acc_stderr": 0.02937661648494563,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03191178226713548
},
"hendrycksTest-sociology": {
"acc": 0.31343283582089554,
"acc_stderr": 0.03280188205348641,
"acc_norm": 0.2835820895522388,
"acc_norm_stderr": 0.031871875379197966
},
"winogrande": {
"acc": 0.5232833464877664,
"acc_stderr": 0.01403724130957364
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.23809523809523808,
"acc_stderr": 0.02193587808118476,
"acc_norm": 0.23544973544973544,
"acc_norm_stderr": 0.02185150982203172
},
"hendrycksTest-college_chemistry": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"hendrycksTest-college_computer_science": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"hendrycksTest-formal_logic": {
"acc": 0.3253968253968254,
"acc_stderr": 0.041905964388711366,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.041905964388711366
},
"hendrycksTest-marketing": {
"acc": 0.2777777777777778,
"acc_stderr": 0.029343114798094476,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.030572811310299607
},
"hendrycksTest-high_school_us_history": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.031660096793998116
},
"hendrycksTest-moral_disputes": {
"acc": 0.30057803468208094,
"acc_stderr": 0.0246853168672578,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153183
},
"hendrycksTest-philosophy": {
"acc": 0.20257234726688103,
"acc_stderr": 0.022827317491059682,
"acc_norm": 0.3086816720257235,
"acc_norm_stderr": 0.026236965881153256
},
"hendrycksTest-astronomy": {
"acc": 0.21052631578947367,
"acc_stderr": 0.033176727875331574,
"acc_norm": 0.3355263157894737,
"acc_norm_stderr": 0.038424985593952694
},
"hendrycksTest-miscellaneous": {
"acc": 0.280970625798212,
"acc_stderr": 0.01607312785122124,
"acc_norm": 0.26181353767560667,
"acc_norm_stderr": 0.01572083867844527
},
"hendrycksTest-college_biology": {
"acc": 0.2777777777777778,
"acc_stderr": 0.037455547914624555,
"acc_norm": 0.22916666666666666,
"acc_norm_stderr": 0.035146974678623884
},
"hendrycksTest-electrical_engineering": {
"acc": 0.3310344827586207,
"acc_stderr": 0.039215453124671215,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378949
},
"hendrycksTest-global_facts": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909281
},
"hendrycksTest-security_studies": {
"acc": 0.3673469387755102,
"acc_stderr": 0.030862144921087558,
"acc_norm": 0.2653061224489796,
"acc_norm_stderr": 0.028263889943784603
},
"hendrycksTest-jurisprudence": {
"acc": 0.25,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024931
},
"hendrycksTest-professional_psychology": {
"acc": 0.24509803921568626,
"acc_stderr": 0.01740181671142766,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.017630827375148383
}
},
"versions": {
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-professional_accounting": 0,
"wsc": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-business_ethics": 0,
"logiqa": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-management": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-abstract_algebra": 0,
"arc_challenge": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-international_law": 0,
"lambada_openai": 0,
"hendrycksTest-nutrition": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-moral_scenarios": 0,
"arc_easy": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"sciq": 0,
"piqa": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-sociology": 0,
"winogrande": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-professional_psychology": 0
},
"config": {
"model": "gpt2",
"model_args": "pretrained=facebook/opt-350m",
"num_fewshot": 0,
"batch_size": 128,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-6.7b
## opt-6.7b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |30.55|± | 1.35|
| | |acc_norm|34.73|± | 1.39|
|arc_easy | 0|acc |65.61|± | 0.97|
| | |acc_norm|60.10|± | 1.00|
|hendrycksTest-abstract_algebra | 0|acc |22.00|± | 4.16|
| | |acc_norm|21.00|± | 4.09|
|hendrycksTest-anatomy | 0|acc |22.22|± | 3.59|
| | |acc_norm|23.70|± | 3.67|
|hendrycksTest-astronomy | 0|acc |26.97|± | 3.61|
| | |acc_norm|32.24|± | 3.80|
|hendrycksTest-business_ethics | 0|acc |33.00|± | 4.73|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-clinical_knowledge | 0|acc |26.04|± | 2.70|
| | |acc_norm|29.81|± | 2.82|
|hendrycksTest-college_biology | 0|acc |29.17|± | 3.80|
| | |acc_norm|24.31|± | 3.59|
|hendrycksTest-college_chemistry | 0|acc |22.00|± | 4.16|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-college_computer_science | 0|acc |36.00|± | 4.82|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-college_mathematics | 0|acc |22.00|± | 4.16|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-college_medicine | 0|acc |20.81|± | 3.10|
| | |acc_norm|21.39|± | 3.13|
|hendrycksTest-college_physics | 0|acc |20.59|± | 4.02|
| | |acc_norm|23.53|± | 4.22|
|hendrycksTest-computer_security | 0|acc |24.00|± | 4.29|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-conceptual_physics | 0|acc |27.23|± | 2.91|
| | |acc_norm|21.70|± | 2.69|
|hendrycksTest-econometrics | 0|acc |25.44|± | 4.10|
| | |acc_norm|25.44|± | 4.10|
|hendrycksTest-electrical_engineering | 0|acc |29.66|± | 3.81|
| | |acc_norm|34.48|± | 3.96|
|hendrycksTest-elementary_mathematics | 0|acc |24.60|± | 2.22|
| | |acc_norm|25.13|± | 2.23|
|hendrycksTest-formal_logic | 0|acc |29.37|± | 4.07|
| | |acc_norm|24.60|± | 3.85|
|hendrycksTest-global_facts | 0|acc |18.00|± | 3.86|
| | |acc_norm|22.00|± | 4.16|
|hendrycksTest-high_school_biology | 0|acc |25.16|± | 2.47|
| | |acc_norm|28.71|± | 2.57|
|hendrycksTest-high_school_chemistry | 0|acc |16.75|± | 2.63|
| | |acc_norm|27.59|± | 3.14|
|hendrycksTest-high_school_computer_science | 0|acc |24.00|± | 4.29|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-high_school_european_history | 0|acc |32.12|± | 3.65|
| | |acc_norm|27.88|± | 3.50|
|hendrycksTest-high_school_geography | 0|acc |21.72|± | 2.94|
| | |acc_norm|27.27|± | 3.17|
|hendrycksTest-high_school_government_and_politics| 0|acc |24.87|± | 3.12|
| | |acc_norm|24.35|± | 3.10|
|hendrycksTest-high_school_macroeconomics | 0|acc |28.97|± | 2.30|
| | |acc_norm|27.95|± | 2.28|
|hendrycksTest-high_school_mathematics | 0|acc |24.07|± | 2.61|
| | |acc_norm|31.48|± | 2.83|
|hendrycksTest-high_school_microeconomics | 0|acc |26.89|± | 2.88|
| | |acc_norm|31.93|± | 3.03|
|hendrycksTest-high_school_physics | 0|acc |21.19|± | 3.34|
| | |acc_norm|22.52|± | 3.41|
|hendrycksTest-high_school_psychology | 0|acc |28.81|± | 1.94|
| | |acc_norm|24.95|± | 1.86|
|hendrycksTest-high_school_statistics | 0|acc |27.78|± | 3.05|
| | |acc_norm|32.41|± | 3.19|
|hendrycksTest-high_school_us_history | 0|acc |26.47|± | 3.10|
| | |acc_norm|25.98|± | 3.08|
|hendrycksTest-high_school_world_history | 0|acc |24.47|± | 2.80|
| | |acc_norm|30.38|± | 2.99|
|hendrycksTest-human_aging | 0|acc |30.49|± | 3.09|
| | |acc_norm|29.15|± | 3.05|
|hendrycksTest-human_sexuality | 0|acc |33.59|± | 4.14|
| | |acc_norm|29.77|± | 4.01|
|hendrycksTest-international_law | 0|acc |31.40|± | 4.24|
| | |acc_norm|46.28|± | 4.55|
|hendrycksTest-jurisprudence | 0|acc |25.93|± | 4.24|
| | |acc_norm|44.44|± | 4.80|
|hendrycksTest-logical_fallacies | 0|acc |20.25|± | 3.16|
| | |acc_norm|27.61|± | 3.51|
|hendrycksTest-machine_learning | 0|acc |20.54|± | 3.83|
| | |acc_norm|22.32|± | 3.95|
|hendrycksTest-management | 0|acc |29.13|± | 4.50|
| | |acc_norm|34.95|± | 4.72|
|hendrycksTest-marketing | 0|acc |28.21|± | 2.95|
| | |acc_norm|32.05|± | 3.06|
|hendrycksTest-medical_genetics | 0|acc |31.00|± | 4.65|
| | |acc_norm|35.00|± | 4.79|
|hendrycksTest-miscellaneous | 0|acc |31.03|± | 1.65|
| | |acc_norm|27.46|± | 1.60|
|hendrycksTest-moral_disputes | 0|acc |27.17|± | 2.39|
| | |acc_norm|31.50|± | 2.50|
|hendrycksTest-moral_scenarios | 0|acc |27.26|± | 1.49|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |30.72|± | 2.64|
| | |acc_norm|39.22|± | 2.80|
|hendrycksTest-philosophy | 0|acc |27.33|± | 2.53|
| | |acc_norm|31.83|± | 2.65|
|hendrycksTest-prehistory | 0|acc |25.00|± | 2.41|
| | |acc_norm|20.06|± | 2.23|
|hendrycksTest-professional_accounting | 0|acc |25.89|± | 2.61|
| | |acc_norm|25.89|± | 2.61|
|hendrycksTest-professional_law | 0|acc |26.14|± | 1.12|
| | |acc_norm|29.34|± | 1.16|
|hendrycksTest-professional_medicine | 0|acc |21.69|± | 2.50|
| | |acc_norm|24.26|± | 2.60|
|hendrycksTest-professional_psychology | 0|acc |25.16|± | 1.76|
| | |acc_norm|25.16|± | 1.76|
|hendrycksTest-public_relations | 0|acc |32.73|± | 4.49|
| | |acc_norm|18.18|± | 3.69|
|hendrycksTest-security_studies | 0|acc |42.04|± | 3.16|
| | |acc_norm|33.88|± | 3.03|
|hendrycksTest-sociology | 0|acc |28.36|± | 3.19|
| | |acc_norm|30.85|± | 3.27|
|hendrycksTest-us_foreign_policy | 0|acc |40.00|± | 4.92|
| | |acc_norm|35.00|± | 4.79|
|hendrycksTest-virology | 0|acc |33.73|± | 3.68|
| | |acc_norm|29.52|± | 3.55|
|hendrycksTest-world_religions | 0|acc |34.50|± | 3.65|
| | |acc_norm|36.26|± | 3.69|
|lambada_openai | 0|ppl | 4.25|± | 0.09|
| | |acc |67.71|± | 0.65|
|logiqa | 0|acc |23.50|± | 1.66|
| | |acc_norm|28.73|± | 1.77|
|piqa | 0|acc |76.28|± | 0.99|
| | |acc_norm|76.44|± | 0.99|
|sciq | 0|acc |90.10|± | 0.94|
| | |acc_norm|85.20|± | 1.12|
|winogrande | 0|acc |65.27|± | 1.34|
|wsc | 0|acc |42.31|± | 4.87|
{
"results": {
"hendrycksTest-nutrition": {
"acc": 0.30718954248366015,
"acc_stderr": 0.026415601914389002,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.02795604616542451
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.24870466321243523,
"acc_stderr": 0.0311958408777003,
"acc_norm": 0.24352331606217617,
"acc_norm_stderr": 0.030975436386845426
},
"hendrycksTest-professional_accounting": {
"acc": 0.25886524822695034,
"acc_stderr": 0.026129572527180848,
"acc_norm": 0.25886524822695034,
"acc_norm_stderr": 0.026129572527180848
},
"hendrycksTest-logical_fallacies": {
"acc": 0.20245398773006135,
"acc_stderr": 0.03157065078911902,
"acc_norm": 0.27607361963190186,
"acc_norm_stderr": 0.0351238528370505
},
"sciq": {
"acc": 0.901,
"acc_stderr": 0.009449248027662761,
"acc_norm": 0.852,
"acc_norm_stderr": 0.011234866364235247
},
"hendrycksTest-moral_scenarios": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249588,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-college_computer_science": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"hendrycksTest-public_relations": {
"acc": 0.32727272727272727,
"acc_stderr": 0.044942908662520896,
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.036942843353377997
},
"hendrycksTest-econometrics": {
"acc": 0.2543859649122807,
"acc_stderr": 0.04096985139843671,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436716
},
"hendrycksTest-world_religions": {
"acc": 0.34502923976608185,
"acc_stderr": 0.036459813773888065,
"acc_norm": 0.36257309941520466,
"acc_norm_stderr": 0.0368713061556206
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.24074074074074073,
"acc_stderr": 0.026067159222275788,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.028317533496066468
},
"hendrycksTest-human_sexuality": {
"acc": 0.33587786259541985,
"acc_stderr": 0.041423137719966634,
"acc_norm": 0.29770992366412213,
"acc_norm_stderr": 0.040103589424622034
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.16748768472906403,
"acc_stderr": 0.026273086047535397,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03144712581678242
},
"hendrycksTest-college_mathematics": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"hendrycksTest-abstract_algebra": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.21,
"acc_norm_stderr": 0.04093601807403326
},
"hendrycksTest-formal_logic": {
"acc": 0.29365079365079366,
"acc_stderr": 0.04073524322147127,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924315
},
"piqa": {
"acc": 0.7627856365614799,
"acc_stderr": 0.009924694933586367,
"acc_norm": 0.764417845484222,
"acc_norm_stderr": 0.009901067586473886
},
"arc_easy": {
"acc": 0.6561447811447811,
"acc_stderr": 0.009746660584852457,
"acc_norm": 0.601010101010101,
"acc_norm_stderr": 0.010048240683798742
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.28974358974358977,
"acc_stderr": 0.023000628243687964,
"acc_norm": 0.2794871794871795,
"acc_norm_stderr": 0.02275238883977683
},
"logiqa": {
"acc": 0.2350230414746544,
"acc_stderr": 0.016631166823890965,
"acc_norm": 0.2872503840245776,
"acc_norm_stderr": 0.017747701948846596
},
"hendrycksTest-high_school_physics": {
"acc": 0.2119205298013245,
"acc_stderr": 0.033367670865679766,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008936
},
"hendrycksTest-management": {
"acc": 0.2912621359223301,
"acc_stderr": 0.044986763205729224,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.047211885060971716
},
"hendrycksTest-professional_medicine": {
"acc": 0.21691176470588236,
"acc_stderr": 0.025035845227711274,
"acc_norm": 0.2426470588235294,
"acc_norm_stderr": 0.026040662474201264
},
"hendrycksTest-college_biology": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554858,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080341
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.2689075630252101,
"acc_stderr": 0.02880139219363128,
"acc_norm": 0.31932773109243695,
"acc_norm_stderr": 0.0302839955258844
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.26037735849056604,
"acc_stderr": 0.0270087660907081,
"acc_norm": 0.2981132075471698,
"acc_norm_stderr": 0.02815283794249386
},
"hendrycksTest-anatomy": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"hendrycksTest-virology": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.29518072289156627,
"acc_norm_stderr": 0.0355092018568963
},
"hendrycksTest-college_medicine": {
"acc": 0.20809248554913296,
"acc_stderr": 0.0309528902177499,
"acc_norm": 0.2138728323699422,
"acc_norm_stderr": 0.031265112061730424
},
"hendrycksTest-high_school_psychology": {
"acc": 0.28807339449541286,
"acc_stderr": 0.01941644589263602,
"acc_norm": 0.24954128440366974,
"acc_norm_stderr": 0.01855389762950162
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2777777777777778,
"acc_stderr": 0.0305467452649532,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.03191923445686185
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.24603174603174602,
"acc_stderr": 0.022182037202948368,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643895
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"hendrycksTest-machine_learning": {
"acc": 0.20535714285714285,
"acc_stderr": 0.038342410214190735,
"acc_norm": 0.22321428571428573,
"acc_norm_stderr": 0.039523019677025116
},
"hendrycksTest-marketing": {
"acc": 0.28205128205128205,
"acc_stderr": 0.02948036054954119,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.030572811310299607
},
"arc_challenge": {
"acc": 0.3054607508532423,
"acc_stderr": 0.0134600804780025,
"acc_norm": 0.34726962457337884,
"acc_norm_stderr": 0.01391303452962044
},
"hendrycksTest-college_chemistry": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"hendrycksTest-high_school_biology": {
"acc": 0.25161290322580643,
"acc_stderr": 0.024685979286239956,
"acc_norm": 0.2870967741935484,
"acc_norm_stderr": 0.025736542745594528
},
"hendrycksTest-philosophy": {
"acc": 0.2733118971061093,
"acc_stderr": 0.02531176597542612,
"acc_norm": 0.3183279742765273,
"acc_norm_stderr": 0.026457225067811025
},
"lambada_openai": {
"ppl": 4.252877363060981,
"ppl_stderr": 0.0927244083936228,
"acc": 0.6770813118571706,
"acc_stderr": 0.006514469814384408
},
"hendrycksTest-high_school_world_history": {
"acc": 0.24472573839662448,
"acc_stderr": 0.027985699387036416,
"acc_norm": 0.3037974683544304,
"acc_norm_stderr": 0.0299366963871386
},
"hendrycksTest-high_school_european_history": {
"acc": 0.3212121212121212,
"acc_stderr": 0.03646204963253812,
"acc_norm": 0.2787878787878788,
"acc_norm_stderr": 0.03501438706296781
},
"hendrycksTest-astronomy": {
"acc": 0.26973684210526316,
"acc_stderr": 0.03611780560284898,
"acc_norm": 0.3223684210526316,
"acc_norm_stderr": 0.03803510248351585
},
"hendrycksTest-sociology": {
"acc": 0.2835820895522388,
"acc_stderr": 0.03187187537919796,
"acc_norm": 0.30845771144278605,
"acc_norm_stderr": 0.03265819588512699
},
"hendrycksTest-human_aging": {
"acc": 0.30493273542600896,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.2914798206278027,
"acc_norm_stderr": 0.030500283176545902
},
"hendrycksTest-business_ethics": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"hendrycksTest-electrical_engineering": {
"acc": 0.296551724137931,
"acc_stderr": 0.03806142687309994,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03960933549451208
},
"hendrycksTest-moral_disputes": {
"acc": 0.27167630057803466,
"acc_stderr": 0.023948512905468355,
"acc_norm": 0.315028901734104,
"acc_norm_stderr": 0.025009313790069695
},
"hendrycksTest-prehistory": {
"acc": 0.25,
"acc_stderr": 0.02409347123262133,
"acc_norm": 0.2006172839506173,
"acc_norm_stderr": 0.022282313949774882
},
"hendrycksTest-professional_psychology": {
"acc": 0.25163398692810457,
"acc_stderr": 0.01755581809132227,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.01755581809132226
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2723404255319149,
"acc_stderr": 0.029101290698386708,
"acc_norm": 0.2170212765957447,
"acc_norm_stderr": 0.026947483121496238
},
"hendrycksTest-professional_law": {
"acc": 0.26140808344198174,
"acc_stderr": 0.01122252816977131,
"acc_norm": 0.29335071707953064,
"acc_norm_stderr": 0.011628520449582073
},
"hendrycksTest-computer_security": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"hendrycksTest-miscellaneous": {
"acc": 0.3103448275862069,
"acc_stderr": 0.016543785026048315,
"acc_norm": 0.27458492975734355,
"acc_norm_stderr": 0.01595982993308404
},
"hendrycksTest-global_facts": {
"acc": 0.18,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"hendrycksTest-high_school_us_history": {
"acc": 0.2647058823529412,
"acc_stderr": 0.030964517926923393,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.03077855467869326
},
"hendrycksTest-jurisprudence": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04803752235190193
},
"hendrycksTest-security_studies": {
"acc": 0.4204081632653061,
"acc_stderr": 0.03160106993449603,
"acc_norm": 0.33877551020408164,
"acc_norm_stderr": 0.030299506562154185
},
"hendrycksTest-medical_genetics": {
"acc": 0.31,
"acc_stderr": 0.04648231987117317,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"wsc": {
"acc": 0.4230769230769231,
"acc_stderr": 0.048679937479186836
},
"hendrycksTest-high_school_geography": {
"acc": 0.21717171717171718,
"acc_stderr": 0.029376616484945633,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.03173071239071724
},
"hendrycksTest-international_law": {
"acc": 0.3140495867768595,
"acc_stderr": 0.04236964753041017,
"acc_norm": 0.4628099173553719,
"acc_norm_stderr": 0.04551711196104218
},
"hendrycksTest-college_physics": {
"acc": 0.20588235294117646,
"acc_stderr": 0.040233822736177455,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.042207736591714534
},
"winogrande": {
"acc": 0.6527229676400947,
"acc_stderr": 0.013380909249751233
}
},
"versions": {
"hendrycksTest-nutrition": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-logical_fallacies": 0,
"sciq": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-formal_logic": 0,
"piqa": 0,
"arc_easy": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"logiqa": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-management": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-marketing": 0,
"arc_challenge": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-philosophy": 0,
"lambada_openai": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-medical_genetics": 0,
"wsc": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-college_physics": 0,
"winogrande": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-6.7b,use_accelerate=True,device_map_option=sequential",
"num_fewshot": 0,
"batch_size": 16,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-66b
## opt-66b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |37.20|± | 1.41|
| | |acc_norm|40.10|± | 1.43|
|arc_easy | 0|acc |71.68|± | 0.92|
| | |acc_norm|67.30|± | 0.96|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-anatomy | 0|acc |27.41|± | 3.85|
| | |acc_norm|26.67|± | 3.82|
|hendrycksTest-astronomy | 0|acc |28.95|± | 3.69|
| | |acc_norm|40.13|± | 3.99|
|hendrycksTest-business_ethics | 0|acc |29.00|± | 4.56|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-clinical_knowledge | 0|acc |24.15|± | 2.63|
| | |acc_norm|27.55|± | 2.75|
|hendrycksTest-college_biology | 0|acc |24.31|± | 3.59|
| | |acc_norm|25.00|± | 3.62|
|hendrycksTest-college_chemistry | 0|acc |30.00|± | 4.61|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |23.00|± | 4.23|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_mathematics | 0|acc |23.00|± | 4.23|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-college_medicine | 0|acc |23.70|± | 3.24|
| | |acc_norm|24.86|± | 3.30|
|hendrycksTest-college_physics | 0|acc |28.43|± | 4.49|
| | |acc_norm|26.47|± | 4.39|
|hendrycksTest-computer_security | 0|acc |32.00|± | 4.69|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-conceptual_physics | 0|acc |25.53|± | 2.85|
| | |acc_norm|22.98|± | 2.75|
|hendrycksTest-econometrics | 0|acc |28.07|± | 4.23|
| | |acc_norm|20.18|± | 3.78|
|hendrycksTest-electrical_engineering | 0|acc |35.86|± | 4.00|
| | |acc_norm|38.62|± | 4.06|
|hendrycksTest-elementary_mathematics | 0|acc |26.46|± | 2.27|
| | |acc_norm|27.25|± | 2.29|
|hendrycksTest-formal_logic | 0|acc |30.16|± | 4.10|
| | |acc_norm|28.57|± | 4.04|
|hendrycksTest-global_facts | 0|acc |29.00|± | 4.56|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-high_school_biology | 0|acc |26.13|± | 2.50|
| | |acc_norm|31.94|± | 2.65|
|hendrycksTest-high_school_chemistry | 0|acc |24.14|± | 3.01|
| | |acc_norm|34.48|± | 3.34|
|hendrycksTest-high_school_computer_science | 0|acc |31.00|± | 4.65|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-high_school_european_history | 0|acc |29.09|± | 3.55|
| | |acc_norm|29.70|± | 3.57|
|hendrycksTest-high_school_geography | 0|acc |26.26|± | 3.14|
| | |acc_norm|31.82|± | 3.32|
|hendrycksTest-high_school_government_and_politics| 0|acc |26.42|± | 3.18|
| | |acc_norm|26.42|± | 3.18|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.49|± | 2.31|
| | |acc_norm|26.67|± | 2.24|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|32.22|± | 2.85|
|hendrycksTest-high_school_microeconomics | 0|acc |29.83|± | 2.97|
| | |acc_norm|36.13|± | 3.12|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|23.18|± | 3.45|
|hendrycksTest-high_school_psychology | 0|acc |28.44|± | 1.93|
| | |acc_norm|25.87|± | 1.88|
|hendrycksTest-high_school_statistics | 0|acc |29.17|± | 3.10|
| | |acc_norm|33.33|± | 3.21|
|hendrycksTest-high_school_us_history | 0|acc |27.45|± | 3.13|
| | |acc_norm|30.39|± | 3.23|
|hendrycksTest-high_school_world_history | 0|acc |30.80|± | 3.01|
| | |acc_norm|32.49|± | 3.05|
|hendrycksTest-human_aging | 0|acc |28.70|± | 3.04|
| | |acc_norm|22.42|± | 2.80|
|hendrycksTest-human_sexuality | 0|acc |36.64|± | 4.23|
| | |acc_norm|32.82|± | 4.12|
|hendrycksTest-international_law | 0|acc |25.62|± | 3.98|
| | |acc_norm|49.59|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |30.56|± | 4.45|
| | |acc_norm|42.59|± | 4.78|
|hendrycksTest-logical_fallacies | 0|acc |23.93|± | 3.35|
| | |acc_norm|28.83|± | 3.56|
|hendrycksTest-machine_learning | 0|acc |24.11|± | 4.06|
| | |acc_norm|23.21|± | 4.01|
|hendrycksTest-management | 0|acc |29.13|± | 4.50|
| | |acc_norm|33.98|± | 4.69|
|hendrycksTest-marketing | 0|acc |29.06|± | 2.97|
| | |acc_norm|29.06|± | 2.97|
|hendrycksTest-medical_genetics | 0|acc |34.00|± | 4.76|
| | |acc_norm|45.00|± | 5.00|
|hendrycksTest-miscellaneous | 0|acc |32.69|± | 1.68|
| | |acc_norm|29.37|± | 1.63|
|hendrycksTest-moral_disputes | 0|acc |32.08|± | 2.51|
| | |acc_norm|31.79|± | 2.51|
|hendrycksTest-moral_scenarios | 0|acc |22.12|± | 1.39|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |33.33|± | 2.70|
| | |acc_norm|39.54|± | 2.80|
|hendrycksTest-philosophy | 0|acc |25.08|± | 2.46|
| | |acc_norm|35.05|± | 2.71|
|hendrycksTest-prehistory | 0|acc |24.38|± | 2.39|
| | |acc_norm|21.30|± | 2.28|
|hendrycksTest-professional_accounting | 0|acc |20.21|± | 2.40|
| | |acc_norm|22.70|± | 2.50|
|hendrycksTest-professional_law | 0|acc |27.90|± | 1.15|
| | |acc_norm|29.53|± | 1.17|
|hendrycksTest-professional_medicine | 0|acc |26.47|± | 2.68|
| | |acc_norm|29.78|± | 2.78|
|hendrycksTest-professional_psychology | 0|acc |26.14|± | 1.78|
| | |acc_norm|28.59|± | 1.83|
|hendrycksTest-public_relations | 0|acc |34.55|± | 4.55|
| | |acc_norm|15.45|± | 3.46|
|hendrycksTest-security_studies | 0|acc |38.37|± | 3.11|
| | |acc_norm|32.65|± | 3.00|
|hendrycksTest-sociology | 0|acc |28.86|± | 3.20|
| | |acc_norm|27.36|± | 3.15|
|hendrycksTest-us_foreign_policy | 0|acc |37.00|± | 4.85|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-virology | 0|acc |32.53|± | 3.65|
| | |acc_norm|30.12|± | 3.57|
|hendrycksTest-world_religions | 0|acc |33.92|± | 3.63|
| | |acc_norm|37.43|± | 3.71|
|lambada_openai | 0|ppl | 3.29|± | 0.06|
| | |acc |73.90|± | 0.61|
|logiqa | 0|acc |22.73|± | 1.64|
| | |acc_norm|28.73|± | 1.77|
|piqa | 0|acc |78.78|± | 0.95|
| | |acc_norm|79.87|± | 0.94|
|sciq | 0|acc |92.60|± | 0.83|
| | |acc_norm|87.30|± | 1.05|
|winogrande | 0|acc |68.75|± | 1.30|
|wsc | 0|acc |54.81|± | 4.90|
{
"results": {
"hendrycksTest-college_mathematics": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_physics": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008937,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.03445406271987054
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2909090909090909,
"acc_stderr": 0.03546563019624337,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.03567969772268047
},
"arc_easy": {
"acc": 0.7167508417508418,
"acc_stderr": 0.009245632200075455,
"acc_norm": 0.672979797979798,
"acc_norm_stderr": 0.009626235849372198
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322004,
"acc_norm": 0.20175438596491227,
"acc_norm_stderr": 0.037752050135836386
},
"hendrycksTest-professional_law": {
"acc": 0.2790091264667536,
"acc_stderr": 0.011455208832803538,
"acc_norm": 0.2953063885267275,
"acc_norm_stderr": 0.011651061936208818
},
"hendrycksTest-human_aging": {
"acc": 0.28699551569506726,
"acc_stderr": 0.030360379710291954,
"acc_norm": 0.2242152466367713,
"acc_norm_stderr": 0.027991534258519527
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.26424870466321243,
"acc_stderr": 0.03182155050916647,
"acc_norm": 0.26424870466321243,
"acc_norm_stderr": 0.03182155050916647
},
"hendrycksTest-college_computer_science": {
"acc": 0.23,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.29831932773109243,
"acc_stderr": 0.029719142876342853,
"acc_norm": 0.36134453781512604,
"acc_norm_stderr": 0.03120469122515001
},
"arc_challenge": {
"acc": 0.3720136518771331,
"acc_stderr": 0.014124597881844461,
"acc_norm": 0.40102389078498296,
"acc_norm_stderr": 0.014322255790719864
},
"hendrycksTest-nutrition": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02699254433929723,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.027996723180631435
},
"sciq": {
"acc": 0.926,
"acc_stderr": 0.008282064512704159,
"acc_norm": 0.873,
"acc_norm_stderr": 0.01053479862085575
},
"hendrycksTest-jurisprudence": {
"acc": 0.3055555555555556,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.0478034362693679
},
"hendrycksTest-sociology": {
"acc": 0.2885572139303483,
"acc_stderr": 0.03203841040213321,
"acc_norm": 0.2736318407960199,
"acc_norm_stderr": 0.031524391865554016
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.24150943396226415,
"acc_stderr": 0.02634148037111837,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.02749566368372407
},
"hendrycksTest-international_law": {
"acc": 0.256198347107438,
"acc_stderr": 0.03984979653302871,
"acc_norm": 0.49586776859504134,
"acc_norm_stderr": 0.04564198767432754
},
"hendrycksTest-virology": {
"acc": 0.3253012048192771,
"acc_stderr": 0.036471685236832266,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.035716092300534796
},
"hendrycksTest-college_physics": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.043898699568087805
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03010833071801162,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280458
},
"hendrycksTest-moral_disputes": {
"acc": 0.3208092485549133,
"acc_stderr": 0.02513100023364791,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153183
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2916666666666667,
"acc_stderr": 0.030998666304560534,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802749
},
"winogrande": {
"acc": 0.6874506708760852,
"acc_stderr": 0.013027563620748837
},
"hendrycksTest-philosophy": {
"acc": 0.2508038585209003,
"acc_stderr": 0.024619771956697168,
"acc_norm": 0.3504823151125402,
"acc_norm_stderr": 0.027098652621301747
},
"wsc": {
"acc": 0.5480769230769231,
"acc_stderr": 0.049038186969314335
},
"hendrycksTest-astronomy": {
"acc": 0.2894736842105263,
"acc_stderr": 0.03690677986137283,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.03988903703336285
},
"hendrycksTest-computer_security": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_psychology": {
"acc": 0.28440366972477066,
"acc_stderr": 0.019342036587702588,
"acc_norm": 0.25871559633027524,
"acc_norm_stderr": 0.01877605231961962
},
"hendrycksTest-college_chemistry": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-management": {
"acc": 0.2912621359223301,
"acc_stderr": 0.04498676320572922,
"acc_norm": 0.33980582524271846,
"acc_norm_stderr": 0.046897659372781335
},
"hendrycksTest-miscellaneous": {
"acc": 0.3269476372924649,
"acc_stderr": 0.016774908180131463,
"acc_norm": 0.2937420178799489,
"acc_norm_stderr": 0.016287759388491675
},
"hendrycksTest-high_school_world_history": {
"acc": 0.3080168776371308,
"acc_stderr": 0.030052389335605695,
"acc_norm": 0.32489451476793246,
"acc_norm_stderr": 0.030486039389105293
},
"lambada_openai": {
"ppl": 3.2877565882479303,
"ppl_stderr": 0.06361523543774811,
"acc": 0.7389869978653212,
"acc_stderr": 0.006118733561625588
},
"hendrycksTest-electrical_engineering": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.04057324734419034
},
"hendrycksTest-high_school_us_history": {
"acc": 0.27450980392156865,
"acc_stderr": 0.03132179803083292,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.03228210387037892
},
"hendrycksTest-college_medicine": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483099,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"hendrycksTest-high_school_geography": {
"acc": 0.26262626262626265,
"acc_stderr": 0.03135305009533084,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.03318477333845331
},
"hendrycksTest-professional_medicine": {
"acc": 0.2647058823529412,
"acc_stderr": 0.026799562024887667,
"acc_norm": 0.2977941176470588,
"acc_norm_stderr": 0.02777829870154544
},
"hendrycksTest-machine_learning": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952686,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755807
},
"hendrycksTest-logical_fallacies": {
"acc": 0.2392638036809816,
"acc_stderr": 0.03351953879521271,
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"hendrycksTest-college_biology": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"hendrycksTest-professional_accounting": {
"acc": 0.20212765957446807,
"acc_stderr": 0.023956668237850226,
"acc_norm": 0.22695035460992907,
"acc_norm_stderr": 0.02498710636564297
},
"hendrycksTest-business_ethics": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"piqa": {
"acc": 0.7878128400435256,
"acc_stderr": 0.009539299828174051,
"acc_norm": 0.7986942328618063,
"acc_norm_stderr": 0.009355431098990426
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2948717948717949,
"acc_stderr": 0.023119362758232294,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923714
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"hendrycksTest-human_sexuality": {
"acc": 0.366412213740458,
"acc_stderr": 0.042258754519696386,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.04118438565806299
},
"hendrycksTest-high_school_biology": {
"acc": 0.26129032258064516,
"acc_stderr": 0.024993053397764826,
"acc_norm": 0.3193548387096774,
"acc_norm_stderr": 0.026522709674667768
},
"hendrycksTest-security_studies": {
"acc": 0.3836734693877551,
"acc_stderr": 0.03113088039623595,
"acc_norm": 0.32653061224489793,
"acc_norm_stderr": 0.030021056238440307
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.21851851851851853,
"acc_stderr": 0.025195752251823793,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.0284934650910286
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.26455026455026454,
"acc_stderr": 0.022717467897708628,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.02293097307163336
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2553191489361702,
"acc_stderr": 0.028504856470514196,
"acc_norm": 0.2297872340425532,
"acc_norm_stderr": 0.027501752944412417
},
"hendrycksTest-prehistory": {
"acc": 0.24382716049382716,
"acc_stderr": 0.023891879541959593,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.022779719088733396
},
"hendrycksTest-medical_genetics": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"hendrycksTest-anatomy": {
"acc": 0.2740740740740741,
"acc_stderr": 0.03853254836552003,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"hendrycksTest-professional_psychology": {
"acc": 0.26143790849673204,
"acc_stderr": 0.017776947157528037,
"acc_norm": 0.28594771241830064,
"acc_norm_stderr": 0.01828048507295467
},
"hendrycksTest-global_facts": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"hendrycksTest-moral_scenarios": {
"acc": 0.2212290502793296,
"acc_stderr": 0.013882164598887275,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-marketing": {
"acc": 0.2905982905982906,
"acc_stderr": 0.029745048572674057,
"acc_norm": 0.2905982905982906,
"acc_norm_stderr": 0.029745048572674054
},
"hendrycksTest-formal_logic": {
"acc": 0.30158730158730157,
"acc_stderr": 0.04104947269903394,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.040406101782088394
},
"hendrycksTest-public_relations": {
"acc": 0.34545454545454546,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.15454545454545454,
"acc_norm_stderr": 0.03462262571262667
},
"logiqa": {
"acc": 0.22734254992319508,
"acc_stderr": 0.016439067675117748,
"acc_norm": 0.2872503840245776,
"acc_norm_stderr": 0.017747701948846593
},
"hendrycksTest-world_religions": {
"acc": 0.3391812865497076,
"acc_stderr": 0.036310534964889056,
"acc_norm": 0.3742690058479532,
"acc_norm_stderr": 0.03711601185389481
}
},
"versions": {
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-high_school_european_history": 0,
"arc_easy": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-high_school_microeconomics": 0,
"arc_challenge": 0,
"hendrycksTest-nutrition": 0,
"sciq": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-high_school_statistics": 0,
"winogrande": 0,
"hendrycksTest-philosophy": 0,
"wsc": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-management": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-high_school_world_history": 0,
"lambada_openai": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-business_ethics": 0,
"piqa": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-public_relations": 0,
"logiqa": 0,
"hendrycksTest-world_religions": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-66b,use_accelerate=True,device_map_option=sequential,max_memory_per_gpu=40GIB",
"num_fewshot": 0,
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# xglm-1.7B
## xglm-1.7B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |20.99|± | 1.19|
| | |acc_norm|24.32|± | 1.25|
|arc_easy | 0|acc |53.62|± | 1.02|
| | |acc_norm|47.90|± | 1.03|
|boolq | 1|acc |58.56|± | 0.86|
|copa | 0|acc |68.00|± | 4.69|
|hellaswag | 0|acc |36.18|± | 0.48|
| | |acc_norm|45.80|± | 0.50|
|mc_taco | 0|em |12.91| | |
| | |f1 |34.52| | |
|openbookqa | 0|acc |17.00|± | 1.68|
| | |acc_norm|29.80|± | 2.05|
|piqa | 0|acc |69.70|± | 1.07|
| | |acc_norm|70.35|± | 1.07|
|prost | 0|acc |22.69|± | 0.31|
| | |acc_norm|27.21|± | 0.33|
|swag | 0|acc |45.97|± | 0.35|
| | |acc_norm|62.19|± | 0.34|
|winogrande | 0|acc |54.93|± | 1.40|
|wsc273 | 0|acc |68.13|± | 2.83|
## xglm-1.7B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 0.99|± | 0.27|
## xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 0.67|± | 0.08|
| | |f1 | 3.44|± | 0.13|
|gsm8k | 0|acc | 0.83|± | 0.25|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.00|± | 0.00|
|math_prealgebra | 1|acc | 0.00|± | 0.00|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |22.91|± | 0.77|
| | |acc_norm|21.44|± | 0.75|
## xglm-1.7B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |57.55|± | 1.11|
|pawsx_en| 0|acc |52.65|± | 1.12|
|pawsx_es| 0|acc |53.80|± | 1.12|
|pawsx_fr| 0|acc |47.35|± | 1.12|
|pawsx_ja| 0|acc |46.10|± | 1.11|
|pawsx_ko| 0|acc |51.40|± | 1.12|
|pawsx_zh| 0|acc |48.10|± | 1.12|
## xglm-1.7B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 56.8|± | 2.22|
|xcopa_ht| 0|acc | 55.8|± | 2.22|
|xcopa_id| 0|acc | 64.6|± | 2.14|
|xcopa_it| 0|acc | 54.0|± | 2.23|
|xcopa_qu| 0|acc | 52.2|± | 2.24|
|xcopa_sw| 0|acc | 56.6|± | 2.22|
|xcopa_ta| 0|acc | 55.2|± | 2.23|
|xcopa_th| 0|acc | 58.2|± | 2.21|
|xcopa_tr| 0|acc | 53.4|± | 2.23|
|xcopa_vi| 0|acc | 63.0|± | 2.16|
|xcopa_zh| 0|acc | 58.0|± | 2.21|
## xglm-1.7B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.51|± | 0.67|
|xnli_bg| 0|acc |44.73|± | 0.70|
|xnli_de| 0|acc |45.33|± | 0.70|
|xnli_el| 0|acc |40.10|± | 0.69|
|xnli_en| 0|acc |49.68|± | 0.71|
|xnli_es| 0|acc |43.61|± | 0.70|
|xnli_fr| 0|acc |45.73|± | 0.70|
|xnli_hi| 0|acc |42.61|± | 0.70|
|xnli_ru| 0|acc |45.97|± | 0.70|
|xnli_sw| 0|acc |42.00|± | 0.70|
|xnli_th| 0|acc |41.70|± | 0.70|
|xnli_tr| 0|acc |42.95|± | 0.70|
|xnli_ur| 0|acc |39.50|± | 0.69|
|xnli_vi| 0|acc |45.03|± | 0.70|
|xnli_zh| 0|acc |33.77|± | 0.67|
## xglm-1.7B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |52.48|± | 1.29|
|xstory_cloze_en| 0|acc |64.33|± | 1.23|
|xstory_cloze_es| 0|acc |59.23|± | 1.26|
|xstory_cloze_eu| 0|acc |56.12|± | 1.28|
|xstory_cloze_hi| 0|acc |55.79|± | 1.28|
|xstory_cloze_id| 0|acc |57.97|± | 1.27|
|xstory_cloze_my| 0|acc |53.81|± | 1.28|
|xstory_cloze_ru| 0|acc |59.83|± | 1.26|
|xstory_cloze_sw| 0|acc |55.99|± | 1.28|
|xstory_cloze_te| 0|acc |58.04|± | 1.27|
|xstory_cloze_zh| 0|acc |56.19|± | 1.28|
## xglm-1.7B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |71.05|± | 0.94|
|xwinograd_fr| 0|acc |60.24|± | 5.40|
|xwinograd_jp| 0|acc |60.58|± | 1.58|
|xwinograd_pt| 0|acc |63.88|± | 2.97|
|xwinograd_ru| 0|acc |59.68|± | 2.77|
|xwinograd_zh| 0|acc |69.84|± | 2.05|
{
"results": {
"copa": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034
},
"piqa": {
"acc": 0.6969532100108814,
"acc_stderr": 0.010722648689531515,
"acc_norm": 0.7034820457018498,
"acc_norm_stderr": 0.010656078922661134
},
"prost": {
"acc": 0.22694278394534587,
"acc_stderr": 0.003060110855833208,
"acc_norm": 0.27209649871904357,
"acc_norm_stderr": 0.0032514084657504338
},
"arc_easy": {
"acc": 0.5361952861952862,
"acc_stderr": 0.01023286555034672,
"acc_norm": 0.47895622895622897,
"acc_norm_stderr": 0.01025069260202258
},
"hellaswag": {
"acc": 0.3617805218084047,
"acc_stderr": 0.004795337009118189,
"acc_norm": 0.45797649870543716,
"acc_norm_stderr": 0.004972126523031943
},
"mc_taco": {
"em": 0.12912912912912913,
"f1": 0.34519977153598014
},
"winogrande": {
"acc": 0.5493291239147593,
"acc_stderr": 0.013983928869040239
},
"wsc273": {
"acc": 0.6813186813186813,
"acc_stderr": 0.02825328818739863
},
"swag": {
"acc": 0.4596621013695891,
"acc_stderr": 0.0035235690445916223,
"acc_norm": 0.6219134259722083,
"acc_norm_stderr": 0.003428398656668824
},
"boolq": {
"acc": 0.5856269113149847,
"acc_stderr": 0.00861586377642113
},
"openbookqa": {
"acc": 0.17,
"acc_stderr": 0.016815633531393426,
"acc_norm": 0.298,
"acc_norm_stderr": 0.02047511809298897
},
"arc_challenge": {
"acc": 0.2098976109215017,
"acc_stderr": 0.011900548748047446,
"acc_norm": 0.2431740614334471,
"acc_norm_stderr": 0.012536554144587089
}
},
"versions": {
"copa": 0,
"piqa": 0,
"prost": 0,
"arc_easy": 0,
"hellaswag": 0,
"mc_taco": 0,
"winogrande": 0,
"wsc273": 0,
"swag": 0,
"boolq": 1,
"openbookqa": 0,
"arc_challenge": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment