Commit 8a707701 authored by Julen Etxaniz's avatar Julen Etxaniz
Browse files

add opt and mpt model results

parent 2ac318a9
{
"results": {
"hendrycksTest-nutrition": {
"acc": 0.30718954248366015,
"acc_stderr": 0.026415601914389002,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.02795604616542451
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.24870466321243523,
"acc_stderr": 0.0311958408777003,
"acc_norm": 0.24352331606217617,
"acc_norm_stderr": 0.030975436386845426
},
"hendrycksTest-professional_accounting": {
"acc": 0.25886524822695034,
"acc_stderr": 0.026129572527180848,
"acc_norm": 0.25886524822695034,
"acc_norm_stderr": 0.026129572527180848
},
"hendrycksTest-logical_fallacies": {
"acc": 0.20245398773006135,
"acc_stderr": 0.03157065078911902,
"acc_norm": 0.27607361963190186,
"acc_norm_stderr": 0.0351238528370505
},
"sciq": {
"acc": 0.901,
"acc_stderr": 0.009449248027662761,
"acc_norm": 0.852,
"acc_norm_stderr": 0.011234866364235247
},
"hendrycksTest-moral_scenarios": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249588,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-college_computer_science": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"hendrycksTest-public_relations": {
"acc": 0.32727272727272727,
"acc_stderr": 0.044942908662520896,
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.036942843353377997
},
"hendrycksTest-econometrics": {
"acc": 0.2543859649122807,
"acc_stderr": 0.04096985139843671,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436716
},
"hendrycksTest-world_religions": {
"acc": 0.34502923976608185,
"acc_stderr": 0.036459813773888065,
"acc_norm": 0.36257309941520466,
"acc_norm_stderr": 0.0368713061556206
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.24074074074074073,
"acc_stderr": 0.026067159222275788,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.028317533496066468
},
"hendrycksTest-human_sexuality": {
"acc": 0.33587786259541985,
"acc_stderr": 0.041423137719966634,
"acc_norm": 0.29770992366412213,
"acc_norm_stderr": 0.040103589424622034
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.16748768472906403,
"acc_stderr": 0.026273086047535397,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03144712581678242
},
"hendrycksTest-college_mathematics": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"hendrycksTest-abstract_algebra": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.21,
"acc_norm_stderr": 0.04093601807403326
},
"hendrycksTest-formal_logic": {
"acc": 0.29365079365079366,
"acc_stderr": 0.04073524322147127,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924315
},
"piqa": {
"acc": 0.7627856365614799,
"acc_stderr": 0.009924694933586367,
"acc_norm": 0.764417845484222,
"acc_norm_stderr": 0.009901067586473886
},
"arc_easy": {
"acc": 0.6561447811447811,
"acc_stderr": 0.009746660584852457,
"acc_norm": 0.601010101010101,
"acc_norm_stderr": 0.010048240683798742
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.28974358974358977,
"acc_stderr": 0.023000628243687964,
"acc_norm": 0.2794871794871795,
"acc_norm_stderr": 0.02275238883977683
},
"logiqa": {
"acc": 0.2350230414746544,
"acc_stderr": 0.016631166823890965,
"acc_norm": 0.2872503840245776,
"acc_norm_stderr": 0.017747701948846596
},
"hendrycksTest-high_school_physics": {
"acc": 0.2119205298013245,
"acc_stderr": 0.033367670865679766,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008936
},
"hendrycksTest-management": {
"acc": 0.2912621359223301,
"acc_stderr": 0.044986763205729224,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.047211885060971716
},
"hendrycksTest-professional_medicine": {
"acc": 0.21691176470588236,
"acc_stderr": 0.025035845227711274,
"acc_norm": 0.2426470588235294,
"acc_norm_stderr": 0.026040662474201264
},
"hendrycksTest-college_biology": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554858,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080341
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.2689075630252101,
"acc_stderr": 0.02880139219363128,
"acc_norm": 0.31932773109243695,
"acc_norm_stderr": 0.0302839955258844
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.26037735849056604,
"acc_stderr": 0.0270087660907081,
"acc_norm": 0.2981132075471698,
"acc_norm_stderr": 0.02815283794249386
},
"hendrycksTest-anatomy": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"hendrycksTest-virology": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.29518072289156627,
"acc_norm_stderr": 0.0355092018568963
},
"hendrycksTest-college_medicine": {
"acc": 0.20809248554913296,
"acc_stderr": 0.0309528902177499,
"acc_norm": 0.2138728323699422,
"acc_norm_stderr": 0.031265112061730424
},
"hendrycksTest-high_school_psychology": {
"acc": 0.28807339449541286,
"acc_stderr": 0.01941644589263602,
"acc_norm": 0.24954128440366974,
"acc_norm_stderr": 0.01855389762950162
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2777777777777778,
"acc_stderr": 0.0305467452649532,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.03191923445686185
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.24603174603174602,
"acc_stderr": 0.022182037202948368,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643895
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"hendrycksTest-machine_learning": {
"acc": 0.20535714285714285,
"acc_stderr": 0.038342410214190735,
"acc_norm": 0.22321428571428573,
"acc_norm_stderr": 0.039523019677025116
},
"hendrycksTest-marketing": {
"acc": 0.28205128205128205,
"acc_stderr": 0.02948036054954119,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.030572811310299607
},
"arc_challenge": {
"acc": 0.3054607508532423,
"acc_stderr": 0.0134600804780025,
"acc_norm": 0.34726962457337884,
"acc_norm_stderr": 0.01391303452962044
},
"hendrycksTest-college_chemistry": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"hendrycksTest-high_school_biology": {
"acc": 0.25161290322580643,
"acc_stderr": 0.024685979286239956,
"acc_norm": 0.2870967741935484,
"acc_norm_stderr": 0.025736542745594528
},
"hendrycksTest-philosophy": {
"acc": 0.2733118971061093,
"acc_stderr": 0.02531176597542612,
"acc_norm": 0.3183279742765273,
"acc_norm_stderr": 0.026457225067811025
},
"lambada_openai": {
"ppl": 4.252877363060981,
"ppl_stderr": 0.0927244083936228,
"acc": 0.6770813118571706,
"acc_stderr": 0.006514469814384408
},
"hendrycksTest-high_school_world_history": {
"acc": 0.24472573839662448,
"acc_stderr": 0.027985699387036416,
"acc_norm": 0.3037974683544304,
"acc_norm_stderr": 0.0299366963871386
},
"hendrycksTest-high_school_european_history": {
"acc": 0.3212121212121212,
"acc_stderr": 0.03646204963253812,
"acc_norm": 0.2787878787878788,
"acc_norm_stderr": 0.03501438706296781
},
"hendrycksTest-astronomy": {
"acc": 0.26973684210526316,
"acc_stderr": 0.03611780560284898,
"acc_norm": 0.3223684210526316,
"acc_norm_stderr": 0.03803510248351585
},
"hendrycksTest-sociology": {
"acc": 0.2835820895522388,
"acc_stderr": 0.03187187537919796,
"acc_norm": 0.30845771144278605,
"acc_norm_stderr": 0.03265819588512699
},
"hendrycksTest-human_aging": {
"acc": 0.30493273542600896,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.2914798206278027,
"acc_norm_stderr": 0.030500283176545902
},
"hendrycksTest-business_ethics": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"hendrycksTest-electrical_engineering": {
"acc": 0.296551724137931,
"acc_stderr": 0.03806142687309994,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03960933549451208
},
"hendrycksTest-moral_disputes": {
"acc": 0.27167630057803466,
"acc_stderr": 0.023948512905468355,
"acc_norm": 0.315028901734104,
"acc_norm_stderr": 0.025009313790069695
},
"hendrycksTest-prehistory": {
"acc": 0.25,
"acc_stderr": 0.02409347123262133,
"acc_norm": 0.2006172839506173,
"acc_norm_stderr": 0.022282313949774882
},
"hendrycksTest-professional_psychology": {
"acc": 0.25163398692810457,
"acc_stderr": 0.01755581809132227,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.01755581809132226
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2723404255319149,
"acc_stderr": 0.029101290698386708,
"acc_norm": 0.2170212765957447,
"acc_norm_stderr": 0.026947483121496238
},
"hendrycksTest-professional_law": {
"acc": 0.26140808344198174,
"acc_stderr": 0.01122252816977131,
"acc_norm": 0.29335071707953064,
"acc_norm_stderr": 0.011628520449582073
},
"hendrycksTest-computer_security": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"hendrycksTest-miscellaneous": {
"acc": 0.3103448275862069,
"acc_stderr": 0.016543785026048315,
"acc_norm": 0.27458492975734355,
"acc_norm_stderr": 0.01595982993308404
},
"hendrycksTest-global_facts": {
"acc": 0.18,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"hendrycksTest-high_school_us_history": {
"acc": 0.2647058823529412,
"acc_stderr": 0.030964517926923393,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.03077855467869326
},
"hendrycksTest-jurisprudence": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04803752235190193
},
"hendrycksTest-security_studies": {
"acc": 0.4204081632653061,
"acc_stderr": 0.03160106993449603,
"acc_norm": 0.33877551020408164,
"acc_norm_stderr": 0.030299506562154185
},
"hendrycksTest-medical_genetics": {
"acc": 0.31,
"acc_stderr": 0.04648231987117317,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"wsc": {
"acc": 0.4230769230769231,
"acc_stderr": 0.048679937479186836
},
"hendrycksTest-high_school_geography": {
"acc": 0.21717171717171718,
"acc_stderr": 0.029376616484945633,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.03173071239071724
},
"hendrycksTest-international_law": {
"acc": 0.3140495867768595,
"acc_stderr": 0.04236964753041017,
"acc_norm": 0.4628099173553719,
"acc_norm_stderr": 0.04551711196104218
},
"hendrycksTest-college_physics": {
"acc": 0.20588235294117646,
"acc_stderr": 0.040233822736177455,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.042207736591714534
},
"winogrande": {
"acc": 0.6527229676400947,
"acc_stderr": 0.013380909249751233
}
},
"versions": {
"hendrycksTest-nutrition": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-logical_fallacies": 0,
"sciq": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-formal_logic": 0,
"piqa": 0,
"arc_easy": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"logiqa": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-management": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-marketing": 0,
"arc_challenge": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-philosophy": 0,
"lambada_openai": 0,
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-medical_genetics": 0,
"wsc": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-college_physics": 0,
"winogrande": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-6.7b,use_accelerate=True,device_map_option=sequential",
"num_fewshot": 0,
"batch_size": 16,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# opt-66b
## opt-66b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |37.20|± | 1.41|
| | |acc_norm|40.10|± | 1.43|
|arc_easy | 0|acc |71.68|± | 0.92|
| | |acc_norm|67.30|± | 0.96|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-anatomy | 0|acc |27.41|± | 3.85|
| | |acc_norm|26.67|± | 3.82|
|hendrycksTest-astronomy | 0|acc |28.95|± | 3.69|
| | |acc_norm|40.13|± | 3.99|
|hendrycksTest-business_ethics | 0|acc |29.00|± | 4.56|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-clinical_knowledge | 0|acc |24.15|± | 2.63|
| | |acc_norm|27.55|± | 2.75|
|hendrycksTest-college_biology | 0|acc |24.31|± | 3.59|
| | |acc_norm|25.00|± | 3.62|
|hendrycksTest-college_chemistry | 0|acc |30.00|± | 4.61|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |23.00|± | 4.23|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_mathematics | 0|acc |23.00|± | 4.23|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-college_medicine | 0|acc |23.70|± | 3.24|
| | |acc_norm|24.86|± | 3.30|
|hendrycksTest-college_physics | 0|acc |28.43|± | 4.49|
| | |acc_norm|26.47|± | 4.39|
|hendrycksTest-computer_security | 0|acc |32.00|± | 4.69|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-conceptual_physics | 0|acc |25.53|± | 2.85|
| | |acc_norm|22.98|± | 2.75|
|hendrycksTest-econometrics | 0|acc |28.07|± | 4.23|
| | |acc_norm|20.18|± | 3.78|
|hendrycksTest-electrical_engineering | 0|acc |35.86|± | 4.00|
| | |acc_norm|38.62|± | 4.06|
|hendrycksTest-elementary_mathematics | 0|acc |26.46|± | 2.27|
| | |acc_norm|27.25|± | 2.29|
|hendrycksTest-formal_logic | 0|acc |30.16|± | 4.10|
| | |acc_norm|28.57|± | 4.04|
|hendrycksTest-global_facts | 0|acc |29.00|± | 4.56|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-high_school_biology | 0|acc |26.13|± | 2.50|
| | |acc_norm|31.94|± | 2.65|
|hendrycksTest-high_school_chemistry | 0|acc |24.14|± | 3.01|
| | |acc_norm|34.48|± | 3.34|
|hendrycksTest-high_school_computer_science | 0|acc |31.00|± | 4.65|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-high_school_european_history | 0|acc |29.09|± | 3.55|
| | |acc_norm|29.70|± | 3.57|
|hendrycksTest-high_school_geography | 0|acc |26.26|± | 3.14|
| | |acc_norm|31.82|± | 3.32|
|hendrycksTest-high_school_government_and_politics| 0|acc |26.42|± | 3.18|
| | |acc_norm|26.42|± | 3.18|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.49|± | 2.31|
| | |acc_norm|26.67|± | 2.24|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|32.22|± | 2.85|
|hendrycksTest-high_school_microeconomics | 0|acc |29.83|± | 2.97|
| | |acc_norm|36.13|± | 3.12|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|23.18|± | 3.45|
|hendrycksTest-high_school_psychology | 0|acc |28.44|± | 1.93|
| | |acc_norm|25.87|± | 1.88|
|hendrycksTest-high_school_statistics | 0|acc |29.17|± | 3.10|
| | |acc_norm|33.33|± | 3.21|
|hendrycksTest-high_school_us_history | 0|acc |27.45|± | 3.13|
| | |acc_norm|30.39|± | 3.23|
|hendrycksTest-high_school_world_history | 0|acc |30.80|± | 3.01|
| | |acc_norm|32.49|± | 3.05|
|hendrycksTest-human_aging | 0|acc |28.70|± | 3.04|
| | |acc_norm|22.42|± | 2.80|
|hendrycksTest-human_sexuality | 0|acc |36.64|± | 4.23|
| | |acc_norm|32.82|± | 4.12|
|hendrycksTest-international_law | 0|acc |25.62|± | 3.98|
| | |acc_norm|49.59|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |30.56|± | 4.45|
| | |acc_norm|42.59|± | 4.78|
|hendrycksTest-logical_fallacies | 0|acc |23.93|± | 3.35|
| | |acc_norm|28.83|± | 3.56|
|hendrycksTest-machine_learning | 0|acc |24.11|± | 4.06|
| | |acc_norm|23.21|± | 4.01|
|hendrycksTest-management | 0|acc |29.13|± | 4.50|
| | |acc_norm|33.98|± | 4.69|
|hendrycksTest-marketing | 0|acc |29.06|± | 2.97|
| | |acc_norm|29.06|± | 2.97|
|hendrycksTest-medical_genetics | 0|acc |34.00|± | 4.76|
| | |acc_norm|45.00|± | 5.00|
|hendrycksTest-miscellaneous | 0|acc |32.69|± | 1.68|
| | |acc_norm|29.37|± | 1.63|
|hendrycksTest-moral_disputes | 0|acc |32.08|± | 2.51|
| | |acc_norm|31.79|± | 2.51|
|hendrycksTest-moral_scenarios | 0|acc |22.12|± | 1.39|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |33.33|± | 2.70|
| | |acc_norm|39.54|± | 2.80|
|hendrycksTest-philosophy | 0|acc |25.08|± | 2.46|
| | |acc_norm|35.05|± | 2.71|
|hendrycksTest-prehistory | 0|acc |24.38|± | 2.39|
| | |acc_norm|21.30|± | 2.28|
|hendrycksTest-professional_accounting | 0|acc |20.21|± | 2.40|
| | |acc_norm|22.70|± | 2.50|
|hendrycksTest-professional_law | 0|acc |27.90|± | 1.15|
| | |acc_norm|29.53|± | 1.17|
|hendrycksTest-professional_medicine | 0|acc |26.47|± | 2.68|
| | |acc_norm|29.78|± | 2.78|
|hendrycksTest-professional_psychology | 0|acc |26.14|± | 1.78|
| | |acc_norm|28.59|± | 1.83|
|hendrycksTest-public_relations | 0|acc |34.55|± | 4.55|
| | |acc_norm|15.45|± | 3.46|
|hendrycksTest-security_studies | 0|acc |38.37|± | 3.11|
| | |acc_norm|32.65|± | 3.00|
|hendrycksTest-sociology | 0|acc |28.86|± | 3.20|
| | |acc_norm|27.36|± | 3.15|
|hendrycksTest-us_foreign_policy | 0|acc |37.00|± | 4.85|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-virology | 0|acc |32.53|± | 3.65|
| | |acc_norm|30.12|± | 3.57|
|hendrycksTest-world_religions | 0|acc |33.92|± | 3.63|
| | |acc_norm|37.43|± | 3.71|
|lambada_openai | 0|ppl | 3.29|± | 0.06|
| | |acc |73.90|± | 0.61|
|logiqa | 0|acc |22.73|± | 1.64|
| | |acc_norm|28.73|± | 1.77|
|piqa | 0|acc |78.78|± | 0.95|
| | |acc_norm|79.87|± | 0.94|
|sciq | 0|acc |92.60|± | 0.83|
| | |acc_norm|87.30|± | 1.05|
|winogrande | 0|acc |68.75|± | 1.30|
|wsc | 0|acc |54.81|± | 4.90|
{
"results": {
"hendrycksTest-college_mathematics": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_physics": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008937,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.03445406271987054
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2909090909090909,
"acc_stderr": 0.03546563019624337,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.03567969772268047
},
"arc_easy": {
"acc": 0.7167508417508418,
"acc_stderr": 0.009245632200075455,
"acc_norm": 0.672979797979798,
"acc_norm_stderr": 0.009626235849372198
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322004,
"acc_norm": 0.20175438596491227,
"acc_norm_stderr": 0.037752050135836386
},
"hendrycksTest-professional_law": {
"acc": 0.2790091264667536,
"acc_stderr": 0.011455208832803538,
"acc_norm": 0.2953063885267275,
"acc_norm_stderr": 0.011651061936208818
},
"hendrycksTest-human_aging": {
"acc": 0.28699551569506726,
"acc_stderr": 0.030360379710291954,
"acc_norm": 0.2242152466367713,
"acc_norm_stderr": 0.027991534258519527
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.26424870466321243,
"acc_stderr": 0.03182155050916647,
"acc_norm": 0.26424870466321243,
"acc_norm_stderr": 0.03182155050916647
},
"hendrycksTest-college_computer_science": {
"acc": 0.23,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.29831932773109243,
"acc_stderr": 0.029719142876342853,
"acc_norm": 0.36134453781512604,
"acc_norm_stderr": 0.03120469122515001
},
"arc_challenge": {
"acc": 0.3720136518771331,
"acc_stderr": 0.014124597881844461,
"acc_norm": 0.40102389078498296,
"acc_norm_stderr": 0.014322255790719864
},
"hendrycksTest-nutrition": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02699254433929723,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.027996723180631435
},
"sciq": {
"acc": 0.926,
"acc_stderr": 0.008282064512704159,
"acc_norm": 0.873,
"acc_norm_stderr": 0.01053479862085575
},
"hendrycksTest-jurisprudence": {
"acc": 0.3055555555555556,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.0478034362693679
},
"hendrycksTest-sociology": {
"acc": 0.2885572139303483,
"acc_stderr": 0.03203841040213321,
"acc_norm": 0.2736318407960199,
"acc_norm_stderr": 0.031524391865554016
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.24150943396226415,
"acc_stderr": 0.02634148037111837,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.02749566368372407
},
"hendrycksTest-international_law": {
"acc": 0.256198347107438,
"acc_stderr": 0.03984979653302871,
"acc_norm": 0.49586776859504134,
"acc_norm_stderr": 0.04564198767432754
},
"hendrycksTest-virology": {
"acc": 0.3253012048192771,
"acc_stderr": 0.036471685236832266,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.035716092300534796
},
"hendrycksTest-college_physics": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.043898699568087805
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03010833071801162,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280458
},
"hendrycksTest-moral_disputes": {
"acc": 0.3208092485549133,
"acc_stderr": 0.02513100023364791,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153183
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2916666666666667,
"acc_stderr": 0.030998666304560534,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802749
},
"winogrande": {
"acc": 0.6874506708760852,
"acc_stderr": 0.013027563620748837
},
"hendrycksTest-philosophy": {
"acc": 0.2508038585209003,
"acc_stderr": 0.024619771956697168,
"acc_norm": 0.3504823151125402,
"acc_norm_stderr": 0.027098652621301747
},
"wsc": {
"acc": 0.5480769230769231,
"acc_stderr": 0.049038186969314335
},
"hendrycksTest-astronomy": {
"acc": 0.2894736842105263,
"acc_stderr": 0.03690677986137283,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.03988903703336285
},
"hendrycksTest-computer_security": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_psychology": {
"acc": 0.28440366972477066,
"acc_stderr": 0.019342036587702588,
"acc_norm": 0.25871559633027524,
"acc_norm_stderr": 0.01877605231961962
},
"hendrycksTest-college_chemistry": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-management": {
"acc": 0.2912621359223301,
"acc_stderr": 0.04498676320572922,
"acc_norm": 0.33980582524271846,
"acc_norm_stderr": 0.046897659372781335
},
"hendrycksTest-miscellaneous": {
"acc": 0.3269476372924649,
"acc_stderr": 0.016774908180131463,
"acc_norm": 0.2937420178799489,
"acc_norm_stderr": 0.016287759388491675
},
"hendrycksTest-high_school_world_history": {
"acc": 0.3080168776371308,
"acc_stderr": 0.030052389335605695,
"acc_norm": 0.32489451476793246,
"acc_norm_stderr": 0.030486039389105293
},
"lambada_openai": {
"ppl": 3.2877565882479303,
"ppl_stderr": 0.06361523543774811,
"acc": 0.7389869978653212,
"acc_stderr": 0.006118733561625588
},
"hendrycksTest-electrical_engineering": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.04057324734419034
},
"hendrycksTest-high_school_us_history": {
"acc": 0.27450980392156865,
"acc_stderr": 0.03132179803083292,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.03228210387037892
},
"hendrycksTest-college_medicine": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483099,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"hendrycksTest-high_school_geography": {
"acc": 0.26262626262626265,
"acc_stderr": 0.03135305009533084,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.03318477333845331
},
"hendrycksTest-professional_medicine": {
"acc": 0.2647058823529412,
"acc_stderr": 0.026799562024887667,
"acc_norm": 0.2977941176470588,
"acc_norm_stderr": 0.02777829870154544
},
"hendrycksTest-machine_learning": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952686,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755807
},
"hendrycksTest-logical_fallacies": {
"acc": 0.2392638036809816,
"acc_stderr": 0.03351953879521271,
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"hendrycksTest-college_biology": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"hendrycksTest-professional_accounting": {
"acc": 0.20212765957446807,
"acc_stderr": 0.023956668237850226,
"acc_norm": 0.22695035460992907,
"acc_norm_stderr": 0.02498710636564297
},
"hendrycksTest-business_ethics": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"piqa": {
"acc": 0.7878128400435256,
"acc_stderr": 0.009539299828174051,
"acc_norm": 0.7986942328618063,
"acc_norm_stderr": 0.009355431098990426
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2948717948717949,
"acc_stderr": 0.023119362758232294,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923714
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"hendrycksTest-human_sexuality": {
"acc": 0.366412213740458,
"acc_stderr": 0.042258754519696386,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.04118438565806299
},
"hendrycksTest-high_school_biology": {
"acc": 0.26129032258064516,
"acc_stderr": 0.024993053397764826,
"acc_norm": 0.3193548387096774,
"acc_norm_stderr": 0.026522709674667768
},
"hendrycksTest-security_studies": {
"acc": 0.3836734693877551,
"acc_stderr": 0.03113088039623595,
"acc_norm": 0.32653061224489793,
"acc_norm_stderr": 0.030021056238440307
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.21851851851851853,
"acc_stderr": 0.025195752251823793,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.0284934650910286
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.26455026455026454,
"acc_stderr": 0.022717467897708628,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.02293097307163336
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2553191489361702,
"acc_stderr": 0.028504856470514196,
"acc_norm": 0.2297872340425532,
"acc_norm_stderr": 0.027501752944412417
},
"hendrycksTest-prehistory": {
"acc": 0.24382716049382716,
"acc_stderr": 0.023891879541959593,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.022779719088733396
},
"hendrycksTest-medical_genetics": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"hendrycksTest-anatomy": {
"acc": 0.2740740740740741,
"acc_stderr": 0.03853254836552003,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"hendrycksTest-professional_psychology": {
"acc": 0.26143790849673204,
"acc_stderr": 0.017776947157528037,
"acc_norm": 0.28594771241830064,
"acc_norm_stderr": 0.01828048507295467
},
"hendrycksTest-global_facts": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"hendrycksTest-moral_scenarios": {
"acc": 0.2212290502793296,
"acc_stderr": 0.013882164598887275,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-marketing": {
"acc": 0.2905982905982906,
"acc_stderr": 0.029745048572674057,
"acc_norm": 0.2905982905982906,
"acc_norm_stderr": 0.029745048572674054
},
"hendrycksTest-formal_logic": {
"acc": 0.30158730158730157,
"acc_stderr": 0.04104947269903394,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.040406101782088394
},
"hendrycksTest-public_relations": {
"acc": 0.34545454545454546,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.15454545454545454,
"acc_norm_stderr": 0.03462262571262667
},
"logiqa": {
"acc": 0.22734254992319508,
"acc_stderr": 0.016439067675117748,
"acc_norm": 0.2872503840245776,
"acc_norm_stderr": 0.017747701948846593
},
"hendrycksTest-world_religions": {
"acc": 0.3391812865497076,
"acc_stderr": 0.036310534964889056,
"acc_norm": 0.3742690058479532,
"acc_norm_stderr": 0.03711601185389481
}
},
"versions": {
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-high_school_european_history": 0,
"arc_easy": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-high_school_microeconomics": 0,
"arc_challenge": 0,
"hendrycksTest-nutrition": 0,
"sciq": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-high_school_statistics": 0,
"winogrande": 0,
"hendrycksTest-philosophy": 0,
"wsc": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-management": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-high_school_world_history": 0,
"lambada_openai": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-business_ethics": 0,
"piqa": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-public_relations": 0,
"logiqa": 0,
"hendrycksTest-world_religions": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-66b,use_accelerate=True,device_map_option=sequential,max_memory_per_gpu=40GIB",
"num_fewshot": 0,
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment