Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
4fbbd60f
Unverified
Commit
4fbbd60f
authored
Aug 01, 2023
by
Hailey Schoelkopf
Committed by
GitHub
Aug 01, 2023
Browse files
Merge pull request #718 from EleutherAI/remove-results-folder
Remove Results folder temporarily
parents
5e59782e
1b9833dd
Changes
188
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
0 additions
and
2779 deletions
+0
-2779
results/opt/opt-350m/opt-350m.json
results/opt/opt-350m/opt-350m.json
+0
-468
results/opt/opt-6.7b/README.md
results/opt/opt-6.7b/README.md
+0
-133
results/opt/opt-6.7b/opt-6.7b.json
results/opt/opt-6.7b/opt-6.7b.json
+0
-468
results/opt/opt-66b/README.md
results/opt/opt-66b/README.md
+0
-133
results/opt/opt-66b/opt-66b.json
results/opt/opt-66b/opt-66b.json
+0
-468
results/xglm/xglm-1.7B/README.md
results/xglm/xglm-1.7B/README.md
+0
-116
results/xglm/xglm-1.7B/xglm-1.7B_common_sense_reasoning_0-shot.json
...lm/xglm-1.7B/xglm-1.7B_common_sense_reasoning_0-shot.json
+0
-91
results/xglm/xglm-1.7B/xglm-1.7B_gsm8k_8-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_gsm8k_8-shot.json
+0
-22
results/xglm/xglm-1.7B/xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
....7B/xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
+0
-71
results/xglm/xglm-1.7B/xglm-1.7B_pawsx_0-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_pawsx_0-shot.json
+0
-52
results/xglm/xglm-1.7B/xglm-1.7B_xcopa_0-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_xcopa_0-shot.json
+0
-72
results/xglm/xglm-1.7B/xglm-1.7B_xnli_0-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_xnli_0-shot.json
+0
-92
results/xglm/xglm-1.7B/xglm-1.7B_xstory_cloze_0-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_xstory_cloze_0-shot.json
+0
-72
results/xglm/xglm-1.7B/xglm-1.7B_xwinograd_0-shot.json
results/xglm/xglm-1.7B/xglm-1.7B_xwinograd_0-shot.json
+0
-47
results/xglm/xglm-2.9B/README.md
results/xglm/xglm-2.9B/README.md
+0
-95
results/xglm/xglm-2.9B/xglm-2.9B_common_sense_reasoning_0-shot.json
...lm/xglm-2.9B/xglm-2.9B_common_sense_reasoning_0-shot.json
+0
-91
results/xglm/xglm-2.9B/xglm-2.9B_pawsx_0-shot.json
results/xglm/xglm-2.9B/xglm-2.9B_pawsx_0-shot.json
+0
-52
results/xglm/xglm-2.9B/xglm-2.9B_xcopa_0-shot.json
results/xglm/xglm-2.9B/xglm-2.9B_xcopa_0-shot.json
+0
-72
results/xglm/xglm-2.9B/xglm-2.9B_xnli_0-shot.json
results/xglm/xglm-2.9B/xglm-2.9B_xnli_0-shot.json
+0
-92
results/xglm/xglm-2.9B/xglm-2.9B_xstory_cloze_0-shot.json
results/xglm/xglm-2.9B/xglm-2.9B_xstory_cloze_0-shot.json
+0
-72
No files found.
results/opt/opt-350m/opt-350m.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.24855491329479767
,
"acc_stderr"
:
0.03295304696818318
,
"acc_norm"
:
0.23121387283236994
,
"acc_norm_stderr"
:
0.03214737302029471
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.23834196891191708
,
"acc_stderr"
:
0.030748905363909892
,
"acc_norm"
:
0.2694300518134715
,
"acc_norm_stderr"
:
0.03201867122877794
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.24193548387096775
,
"acc_stderr"
:
0.024362599693031086
,
"acc_norm"
:
0.27741935483870966
,
"acc_norm_stderr"
:
0.025470196835900055
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.2765957446808511
,
"acc_stderr"
:
0.029241883869628824
,
"acc_norm"
:
0.2
,
"acc_norm_stderr"
:
0.026148818018424506
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.26851851851851855
,
"acc_stderr"
:
0.030225226160012404
,
"acc_norm"
:
0.28703703703703703
,
"acc_norm_stderr"
:
0.03085199299325701
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.24113475177304963
,
"acc_stderr"
:
0.02551873104953776
,
"acc_norm"
:
0.23404255319148937
,
"acc_norm_stderr"
:
0.025257861359432407
},
"wsc"
:
{
"acc"
:
0.36538461538461536
,
"acc_stderr"
:
0.0474473339327792
},
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.22
,
"acc_stderr"
:
0.041633319989322695
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.045604802157206845
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.04229525846816506
,
"acc_norm"
:
0.26
,
"acc_norm_stderr"
:
0.0440844002276808
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.23161764705882354
,
"acc_stderr"
:
0.025626533803777562
,
"acc_norm"
:
0.25735294117647056
,
"acc_norm_stderr"
:
0.026556519470041513
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.19607843137254902
,
"acc_stderr"
:
0.03950581861179964
,
"acc_norm"
:
0.24509803921568626
,
"acc_norm_stderr"
:
0.042801058373643945
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.047258156262526045
,
"acc_norm"
:
0.31
,
"acc_norm_stderr"
:
0.04648231987117316
},
"logiqa"
:
{
"acc"
:
0.21044546850998463
,
"acc_stderr"
:
0.015988369488888755
,
"acc_norm"
:
0.2857142857142857
,
"acc_norm_stderr"
:
0.017719247798458293
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.29
,
"acc_stderr"
:
0.04560480215720684
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.04560480215720683
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.35874439461883406
,
"acc_stderr"
:
0.032190792004199956
,
"acc_norm"
:
0.27802690582959644
,
"acc_norm_stderr"
:
0.030069584874494043
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.22568807339449543
,
"acc_stderr"
:
0.01792308766780305
,
"acc_norm"
:
0.23302752293577983
,
"acc_norm_stderr"
:
0.01812566918086148
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.3511450381679389
,
"acc_stderr"
:
0.04186445163013751
,
"acc_norm"
:
0.3282442748091603
,
"acc_norm_stderr"
:
0.04118438565806298
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.04229525846816505
,
"acc_norm"
:
0.39
,
"acc_norm_stderr"
:
0.04902071300001974
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.2742616033755274
,
"acc_stderr"
:
0.029041333510598046
,
"acc_norm"
:
0.29957805907172996
,
"acc_norm_stderr"
:
0.029818024749753095
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.19327731092436976
,
"acc_stderr"
:
0.02564947026588919
,
"acc_norm"
:
0.2815126050420168
,
"acc_norm_stderr"
:
0.029213549414372153
},
"hendrycksTest-management"
:
{
"acc"
:
0.1553398058252427
,
"acc_stderr"
:
0.03586594738573973
,
"acc_norm"
:
0.2524271844660194
,
"acc_norm_stderr"
:
0.04301250399690878
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.21851851851851853
,
"acc_stderr"
:
0.025195752251823793
,
"acc_norm"
:
0.2740740740740741
,
"acc_norm_stderr"
:
0.027195934804085626
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.1901840490797546
,
"acc_stderr"
:
0.030833491146281245
,
"acc_norm"
:
0.2822085889570552
,
"acc_norm_stderr"
:
0.03536117886664743
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.2631578947368421
,
"acc_stderr"
:
0.03377310252209194
,
"acc_norm"
:
0.3333333333333333
,
"acc_norm_stderr"
:
0.03615507630310935
},
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.22
,
"acc_stderr"
:
0.04163331998932269
,
"acc_norm"
:
0.24
,
"acc_norm_stderr"
:
0.042923469599092816
},
"arc_challenge"
:
{
"acc"
:
0.20733788395904437
,
"acc_stderr"
:
0.01184690578297137
,
"acc_norm"
:
0.23890784982935154
,
"acc_norm_stderr"
:
0.012461071376316617
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.29464285714285715
,
"acc_stderr"
:
0.04327040932578728
,
"acc_norm"
:
0.23214285714285715
,
"acc_norm_stderr"
:
0.040073418097558065
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.21132075471698114
,
"acc_stderr"
:
0.025125766484827845
,
"acc_norm"
:
0.30566037735849055
,
"acc_norm_stderr"
:
0.028353298073322666
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.2561929595827901
,
"acc_stderr"
:
0.011149173153110582
,
"acc_norm"
:
0.2770534550195567
,
"acc_norm_stderr"
:
0.01143046244371968
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.17355371900826447
,
"acc_stderr"
:
0.0345727283691767
,
"acc_norm"
:
0.4462809917355372
,
"acc_norm_stderr"
:
0.0453793517794788
},
"lambada_openai"
:
{
"ppl"
:
16.39826111439643
,
"ppl_stderr"
:
0.5572608146298462
,
"acc"
:
0.45158160294973804
,
"acc_stderr"
:
0.006933239470474417
},
"hendrycksTest-nutrition"
:
{
"acc"
:
0.28431372549019607
,
"acc_stderr"
:
0.025829163272757465
,
"acc_norm"
:
0.38235294117647056
,
"acc_norm_stderr"
:
0.027826109307283683
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.25165562913907286
,
"acc_stderr"
:
0.035433042343899844
,
"acc_norm"
:
0.24503311258278146
,
"acc_norm_stderr"
:
0.03511807571804725
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.2222222222222222
,
"acc_stderr"
:
0.035914440841969694
,
"acc_norm"
:
0.2074074074074074
,
"acc_norm_stderr"
:
0.03502553170678316
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.22839506172839505
,
"acc_stderr"
:
0.023358211840626267
,
"acc_norm"
:
0.20679012345679013
,
"acc_norm_stderr"
:
0.022535006705942818
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.3
,
"acc_stderr"
:
0.04389311454644287
,
"acc_norm"
:
0.24545454545454545
,
"acc_norm_stderr"
:
0.041220665028782834
},
"hendrycksTest-virology"
:
{
"acc"
:
0.29518072289156627
,
"acc_stderr"
:
0.035509201856896294
,
"acc_norm"
:
0.3072289156626506
,
"acc_norm_stderr"
:
0.035915667978246635
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.23798882681564246
,
"acc_stderr"
:
0.014242630070574915
,
"acc_norm"
:
0.2681564245810056
,
"acc_norm_stderr"
:
0.014816119635317005
},
"arc_easy"
:
{
"acc"
:
0.44023569023569026
,
"acc_stderr"
:
0.01018622862451566
,
"acc_norm"
:
0.4036195286195286
,
"acc_norm_stderr"
:
0.010067368960348204
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.19704433497536947
,
"acc_stderr"
:
0.027986724666736212
,
"acc_norm"
:
0.26108374384236455
,
"acc_norm_stderr"
:
0.030903796952114468
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.27692307692307694
,
"acc_stderr"
:
0.022688042352424994
,
"acc_norm"
:
0.2794871794871795
,
"acc_norm_stderr"
:
0.022752388839776826
},
"sciq"
:
{
"acc"
:
0.748
,
"acc_stderr"
:
0.013736254390651141
,
"acc_norm"
:
0.669
,
"acc_norm_stderr"
:
0.014888272588203945
},
"piqa"
:
{
"acc"
:
0.6436343852013058
,
"acc_stderr"
:
0.01117410986586471
,
"acc_norm"
:
0.6474428726877041
,
"acc_norm_stderr"
:
0.011147074365010456
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.24848484848484848
,
"acc_stderr"
:
0.03374402644139404
,
"acc_norm"
:
0.3151515151515151
,
"acc_norm_stderr"
:
0.0362773057502241
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.29
,
"acc_stderr"
:
0.04560480215720684
,
"acc_norm"
:
0.32
,
"acc_norm_stderr"
:
0.046882617226215034
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.22807017543859648
,
"acc_stderr"
:
0.03947152782669415
,
"acc_norm"
:
0.2631578947368421
,
"acc_norm_stderr"
:
0.04142439719489362
},
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.21717171717171718
,
"acc_stderr"
:
0.02937661648494563
,
"acc_norm"
:
0.2777777777777778
,
"acc_norm_stderr"
:
0.03191178226713548
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.31343283582089554
,
"acc_stderr"
:
0.03280188205348641
,
"acc_norm"
:
0.2835820895522388
,
"acc_norm_stderr"
:
0.031871875379197966
},
"winogrande"
:
{
"acc"
:
0.5232833464877664
,
"acc_stderr"
:
0.01403724130957364
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.23809523809523808
,
"acc_stderr"
:
0.02193587808118476
,
"acc_norm"
:
0.23544973544973544
,
"acc_norm_stderr"
:
0.02185150982203172
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.26
,
"acc_stderr"
:
0.04408440022768078
,
"acc_norm"
:
0.34
,
"acc_norm_stderr"
:
0.04760952285695235
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.28
,
"acc_stderr"
:
0.04512608598542127
,
"acc_norm"
:
0.24
,
"acc_norm_stderr"
:
0.04292346959909284
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.3253968253968254
,
"acc_stderr"
:
0.041905964388711366
,
"acc_norm"
:
0.3253968253968254
,
"acc_norm_stderr"
:
0.041905964388711366
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.2777777777777778
,
"acc_stderr"
:
0.029343114798094476
,
"acc_norm"
:
0.32051282051282054
,
"acc_norm_stderr"
:
0.030572811310299607
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.25
,
"acc_stderr"
:
0.03039153369274154
,
"acc_norm"
:
0.28431372549019607
,
"acc_norm_stderr"
:
0.031660096793998116
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.30057803468208094
,
"acc_stderr"
:
0.0246853168672578
,
"acc_norm"
:
0.3179190751445087
,
"acc_norm_stderr"
:
0.025070713719153183
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.20257234726688103
,
"acc_stderr"
:
0.022827317491059682
,
"acc_norm"
:
0.3086816720257235
,
"acc_norm_stderr"
:
0.026236965881153256
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.21052631578947367
,
"acc_stderr"
:
0.033176727875331574
,
"acc_norm"
:
0.3355263157894737
,
"acc_norm_stderr"
:
0.038424985593952694
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.280970625798212
,
"acc_stderr"
:
0.01607312785122124
,
"acc_norm"
:
0.26181353767560667
,
"acc_norm_stderr"
:
0.01572083867844527
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.2777777777777778
,
"acc_stderr"
:
0.037455547914624555
,
"acc_norm"
:
0.22916666666666666
,
"acc_norm_stderr"
:
0.035146974678623884
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.3310344827586207
,
"acc_stderr"
:
0.039215453124671215
,
"acc_norm"
:
0.3103448275862069
,
"acc_norm_stderr"
:
0.03855289616378949
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.24
,
"acc_stderr"
:
0.042923469599092816
,
"acc_norm"
:
0.24
,
"acc_norm_stderr"
:
0.04292346959909281
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.3673469387755102
,
"acc_stderr"
:
0.030862144921087558
,
"acc_norm"
:
0.2653061224489796
,
"acc_norm_stderr"
:
0.028263889943784603
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.25
,
"acc_stderr"
:
0.04186091791394607
,
"acc_norm"
:
0.37037037037037035
,
"acc_norm_stderr"
:
0.04668408033024931
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.24509803921568626
,
"acc_stderr"
:
0.01740181671142766
,
"acc_norm"
:
0.2549019607843137
,
"acc_norm_stderr"
:
0.017630827375148383
}
},
"versions"
:
{
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"wsc"
:
0
,
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-professional_medicine"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"logiqa"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"hendrycksTest-world_religions"
:
0
,
"hendrycksTest-abstract_algebra"
:
0
,
"arc_challenge"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-international_law"
:
0
,
"lambada_openai"
:
0
,
"hendrycksTest-nutrition"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"arc_easy"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"sciq"
:
0
,
"piqa"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-sociology"
:
0
,
"winogrande"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"hendrycksTest-marketing"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-professional_psychology"
:
0
},
"config"
:
{
"model"
:
"gpt2"
,
"model_args"
:
"pretrained=facebook/opt-350m"
,
"num_fewshot"
:
0
,
"batch_size"
:
128
,
"device"
:
"cuda"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/opt/opt-6.7b/README.md
deleted
100644 → 0
View file @
5e59782e
# opt-6.7b
## opt-6.7b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |30.55|± | 1.35|
| | |acc_norm|34.73|± | 1.39|
|arc_easy | 0|acc |65.61|± | 0.97|
| | |acc_norm|60.10|± | 1.00|
|hendrycksTest-abstract_algebra | 0|acc |22.00|± | 4.16|
| | |acc_norm|21.00|± | 4.09|
|hendrycksTest-anatomy | 0|acc |22.22|± | 3.59|
| | |acc_norm|23.70|± | 3.67|
|hendrycksTest-astronomy | 0|acc |26.97|± | 3.61|
| | |acc_norm|32.24|± | 3.80|
|hendrycksTest-business_ethics | 0|acc |33.00|± | 4.73|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-clinical_knowledge | 0|acc |26.04|± | 2.70|
| | |acc_norm|29.81|± | 2.82|
|hendrycksTest-college_biology | 0|acc |29.17|± | 3.80|
| | |acc_norm|24.31|± | 3.59|
|hendrycksTest-college_chemistry | 0|acc |22.00|± | 4.16|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-college_computer_science | 0|acc |36.00|± | 4.82|
| | |acc_norm|31.00|± | 4.65|
|hendrycksTest-college_mathematics | 0|acc |22.00|± | 4.16|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-college_medicine | 0|acc |20.81|± | 3.10|
| | |acc_norm|21.39|± | 3.13|
|hendrycksTest-college_physics | 0|acc |20.59|± | 4.02|
| | |acc_norm|23.53|± | 4.22|
|hendrycksTest-computer_security | 0|acc |24.00|± | 4.29|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-conceptual_physics | 0|acc |27.23|± | 2.91|
| | |acc_norm|21.70|± | 2.69|
|hendrycksTest-econometrics | 0|acc |25.44|± | 4.10|
| | |acc_norm|25.44|± | 4.10|
|hendrycksTest-electrical_engineering | 0|acc |29.66|± | 3.81|
| | |acc_norm|34.48|± | 3.96|
|hendrycksTest-elementary_mathematics | 0|acc |24.60|± | 2.22|
| | |acc_norm|25.13|± | 2.23|
|hendrycksTest-formal_logic | 0|acc |29.37|± | 4.07|
| | |acc_norm|24.60|± | 3.85|
|hendrycksTest-global_facts | 0|acc |18.00|± | 3.86|
| | |acc_norm|22.00|± | 4.16|
|hendrycksTest-high_school_biology | 0|acc |25.16|± | 2.47|
| | |acc_norm|28.71|± | 2.57|
|hendrycksTest-high_school_chemistry | 0|acc |16.75|± | 2.63|
| | |acc_norm|27.59|± | 3.14|
|hendrycksTest-high_school_computer_science | 0|acc |24.00|± | 4.29|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-high_school_european_history | 0|acc |32.12|± | 3.65|
| | |acc_norm|27.88|± | 3.50|
|hendrycksTest-high_school_geography | 0|acc |21.72|± | 2.94|
| | |acc_norm|27.27|± | 3.17|
|hendrycksTest-high_school_government_and_politics| 0|acc |24.87|± | 3.12|
| | |acc_norm|24.35|± | 3.10|
|hendrycksTest-high_school_macroeconomics | 0|acc |28.97|± | 2.30|
| | |acc_norm|27.95|± | 2.28|
|hendrycksTest-high_school_mathematics | 0|acc |24.07|± | 2.61|
| | |acc_norm|31.48|± | 2.83|
|hendrycksTest-high_school_microeconomics | 0|acc |26.89|± | 2.88|
| | |acc_norm|31.93|± | 3.03|
|hendrycksTest-high_school_physics | 0|acc |21.19|± | 3.34|
| | |acc_norm|22.52|± | 3.41|
|hendrycksTest-high_school_psychology | 0|acc |28.81|± | 1.94|
| | |acc_norm|24.95|± | 1.86|
|hendrycksTest-high_school_statistics | 0|acc |27.78|± | 3.05|
| | |acc_norm|32.41|± | 3.19|
|hendrycksTest-high_school_us_history | 0|acc |26.47|± | 3.10|
| | |acc_norm|25.98|± | 3.08|
|hendrycksTest-high_school_world_history | 0|acc |24.47|± | 2.80|
| | |acc_norm|30.38|± | 2.99|
|hendrycksTest-human_aging | 0|acc |30.49|± | 3.09|
| | |acc_norm|29.15|± | 3.05|
|hendrycksTest-human_sexuality | 0|acc |33.59|± | 4.14|
| | |acc_norm|29.77|± | 4.01|
|hendrycksTest-international_law | 0|acc |31.40|± | 4.24|
| | |acc_norm|46.28|± | 4.55|
|hendrycksTest-jurisprudence | 0|acc |25.93|± | 4.24|
| | |acc_norm|44.44|± | 4.80|
|hendrycksTest-logical_fallacies | 0|acc |20.25|± | 3.16|
| | |acc_norm|27.61|± | 3.51|
|hendrycksTest-machine_learning | 0|acc |20.54|± | 3.83|
| | |acc_norm|22.32|± | 3.95|
|hendrycksTest-management | 0|acc |29.13|± | 4.50|
| | |acc_norm|34.95|± | 4.72|
|hendrycksTest-marketing | 0|acc |28.21|± | 2.95|
| | |acc_norm|32.05|± | 3.06|
|hendrycksTest-medical_genetics | 0|acc |31.00|± | 4.65|
| | |acc_norm|35.00|± | 4.79|
|hendrycksTest-miscellaneous | 0|acc |31.03|± | 1.65|
| | |acc_norm|27.46|± | 1.60|
|hendrycksTest-moral_disputes | 0|acc |27.17|± | 2.39|
| | |acc_norm|31.50|± | 2.50|
|hendrycksTest-moral_scenarios | 0|acc |27.26|± | 1.49|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |30.72|± | 2.64|
| | |acc_norm|39.22|± | 2.80|
|hendrycksTest-philosophy | 0|acc |27.33|± | 2.53|
| | |acc_norm|31.83|± | 2.65|
|hendrycksTest-prehistory | 0|acc |25.00|± | 2.41|
| | |acc_norm|20.06|± | 2.23|
|hendrycksTest-professional_accounting | 0|acc |25.89|± | 2.61|
| | |acc_norm|25.89|± | 2.61|
|hendrycksTest-professional_law | 0|acc |26.14|± | 1.12|
| | |acc_norm|29.34|± | 1.16|
|hendrycksTest-professional_medicine | 0|acc |21.69|± | 2.50|
| | |acc_norm|24.26|± | 2.60|
|hendrycksTest-professional_psychology | 0|acc |25.16|± | 1.76|
| | |acc_norm|25.16|± | 1.76|
|hendrycksTest-public_relations | 0|acc |32.73|± | 4.49|
| | |acc_norm|18.18|± | 3.69|
|hendrycksTest-security_studies | 0|acc |42.04|± | 3.16|
| | |acc_norm|33.88|± | 3.03|
|hendrycksTest-sociology | 0|acc |28.36|± | 3.19|
| | |acc_norm|30.85|± | 3.27|
|hendrycksTest-us_foreign_policy | 0|acc |40.00|± | 4.92|
| | |acc_norm|35.00|± | 4.79|
|hendrycksTest-virology | 0|acc |33.73|± | 3.68|
| | |acc_norm|29.52|± | 3.55|
|hendrycksTest-world_religions | 0|acc |34.50|± | 3.65|
| | |acc_norm|36.26|± | 3.69|
|lambada_openai | 0|ppl | 4.25|± | 0.09|
| | |acc |67.71|± | 0.65|
|logiqa | 0|acc |23.50|± | 1.66|
| | |acc_norm|28.73|± | 1.77|
|piqa | 0|acc |76.28|± | 0.99|
| | |acc_norm|76.44|± | 0.99|
|sciq | 0|acc |90.10|± | 0.94|
| | |acc_norm|85.20|± | 1.12|
|winogrande | 0|acc |65.27|± | 1.34|
|wsc | 0|acc |42.31|± | 4.87|
results/opt/opt-6.7b/opt-6.7b.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"hendrycksTest-nutrition"
:
{
"acc"
:
0.30718954248366015
,
"acc_stderr"
:
0.026415601914389002
,
"acc_norm"
:
0.39215686274509803
,
"acc_norm_stderr"
:
0.02795604616542451
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.24870466321243523
,
"acc_stderr"
:
0.0311958408777003
,
"acc_norm"
:
0.24352331606217617
,
"acc_norm_stderr"
:
0.030975436386845426
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.25886524822695034
,
"acc_stderr"
:
0.026129572527180848
,
"acc_norm"
:
0.25886524822695034
,
"acc_norm_stderr"
:
0.026129572527180848
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.20245398773006135
,
"acc_stderr"
:
0.03157065078911902
,
"acc_norm"
:
0.27607361963190186
,
"acc_norm_stderr"
:
0.0351238528370505
},
"sciq"
:
{
"acc"
:
0.901
,
"acc_stderr"
:
0.009449248027662761
,
"acc_norm"
:
0.852
,
"acc_norm_stderr"
:
0.011234866364235247
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.27262569832402234
,
"acc_stderr"
:
0.014893391735249588
,
"acc_norm"
:
0.27262569832402234
,
"acc_norm_stderr"
:
0.014893391735249588
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.36
,
"acc_stderr"
:
0.048241815132442176
,
"acc_norm"
:
0.31
,
"acc_norm_stderr"
:
0.04648231987117316
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.32727272727272727
,
"acc_stderr"
:
0.044942908662520896
,
"acc_norm"
:
0.18181818181818182
,
"acc_norm_stderr"
:
0.036942843353377997
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.2543859649122807
,
"acc_stderr"
:
0.04096985139843671
,
"acc_norm"
:
0.2543859649122807
,
"acc_norm_stderr"
:
0.040969851398436716
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.34502923976608185
,
"acc_stderr"
:
0.036459813773888065
,
"acc_norm"
:
0.36257309941520466
,
"acc_norm_stderr"
:
0.0368713061556206
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.24074074074074073
,
"acc_stderr"
:
0.026067159222275788
,
"acc_norm"
:
0.3148148148148148
,
"acc_norm_stderr"
:
0.028317533496066468
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.33587786259541985
,
"acc_stderr"
:
0.041423137719966634
,
"acc_norm"
:
0.29770992366412213
,
"acc_norm_stderr"
:
0.040103589424622034
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.16748768472906403
,
"acc_stderr"
:
0.026273086047535397
,
"acc_norm"
:
0.27586206896551724
,
"acc_norm_stderr"
:
0.03144712581678242
},
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.22
,
"acc_stderr"
:
0.04163331998932269
,
"acc_norm"
:
0.27
,
"acc_norm_stderr"
:
0.044619604333847394
},
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.22
,
"acc_stderr"
:
0.0416333199893227
,
"acc_norm"
:
0.21
,
"acc_norm_stderr"
:
0.04093601807403326
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.29365079365079366
,
"acc_stderr"
:
0.04073524322147127
,
"acc_norm"
:
0.24603174603174602
,
"acc_norm_stderr"
:
0.03852273364924315
},
"piqa"
:
{
"acc"
:
0.7627856365614799
,
"acc_stderr"
:
0.009924694933586367
,
"acc_norm"
:
0.764417845484222
,
"acc_norm_stderr"
:
0.009901067586473886
},
"arc_easy"
:
{
"acc"
:
0.6561447811447811
,
"acc_stderr"
:
0.009746660584852457
,
"acc_norm"
:
0.601010101010101
,
"acc_norm_stderr"
:
0.010048240683798742
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.28974358974358977
,
"acc_stderr"
:
0.023000628243687964
,
"acc_norm"
:
0.2794871794871795
,
"acc_norm_stderr"
:
0.02275238883977683
},
"logiqa"
:
{
"acc"
:
0.2350230414746544
,
"acc_stderr"
:
0.016631166823890965
,
"acc_norm"
:
0.2872503840245776
,
"acc_norm_stderr"
:
0.017747701948846596
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.2119205298013245
,
"acc_stderr"
:
0.033367670865679766
,
"acc_norm"
:
0.2251655629139073
,
"acc_norm_stderr"
:
0.03410435282008936
},
"hendrycksTest-management"
:
{
"acc"
:
0.2912621359223301
,
"acc_stderr"
:
0.044986763205729224
,
"acc_norm"
:
0.34951456310679613
,
"acc_norm_stderr"
:
0.047211885060971716
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.21691176470588236
,
"acc_stderr"
:
0.025035845227711274
,
"acc_norm"
:
0.2426470588235294
,
"acc_norm_stderr"
:
0.026040662474201264
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.2916666666666667
,
"acc_stderr"
:
0.03800968060554858
,
"acc_norm"
:
0.24305555555555555
,
"acc_norm_stderr"
:
0.03586879280080341
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.2689075630252101
,
"acc_stderr"
:
0.02880139219363128
,
"acc_norm"
:
0.31932773109243695
,
"acc_norm_stderr"
:
0.0302839955258844
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.26037735849056604
,
"acc_stderr"
:
0.0270087660907081
,
"acc_norm"
:
0.2981132075471698
,
"acc_norm_stderr"
:
0.02815283794249386
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.2222222222222222
,
"acc_stderr"
:
0.035914440841969694
,
"acc_norm"
:
0.23703703703703705
,
"acc_norm_stderr"
:
0.03673731683969506
},
"hendrycksTest-virology"
:
{
"acc"
:
0.3373493975903614
,
"acc_stderr"
:
0.03680783690727581
,
"acc_norm"
:
0.29518072289156627
,
"acc_norm_stderr"
:
0.0355092018568963
},
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.20809248554913296
,
"acc_stderr"
:
0.0309528902177499
,
"acc_norm"
:
0.2138728323699422
,
"acc_norm_stderr"
:
0.031265112061730424
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.28807339449541286
,
"acc_stderr"
:
0.01941644589263602
,
"acc_norm"
:
0.24954128440366974
,
"acc_norm_stderr"
:
0.01855389762950162
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.2777777777777778
,
"acc_stderr"
:
0.0305467452649532
,
"acc_norm"
:
0.32407407407407407
,
"acc_norm_stderr"
:
0.03191923445686185
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.24603174603174602
,
"acc_stderr"
:
0.022182037202948368
,
"acc_norm"
:
0.25132275132275134
,
"acc_norm_stderr"
:
0.022340482339643895
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.4
,
"acc_stderr"
:
0.049236596391733084
,
"acc_norm"
:
0.35
,
"acc_norm_stderr"
:
0.0479372485441102
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.20535714285714285
,
"acc_stderr"
:
0.038342410214190735
,
"acc_norm"
:
0.22321428571428573
,
"acc_norm_stderr"
:
0.039523019677025116
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.28205128205128205
,
"acc_stderr"
:
0.02948036054954119
,
"acc_norm"
:
0.32051282051282054
,
"acc_norm_stderr"
:
0.030572811310299607
},
"arc_challenge"
:
{
"acc"
:
0.3054607508532423
,
"acc_stderr"
:
0.0134600804780025
,
"acc_norm"
:
0.34726962457337884
,
"acc_norm_stderr"
:
0.01391303452962044
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.22
,
"acc_stderr"
:
0.04163331998932269
,
"acc_norm"
:
0.36
,
"acc_norm_stderr"
:
0.048241815132442176
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.25161290322580643
,
"acc_stderr"
:
0.024685979286239956
,
"acc_norm"
:
0.2870967741935484
,
"acc_norm_stderr"
:
0.025736542745594528
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.2733118971061093
,
"acc_stderr"
:
0.02531176597542612
,
"acc_norm"
:
0.3183279742765273
,
"acc_norm_stderr"
:
0.026457225067811025
},
"lambada_openai"
:
{
"ppl"
:
4.252877363060981
,
"ppl_stderr"
:
0.0927244083936228
,
"acc"
:
0.6770813118571706
,
"acc_stderr"
:
0.006514469814384408
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.24472573839662448
,
"acc_stderr"
:
0.027985699387036416
,
"acc_norm"
:
0.3037974683544304
,
"acc_norm_stderr"
:
0.0299366963871386
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.3212121212121212
,
"acc_stderr"
:
0.03646204963253812
,
"acc_norm"
:
0.2787878787878788
,
"acc_norm_stderr"
:
0.03501438706296781
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.26973684210526316
,
"acc_stderr"
:
0.03611780560284898
,
"acc_norm"
:
0.3223684210526316
,
"acc_norm_stderr"
:
0.03803510248351585
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.2835820895522388
,
"acc_stderr"
:
0.03187187537919796
,
"acc_norm"
:
0.30845771144278605
,
"acc_norm_stderr"
:
0.03265819588512699
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.30493273542600896
,
"acc_stderr"
:
0.030898610882477515
,
"acc_norm"
:
0.2914798206278027
,
"acc_norm_stderr"
:
0.030500283176545902
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.047258156262526045
,
"acc_norm"
:
0.24
,
"acc_norm_stderr"
:
0.04292346959909283
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.296551724137931
,
"acc_stderr"
:
0.03806142687309994
,
"acc_norm"
:
0.3448275862068966
,
"acc_norm_stderr"
:
0.03960933549451208
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.27167630057803466
,
"acc_stderr"
:
0.023948512905468355
,
"acc_norm"
:
0.315028901734104
,
"acc_norm_stderr"
:
0.025009313790069695
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.25
,
"acc_stderr"
:
0.02409347123262133
,
"acc_norm"
:
0.2006172839506173
,
"acc_norm_stderr"
:
0.022282313949774882
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.25163398692810457
,
"acc_stderr"
:
0.01755581809132227
,
"acc_norm"
:
0.25163398692810457
,
"acc_norm_stderr"
:
0.01755581809132226
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.2723404255319149
,
"acc_stderr"
:
0.029101290698386708
,
"acc_norm"
:
0.2170212765957447
,
"acc_norm_stderr"
:
0.026947483121496238
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.26140808344198174
,
"acc_stderr"
:
0.01122252816977131
,
"acc_norm"
:
0.29335071707953064
,
"acc_norm_stderr"
:
0.011628520449582073
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.24
,
"acc_stderr"
:
0.04292346959909284
,
"acc_norm"
:
0.28
,
"acc_norm_stderr"
:
0.045126085985421276
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.3103448275862069
,
"acc_stderr"
:
0.016543785026048315
,
"acc_norm"
:
0.27458492975734355
,
"acc_norm_stderr"
:
0.01595982993308404
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.18
,
"acc_stderr"
:
0.038612291966536955
,
"acc_norm"
:
0.22
,
"acc_norm_stderr"
:
0.041633319989322695
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.24
,
"acc_stderr"
:
0.04292346959909284
,
"acc_norm"
:
0.33
,
"acc_norm_stderr"
:
0.047258156262526045
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.2647058823529412
,
"acc_stderr"
:
0.030964517926923393
,
"acc_norm"
:
0.25980392156862747
,
"acc_norm_stderr"
:
0.03077855467869326
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.25925925925925924
,
"acc_stderr"
:
0.042365112580946336
,
"acc_norm"
:
0.4444444444444444
,
"acc_norm_stderr"
:
0.04803752235190193
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.4204081632653061
,
"acc_stderr"
:
0.03160106993449603
,
"acc_norm"
:
0.33877551020408164
,
"acc_norm_stderr"
:
0.030299506562154185
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.31
,
"acc_stderr"
:
0.04648231987117317
,
"acc_norm"
:
0.35
,
"acc_norm_stderr"
:
0.04793724854411019
},
"wsc"
:
{
"acc"
:
0.4230769230769231
,
"acc_stderr"
:
0.048679937479186836
},
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.21717171717171718
,
"acc_stderr"
:
0.029376616484945633
,
"acc_norm"
:
0.2727272727272727
,
"acc_norm_stderr"
:
0.03173071239071724
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.3140495867768595
,
"acc_stderr"
:
0.04236964753041017
,
"acc_norm"
:
0.4628099173553719
,
"acc_norm_stderr"
:
0.04551711196104218
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.20588235294117646
,
"acc_stderr"
:
0.040233822736177455
,
"acc_norm"
:
0.23529411764705882
,
"acc_norm_stderr"
:
0.042207736591714534
},
"winogrande"
:
{
"acc"
:
0.6527229676400947
,
"acc_stderr"
:
0.013380909249751233
}
},
"versions"
:
{
"hendrycksTest-nutrition"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"sciq"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-world_religions"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-abstract_algebra"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"piqa"
:
0
,
"arc_easy"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"logiqa"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-professional_medicine"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-marketing"
:
0
,
"arc_challenge"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"lambada_openai"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-sociology"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-professional_psychology"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"wsc"
:
0
,
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-international_law"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"winogrande"
:
0
},
"config"
:
{
"model"
:
"hf-causal"
,
"model_args"
:
"pretrained=facebook/opt-6.7b,use_accelerate=True,device_map_option=sequential"
,
"num_fewshot"
:
0
,
"batch_size"
:
16
,
"device"
:
"cuda"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/opt/opt-66b/README.md
deleted
100644 → 0
View file @
5e59782e
# opt-66b
## opt-66b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |37.20|± | 1.41|
| | |acc_norm|40.10|± | 1.43|
|arc_easy | 0|acc |71.68|± | 0.92|
| | |acc_norm|67.30|± | 0.96|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|24.00|± | 4.29|
|hendrycksTest-anatomy | 0|acc |27.41|± | 3.85|
| | |acc_norm|26.67|± | 3.82|
|hendrycksTest-astronomy | 0|acc |28.95|± | 3.69|
| | |acc_norm|40.13|± | 3.99|
|hendrycksTest-business_ethics | 0|acc |29.00|± | 4.56|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-clinical_knowledge | 0|acc |24.15|± | 2.63|
| | |acc_norm|27.55|± | 2.75|
|hendrycksTest-college_biology | 0|acc |24.31|± | 3.59|
| | |acc_norm|25.00|± | 3.62|
|hendrycksTest-college_chemistry | 0|acc |30.00|± | 4.61|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |23.00|± | 4.23|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_mathematics | 0|acc |23.00|± | 4.23|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-college_medicine | 0|acc |23.70|± | 3.24|
| | |acc_norm|24.86|± | 3.30|
|hendrycksTest-college_physics | 0|acc |28.43|± | 4.49|
| | |acc_norm|26.47|± | 4.39|
|hendrycksTest-computer_security | 0|acc |32.00|± | 4.69|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-conceptual_physics | 0|acc |25.53|± | 2.85|
| | |acc_norm|22.98|± | 2.75|
|hendrycksTest-econometrics | 0|acc |28.07|± | 4.23|
| | |acc_norm|20.18|± | 3.78|
|hendrycksTest-electrical_engineering | 0|acc |35.86|± | 4.00|
| | |acc_norm|38.62|± | 4.06|
|hendrycksTest-elementary_mathematics | 0|acc |26.46|± | 2.27|
| | |acc_norm|27.25|± | 2.29|
|hendrycksTest-formal_logic | 0|acc |30.16|± | 4.10|
| | |acc_norm|28.57|± | 4.04|
|hendrycksTest-global_facts | 0|acc |29.00|± | 4.56|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-high_school_biology | 0|acc |26.13|± | 2.50|
| | |acc_norm|31.94|± | 2.65|
|hendrycksTest-high_school_chemistry | 0|acc |24.14|± | 3.01|
| | |acc_norm|34.48|± | 3.34|
|hendrycksTest-high_school_computer_science | 0|acc |31.00|± | 4.65|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-high_school_european_history | 0|acc |29.09|± | 3.55|
| | |acc_norm|29.70|± | 3.57|
|hendrycksTest-high_school_geography | 0|acc |26.26|± | 3.14|
| | |acc_norm|31.82|± | 3.32|
|hendrycksTest-high_school_government_and_politics| 0|acc |26.42|± | 3.18|
| | |acc_norm|26.42|± | 3.18|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.49|± | 2.31|
| | |acc_norm|26.67|± | 2.24|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|32.22|± | 2.85|
|hendrycksTest-high_school_microeconomics | 0|acc |29.83|± | 2.97|
| | |acc_norm|36.13|± | 3.12|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|23.18|± | 3.45|
|hendrycksTest-high_school_psychology | 0|acc |28.44|± | 1.93|
| | |acc_norm|25.87|± | 1.88|
|hendrycksTest-high_school_statistics | 0|acc |29.17|± | 3.10|
| | |acc_norm|33.33|± | 3.21|
|hendrycksTest-high_school_us_history | 0|acc |27.45|± | 3.13|
| | |acc_norm|30.39|± | 3.23|
|hendrycksTest-high_school_world_history | 0|acc |30.80|± | 3.01|
| | |acc_norm|32.49|± | 3.05|
|hendrycksTest-human_aging | 0|acc |28.70|± | 3.04|
| | |acc_norm|22.42|± | 2.80|
|hendrycksTest-human_sexuality | 0|acc |36.64|± | 4.23|
| | |acc_norm|32.82|± | 4.12|
|hendrycksTest-international_law | 0|acc |25.62|± | 3.98|
| | |acc_norm|49.59|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |30.56|± | 4.45|
| | |acc_norm|42.59|± | 4.78|
|hendrycksTest-logical_fallacies | 0|acc |23.93|± | 3.35|
| | |acc_norm|28.83|± | 3.56|
|hendrycksTest-machine_learning | 0|acc |24.11|± | 4.06|
| | |acc_norm|23.21|± | 4.01|
|hendrycksTest-management | 0|acc |29.13|± | 4.50|
| | |acc_norm|33.98|± | 4.69|
|hendrycksTest-marketing | 0|acc |29.06|± | 2.97|
| | |acc_norm|29.06|± | 2.97|
|hendrycksTest-medical_genetics | 0|acc |34.00|± | 4.76|
| | |acc_norm|45.00|± | 5.00|
|hendrycksTest-miscellaneous | 0|acc |32.69|± | 1.68|
| | |acc_norm|29.37|± | 1.63|
|hendrycksTest-moral_disputes | 0|acc |32.08|± | 2.51|
| | |acc_norm|31.79|± | 2.51|
|hendrycksTest-moral_scenarios | 0|acc |22.12|± | 1.39|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |33.33|± | 2.70|
| | |acc_norm|39.54|± | 2.80|
|hendrycksTest-philosophy | 0|acc |25.08|± | 2.46|
| | |acc_norm|35.05|± | 2.71|
|hendrycksTest-prehistory | 0|acc |24.38|± | 2.39|
| | |acc_norm|21.30|± | 2.28|
|hendrycksTest-professional_accounting | 0|acc |20.21|± | 2.40|
| | |acc_norm|22.70|± | 2.50|
|hendrycksTest-professional_law | 0|acc |27.90|± | 1.15|
| | |acc_norm|29.53|± | 1.17|
|hendrycksTest-professional_medicine | 0|acc |26.47|± | 2.68|
| | |acc_norm|29.78|± | 2.78|
|hendrycksTest-professional_psychology | 0|acc |26.14|± | 1.78|
| | |acc_norm|28.59|± | 1.83|
|hendrycksTest-public_relations | 0|acc |34.55|± | 4.55|
| | |acc_norm|15.45|± | 3.46|
|hendrycksTest-security_studies | 0|acc |38.37|± | 3.11|
| | |acc_norm|32.65|± | 3.00|
|hendrycksTest-sociology | 0|acc |28.86|± | 3.20|
| | |acc_norm|27.36|± | 3.15|
|hendrycksTest-us_foreign_policy | 0|acc |37.00|± | 4.85|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-virology | 0|acc |32.53|± | 3.65|
| | |acc_norm|30.12|± | 3.57|
|hendrycksTest-world_religions | 0|acc |33.92|± | 3.63|
| | |acc_norm|37.43|± | 3.71|
|lambada_openai | 0|ppl | 3.29|± | 0.06|
| | |acc |73.90|± | 0.61|
|logiqa | 0|acc |22.73|± | 1.64|
| | |acc_norm|28.73|± | 1.77|
|piqa | 0|acc |78.78|± | 0.95|
| | |acc_norm|79.87|± | 0.94|
|sciq | 0|acc |92.60|± | 0.83|
| | |acc_norm|87.30|± | 1.05|
|winogrande | 0|acc |68.75|± | 1.30|
|wsc | 0|acc |54.81|± | 4.90|
results/opt/opt-66b/opt-66b.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.04229525846816505
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.04560480215720684
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.2251655629139073
,
"acc_stderr"
:
0.03410435282008937
,
"acc_norm"
:
0.23178807947019867
,
"acc_norm_stderr"
:
0.03445406271987054
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.2909090909090909
,
"acc_stderr"
:
0.03546563019624337
,
"acc_norm"
:
0.296969696969697
,
"acc_norm_stderr"
:
0.03567969772268047
},
"arc_easy"
:
{
"acc"
:
0.7167508417508418
,
"acc_stderr"
:
0.009245632200075455
,
"acc_norm"
:
0.672979797979798
,
"acc_norm_stderr"
:
0.009626235849372198
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.2807017543859649
,
"acc_stderr"
:
0.042270544512322004
,
"acc_norm"
:
0.20175438596491227
,
"acc_norm_stderr"
:
0.037752050135836386
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.2790091264667536
,
"acc_stderr"
:
0.011455208832803538
,
"acc_norm"
:
0.2953063885267275
,
"acc_norm_stderr"
:
0.011651061936208818
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.28699551569506726
,
"acc_stderr"
:
0.030360379710291954
,
"acc_norm"
:
0.2242152466367713
,
"acc_norm_stderr"
:
0.027991534258519527
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.31
,
"acc_stderr"
:
0.04648231987117316
,
"acc_norm"
:
0.3
,
"acc_norm_stderr"
:
0.046056618647183814
},
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.04229525846816506
,
"acc_norm"
:
0.24
,
"acc_norm_stderr"
:
0.04292346959909283
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.26424870466321243
,
"acc_stderr"
:
0.03182155050916647
,
"acc_norm"
:
0.26424870466321243
,
"acc_norm_stderr"
:
0.03182155050916647
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.04229525846816507
,
"acc_norm"
:
0.28
,
"acc_norm_stderr"
:
0.04512608598542127
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.29831932773109243
,
"acc_stderr"
:
0.029719142876342853
,
"acc_norm"
:
0.36134453781512604
,
"acc_norm_stderr"
:
0.03120469122515001
},
"arc_challenge"
:
{
"acc"
:
0.3720136518771331
,
"acc_stderr"
:
0.014124597881844461
,
"acc_norm"
:
0.40102389078498296
,
"acc_norm_stderr"
:
0.014322255790719864
},
"hendrycksTest-nutrition"
:
{
"acc"
:
0.3333333333333333
,
"acc_stderr"
:
0.02699254433929723
,
"acc_norm"
:
0.3954248366013072
,
"acc_norm_stderr"
:
0.027996723180631435
},
"sciq"
:
{
"acc"
:
0.926
,
"acc_stderr"
:
0.008282064512704159
,
"acc_norm"
:
0.873
,
"acc_norm_stderr"
:
0.01053479862085575
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.3055555555555556
,
"acc_stderr"
:
0.044531975073749834
,
"acc_norm"
:
0.42592592592592593
,
"acc_norm_stderr"
:
0.0478034362693679
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.2885572139303483
,
"acc_stderr"
:
0.03203841040213321
,
"acc_norm"
:
0.2736318407960199
,
"acc_norm_stderr"
:
0.031524391865554016
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.24150943396226415
,
"acc_stderr"
:
0.02634148037111837
,
"acc_norm"
:
0.27547169811320754
,
"acc_norm_stderr"
:
0.02749566368372407
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.256198347107438
,
"acc_stderr"
:
0.03984979653302871
,
"acc_norm"
:
0.49586776859504134
,
"acc_norm_stderr"
:
0.04564198767432754
},
"hendrycksTest-virology"
:
{
"acc"
:
0.3253012048192771
,
"acc_stderr"
:
0.036471685236832266
,
"acc_norm"
:
0.30120481927710846
,
"acc_norm_stderr"
:
0.035716092300534796
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.28431372549019607
,
"acc_stderr"
:
0.04488482852329017
,
"acc_norm"
:
0.2647058823529412
,
"acc_norm_stderr"
:
0.043898699568087805
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.2413793103448276
,
"acc_stderr"
:
0.03010833071801162
,
"acc_norm"
:
0.3448275862068966
,
"acc_norm_stderr"
:
0.03344283744280458
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.3208092485549133
,
"acc_stderr"
:
0.02513100023364791
,
"acc_norm"
:
0.3179190751445087
,
"acc_norm_stderr"
:
0.025070713719153183
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.2916666666666667
,
"acc_stderr"
:
0.030998666304560534
,
"acc_norm"
:
0.3333333333333333
,
"acc_norm_stderr"
:
0.03214952147802749
},
"winogrande"
:
{
"acc"
:
0.6874506708760852
,
"acc_stderr"
:
0.013027563620748837
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.2508038585209003
,
"acc_stderr"
:
0.024619771956697168
,
"acc_norm"
:
0.3504823151125402
,
"acc_norm_stderr"
:
0.027098652621301747
},
"wsc"
:
{
"acc"
:
0.5480769230769231
,
"acc_stderr"
:
0.049038186969314335
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.2894736842105263
,
"acc_stderr"
:
0.03690677986137283
,
"acc_norm"
:
0.40131578947368424
,
"acc_norm_stderr"
:
0.03988903703336285
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.046882617226215034
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.04560480215720684
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.28440366972477066
,
"acc_stderr"
:
0.019342036587702588
,
"acc_norm"
:
0.25871559633027524
,
"acc_norm_stderr"
:
0.01877605231961962
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.3
,
"acc_stderr"
:
0.046056618647183814
,
"acc_norm"
:
0.34
,
"acc_norm_stderr"
:
0.04760952285695236
},
"hendrycksTest-management"
:
{
"acc"
:
0.2912621359223301
,
"acc_stderr"
:
0.04498676320572922
,
"acc_norm"
:
0.33980582524271846
,
"acc_norm_stderr"
:
0.046897659372781335
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.3269476372924649
,
"acc_stderr"
:
0.016774908180131463
,
"acc_norm"
:
0.2937420178799489
,
"acc_norm_stderr"
:
0.016287759388491675
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.3080168776371308
,
"acc_stderr"
:
0.030052389335605695
,
"acc_norm"
:
0.32489451476793246
,
"acc_norm_stderr"
:
0.030486039389105293
},
"lambada_openai"
:
{
"ppl"
:
3.2877565882479303
,
"ppl_stderr"
:
0.06361523543774811
,
"acc"
:
0.7389869978653212
,
"acc_stderr"
:
0.006118733561625588
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.3586206896551724
,
"acc_stderr"
:
0.039966295748767186
,
"acc_norm"
:
0.38620689655172413
,
"acc_norm_stderr"
:
0.04057324734419034
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.27450980392156865
,
"acc_stderr"
:
0.03132179803083292
,
"acc_norm"
:
0.30392156862745096
,
"acc_norm_stderr"
:
0.03228210387037892
},
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.23699421965317918
,
"acc_stderr"
:
0.03242414757483099
,
"acc_norm"
:
0.24855491329479767
,
"acc_norm_stderr"
:
0.03295304696818318
},
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.26262626262626265
,
"acc_stderr"
:
0.03135305009533084
,
"acc_norm"
:
0.3181818181818182
,
"acc_norm_stderr"
:
0.03318477333845331
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.2647058823529412
,
"acc_stderr"
:
0.026799562024887667
,
"acc_norm"
:
0.2977941176470588
,
"acc_norm_stderr"
:
0.02777829870154544
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.24107142857142858
,
"acc_stderr"
:
0.04059867246952686
,
"acc_norm"
:
0.23214285714285715
,
"acc_norm_stderr"
:
0.04007341809755807
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.2392638036809816
,
"acc_stderr"
:
0.03351953879521271
,
"acc_norm"
:
0.2883435582822086
,
"acc_norm_stderr"
:
0.035590395316173425
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.24305555555555555
,
"acc_stderr"
:
0.03586879280080341
,
"acc_norm"
:
0.25
,
"acc_norm_stderr"
:
0.03621034121889507
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.20212765957446807
,
"acc_stderr"
:
0.023956668237850226
,
"acc_norm"
:
0.22695035460992907
,
"acc_norm_stderr"
:
0.02498710636564297
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.29
,
"acc_stderr"
:
0.045604802157206845
,
"acc_norm"
:
0.28
,
"acc_norm_stderr"
:
0.045126085985421276
},
"piqa"
:
{
"acc"
:
0.7878128400435256
,
"acc_stderr"
:
0.009539299828174051
,
"acc_norm"
:
0.7986942328618063
,
"acc_norm_stderr"
:
0.009355431098990426
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.2948717948717949
,
"acc_stderr"
:
0.023119362758232294
,
"acc_norm"
:
0.26666666666666666
,
"acc_norm_stderr"
:
0.022421273612923714
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.37
,
"acc_stderr"
:
0.04852365870939099
,
"acc_norm"
:
0.36
,
"acc_norm_stderr"
:
0.04824181513244218
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.366412213740458
,
"acc_stderr"
:
0.042258754519696386
,
"acc_norm"
:
0.3282442748091603
,
"acc_norm_stderr"
:
0.04118438565806299
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.26129032258064516
,
"acc_stderr"
:
0.024993053397764826
,
"acc_norm"
:
0.3193548387096774
,
"acc_norm_stderr"
:
0.026522709674667768
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.3836734693877551
,
"acc_stderr"
:
0.03113088039623595
,
"acc_norm"
:
0.32653061224489793
,
"acc_norm_stderr"
:
0.030021056238440307
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.21851851851851853
,
"acc_stderr"
:
0.025195752251823793
,
"acc_norm"
:
0.32222222222222224
,
"acc_norm_stderr"
:
0.0284934650910286
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.26455026455026454
,
"acc_stderr"
:
0.022717467897708628
,
"acc_norm"
:
0.2724867724867725
,
"acc_norm_stderr"
:
0.02293097307163336
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.2553191489361702
,
"acc_stderr"
:
0.028504856470514196
,
"acc_norm"
:
0.2297872340425532
,
"acc_norm_stderr"
:
0.027501752944412417
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.24382716049382716
,
"acc_stderr"
:
0.023891879541959593
,
"acc_norm"
:
0.21296296296296297
,
"acc_norm_stderr"
:
0.022779719088733396
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.34
,
"acc_stderr"
:
0.04760952285695235
,
"acc_norm"
:
0.45
,
"acc_norm_stderr"
:
0.05
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.2740740740740741
,
"acc_stderr"
:
0.03853254836552003
,
"acc_norm"
:
0.26666666666666666
,
"acc_norm_stderr"
:
0.038201699145179055
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.26143790849673204
,
"acc_stderr"
:
0.017776947157528037
,
"acc_norm"
:
0.28594771241830064
,
"acc_norm_stderr"
:
0.01828048507295467
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.29
,
"acc_stderr"
:
0.045604802157206845
,
"acc_norm"
:
0.26
,
"acc_norm_stderr"
:
0.04408440022768078
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.2212290502793296
,
"acc_stderr"
:
0.013882164598887275
,
"acc_norm"
:
0.27262569832402234
,
"acc_norm_stderr"
:
0.014893391735249588
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.2905982905982906
,
"acc_stderr"
:
0.029745048572674057
,
"acc_norm"
:
0.2905982905982906
,
"acc_norm_stderr"
:
0.029745048572674054
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.30158730158730157
,
"acc_stderr"
:
0.04104947269903394
,
"acc_norm"
:
0.2857142857142857
,
"acc_norm_stderr"
:
0.040406101782088394
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.34545454545454546
,
"acc_stderr"
:
0.04554619617541054
,
"acc_norm"
:
0.15454545454545454
,
"acc_norm_stderr"
:
0.03462262571262667
},
"logiqa"
:
{
"acc"
:
0.22734254992319508
,
"acc_stderr"
:
0.016439067675117748
,
"acc_norm"
:
0.2872503840245776
,
"acc_norm_stderr"
:
0.017747701948846593
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.3391812865497076
,
"acc_stderr"
:
0.036310534964889056
,
"acc_norm"
:
0.3742690058479532
,
"acc_norm_stderr"
:
0.03711601185389481
}
},
"versions"
:
{
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"arc_easy"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-abstract_algebra"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"arc_challenge"
:
0
,
"hendrycksTest-nutrition"
:
0
,
"sciq"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-sociology"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-international_law"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"winogrande"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"wsc"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"lambada_openai"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-professional_medicine"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"piqa"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-professional_psychology"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"hendrycksTest-marketing"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"logiqa"
:
0
,
"hendrycksTest-world_religions"
:
0
},
"config"
:
{
"model"
:
"hf-causal"
,
"model_args"
:
"pretrained=facebook/opt-66b,use_accelerate=True,device_map_option=sequential,max_memory_per_gpu=40GIB"
,
"num_fewshot"
:
0
,
"batch_size"
:
1
,
"device"
:
"cuda"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/README.md
deleted
100644 → 0
View file @
5e59782e
# xglm-1.7B
## xglm-1.7B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |20.99|± | 1.19|
| | |acc_norm|24.32|± | 1.25|
|arc_easy | 0|acc |53.62|± | 1.02|
| | |acc_norm|47.90|± | 1.03|
|boolq | 1|acc |58.56|± | 0.86|
|copa | 0|acc |68.00|± | 4.69|
|hellaswag | 0|acc |36.18|± | 0.48|
| | |acc_norm|45.80|± | 0.50|
|mc_taco | 0|em |12.91| | |
| | |f1 |34.52| | |
|openbookqa | 0|acc |17.00|± | 1.68|
| | |acc_norm|29.80|± | 2.05|
|piqa | 0|acc |69.70|± | 1.07|
| | |acc_norm|70.35|± | 1.07|
|prost | 0|acc |22.69|± | 0.31|
| | |acc_norm|27.21|± | 0.33|
|swag | 0|acc |45.97|± | 0.35|
| | |acc_norm|62.19|± | 0.34|
|winogrande | 0|acc |54.93|± | 1.40|
|wsc273 | 0|acc |68.13|± | 2.83|
## xglm-1.7B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 0.99|± | 0.27|
## xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 0.67|± | 0.08|
| | |f1 | 3.44|± | 0.13|
|gsm8k | 0|acc | 0.83|± | 0.25|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.00|± | 0.00|
|math_prealgebra | 1|acc | 0.00|± | 0.00|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |22.91|± | 0.77|
| | |acc_norm|21.44|± | 0.75|
## xglm-1.7B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |57.55|± | 1.11|
|pawsx_en| 0|acc |52.65|± | 1.12|
|pawsx_es| 0|acc |53.80|± | 1.12|
|pawsx_fr| 0|acc |47.35|± | 1.12|
|pawsx_ja| 0|acc |46.10|± | 1.11|
|pawsx_ko| 0|acc |51.40|± | 1.12|
|pawsx_zh| 0|acc |48.10|± | 1.12|
## xglm-1.7B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 56.8|± | 2.22|
|xcopa_ht| 0|acc | 55.8|± | 2.22|
|xcopa_id| 0|acc | 64.6|± | 2.14|
|xcopa_it| 0|acc | 54.0|± | 2.23|
|xcopa_qu| 0|acc | 52.2|± | 2.24|
|xcopa_sw| 0|acc | 56.6|± | 2.22|
|xcopa_ta| 0|acc | 55.2|± | 2.23|
|xcopa_th| 0|acc | 58.2|± | 2.21|
|xcopa_tr| 0|acc | 53.4|± | 2.23|
|xcopa_vi| 0|acc | 63.0|± | 2.16|
|xcopa_zh| 0|acc | 58.0|± | 2.21|
## xglm-1.7B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.51|± | 0.67|
|xnli_bg| 0|acc |44.73|± | 0.70|
|xnli_de| 0|acc |45.33|± | 0.70|
|xnli_el| 0|acc |40.10|± | 0.69|
|xnli_en| 0|acc |49.68|± | 0.71|
|xnli_es| 0|acc |43.61|± | 0.70|
|xnli_fr| 0|acc |45.73|± | 0.70|
|xnli_hi| 0|acc |42.61|± | 0.70|
|xnli_ru| 0|acc |45.97|± | 0.70|
|xnli_sw| 0|acc |42.00|± | 0.70|
|xnli_th| 0|acc |41.70|± | 0.70|
|xnli_tr| 0|acc |42.95|± | 0.70|
|xnli_ur| 0|acc |39.50|± | 0.69|
|xnli_vi| 0|acc |45.03|± | 0.70|
|xnli_zh| 0|acc |33.77|± | 0.67|
## xglm-1.7B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |52.48|± | 1.29|
|xstory_cloze_en| 0|acc |64.33|± | 1.23|
|xstory_cloze_es| 0|acc |59.23|± | 1.26|
|xstory_cloze_eu| 0|acc |56.12|± | 1.28|
|xstory_cloze_hi| 0|acc |55.79|± | 1.28|
|xstory_cloze_id| 0|acc |57.97|± | 1.27|
|xstory_cloze_my| 0|acc |53.81|± | 1.28|
|xstory_cloze_ru| 0|acc |59.83|± | 1.26|
|xstory_cloze_sw| 0|acc |55.99|± | 1.28|
|xstory_cloze_te| 0|acc |58.04|± | 1.27|
|xstory_cloze_zh| 0|acc |56.19|± | 1.28|
## xglm-1.7B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |71.05|± | 0.94|
|xwinograd_fr| 0|acc |60.24|± | 5.40|
|xwinograd_jp| 0|acc |60.58|± | 1.58|
|xwinograd_pt| 0|acc |63.88|± | 2.97|
|xwinograd_ru| 0|acc |59.68|± | 2.77|
|xwinograd_zh| 0|acc |69.84|± | 2.05|
results/xglm/xglm-1.7B/xglm-1.7B_common_sense_reasoning_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"copa"
:
{
"acc"
:
0.68
,
"acc_stderr"
:
0.046882617226215034
},
"piqa"
:
{
"acc"
:
0.6969532100108814
,
"acc_stderr"
:
0.010722648689531515
,
"acc_norm"
:
0.7034820457018498
,
"acc_norm_stderr"
:
0.010656078922661134
},
"prost"
:
{
"acc"
:
0.22694278394534587
,
"acc_stderr"
:
0.003060110855833208
,
"acc_norm"
:
0.27209649871904357
,
"acc_norm_stderr"
:
0.0032514084657504338
},
"arc_easy"
:
{
"acc"
:
0.5361952861952862
,
"acc_stderr"
:
0.01023286555034672
,
"acc_norm"
:
0.47895622895622897
,
"acc_norm_stderr"
:
0.01025069260202258
},
"hellaswag"
:
{
"acc"
:
0.3617805218084047
,
"acc_stderr"
:
0.004795337009118189
,
"acc_norm"
:
0.45797649870543716
,
"acc_norm_stderr"
:
0.004972126523031943
},
"mc_taco"
:
{
"em"
:
0.12912912912912913
,
"f1"
:
0.34519977153598014
},
"winogrande"
:
{
"acc"
:
0.5493291239147593
,
"acc_stderr"
:
0.013983928869040239
},
"wsc273"
:
{
"acc"
:
0.6813186813186813
,
"acc_stderr"
:
0.02825328818739863
},
"swag"
:
{
"acc"
:
0.4596621013695891
,
"acc_stderr"
:
0.0035235690445916223
,
"acc_norm"
:
0.6219134259722083
,
"acc_norm_stderr"
:
0.003428398656668824
},
"boolq"
:
{
"acc"
:
0.5856269113149847
,
"acc_stderr"
:
0.00861586377642113
},
"openbookqa"
:
{
"acc"
:
0.17
,
"acc_stderr"
:
0.016815633531393426
,
"acc_norm"
:
0.298
,
"acc_norm_stderr"
:
0.02047511809298897
},
"arc_challenge"
:
{
"acc"
:
0.2098976109215017
,
"acc_stderr"
:
0.011900548748047446
,
"acc_norm"
:
0.2431740614334471
,
"acc_norm_stderr"
:
0.012536554144587089
}
},
"versions"
:
{
"copa"
:
0
,
"piqa"
:
0
,
"prost"
:
0
,
"arc_easy"
:
0
,
"hellaswag"
:
0
,
"mc_taco"
:
0
,
"winogrande"
:
0
,
"wsc273"
:
0
,
"swag"
:
0
,
"boolq"
:
1
,
"openbookqa"
:
0
,
"arc_challenge"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_gsm8k_8-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"gsm8k"
:
{
"acc"
:
0.009855951478392721
,
"acc_stderr"
:
0.00272107657704166
}
},
"versions"
:
{
"gsm8k"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
8
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"math_prealgebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"gsm8k"
:
{
"acc"
:
0.008339651250947688
,
"acc_stderr"
:
0.002504942226860518
},
"math_num_theory"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_precalc"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_geometry"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"drop"
:
{
"em"
:
0.006711409395973154
,
"em_stderr"
:
0.0008361500895152447
,
"f1"
:
0.03435402684563763
,
"f1_stderr"
:
0.0012720691502648663
},
"math_intermediate_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_counting_and_prob"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"mathqa"
:
{
"acc"
:
0.22914572864321608
,
"acc_stderr"
:
0.007693830518376538
,
"acc_norm"
:
0.21440536013400335
,
"acc_norm_stderr"
:
0.0075130739863118485
}
},
"versions"
:
{
"math_prealgebra"
:
1
,
"gsm8k"
:
0
,
"math_num_theory"
:
1
,
"math_precalc"
:
1
,
"mathqa"
:
0
,
"math_algebra"
:
1
,
"math_geometry"
:
1
,
"drop"
:
1
,
"math_intermediate_algebra"
:
1
,
"math_counting_and_prob"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_pawsx_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"pawsx_fr"
:
{
"acc"
:
0.4735
,
"acc_stderr"
:
0.011167418260963935
},
"pawsx_de"
:
{
"acc"
:
0.5755
,
"acc_stderr"
:
0.011054907529701135
},
"pawsx_ko"
:
{
"acc"
:
0.514
,
"acc_stderr"
:
0.011178751372184865
},
"pawsx_ja"
:
{
"acc"
:
0.461
,
"acc_stderr"
:
0.011149065020234333
},
"pawsx_en"
:
{
"acc"
:
0.5265
,
"acc_stderr"
:
0.011167418260963933
},
"pawsx_es"
:
{
"acc"
:
0.538
,
"acc_stderr"
:
0.011150792352341666
},
"pawsx_zh"
:
{
"acc"
:
0.481
,
"acc_stderr"
:
0.011175058879956061
}
},
"versions"
:
{
"pawsx_fr"
:
0
,
"pawsx_de"
:
0
,
"pawsx_ko"
:
0
,
"pawsx_ja"
:
0
,
"pawsx_en"
:
0
,
"pawsx_es"
:
0
,
"pawsx_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_xcopa_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xcopa_id"
:
{
"acc"
:
0.646
,
"acc_stderr"
:
0.02140758204791645
},
"xcopa_ta"
:
{
"acc"
:
0.552
,
"acc_stderr"
:
0.02226169729227013
},
"xcopa_ht"
:
{
"acc"
:
0.558
,
"acc_stderr"
:
0.022231970696321122
},
"xcopa_it"
:
{
"acc"
:
0.54
,
"acc_stderr"
:
0.022311333245289663
},
"xcopa_tr"
:
{
"acc"
:
0.534
,
"acc_stderr"
:
0.02233126442325838
},
"xcopa_th"
:
{
"acc"
:
0.582
,
"acc_stderr"
:
0.022080014812228137
},
"xcopa_qu"
:
{
"acc"
:
0.522
,
"acc_stderr"
:
0.022361396739207878
},
"xcopa_zh"
:
{
"acc"
:
0.58
,
"acc_stderr"
:
0.02209471322976178
},
"xcopa_vi"
:
{
"acc"
:
0.63
,
"acc_stderr"
:
0.02161328916516579
},
"xcopa_et"
:
{
"acc"
:
0.568
,
"acc_stderr"
:
0.022175109265613165
},
"xcopa_sw"
:
{
"acc"
:
0.566
,
"acc_stderr"
:
0.022187215803029008
}
},
"versions"
:
{
"xcopa_id"
:
0
,
"xcopa_ta"
:
0
,
"xcopa_ht"
:
0
,
"xcopa_it"
:
0
,
"xcopa_tr"
:
0
,
"xcopa_th"
:
0
,
"xcopa_qu"
:
0
,
"xcopa_zh"
:
0
,
"xcopa_vi"
:
0
,
"xcopa_et"
:
0
,
"xcopa_sw"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_xnli_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xnli_bg"
:
{
"acc"
:
0.4473053892215569
,
"acc_stderr"
:
0.0070253693946827935
},
"xnli_hi"
:
{
"acc"
:
0.42614770459081835
,
"acc_stderr"
:
0.006987223294820979
},
"xnli_el"
:
{
"acc"
:
0.40099800399201596
,
"acc_stderr"
:
0.006924839696959944
},
"xnli_en"
:
{
"acc"
:
0.49680638722554893
,
"acc_stderr"
:
0.00706456831954508
},
"xnli_ar"
:
{
"acc"
:
0.33512974051896205
,
"acc_stderr"
:
0.006669594382503631
},
"xnli_ru"
:
{
"acc"
:
0.4596806387225549
,
"acc_stderr"
:
0.00704170545485625
},
"xnli_fr"
:
{
"acc"
:
0.45728542914171655
,
"acc_stderr"
:
0.007038885597058048
},
"xnli_tr"
:
{
"acc"
:
0.4295409181636727
,
"acc_stderr"
:
0.006994215414803208
},
"xnli_th"
:
{
"acc"
:
0.4169660678642715
,
"acc_stderr"
:
0.006966614137458995
},
"xnli_vi"
:
{
"acc"
:
0.4502994011976048
,
"acc_stderr"
:
0.007029723996054755
},
"xnli_de"
:
{
"acc"
:
0.4532934131736527
,
"acc_stderr"
:
0.0070338214783393326
},
"xnli_sw"
:
{
"acc"
:
0.41996007984031936
,
"acc_stderr"
:
0.006973606391328806
},
"xnli_zh"
:
{
"acc"
:
0.3377245508982036
,
"acc_stderr"
:
0.006682287063203171
},
"xnli_es"
:
{
"acc"
:
0.436127744510978
,
"acc_stderr"
:
0.007006832004922492
},
"xnli_ur"
:
{
"acc"
:
0.39500998003992016
,
"acc_stderr"
:
0.0069072094196003676
}
},
"versions"
:
{
"xnli_bg"
:
0
,
"xnli_hi"
:
0
,
"xnli_el"
:
0
,
"xnli_en"
:
0
,
"xnli_ar"
:
0
,
"xnli_ru"
:
0
,
"xnli_fr"
:
0
,
"xnli_tr"
:
0
,
"xnli_th"
:
0
,
"xnli_vi"
:
0
,
"xnli_de"
:
0
,
"xnli_sw"
:
0
,
"xnli_zh"
:
0
,
"xnli_es"
:
0
,
"xnli_ur"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_xstory_cloze_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xstory_cloze_hi"
:
{
"acc"
:
0.5579086697551291
,
"acc_stderr"
:
0.012780536370279769
},
"xstory_cloze_sw"
:
{
"acc"
:
0.5598941098610192
,
"acc_stderr"
:
0.012774475160716338
},
"xstory_cloze_zh"
:
{
"acc"
:
0.5618795499669094
,
"acc_stderr"
:
0.012768206616277759
},
"xstory_cloze_my"
:
{
"acc"
:
0.5380542686962276
,
"acc_stderr"
:
0.0128298047203217
},
"xstory_cloze_eu"
:
{
"acc"
:
0.5612177365982793
,
"acc_stderr"
:
0.012770319186938004
},
"xstory_cloze_id"
:
{
"acc"
:
0.5797485109199206
,
"acc_stderr"
:
0.012702405649149104
},
"xstory_cloze_te"
:
{
"acc"
:
0.5804103242885507
,
"acc_stderr"
:
0.012699642268200749
},
"xstory_cloze_en"
:
{
"acc"
:
0.6432825943084051
,
"acc_stderr"
:
0.01232748767711036
},
"xstory_cloze_es"
:
{
"acc"
:
0.5923229649238915
,
"acc_stderr"
:
0.012645876488040282
},
"xstory_cloze_ar"
:
{
"acc"
:
0.5248180013236268
,
"acc_stderr"
:
0.012851264962354841
},
"xstory_cloze_ru"
:
{
"acc"
:
0.5982792852415619
,
"acc_stderr"
:
0.012616114526927917
}
},
"versions"
:
{
"xstory_cloze_hi"
:
0
,
"xstory_cloze_sw"
:
0
,
"xstory_cloze_zh"
:
0
,
"xstory_cloze_my"
:
0
,
"xstory_cloze_eu"
:
0
,
"xstory_cloze_id"
:
0
,
"xstory_cloze_te"
:
0
,
"xstory_cloze_en"
:
0
,
"xstory_cloze_es"
:
0
,
"xstory_cloze_ar"
:
0
,
"xstory_cloze_ru"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-1.7B/xglm-1.7B_xwinograd_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xwinograd_pt"
:
{
"acc"
:
0.6387832699619772
,
"acc_stderr"
:
0.029676320268041578
},
"xwinograd_zh"
:
{
"acc"
:
0.6984126984126984
,
"acc_stderr"
:
0.020463437846223773
},
"xwinograd_jp"
:
{
"acc"
:
0.6058394160583942
,
"acc_stderr"
:
0.015788199459722305
},
"xwinograd_fr"
:
{
"acc"
:
0.6024096385542169
,
"acc_stderr"
:
0.05404517824786813
},
"xwinograd_en"
:
{
"acc"
:
0.7105376344086022
,
"acc_stderr"
:
0.009407441676993788
},
"xwinograd_ru"
:
{
"acc"
:
0.5968253968253968
,
"acc_stderr"
:
0.02768250629102932
}
},
"versions"
:
{
"xwinograd_pt"
:
0
,
"xwinograd_zh"
:
0
,
"xwinograd_jp"
:
0
,
"xwinograd_fr"
:
0
,
"xwinograd_en"
:
0
,
"xwinograd_ru"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-1.7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-2.9B/README.md
deleted
100644 → 0
View file @
5e59782e
# xglm-2.9B
## xglm-2.9B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |23.46|± | 1.24|
| | |acc_norm|27.39|± | 1.30|
|arc_easy | 0|acc |56.65|± | 1.02|
| | |acc_norm|53.37|± | 1.02|
|boolq | 1|acc |61.44|± | 0.85|
|copa | 0|acc |74.00|± | 4.41|
|hellaswag | 0|acc |40.92|± | 0.49|
| | |acc_norm|53.70|± | 0.50|
|mc_taco | 0|em |11.94| | |
| | |f1 |47.80| | |
|openbookqa | 0|acc |21.60|± | 1.84|
| | |acc_norm|33.20|± | 2.11|
|piqa | 0|acc |71.27|± | 1.06|
| | |acc_norm|73.01|± | 1.04|
|prost | 0|acc |21.92|± | 0.30|
| | |acc_norm|26.64|± | 0.32|
|swag | 0|acc |48.49|± | 0.35|
| | |acc_norm|65.78|± | 0.34|
|winogrande | 0|acc |54.62|± | 1.40|
|wsc273 | 0|acc |71.06|± | 2.75|
## xglm-2.9B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |50.65|± | 1.12|
|pawsx_en| 0|acc |54.75|± | 1.11|
|pawsx_es| 0|acc |53.15|± | 1.12|
|pawsx_fr| 0|acc |49.70|± | 1.12|
|pawsx_ja| 0|acc |50.95|± | 1.12|
|pawsx_ko| 0|acc |46.75|± | 1.12|
|pawsx_zh| 0|acc |53.70|± | 1.12|
## xglm-2.9B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 58.2|± | 2.21|
|xcopa_ht| 0|acc | 55.8|± | 2.22|
|xcopa_id| 0|acc | 66.8|± | 2.11|
|xcopa_it| 0|acc | 60.2|± | 2.19|
|xcopa_qu| 0|acc | 50.2|± | 2.24|
|xcopa_sw| 0|acc | 58.8|± | 2.20|
|xcopa_ta| 0|acc | 54.2|± | 2.23|
|xcopa_th| 0|acc | 57.0|± | 2.22|
|xcopa_tr| 0|acc | 56.6|± | 2.22|
|xcopa_vi| 0|acc | 65.2|± | 2.13|
|xcopa_zh| 0|acc | 60.0|± | 2.19|
## xglm-2.9B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.65|± | 0.67|
|xnli_bg| 0|acc |45.97|± | 0.70|
|xnli_de| 0|acc |48.32|± | 0.71|
|xnli_el| 0|acc |41.40|± | 0.70|
|xnli_en| 0|acc |51.08|± | 0.71|
|xnli_es| 0|acc |46.67|± | 0.70|
|xnli_fr| 0|acc |45.03|± | 0.70|
|xnli_hi| 0|acc |44.03|± | 0.70|
|xnli_ru| 0|acc |45.29|± | 0.70|
|xnli_sw| 0|acc |44.43|± | 0.70|
|xnli_th| 0|acc |41.98|± | 0.70|
|xnli_tr| 0|acc |44.97|± | 0.70|
|xnli_ur| 0|acc |40.10|± | 0.69|
|xnli_vi| 0|acc |45.99|± | 0.70|
|xnli_zh| 0|acc |34.81|± | 0.67|
## xglm-2.9B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |53.87|± | 1.28|
|xstory_cloze_en| 0|acc |67.31|± | 1.21|
|xstory_cloze_es| 0|acc |60.95|± | 1.26|
|xstory_cloze_eu| 0|acc |56.32|± | 1.28|
|xstory_cloze_hi| 0|acc |57.51|± | 1.27|
|xstory_cloze_id| 0|acc |61.35|± | 1.25|
|xstory_cloze_my| 0|acc |55.20|± | 1.28|
|xstory_cloze_ru| 0|acc |62.21|± | 1.25|
|xstory_cloze_sw| 0|acc |56.72|± | 1.28|
|xstory_cloze_te| 0|acc |60.03|± | 1.26|
|xstory_cloze_zh| 0|acc |57.64|± | 1.27|
## xglm-2.9B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |75.61|± | 0.89|
|xwinograd_fr| 0|acc |59.04|± | 5.43|
|xwinograd_jp| 0|acc |64.65|± | 1.54|
|xwinograd_pt| 0|acc |66.16|± | 2.92|
|xwinograd_ru| 0|acc |62.86|± | 2.73|
|xwinograd_zh| 0|acc |71.63|± | 2.01|
results/xglm/xglm-2.9B/xglm-2.9B_common_sense_reasoning_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"piqa"
:
{
"acc"
:
0.7127312295973884
,
"acc_stderr"
:
0.010557291761528637
,
"acc_norm"
:
0.7301414581066377
,
"acc_norm_stderr"
:
0.010356595421852193
},
"openbookqa"
:
{
"acc"
:
0.216
,
"acc_stderr"
:
0.01842190906141194
,
"acc_norm"
:
0.332
,
"acc_norm_stderr"
:
0.021081766571222856
},
"arc_challenge"
:
{
"acc"
:
0.23464163822525597
,
"acc_stderr"
:
0.01238387356076868
,
"acc_norm"
:
0.2738907849829352
,
"acc_norm_stderr"
:
0.013032004972989505
},
"arc_easy"
:
{
"acc"
:
0.5664983164983165
,
"acc_stderr"
:
0.010168640625454107
,
"acc_norm"
:
0.5336700336700336
,
"acc_norm_stderr"
:
0.010236494647406476
},
"boolq"
:
{
"acc"
:
0.6143730886850153
,
"acc_stderr"
:
0.008513189460768057
},
"wsc273"
:
{
"acc"
:
0.7106227106227107
,
"acc_stderr"
:
0.027495860234525278
},
"copa"
:
{
"acc"
:
0.74
,
"acc_stderr"
:
0.04408440022768077
},
"hellaswag"
:
{
"acc"
:
0.4091814379605656
,
"acc_stderr"
:
0.004906779523192668
,
"acc_norm"
:
0.5370444134634534
,
"acc_norm_stderr"
:
0.004976067726432559
},
"prost"
:
{
"acc"
:
0.21920367207514946
,
"acc_stderr"
:
0.003022497462586152
,
"acc_norm"
:
0.2664389410760034
,
"acc_norm_stderr"
:
0.0032299078734217036
},
"swag"
:
{
"acc"
:
0.4849045286414076
,
"acc_stderr"
:
0.0035334805738792946
,
"acc_norm"
:
0.6578026592022393
,
"acc_norm_stderr"
:
0.0033544154500719224
},
"mc_taco"
:
{
"em"
:
0.11936936936936937
,
"f1"
:
0.4779548809969738
},
"winogrande"
:
{
"acc"
:
0.5461720599842147
,
"acc_stderr"
:
0.013992441563707058
}
},
"versions"
:
{
"piqa"
:
0
,
"openbookqa"
:
0
,
"arc_challenge"
:
0
,
"arc_easy"
:
0
,
"boolq"
:
1
,
"wsc273"
:
0
,
"copa"
:
0
,
"hellaswag"
:
0
,
"prost"
:
0
,
"swag"
:
0
,
"mc_taco"
:
0
,
"winogrande"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-2.9B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-2.9B/xglm-2.9B_pawsx_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"pawsx_zh"
:
{
"acc"
:
0.537
,
"acc_stderr"
:
0.011152474561478182
},
"pawsx_de"
:
{
"acc"
:
0.5065
,
"acc_stderr"
:
0.0111821910061423
},
"pawsx_en"
:
{
"acc"
:
0.5475
,
"acc_stderr"
:
0.011132557743886095
},
"pawsx_es"
:
{
"acc"
:
0.5315
,
"acc_stderr"
:
0.011160921022883278
},
"pawsx_fr"
:
{
"acc"
:
0.497
,
"acc_stderr"
:
0.011182934722804556
},
"pawsx_ja"
:
{
"acc"
:
0.5095
,
"acc_stderr"
:
0.011181117282805214
},
"pawsx_ko"
:
{
"acc"
:
0.4675
,
"acc_stderr"
:
0.011159486640120933
}
},
"versions"
:
{
"pawsx_zh"
:
0
,
"pawsx_de"
:
0
,
"pawsx_en"
:
0
,
"pawsx_es"
:
0
,
"pawsx_fr"
:
0
,
"pawsx_ja"
:
0
,
"pawsx_ko"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-2.9B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-2.9B/xglm-2.9B_xcopa_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xcopa_zh"
:
{
"acc"
:
0.6
,
"acc_stderr"
:
0.021930844120728505
},
"xcopa_ht"
:
{
"acc"
:
0.558
,
"acc_stderr"
:
0.02223197069632112
},
"xcopa_id"
:
{
"acc"
:
0.668
,
"acc_stderr"
:
0.021081766571222856
},
"xcopa_tr"
:
{
"acc"
:
0.566
,
"acc_stderr"
:
0.02218721580302901
},
"xcopa_it"
:
{
"acc"
:
0.602
,
"acc_stderr"
:
0.021912377885779967
},
"xcopa_qu"
:
{
"acc"
:
0.502
,
"acc_stderr"
:
0.02238289498648353
},
"xcopa_sw"
:
{
"acc"
:
0.588
,
"acc_stderr"
:
0.022033677993740865
},
"xcopa_th"
:
{
"acc"
:
0.57
,
"acc_stderr"
:
0.02216263442665284
},
"xcopa_vi"
:
{
"acc"
:
0.652
,
"acc_stderr"
:
0.021323728632807494
},
"xcopa_ta"
:
{
"acc"
:
0.542
,
"acc_stderr"
:
0.022303966774269938
},
"xcopa_et"
:
{
"acc"
:
0.582
,
"acc_stderr"
:
0.022080014812228137
}
},
"versions"
:
{
"xcopa_zh"
:
0
,
"xcopa_ht"
:
0
,
"xcopa_id"
:
0
,
"xcopa_tr"
:
0
,
"xcopa_it"
:
0
,
"xcopa_qu"
:
0
,
"xcopa_sw"
:
0
,
"xcopa_th"
:
0
,
"xcopa_vi"
:
0
,
"xcopa_ta"
:
0
,
"xcopa_et"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-2.9B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-2.9B/xglm-2.9B_xnli_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xnli_ar"
:
{
"acc"
:
0.33652694610778444
,
"acc_stderr"
:
0.006676456919028458
},
"xnli_bg"
:
{
"acc"
:
0.4596806387225549
,
"acc_stderr"
:
0.007041705454856254
},
"xnli_de"
:
{
"acc"
:
0.48323353293413174
,
"acc_stderr"
:
0.007060739327060854
},
"xnli_el"
:
{
"acc"
:
0.41397205588822356
,
"acc_stderr"
:
0.00695935771309272
},
"xnli_en"
:
{
"acc"
:
0.5107784431137724
,
"acc_stderr"
:
0.007063070754956929
},
"xnli_es"
:
{
"acc"
:
0.4666666666666667
,
"acc_stderr"
:
0.0070489955857553875
},
"xnli_fr"
:
{
"acc"
:
0.4502994011976048
,
"acc_stderr"
:
0.00702972399605476
},
"xnli_hi"
:
{
"acc"
:
0.4403193612774451
,
"acc_stderr"
:
0.007014206007644934
},
"xnli_ru"
:
{
"acc"
:
0.4528942115768463
,
"acc_stderr"
:
0.007033289986695003
},
"xnli_sw"
:
{
"acc"
:
0.444311377245509
,
"acc_stderr"
:
0.007020757195791273
},
"xnli_th"
:
{
"acc"
:
0.41976047904191616
,
"acc_stderr"
:
0.006973148443615152
},
"xnli_tr"
:
{
"acc"
:
0.4497005988023952
,
"acc_stderr"
:
0.007028873660193274
},
"xnli_ur"
:
{
"acc"
:
0.40099800399201596
,
"acc_stderr"
:
0.006924839696959946
},
"xnli_vi"
:
{
"acc"
:
0.4598802395209581
,
"acc_stderr"
:
0.00704193305036814
},
"xnli_zh"
:
{
"acc"
:
0.34810379241516964
,
"acc_stderr"
:
0.006730821739872395
}
},
"versions"
:
{
"xnli_ar"
:
0
,
"xnli_bg"
:
0
,
"xnli_de"
:
0
,
"xnli_el"
:
0
,
"xnli_en"
:
0
,
"xnli_es"
:
0
,
"xnli_fr"
:
0
,
"xnli_hi"
:
0
,
"xnli_ru"
:
0
,
"xnli_sw"
:
0
,
"xnli_th"
:
0
,
"xnli_tr"
:
0
,
"xnli_ur"
:
0
,
"xnli_vi"
:
0
,
"xnli_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-2.9B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/xglm/xglm-2.9B/xglm-2.9B_xstory_cloze_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xstory_cloze_es"
:
{
"acc"
:
0.6095301125082727
,
"acc_stderr"
:
0.012554600076548371
},
"xstory_cloze_sw"
:
{
"acc"
:
0.5671740569159497
,
"acc_stderr"
:
0.01275047450298583
},
"xstory_cloze_en"
:
{
"acc"
:
0.6730641958967571
,
"acc_stderr"
:
0.012071771683911351
},
"xstory_cloze_zh"
:
{
"acc"
:
0.5764394440767704
,
"acc_stderr"
:
0.01271587138288145
},
"xstory_cloze_id"
:
{
"acc"
:
0.613500992720053
,
"acc_stderr"
:
0.012531219943771486
},
"xstory_cloze_eu"
:
{
"acc"
:
0.5632031767041694
,
"acc_stderr"
:
0.01276391225017363
},
"xstory_cloze_te"
:
{
"acc"
:
0.600264725347452
,
"acc_stderr"
:
0.01260576407762715
},
"xstory_cloze_ru"
:
{
"acc"
:
0.6221045665122436
,
"acc_stderr"
:
0.012477542072994664
},
"xstory_cloze_hi"
:
{
"acc"
:
0.5751158173395102
,
"acc_stderr"
:
0.01272109407352333
},
"xstory_cloze_ar"
:
{
"acc"
:
0.5387160820648577
,
"acc_stderr"
:
0.01282849335327155
},
"xstory_cloze_my"
:
{
"acc"
:
0.5519523494374586
,
"acc_stderr"
:
0.012797478885304733
}
},
"versions"
:
{
"xstory_cloze_es"
:
0
,
"xstory_cloze_sw"
:
0
,
"xstory_cloze_en"
:
0
,
"xstory_cloze_zh"
:
0
,
"xstory_cloze_id"
:
0
,
"xstory_cloze_eu"
:
0
,
"xstory_cloze_te"
:
0
,
"xstory_cloze_ru"
:
0
,
"xstory_cloze_hi"
:
0
,
"xstory_cloze_ar"
:
0
,
"xstory_cloze_my"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=facebook/xglm-2.9B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
Prev
1
…
4
5
6
7
8
9
10
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment