Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
4de8a74e
Unverified
Commit
4de8a74e
authored
May 21, 2023
by
Stella Biderman
Committed by
GitHub
May 21, 2023
Browse files
Merge branch 'master' into json-task
parents
3226ed64
bda68845
Changes
191
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
1097 additions
and
0 deletions
+1097
-0
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
+80
-0
results/mpt/mpt-7b/mpt-7b_mmlu_5-shot.json
results/mpt/mpt-7b/mpt-7b_mmlu_5-shot.json
+416
-0
results/mpt/mpt-7b/mpt-7b_pawsx_0-shot.json
results/mpt/mpt-7b/mpt-7b_pawsx_0-shot.json
+52
-0
results/mpt/mpt-7b/mpt-7b_reading_comprehension_0-shot.json
results/mpt/mpt-7b/mpt-7b_reading_comprehension_0-shot.json
+36
-0
results/mpt/mpt-7b/mpt-7b_superglue_0-shot.json
results/mpt/mpt-7b/mpt-7b_superglue_0-shot.json
+55
-0
results/mpt/mpt-7b/mpt-7b_unscramble_0-shot.json
results/mpt/mpt-7b/mpt-7b_unscramble_0-shot.json
+42
-0
results/mpt/mpt-7b/mpt-7b_xcopa_0-shot.json
results/mpt/mpt-7b/mpt-7b_xcopa_0-shot.json
+72
-0
results/mpt/mpt-7b/mpt-7b_xnli_0-shot.json
results/mpt/mpt-7b/mpt-7b_xnli_0-shot.json
+92
-0
results/mpt/mpt-7b/mpt-7b_xstory_cloze_0-shot.json
results/mpt/mpt-7b/mpt-7b_xstory_cloze_0-shot.json
+72
-0
results/mpt/mpt-7b/mpt-7b_xwinograd_0-shot.json
results/mpt/mpt-7b/mpt-7b_xwinograd_0-shot.json
+47
-0
results/opt/opt-1.3b/README.md
results/opt/opt-1.3b/README.md
+133
-0
No files found.
Too many changes to show.
To preserve performance only
191 of 191+
files are displayed.
Plain diff
Email patch
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"lambada_openai"
:
{
"ppl"
:
3.8685393479651173
,
"ppl_stderr"
:
0.08079962928213236
,
"acc"
:
0.6834853483407723
,
"acc_stderr"
:
0.006479978824925181
},
"lambada_openai_cloze"
:
{
"ppl"
:
26.557641771948866
,
"ppl_stderr"
:
0.7040059319391023
,
"acc"
:
0.3964680768484378
,
"acc_stderr"
:
0.006815007030417622
},
"lambada_openai_mt_de"
:
{
"ppl"
:
70.11746381055114
,
"ppl_stderr"
:
4.044349022196945
,
"acc"
:
0.33766737822627596
,
"acc_stderr"
:
0.006588623616680426
},
"lambada_openai_mt_en"
:
{
"ppl"
:
3.8685393479651173
,
"ppl_stderr"
:
0.08079962928213236
,
"acc"
:
0.6834853483407723
,
"acc_stderr"
:
0.006479978824925181
},
"lambada_openai_mt_es"
:
{
"ppl"
:
67.22873557270539
,
"ppl_stderr"
:
3.6853291413042055
,
"acc"
:
0.3694934989326606
,
"acc_stderr"
:
0.006724504027913079
},
"lambada_openai_mt_fr"
:
{
"ppl"
:
42.93424455960906
,
"ppl_stderr"
:
2.3689294613751506
,
"acc"
:
0.4302348146710654
,
"acc_stderr"
:
0.006897835015074962
},
"lambada_openai_mt_it"
:
{
"ppl"
:
65.76277239773485
,
"ppl_stderr"
:
3.8678074045404967
,
"acc"
:
0.392004657481079
,
"acc_stderr"
:
0.006801548708056975
},
"lambada_standard"
:
{
"ppl"
:
4.922814417244969
,
"ppl_stderr"
:
0.10844800336124351
,
"acc"
:
0.619056860081506
,
"acc_stderr"
:
0.006765617200231529
},
"lambada_standard_cloze"
:
{
"ppl"
:
109.09886744922697
,
"ppl_stderr"
:
3.0351063735026287
,
"acc"
:
0.1674752571317679
,
"acc_stderr"
:
0.005202187177767864
}
},
"versions"
:
{
"lambada_openai"
:
0
,
"lambada_openai_cloze"
:
0
,
"lambada_openai_mt_de"
:
0
,
"lambada_openai_mt_en"
:
0
,
"lambada_openai_mt_es"
:
0
,
"lambada_openai_mt_fr"
:
0
,
"lambada_openai_mt_it"
:
0
,
"lambada_standard"
:
0
,
"lambada_standard_cloze"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_mmlu_5-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.18
,
"acc_stderr"
:
0.03861229196653695
,
"acc_norm"
:
0.21
,
"acc_norm_stderr"
:
0.040936018074033256
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.3851851851851852
,
"acc_stderr"
:
0.042039210401562783
,
"acc_norm"
:
0.37777777777777777
,
"acc_norm_stderr"
:
0.04188307537595853
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.39473684210526316
,
"acc_stderr"
:
0.039777499346220734
,
"acc_norm"
:
0.42105263157894735
,
"acc_norm_stderr"
:
0.04017901275981748
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.49
,
"acc_stderr"
:
0.05024183937956911
,
"acc_norm"
:
0.48
,
"acc_norm_stderr"
:
0.050211673156867795
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.3320754716981132
,
"acc_stderr"
:
0.028985455652334388
,
"acc_norm"
:
0.37735849056603776
,
"acc_norm_stderr"
:
0.029832808114796005
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.3819444444444444
,
"acc_stderr"
:
0.040629907841466674
,
"acc_norm"
:
0.3541666666666667
,
"acc_norm_stderr"
:
0.039994111357535424
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.39
,
"acc_stderr"
:
0.04902071300001974
,
"acc_norm"
:
0.41
,
"acc_norm_stderr"
:
0.049431107042371025
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.34
,
"acc_stderr"
:
0.04760952285695235
,
"acc_norm"
:
0.32
,
"acc_norm_stderr"
:
0.046882617226215034
},
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.27
,
"acc_stderr"
:
0.044619604333847394
,
"acc_norm"
:
0.33
,
"acc_norm_stderr"
:
0.047258156262526045
},
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.36416184971098264
,
"acc_stderr"
:
0.03669072477416907
,
"acc_norm"
:
0.3468208092485549
,
"acc_norm_stderr"
:
0.036291466701596636
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.30392156862745096
,
"acc_stderr"
:
0.045766654032077615
,
"acc_norm"
:
0.3333333333333333
,
"acc_norm_stderr"
:
0.04690650298201943
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.41
,
"acc_stderr"
:
0.04943110704237102
,
"acc_norm"
:
0.41
,
"acc_norm_stderr"
:
0.049431107042371025
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.3276595744680851
,
"acc_stderr"
:
0.030683020843231015
,
"acc_norm"
:
0.2553191489361702
,
"acc_norm_stderr"
:
0.02850485647051418
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.2719298245614035
,
"acc_stderr"
:
0.04185774424022056
,
"acc_norm"
:
0.23684210526315788
,
"acc_norm_stderr"
:
0.039994238792813365
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.36551724137931035
,
"acc_stderr"
:
0.040131241954243856
,
"acc_norm"
:
0.33793103448275863
,
"acc_norm_stderr"
:
0.03941707632064889
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.29894179894179895
,
"acc_stderr"
:
0.023577604791655802
,
"acc_norm"
:
0.28835978835978837
,
"acc_norm_stderr"
:
0.023330654054535892
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.30952380952380953
,
"acc_stderr"
:
0.04134913018303316
,
"acc_norm"
:
0.2857142857142857
,
"acc_norm_stderr"
:
0.040406101782088394
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.35
,
"acc_stderr"
:
0.047937248544110196
,
"acc_norm"
:
0.33
,
"acc_norm_stderr"
:
0.047258156262526045
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.36451612903225805
,
"acc_stderr"
:
0.02737987122994325
,
"acc_norm"
:
0.3903225806451613
,
"acc_norm_stderr"
:
0.027751256636969583
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.21182266009852216
,
"acc_stderr"
:
0.02874898368994106
,
"acc_norm"
:
0.21674876847290642
,
"acc_norm_stderr"
:
0.028990331252516235
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.43
,
"acc_stderr"
:
0.04975698519562428
,
"acc_norm"
:
0.41
,
"acc_norm_stderr"
:
0.049431107042371025
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.38181818181818183
,
"acc_stderr"
:
0.03793713171165635
,
"acc_norm"
:
0.37575757575757573
,
"acc_norm_stderr"
:
0.03781887353205983
},
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.3838383838383838
,
"acc_stderr"
:
0.03464881675016339
,
"acc_norm"
:
0.40404040404040403
,
"acc_norm_stderr"
:
0.03496130972056128
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.41450777202072536
,
"acc_stderr"
:
0.03555300319557673
,
"acc_norm"
:
0.41450777202072536
,
"acc_norm_stderr"
:
0.03555300319557672
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.3487179487179487
,
"acc_stderr"
:
0.024162780284017717
,
"acc_norm"
:
0.29743589743589743
,
"acc_norm_stderr"
:
0.02317740813146594
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.29259259259259257
,
"acc_stderr"
:
0.027738969632176088
,
"acc_norm"
:
0.3037037037037037
,
"acc_norm_stderr"
:
0.02803792996911499
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.33613445378151263
,
"acc_stderr"
:
0.030684737115135353
,
"acc_norm"
:
0.3697478991596639
,
"acc_norm_stderr"
:
0.03135709599613591
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.2781456953642384
,
"acc_stderr"
:
0.03658603262763743
,
"acc_norm"
:
0.2781456953642384
,
"acc_norm_stderr"
:
0.03658603262763743
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.46972477064220186
,
"acc_stderr"
:
0.021397988604936965
,
"acc_norm"
:
0.44587155963302755
,
"acc_norm_stderr"
:
0.02131133500970858
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.3287037037037037
,
"acc_stderr"
:
0.03203614084670058
,
"acc_norm"
:
0.32407407407407407
,
"acc_norm_stderr"
:
0.03191923445686185
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.3431372549019608
,
"acc_stderr"
:
0.03332139944668085
,
"acc_norm"
:
0.3137254901960784
,
"acc_norm_stderr"
:
0.032566854844603886
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.29535864978902954
,
"acc_stderr"
:
0.029696338713422876
,
"acc_norm"
:
0.2869198312236287
,
"acc_norm_stderr"
:
0.02944377302259469
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.336322869955157
,
"acc_stderr"
:
0.031708824268455
,
"acc_norm"
:
0.3273542600896861
,
"acc_norm_stderr"
:
0.031493846709941306
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.2748091603053435
,
"acc_stderr"
:
0.03915345408847836
,
"acc_norm"
:
0.3282442748091603
,
"acc_norm_stderr"
:
0.041184385658062976
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.371900826446281
,
"acc_stderr"
:
0.04412015806624504
,
"acc_norm"
:
0.49586776859504134
,
"acc_norm_stderr"
:
0.045641987674327526
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.3425925925925926
,
"acc_stderr"
:
0.045879047413018105
,
"acc_norm"
:
0.39814814814814814
,
"acc_norm_stderr"
:
0.04732332615978814
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.3803680981595092
,
"acc_stderr"
:
0.038142698932618374
,
"acc_norm"
:
0.36809815950920244
,
"acc_norm_stderr"
:
0.03789213935838395
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.26785714285714285
,
"acc_stderr"
:
0.04203277291467763
,
"acc_norm"
:
0.24107142857142858
,
"acc_norm_stderr"
:
0.04059867246952686
},
"hendrycksTest-management"
:
{
"acc"
:
0.42718446601941745
,
"acc_stderr"
:
0.04897957737781169
,
"acc_norm"
:
0.39805825242718446
,
"acc_norm_stderr"
:
0.0484674825397724
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.5512820512820513
,
"acc_stderr"
:
0.032583346493868806
,
"acc_norm"
:
0.5512820512820513
,
"acc_norm_stderr"
:
0.032583346493868806
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.39
,
"acc_stderr"
:
0.04902071300001975
,
"acc_norm"
:
0.38
,
"acc_norm_stderr"
:
0.04878317312145633
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.5555555555555556
,
"acc_stderr"
:
0.017769250583533246
,
"acc_norm"
:
0.5568326947637292
,
"acc_norm_stderr"
:
0.01776408503534841
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.3208092485549133
,
"acc_stderr"
:
0.025131000233647904
,
"acc_norm"
:
0.30057803468208094
,
"acc_norm_stderr"
:
0.024685316867257796
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.26033519553072626
,
"acc_stderr"
:
0.014676252009319483
,
"acc_norm"
:
0.27262569832402234
,
"acc_norm_stderr"
:
0.014893391735249614
},
"hendrycksTest-nutrition"
:
{
"acc"
:
0.3431372549019608
,
"acc_stderr"
:
0.027184498909941616
,
"acc_norm"
:
0.4019607843137255
,
"acc_norm_stderr"
:
0.02807415894760066
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.3762057877813505
,
"acc_stderr"
:
0.027513925683549427
,
"acc_norm"
:
0.36977491961414793
,
"acc_norm_stderr"
:
0.027417996705630998
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.33641975308641975
,
"acc_stderr"
:
0.02628973494595293
,
"acc_norm"
:
0.3055555555555556
,
"acc_norm_stderr"
:
0.025630824975621344
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.3049645390070922
,
"acc_stderr"
:
0.027464708442022135
,
"acc_norm"
:
0.2907801418439716
,
"acc_norm_stderr"
:
0.027090664368353178
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.25945241199478486
,
"acc_stderr"
:
0.011195262076350299
,
"acc_norm"
:
0.2842242503259452
,
"acc_norm_stderr"
:
0.011519880596516074
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.29411764705882354
,
"acc_stderr"
:
0.027678468642144703
,
"acc_norm"
:
0.3161764705882353
,
"acc_norm_stderr"
:
0.02824568739146291
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.315359477124183
,
"acc_stderr"
:
0.018798086284886883
,
"acc_norm"
:
0.3022875816993464
,
"acc_norm_stderr"
:
0.01857923271111388
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.41818181818181815
,
"acc_stderr"
:
0.04724577405731571
,
"acc_norm"
:
0.42727272727272725
,
"acc_norm_stderr"
:
0.04738198703545483
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.2816326530612245
,
"acc_stderr"
:
0.028795185574291282
,
"acc_norm"
:
0.24081632653061225
,
"acc_norm_stderr"
:
0.027372942201788163
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.34328358208955223
,
"acc_stderr"
:
0.03357379665433431
,
"acc_norm"
:
0.3681592039800995
,
"acc_norm_stderr"
:
0.034104105654953025
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.38
,
"acc_stderr"
:
0.04878317312145632
,
"acc_norm"
:
0.39
,
"acc_norm_stderr"
:
0.04902071300001975
},
"hendrycksTest-virology"
:
{
"acc"
:
0.3253012048192771
,
"acc_stderr"
:
0.03647168523683229
,
"acc_norm"
:
0.3253012048192771
,
"acc_norm_stderr"
:
0.03647168523683227
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.543859649122807
,
"acc_stderr"
:
0.03820042586602966
,
"acc_norm"
:
0.5789473684210527
,
"acc_norm_stderr"
:
0.03786720706234214
}
},
"versions"
:
{
"hendrycksTest-abstract_algebra"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-international_law"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-marketing"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"hendrycksTest-nutrition"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-professional_medicine"
:
0
,
"hendrycksTest-professional_psychology"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-sociology"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-world_religions"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_pawsx_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"pawsx_de"
:
{
"acc"
:
0.614
,
"acc_stderr"
:
0.010888584877766427
},
"pawsx_en"
:
{
"acc"
:
0.7035
,
"acc_stderr"
:
0.010214991337441791
},
"pawsx_es"
:
{
"acc"
:
0.6495
,
"acc_stderr"
:
0.01067154233969731
},
"pawsx_fr"
:
{
"acc"
:
0.6285
,
"acc_stderr"
:
0.01080751017293364
},
"pawsx_ja"
:
{
"acc"
:
0.493
,
"acc_stderr"
:
0.011182040020027768
},
"pawsx_ko"
:
{
"acc"
:
0.5365
,
"acc_stderr"
:
0.011153298751334327
},
"pawsx_zh"
:
{
"acc"
:
0.5625
,
"acc_stderr"
:
0.011095423796079503
}
},
"versions"
:
{
"pawsx_de"
:
0
,
"pawsx_en"
:
0
,
"pawsx_es"
:
0
,
"pawsx_fr"
:
0
,
"pawsx_ja"
:
0
,
"pawsx_ko"
:
0
,
"pawsx_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_reading_comprehension_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"coqa"
:
{
"f1"
:
0.7650867255895625
,
"f1_stderr"
:
0.01481717694356494
,
"em"
:
0.6301666666666667
,
"em_stderr"
:
0.018680205213012713
},
"drop"
:
{
"em"
:
0.03429110738255033
,
"em_stderr"
:
0.0018636035184959787
,
"f1"
:
0.1338569630872482
,
"f1_stderr"
:
0.0025165760673094154
},
"race"
:
{
"acc"
:
0.3866028708133971
,
"acc_stderr"
:
0.01507138477304713
}
},
"versions"
:
{
"coqa"
:
1
,
"drop"
:
1
,
"race"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_superglue_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"boolq"
:
{
"acc"
:
0.7382262996941896
,
"acc_stderr"
:
0.007688653730439849
},
"cb"
:
{
"acc"
:
0.4107142857142857
,
"acc_stderr"
:
0.0663363415035954
,
"f1"
:
0.2126984126984127
},
"copa"
:
{
"acc"
:
0.84
,
"acc_stderr"
:
0.036845294917747115
},
"multirc"
:
{
"acc"
:
0.008394543546694649
,
"acc_stderr"
:
0.002956987733350658
},
"record"
:
{
"f1"
:
0.900992857142857
,
"f1_stderr"
:
0.0029401923751195343
,
"em"
:
0.893
,
"em_stderr"
:
0.0030912870467751773
},
"wic"
:
{
"acc"
:
0.4843260188087774
,
"acc_stderr"
:
0.01980098495534785
},
"wsc"
:
{
"acc"
:
0.6346153846153846
,
"acc_stderr"
:
0.0474473339327792
}
},
"versions"
:
{
"boolq"
:
1
,
"cb"
:
1
,
"copa"
:
0
,
"multirc"
:
1
,
"record"
:
0
,
"wic"
:
0
,
"wsc"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_unscramble_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"anagrams1"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"anagrams2"
:
{
"acc"
:
0.0001
,
"acc_stderr"
:
0.0001000000000000119
},
"cycle_letters"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"random_insertion"
:
{
"acc"
:
0.0004
,
"acc_stderr"
:
0.00019996999474889126
},
"reversed_words"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
}
},
"versions"
:
{
"anagrams1"
:
0
,
"anagrams2"
:
0
,
"cycle_letters"
:
0
,
"random_insertion"
:
0
,
"reversed_words"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_xcopa_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"xcopa_et"
:
{
"acc"
:
0.474
,
"acc_stderr"
:
0.02235279165091416
},
"xcopa_ht"
:
{
"acc"
:
0.498
,
"acc_stderr"
:
0.02238289498648353
},
"xcopa_id"
:
{
"acc"
:
0.568
,
"acc_stderr"
:
0.02217510926561316
},
"xcopa_it"
:
{
"acc"
:
0.594
,
"acc_stderr"
:
0.02198396209008634
},
"xcopa_qu"
:
{
"acc"
:
0.484
,
"acc_stderr"
:
0.0223716109825804
},
"xcopa_sw"
:
{
"acc"
:
0.516
,
"acc_stderr"
:
0.0223716109825804
},
"xcopa_ta"
:
{
"acc"
:
0.54
,
"acc_stderr"
:
0.022311333245289663
},
"xcopa_th"
:
{
"acc"
:
0.542
,
"acc_stderr"
:
0.02230396677426995
},
"xcopa_tr"
:
{
"acc"
:
0.516
,
"acc_stderr"
:
0.022371610982580396
},
"xcopa_vi"
:
{
"acc"
:
0.536
,
"acc_stderr"
:
0.022324981738385246
},
"xcopa_zh"
:
{
"acc"
:
0.632
,
"acc_stderr"
:
0.02158898256835354
}
},
"versions"
:
{
"xcopa_et"
:
0
,
"xcopa_ht"
:
0
,
"xcopa_id"
:
0
,
"xcopa_it"
:
0
,
"xcopa_qu"
:
0
,
"xcopa_sw"
:
0
,
"xcopa_ta"
:
0
,
"xcopa_th"
:
0
,
"xcopa_tr"
:
0
,
"xcopa_vi"
:
0
,
"xcopa_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_xnli_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"xnli_ar"
:
{
"acc"
:
0.3331337325349301
,
"acc_stderr"
:
0.006659676975732312
},
"xnli_bg"
:
{
"acc"
:
0.36826347305389223
,
"acc_stderr"
:
0.00681509322031267
},
"xnli_de"
:
{
"acc"
:
0.46447105788423154
,
"acc_stderr"
:
0.007046854204317796
},
"xnli_el"
:
{
"acc"
:
0.36187624750499003
,
"acc_stderr"
:
0.006789799946197319
},
"xnli_en"
:
{
"acc"
:
0.543313373253493
,
"acc_stderr"
:
0.007038155029004149
},
"xnli_es"
:
{
"acc"
:
0.4564870259481038
,
"acc_stderr"
:
0.007037909229199955
},
"xnli_fr"
:
{
"acc"
:
0.4880239520958084
,
"acc_stderr"
:
0.007062685615595022
},
"xnli_hi"
:
{
"acc"
:
0.3473053892215569
,
"acc_stderr"
:
0.006727214239733754
},
"xnli_ru"
:
{
"acc"
:
0.444311377245509
,
"acc_stderr"
:
0.007020757195791274
},
"xnli_sw"
:
{
"acc"
:
0.3341317365269461
,
"acc_stderr"
:
0.006664652441694265
},
"xnli_th"
:
{
"acc"
:
0.36127744510978044
,
"acc_stderr"
:
0.006787362347422093
},
"xnli_tr"
:
{
"acc"
:
0.37684630738522956
,
"acc_stderr"
:
0.006847061089041557
},
"xnli_ur"
:
{
"acc"
:
0.33632734530938124
,
"acc_stderr"
:
0.006675480563072364
},
"xnli_vi"
:
{
"acc"
:
0.37325349301397204
,
"acc_stderr"
:
0.0068339592620100505
},
"xnli_zh"
:
{
"acc"
:
0.3534930139720559
,
"acc_stderr"
:
0.006754629196407293
}
},
"versions"
:
{
"xnli_ar"
:
0
,
"xnli_bg"
:
0
,
"xnli_de"
:
0
,
"xnli_el"
:
0
,
"xnli_en"
:
0
,
"xnli_es"
:
0
,
"xnli_fr"
:
0
,
"xnli_hi"
:
0
,
"xnli_ru"
:
0
,
"xnli_sw"
:
0
,
"xnli_th"
:
0
,
"xnli_tr"
:
0
,
"xnli_ur"
:
0
,
"xnli_vi"
:
0
,
"xnli_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_xstory_cloze_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"xstory_cloze_ar"
:
{
"acc"
:
0.48510919920582396
,
"acc_stderr"
:
0.012861417842074004
},
"xstory_cloze_en"
:
{
"acc"
:
0.7789543348775645
,
"acc_stderr"
:
0.010678457581809239
},
"xstory_cloze_es"
:
{
"acc"
:
0.6604897418927862
,
"acc_stderr"
:
0.01218627614665943
},
"xstory_cloze_eu"
:
{
"acc"
:
0.5109199205823958
,
"acc_stderr"
:
0.012864056278255038
},
"xstory_cloze_hi"
:
{
"acc"
:
0.5168762409000662
,
"acc_stderr"
:
0.012859793919977606
},
"xstory_cloze_id"
:
{
"acc"
:
0.5519523494374586
,
"acc_stderr"
:
0.012797478885304742
},
"xstory_cloze_my"
:
{
"acc"
:
0.48378557246856385
,
"acc_stderr"
:
0.01286035780505586
},
"xstory_cloze_ru"
:
{
"acc"
:
0.5724685638649901
,
"acc_stderr"
:
0.012731259626982528
},
"xstory_cloze_sw"
:
{
"acc"
:
0.4990072799470549
,
"acc_stderr"
:
0.012867099955422935
},
"xstory_cloze_te"
:
{
"acc"
:
0.5294506949040371
,
"acc_stderr"
:
0.012844785490017004
},
"xstory_cloze_zh"
:
{
"acc"
:
0.5956320317670417
,
"acc_stderr"
:
0.012629580396570932
}
},
"versions"
:
{
"xstory_cloze_ar"
:
0
,
"xstory_cloze_en"
:
0
,
"xstory_cloze_es"
:
0
,
"xstory_cloze_eu"
:
0
,
"xstory_cloze_hi"
:
0
,
"xstory_cloze_id"
:
0
,
"xstory_cloze_my"
:
0
,
"xstory_cloze_ru"
:
0
,
"xstory_cloze_sw"
:
0
,
"xstory_cloze_te"
:
0
,
"xstory_cloze_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_xwinograd_0-shot.json
0 → 100644
View file @
4de8a74e
{
"results"
:
{
"xwinograd_en"
:
{
"acc"
:
0.8666666666666667
,
"acc_stderr"
:
0.0070514325016347275
},
"xwinograd_fr"
:
{
"acc"
:
0.6626506024096386
,
"acc_stderr"
:
0.05221260262032129
},
"xwinograd_jp"
:
{
"acc"
:
0.602711157455683
,
"acc_stderr"
:
0.015809751560314552
},
"xwinograd_pt"
:
{
"acc"
:
0.6692015209125475
,
"acc_stderr"
:
0.02906762615931534
},
"xwinograd_ru"
:
{
"acc"
:
0.6952380952380952
,
"acc_stderr"
:
0.025976599352305375
},
"xwinograd_zh"
:
{
"acc"
:
0.7162698412698413
,
"acc_stderr"
:
0.02010051064884106
}
},
"versions"
:
{
"xwinograd_en"
:
0
,
"xwinograd_fr"
:
0
,
"xwinograd_jp"
:
0
,
"xwinograd_pt"
:
0
,
"xwinograd_ru"
:
0
,
"xwinograd_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True,dtype=bfloat16"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/opt/opt-1.3b/README.md
0 → 100644
View file @
4de8a74e
# opt-1.3b
## opt-1.3b.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|arc_challenge | 0|acc |23.12|± | 1.23|
| | |acc_norm|29.44|± | 1.33|
|arc_easy | 0|acc |57.03|± | 1.02|
| | |acc_norm|50.93|± | 1.03|
|hendrycksTest-abstract_algebra | 0|acc |26.00|± | 4.41|
| | |acc_norm|25.00|± | 4.35|
|hendrycksTest-anatomy | 0|acc |22.96|± | 3.63|
| | |acc_norm|21.48|± | 3.55|
|hendrycksTest-astronomy | 0|acc |23.68|± | 3.46|
| | |acc_norm|34.21|± | 3.86|
|hendrycksTest-business_ethics | 0|acc |34.00|± | 4.76|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-clinical_knowledge | 0|acc |19.25|± | 2.43|
| | |acc_norm|25.28|± | 2.67|
|hendrycksTest-college_biology | 0|acc |26.39|± | 3.69|
| | |acc_norm|27.78|± | 3.75|
|hendrycksTest-college_chemistry | 0|acc |28.00|± | 4.51|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-college_computer_science | 0|acc |33.00|± | 4.73|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_mathematics | 0|acc |19.00|± | 3.94|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_medicine | 0|acc |17.92|± | 2.92|
| | |acc_norm|23.70|± | 3.24|
|hendrycksTest-college_physics | 0|acc |27.45|± | 4.44|
| | |acc_norm|30.39|± | 4.58|
|hendrycksTest-computer_security | 0|acc |29.00|± | 4.56|
| | |acc_norm|38.00|± | 4.88|
|hendrycksTest-conceptual_physics | 0|acc |21.70|± | 2.69|
| | |acc_norm|20.43|± | 2.64|
|hendrycksTest-econometrics | 0|acc |24.56|± | 4.05|
| | |acc_norm|23.68|± | 4.00|
|hendrycksTest-electrical_engineering | 0|acc |25.52|± | 3.63|
| | |acc_norm|28.97|± | 3.78|
|hendrycksTest-elementary_mathematics | 0|acc |19.58|± | 2.04|
| | |acc_norm|24.87|± | 2.23|
|hendrycksTest-formal_logic | 0|acc |29.37|± | 4.07|
| | |acc_norm|26.98|± | 3.97|
|hendrycksTest-global_facts | 0|acc |16.00|± | 3.68|
| | |acc_norm|18.00|± | 3.86|
|hendrycksTest-high_school_biology | 0|acc |20.97|± | 2.32|
| | |acc_norm|26.77|± | 2.52|
|hendrycksTest-high_school_chemistry | 0|acc |24.63|± | 3.03|
| | |acc_norm|30.54|± | 3.24|
|hendrycksTest-high_school_computer_science | 0|acc |27.00|± | 4.46|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-high_school_european_history | 0|acc |24.24|± | 3.35|
| | |acc_norm|27.27|± | 3.48|
|hendrycksTest-high_school_geography | 0|acc |22.22|± | 2.96|
| | |acc_norm|28.28|± | 3.21|
|hendrycksTest-high_school_government_and_politics| 0|acc |20.73|± | 2.93|
| | |acc_norm|23.83|± | 3.07|
|hendrycksTest-high_school_macroeconomics | 0|acc |29.23|± | 2.31|
| | |acc_norm|29.23|± | 2.31|
|hendrycksTest-high_school_mathematics | 0|acc |21.85|± | 2.52|
| | |acc_norm|28.89|± | 2.76|
|hendrycksTest-high_school_microeconomics | 0|acc |21.43|± | 2.67|
| | |acc_norm|30.25|± | 2.98|
|hendrycksTest-high_school_physics | 0|acc |22.52|± | 3.41|
| | |acc_norm|25.17|± | 3.54|
|hendrycksTest-high_school_psychology | 0|acc |22.57|± | 1.79|
| | |acc_norm|24.22|± | 1.84|
|hendrycksTest-high_school_statistics | 0|acc |25.46|± | 2.97|
| | |acc_norm|27.78|± | 3.05|
|hendrycksTest-high_school_us_history | 0|acc |25.00|± | 3.04|
| | |acc_norm|25.49|± | 3.06|
|hendrycksTest-high_school_world_history | 0|acc |26.58|± | 2.88|
| | |acc_norm|27.85|± | 2.92|
|hendrycksTest-human_aging | 0|acc |35.43|± | 3.21|
| | |acc_norm|29.15|± | 3.05|
|hendrycksTest-human_sexuality | 0|acc |40.46|± | 4.30|
| | |acc_norm|31.30|± | 4.07|
|hendrycksTest-international_law | 0|acc |17.36|± | 3.46|
| | |acc_norm|47.93|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |28.70|± | 4.37|
| | |acc_norm|39.81|± | 4.73|
|hendrycksTest-logical_fallacies | 0|acc |19.02|± | 3.08|
| | |acc_norm|28.83|± | 3.56|
|hendrycksTest-machine_learning | 0|acc |27.68|± | 4.25|
| | |acc_norm|27.68|± | 4.25|
|hendrycksTest-management | 0|acc |20.39|± | 3.99|
| | |acc_norm|27.18|± | 4.41|
|hendrycksTest-marketing | 0|acc |28.21|± | 2.95|
| | |acc_norm|33.76|± | 3.10|
|hendrycksTest-medical_genetics | 0|acc |27.00|± | 4.46|
| | |acc_norm|36.00|± | 4.82|
|hendrycksTest-miscellaneous | 0|acc |28.35|± | 1.61|
| | |acc_norm|28.74|± | 1.62|
|hendrycksTest-moral_disputes | 0|acc |27.17|± | 2.39|
| | |acc_norm|30.35|± | 2.48|
|hendrycksTest-moral_scenarios | 0|acc |23.80|± | 1.42|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |29.41|± | 2.61|
| | |acc_norm|39.54|± | 2.80|
|hendrycksTest-philosophy | 0|acc |23.79|± | 2.42|
| | |acc_norm|30.87|± | 2.62|
|hendrycksTest-prehistory | 0|acc |24.07|± | 2.38|
| | |acc_norm|21.60|± | 2.29|
|hendrycksTest-professional_accounting | 0|acc |25.89|± | 2.61|
| | |acc_norm|26.24|± | 2.62|
|hendrycksTest-professional_law | 0|acc |26.01|± | 1.12|
| | |acc_norm|28.03|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |24.63|± | 2.62|
| | |acc_norm|22.43|± | 2.53|
|hendrycksTest-professional_psychology | 0|acc |23.69|± | 1.72|
| | |acc_norm|25.49|± | 1.76|
|hendrycksTest-public_relations | 0|acc |25.45|± | 4.17|
| | |acc_norm|19.09|± | 3.76|
|hendrycksTest-security_studies | 0|acc |32.24|± | 2.99|
| | |acc_norm|26.53|± | 2.83|
|hendrycksTest-sociology | 0|acc |33.83|± | 3.35|
| | |acc_norm|34.33|± | 3.36|
|hendrycksTest-us_foreign_policy | 0|acc |32.00|± | 4.69|
| | |acc_norm|27.00|± | 4.46|
|hendrycksTest-virology | 0|acc |34.34|± | 3.70|
| | |acc_norm|30.12|± | 3.57|
|hendrycksTest-world_religions | 0|acc |34.50|± | 3.65|
| | |acc_norm|33.92|± | 3.63|
|lambada_openai | 0|ppl | 6.64|± | 0.17|
| | |acc |57.93|± | 0.69|
|logiqa | 0|acc |22.27|± | 1.63|
| | |acc_norm|27.19|± | 1.75|
|piqa | 0|acc |71.71|± | 1.05|
| | |acc_norm|72.47|± | 1.04|
|sciq | 0|acc |84.50|± | 1.15|
| | |acc_norm|76.50|± | 1.34|
|winogrande | 0|acc |59.75|± | 1.38|
|wsc | 0|acc |38.46|± | 4.79|
Prev
1
…
6
7
8
9
10
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment