Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
4fbbd60f
Unverified
Commit
4fbbd60f
authored
Aug 01, 2023
by
Hailey Schoelkopf
Committed by
GitHub
Aug 01, 2023
Browse files
Merge pull request #718 from EleutherAI/remove-results-folder
Remove Results folder temporarily
parents
5e59782e
1b9833dd
Changes
188
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
0 additions
and
2505 deletions
+0
-2505
results/llama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
...lama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
+0
-76
results/llama/llama-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
...a-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
+0
-71
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
+0
-416
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
+0
-52
results/llama/llama-7B/llama-7B_question_answering_0-shot.json
...ts/llama/llama-7B/llama-7B_question_answering_0-shot.json
+0
-66
results/llama/llama-7B/llama-7B_reading_comprehension_0-shot.json
...llama/llama-7B/llama-7B_reading_comprehension_0-shot.json
+0
-36
results/llama/llama-7B/llama-7B_unscramble_0-shot.json
results/llama/llama-7B/llama-7B_unscramble_0-shot.json
+0
-42
results/llama/llama-7B/llama-7B_xcopa_0-shot.json
results/llama/llama-7B/llama-7B_xcopa_0-shot.json
+0
-72
results/llama/llama-7B/llama-7B_xnli_0-shot.json
results/llama/llama-7B/llama-7B_xnli_0-shot.json
+0
-92
results/llama/llama-7B/llama-7B_xstory_cloze_0-shot.json
results/llama/llama-7B/llama-7B_xstory_cloze_0-shot.json
+0
-72
results/llama/llama-7B/llama-7B_xwinograd_0-shot.json
results/llama/llama-7B/llama-7B_xwinograd_0-shot.json
+0
-47
results/mpt/mpt-7b/README.md
results/mpt/mpt-7b/README.md
+0
-454
results/mpt/mpt-7b/mpt-7b_anli_0-shot.json
results/mpt/mpt-7b/mpt-7b_anli_0-shot.json
+0
-32
results/mpt/mpt-7b/mpt-7b_arithmetic_5-shot.json
results/mpt/mpt-7b/mpt-7b_arithmetic_5-shot.json
+0
-67
results/mpt/mpt-7b/mpt-7b_bbh_3-shot.json
results/mpt/mpt-7b/mpt-7b_bbh_3-shot.json
+0
-124
results/mpt/mpt-7b/mpt-7b_blimp_0-shot.json
results/mpt/mpt-7b/mpt-7b_blimp_0-shot.json
+0
-352
results/mpt/mpt-7b/mpt-7b_common_sense_reasoning_0-shot.json
results/mpt/mpt-7b/mpt-7b_common_sense_reasoning_0-shot.json
+0
-91
results/mpt/mpt-7b/mpt-7b_glue_0-shot.json
results/mpt/mpt-7b/mpt-7b_glue_0-shot.json
+0
-66
results/mpt/mpt-7b/mpt-7b_human_alignment_0-shot.json
results/mpt/mpt-7b/mpt-7b_human_alignment_0-shot.json
+0
-197
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
+0
-80
No files found.
results/llama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"mathqa"
:
{
"acc"
:
0.26767169179229483
,
"acc_stderr"
:
0.008105031808599684
,
"acc_norm"
:
0.27872696817420434
,
"acc_norm_stderr"
:
0.008208048863665952
},
"math_asdiv"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"gsm8k"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_num_theory"
:
{
"acc"
:
0.007407407407407408
,
"acc_stderr"
:
0.003693382168437238
},
"math_precalc"
:
{
"acc"
:
0.003663003663003663
,
"acc_stderr"
:
0.002587757368193461
},
"drop"
:
{
"em"
:
0.04268036912751678
,
"em_stderr"
:
0.002070056585023236
,
"f1"
:
0.1215950083892614
,
"f1_stderr"
:
0.0024765528531984883
},
"math_geometry"
:
{
"acc"
:
0.008350730688935281
,
"acc_stderr"
:
0.004162242110295851
},
"math_counting_and_prob"
:
{
"acc"
:
0.016877637130801686
,
"acc_stderr"
:
0.0059228268948526815
},
"math_intermediate_algebra"
:
{
"acc"
:
0.006644518272425249
,
"acc_stderr"
:
0.0027050844483854013
},
"math_prealgebra"
:
{
"acc"
:
0.012629161882893225
,
"acc_stderr"
:
0.003785888218263002
},
"math_algebra"
:
{
"acc"
:
0.016849199663016005
,
"acc_stderr"
:
0.0037372948497597248
}
},
"versions"
:
{
"mathqa"
:
0
,
"math_asdiv"
:
0
,
"gsm8k"
:
0
,
"math_num_theory"
:
1
,
"math_precalc"
:
1
,
"drop"
:
1
,
"math_geometry"
:
1
,
"math_counting_and_prob"
:
1
,
"math_intermediate_algebra"
:
1
,
"math_prealgebra"
:
1
,
"math_algebra"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"mathqa"
:
{
"acc"
:
0.2820770519262982
,
"acc_stderr"
:
0.008238030326915545
,
"acc_norm"
:
0.2877721943048576
,
"acc_norm_stderr"
:
0.008287708494779904
},
"math_prealgebra"
:
{
"acc"
:
0.001148105625717566
,
"acc_stderr"
:
0.0011481056257175704
},
"math_geometry"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_intermediate_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_counting_and_prob"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_num_theory"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"gsm8k"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"drop"
:
{
"em"
:
0.012374161073825503
,
"em_stderr"
:
0.0011321233703992673
,
"f1"
:
0.020981543624161086
,
"f1_stderr"
:
0.001252441423790731
},
"math_precalc"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
}
},
"versions"
:
{
"mathqa"
:
0
,
"math_prealgebra"
:
1
,
"math_geometry"
:
1
,
"math_intermediate_algebra"
:
1
,
"math_counting_and_prob"
:
1
,
"math_num_theory"
:
1
,
"gsm8k"
:
0
,
"drop"
:
1
,
"math_precalc"
:
1
,
"math_algebra"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.4292929292929293
,
"acc_stderr"
:
0.035265527246011986
,
"acc_norm"
:
0.36363636363636365
,
"acc_norm_stderr"
:
0.03427308652999934
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.40192926045016075
,
"acc_stderr"
:
0.027846476005930477
,
"acc_norm"
:
0.3536977491961415
,
"acc_norm_stderr"
:
0.02715520810320088
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.6257309941520468
,
"acc_stderr"
:
0.03711601185389481
,
"acc_norm"
:
0.5146198830409356
,
"acc_norm_stderr"
:
0.038331852752130254
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.3194444444444444
,
"acc_stderr"
:
0.03899073687357335
,
"acc_norm"
:
0.2916666666666667
,
"acc_norm_stderr"
:
0.03800968060554858
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.3586206896551724
,
"acc_stderr"
:
0.03996629574876719
,
"acc_norm"
:
0.32413793103448274
,
"acc_norm_stderr"
:
0.03900432069185554
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.046882617226215034
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.045604802157206824
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.48186528497409326
,
"acc_stderr"
:
0.03606065001832917
,
"acc_norm"
:
0.37305699481865284
,
"acc_norm_stderr"
:
0.03490205592048573
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.2759776536312849
,
"acc_stderr"
:
0.014950103002475353
,
"acc_norm"
:
0.27262569832402234
,
"acc_norm_stderr"
:
0.014893391735249588
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.2894736842105263
,
"acc_stderr"
:
0.04266339443159394
,
"acc_norm"
:
0.2631578947368421
,
"acc_norm_stderr"
:
0.0414243971948936
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.3884297520661157
,
"acc_stderr"
:
0.04449270350068382
,
"acc_norm"
:
0.5785123966942148
,
"acc_norm_stderr"
:
0.045077322787750874
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.56
,
"acc_stderr"
:
0.049888765156985884
,
"acc_norm"
:
0.45
,
"acc_norm_stderr"
:
0.05
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.31794871794871793
,
"acc_stderr"
:
0.02361088430892786
,
"acc_norm"
:
0.30256410256410254
,
"acc_norm_stderr"
:
0.023290888053772742
},
"hendrycksTest-virology"
:
{
"acc"
:
0.39759036144578314
,
"acc_stderr"
:
0.038099730845402184
,
"acc_norm"
:
0.2891566265060241
,
"acc_norm_stderr"
:
0.035294868015111155
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.22592592592592592
,
"acc_stderr"
:
0.025497532639609542
,
"acc_norm"
:
0.3074074074074074
,
"acc_norm_stderr"
:
0.02813325257881564
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.3886792452830189
,
"acc_stderr"
:
0.03000048544867599
,
"acc_norm"
:
0.38113207547169814
,
"acc_norm_stderr"
:
0.029890609686286627
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.3839869281045752
,
"acc_stderr"
:
0.01967580813528152
,
"acc_norm"
:
0.29901960784313725
,
"acc_norm_stderr"
:
0.01852175621542302
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.30952380952380953
,
"acc_stderr"
:
0.04134913018303316
,
"acc_norm"
:
0.3492063492063492
,
"acc_norm_stderr"
:
0.042639068927951315
},
"hendrycksTest-management"
:
{
"acc"
:
0.4854368932038835
,
"acc_stderr"
:
0.04948637324026637
,
"acc_norm"
:
0.36893203883495146
,
"acc_norm_stderr"
:
0.0477761518115674
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.5114503816793893
,
"acc_stderr"
:
0.043841400240780176
,
"acc_norm"
:
0.366412213740458
,
"acc_norm_stderr"
:
0.042258754519696386
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.3924050632911392
,
"acc_stderr"
:
0.03178471874564729
,
"acc_norm"
:
0.33755274261603374
,
"acc_norm_stderr"
:
0.030781549102026216
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.44
,
"acc_stderr"
:
0.04988876515698589
,
"acc_norm"
:
0.4
,
"acc_norm_stderr"
:
0.04923659639173309
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.37
,
"acc_stderr"
:
0.048523658709391
,
"acc_norm"
:
0.44
,
"acc_norm_stderr"
:
0.04988876515698589
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.5836526181353767
,
"acc_stderr"
:
0.0176279480304303
,
"acc_norm"
:
0.3895274584929757
,
"acc_norm_stderr"
:
0.017438082556264597
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.39090909090909093
,
"acc_stderr"
:
0.046737523336702384
,
"acc_norm"
:
0.22727272727272727
,
"acc_norm_stderr"
:
0.040139645540727735
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.23529411764705882
,
"acc_stderr"
:
0.04220773659171453
,
"acc_norm"
:
0.3235294117647059
,
"acc_norm_stderr"
:
0.046550104113196177
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.30141843971631205
,
"acc_stderr"
:
0.02737412888263115
,
"acc_norm"
:
0.29432624113475175
,
"acc_norm_stderr"
:
0.027187127011503793
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.3803680981595092
,
"acc_stderr"
:
0.03814269893261837
,
"acc_norm"
:
0.3496932515337423
,
"acc_norm_stderr"
:
0.037466683254700206
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.53
,
"acc_stderr"
:
0.050161355804659205
,
"acc_norm"
:
0.46
,
"acc_norm_stderr"
:
0.05009082659620332
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.2512315270935961
,
"acc_stderr"
:
0.030516530732694436
,
"acc_norm"
:
0.2955665024630542
,
"acc_norm_stderr"
:
0.03210494433751458
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.45394736842105265
,
"acc_stderr"
:
0.04051646342874143
,
"acc_norm"
:
0.4605263157894737
,
"acc_norm_stderr"
:
0.04056242252249033
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.4264705882352941
,
"acc_stderr"
:
0.03471157907953424
,
"acc_norm"
:
0.3137254901960784
,
"acc_norm_stderr"
:
0.032566854844603886
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.047258156262526045
,
"acc_norm"
:
0.3
,
"acc_norm_stderr"
:
0.046056618647183814
},
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.042295258468165065
,
"acc_norm"
:
0.26
,
"acc_norm_stderr"
:
0.0440844002276808
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.36416184971098264
,
"acc_stderr"
:
0.025906632631016117
,
"acc_norm"
:
0.33236994219653176
,
"acc_norm_stderr"
:
0.02536116874968821
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.04725815626252605
,
"acc_norm"
:
0.28
,
"acc_norm_stderr"
:
0.04512608598542128
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.2966101694915254
,
"acc_stderr"
:
0.011665946586082849
,
"acc_norm"
:
0.28552803129074317
,
"acc_norm_stderr"
:
0.011535751586665664
},
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.046882617226215034
,
"acc_norm"
:
0.32
,
"acc_norm_stderr"
:
0.04688261722621505
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.3865546218487395
,
"acc_stderr"
:
0.0316314580755238
,
"acc_norm"
:
0.36554621848739494
,
"acc_norm_stderr"
:
0.0312821770636846
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.40606060606060607
,
"acc_stderr"
:
0.03834816355401181
,
"acc_norm"
:
0.3696969696969697
,
"acc_norm_stderr"
:
0.03769430314512568
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.3580645161290323
,
"acc_stderr"
:
0.027273890594300642
,
"acc_norm"
:
0.3580645161290323
,
"acc_norm_stderr"
:
0.02727389059430063
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.40816326530612246
,
"acc_stderr"
:
0.03146465712827424
,
"acc_norm"
:
0.31020408163265306
,
"acc_norm_stderr"
:
0.029613459872484375
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.46605504587155966
,
"acc_stderr"
:
0.02138786335035399
,
"acc_norm"
:
0.30825688073394497
,
"acc_norm_stderr"
:
0.01979836669836726
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.3276595744680851
,
"acc_stderr"
:
0.030683020843231004
,
"acc_norm"
:
0.2170212765957447
,
"acc_norm_stderr"
:
0.026947483121496228
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.3721973094170404
,
"acc_stderr"
:
0.03244305283008731
,
"acc_norm"
:
0.25112107623318386
,
"acc_norm_stderr"
:
0.02910522083322462
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.4012345679012346
,
"acc_stderr"
:
0.0272725828498398
,
"acc_norm"
:
0.2777777777777778
,
"acc_norm_stderr"
:
0.02492200116888633
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.47761194029850745
,
"acc_stderr"
:
0.035319879302087305
,
"acc_norm"
:
0.42786069651741293
,
"acc_norm_stderr"
:
0.03498541988407795
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.6111111111111112
,
"acc_stderr"
:
0.031937057262002924
,
"acc_norm"
:
0.5042735042735043
,
"acc_norm_stderr"
:
0.03275489264382132
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.41
,
"acc_stderr"
:
0.049431107042371025
,
"acc_norm"
:
0.34
,
"acc_norm_stderr"
:
0.047609522856952365
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.30357142857142855
,
"acc_stderr"
:
0.04364226155841044
,
"acc_norm"
:
0.26785714285714285
,
"acc_norm_stderr"
:
0.04203277291467762
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.3201058201058201
,
"acc_stderr"
:
0.024026846392873506
,
"acc_norm"
:
0.291005291005291
,
"acc_norm_stderr"
:
0.023393826500484865
},
"hendrycksTest-nutrition"
:
{
"acc"
:
0.3954248366013072
,
"acc_stderr"
:
0.027996723180631435
,
"acc_norm"
:
0.43790849673202614
,
"acc_norm_stderr"
:
0.028408302020332694
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.3851851851851852
,
"acc_stderr"
:
0.042039210401562783
,
"acc_norm"
:
0.2814814814814815
,
"acc_norm_stderr"
:
0.03885004245800254
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.4351851851851852
,
"acc_stderr"
:
0.04792898170907062
,
"acc_norm"
:
0.5
,
"acc_norm_stderr"
:
0.04833682445228318
},
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.37572254335260113
,
"acc_stderr"
:
0.036928207672648664
,
"acc_norm"
:
0.3063583815028902
,
"acc_norm_stderr"
:
0.03514942551267439
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.3425925925925926
,
"acc_stderr"
:
0.03236585252602156
,
"acc_norm"
:
0.3425925925925926
,
"acc_norm_stderr"
:
0.03236585252602156
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.2052980132450331
,
"acc_stderr"
:
0.03297986648473834
,
"acc_norm"
:
0.271523178807947
,
"acc_norm_stderr"
:
0.036313298039696525
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.3382352941176471
,
"acc_stderr"
:
0.028739328513983576
,
"acc_norm"
:
0.27941176470588236
,
"acc_norm_stderr"
:
0.027257202606114948
}
},
"versions"
:
{
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"hendrycksTest-world_religions"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-international_law"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-professional_psychology"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-abstract_algebra"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-sociology"
:
0
,
"hendrycksTest-marketing"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-nutrition"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-professional_medicine"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"pawsx_en"
:
{
"acc"
:
0.6185
,
"acc_stderr"
:
0.010864524561478635
},
"pawsx_es"
:
{
"acc"
:
0.561
,
"acc_stderr"
:
0.011099599116647334
},
"pawsx_zh"
:
{
"acc"
:
0.491
,
"acc_stderr"
:
0.01118132420626029
},
"pawsx_fr"
:
{
"acc"
:
0.5295
,
"acc_stderr"
:
0.01116365480451166
},
"pawsx_de"
:
{
"acc"
:
0.5465
,
"acc_stderr"
:
0.011134669525078668
},
"pawsx_ko"
:
{
"acc"
:
0.497
,
"acc_stderr"
:
0.011182934722804558
},
"pawsx_ja"
:
{
"acc"
:
0.567
,
"acc_stderr"
:
0.011082279027990133
}
},
"versions"
:
{
"pawsx_en"
:
0
,
"pawsx_es"
:
0
,
"pawsx_zh"
:
0
,
"pawsx_fr"
:
0
,
"pawsx_de"
:
0
,
"pawsx_ko"
:
0
,
"pawsx_ja"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_question_answering_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"webqs"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"truthfulqa_mc"
:
{
"mc1"
:
0.21052631578947367
,
"mc1_stderr"
:
0.014271740645964186
,
"mc2"
:
0.3413779054949544
,
"mc2_stderr"
:
0.013147086422190785
},
"headqa_en"
:
{
"acc"
:
0.3242159008023341
,
"acc_stderr"
:
0.008940599111132593
,
"acc_norm"
:
0.35922684172137126
,
"acc_norm_stderr"
:
0.009163935584608705
},
"triviaqa"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"headqa_es"
:
{
"acc"
:
0.2826404084609774
,
"acc_stderr"
:
0.00860064580960105
,
"acc_norm"
:
0.3242159008023341
,
"acc_norm_stderr"
:
0.008940599111132597
},
"logiqa"
:
{
"acc"
:
0.21812596006144394
,
"acc_stderr"
:
0.01619814925841932
,
"acc_norm"
:
0.30261136712749614
,
"acc_norm_stderr"
:
0.018018696598158843
},
"squad2"
:
{
"exact"
:
9.416322749094585
,
"f1"
:
19.449005859216243
,
"HasAns_exact"
:
18.488529014844804
,
"HasAns_f1"
:
38.58266642484388
,
"NoAns_exact"
:
0.3700588730025231
,
"NoAns_f1"
:
0.3700588730025231
,
"best_exact"
:
50.07159100480081
,
"best_f1"
:
50.0801228586961
}
},
"versions"
:
{
"webqs"
:
0
,
"truthfulqa_mc"
:
1
,
"headqa_en"
:
0
,
"triviaqa"
:
1
,
"headqa_es"
:
0
,
"logiqa"
:
0
,
"squad2"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_reading_comprehension_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"coqa"
:
{
"f1"
:
0.752079907334399
,
"f1_stderr"
:
0.01529202913294504
,
"em"
:
0.6266666666666667
,
"em_stderr"
:
0.01881953816383566
},
"drop"
:
{
"em"
:
0.035864093959731544
,
"em_stderr"
:
0.001904314663911949
,
"f1"
:
0.11349412751677862
,
"f1_stderr"
:
0.002340148712955784
},
"race"
:
{
"acc"
:
0.3990430622009569
,
"acc_stderr"
:
0.015155885289809327
}
},
"versions"
:
{
"coqa"
:
1
,
"drop"
:
1
,
"race"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_unscramble_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"anagrams1"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"anagrams2"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"cycle_letters"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"random_insertion"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"reversed_words"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
}
},
"versions"
:
{
"anagrams1"
:
0
,
"anagrams2"
:
0
,
"cycle_letters"
:
0
,
"random_insertion"
:
0
,
"reversed_words"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_xcopa_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xcopa_sw"
:
{
"acc"
:
0.508
,
"acc_stderr"
:
0.022380208834928025
},
"xcopa_zh"
:
{
"acc"
:
0.562
,
"acc_stderr"
:
0.022210326363977417
},
"xcopa_et"
:
{
"acc"
:
0.488
,
"acc_stderr"
:
0.02237662679792717
},
"xcopa_th"
:
{
"acc"
:
0.558
,
"acc_stderr"
:
0.022231970696321122
},
"xcopa_tr"
:
{
"acc"
:
0.556
,
"acc_stderr"
:
0.022242244375731017
},
"xcopa_qu"
:
{
"acc"
:
0.514
,
"acc_stderr"
:
0.022374298166353185
},
"xcopa_id"
:
{
"acc"
:
0.546
,
"acc_stderr"
:
0.02228814759117695
},
"xcopa_ta"
:
{
"acc"
:
0.552
,
"acc_stderr"
:
0.02226169729227013
},
"xcopa_ht"
:
{
"acc"
:
0.51
,
"acc_stderr"
:
0.02237859698923078
},
"xcopa_vi"
:
{
"acc"
:
0.516
,
"acc_stderr"
:
0.0223716109825804
},
"xcopa_it"
:
{
"acc"
:
0.62
,
"acc_stderr"
:
0.021728881438701705
}
},
"versions"
:
{
"xcopa_sw"
:
0
,
"xcopa_zh"
:
0
,
"xcopa_et"
:
0
,
"xcopa_th"
:
0
,
"xcopa_tr"
:
0
,
"xcopa_qu"
:
0
,
"xcopa_id"
:
0
,
"xcopa_ta"
:
0
,
"xcopa_ht"
:
0
,
"xcopa_vi"
:
0
,
"xcopa_it"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_xnli_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xnli_hi"
:
{
"acc"
:
0.36107784431137724
,
"acc_stderr"
:
0.006786547275949383
},
"xnli_vi"
:
{
"acc"
:
0.35588822355289423
,
"acc_stderr"
:
0.006764908277770048
},
"xnli_fr"
:
{
"acc"
:
0.4375249500998004
,
"acc_stderr"
:
0.007009346470228512
},
"xnli_tr"
:
{
"acc"
:
0.35588822355289423
,
"acc_stderr"
:
0.006764908277770052
},
"xnli_el"
:
{
"acc"
:
0.34930139720558884
,
"acc_stderr"
:
0.00673619394518908
},
"xnli_zh"
:
{
"acc"
:
0.36227544910179643
,
"acc_stderr"
:
0.00679141867023231
},
"xnli_es"
:
{
"acc"
:
0.406187624750499
,
"acc_stderr"
:
0.006939248658213527
},
"xnli_sw"
:
{
"acc"
:
0.33712574850299404
,
"acc_stderr"
:
0.006679377985812511
},
"xnli_th"
:
{
"acc"
:
0.34510978043912177
,
"acc_stderr"
:
0.0067171859376095565
},
"xnli_bg"
:
{
"acc"
:
0.36986027944111777
,
"acc_stderr"
:
0.006821215321725033
},
"xnli_ru"
:
{
"acc"
:
0.3936127744510978
,
"acc_stderr"
:
0.006902939997053364
},
"xnli_ur"
:
{
"acc"
:
0.3339321357285429
,
"acc_stderr"
:
0.006663660032909966
},
"xnli_ar"
:
{
"acc"
:
0.33572854291417165
,
"acc_stderr"
:
0.006672543485924257
},
"xnli_en"
:
{
"acc"
:
0.5105788423153692
,
"acc_stderr"
:
0.007063131001466571
},
"xnli_de"
:
{
"acc"
:
0.4477045908183633
,
"acc_stderr"
:
0.007025964880868372
}
},
"versions"
:
{
"xnli_hi"
:
0
,
"xnli_vi"
:
0
,
"xnli_fr"
:
0
,
"xnli_tr"
:
0
,
"xnli_el"
:
0
,
"xnli_zh"
:
0
,
"xnli_es"
:
0
,
"xnli_sw"
:
0
,
"xnli_th"
:
0
,
"xnli_bg"
:
0
,
"xnli_ru"
:
0
,
"xnli_ur"
:
0
,
"xnli_ar"
:
0
,
"xnli_en"
:
0
,
"xnli_de"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_xstory_cloze_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xstory_cloze_id"
:
{
"acc"
:
0.5208471211118465
,
"acc_stderr"
:
0.012855936282881265
},
"xstory_cloze_en"
:
{
"acc"
:
0.7478491065519524
,
"acc_stderr"
:
0.011175031901561881
},
"xstory_cloze_ar"
:
{
"acc"
:
0.4831237590999338
,
"acc_stderr"
:
0.012859793919977602
},
"xstory_cloze_sw"
:
{
"acc"
:
0.5036399735274653
,
"acc_stderr"
:
0.012866784348289232
},
"xstory_cloze_my"
:
{
"acc"
:
0.4870946393117141
,
"acc_stderr"
:
0.012862838605728472
},
"xstory_cloze_hi"
:
{
"acc"
:
0.5268034414295168
,
"acc_stderr"
:
0.012848623899505763
},
"xstory_cloze_te"
:
{
"acc"
:
0.528788881535407
,
"acc_stderr"
:
0.012845779070719484
},
"xstory_cloze_es"
:
{
"acc"
:
0.6512243547319656
,
"acc_stderr"
:
0.012264502012981189
},
"xstory_cloze_zh"
:
{
"acc"
:
0.5433487756452681
,
"acc_stderr"
:
0.012818676452481954
},
"xstory_cloze_eu"
:
{
"acc"
:
0.500992720052945
,
"acc_stderr"
:
0.012867099955422926
},
"xstory_cloze_ru"
:
{
"acc"
:
0.613500992720053
,
"acc_stderr"
:
0.012531219943771484
}
},
"versions"
:
{
"xstory_cloze_id"
:
0
,
"xstory_cloze_en"
:
0
,
"xstory_cloze_ar"
:
0
,
"xstory_cloze_sw"
:
0
,
"xstory_cloze_my"
:
0
,
"xstory_cloze_hi"
:
0
,
"xstory_cloze_te"
:
0
,
"xstory_cloze_es"
:
0
,
"xstory_cloze_zh"
:
0
,
"xstory_cloze_eu"
:
0
,
"xstory_cloze_ru"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_xwinograd_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"xwinograd_en"
:
{
"acc"
:
0.8494623655913979
,
"acc_stderr"
:
0.007417824483418801
},
"xwinograd_fr"
:
{
"acc"
:
0.7228915662650602
,
"acc_stderr"
:
0.049425892997830914
},
"xwinograd_jp"
:
{
"acc"
:
0.5891553701772679
,
"acc_stderr"
:
0.01589538213585439
},
"xwinograd_zh"
:
{
"acc"
:
0.6369047619047619
,
"acc_stderr"
:
0.02144191312603829
},
"xwinograd_ru"
:
{
"acc"
:
0.6444444444444445
,
"acc_stderr"
:
0.02701354947053879
},
"xwinograd_pt"
:
{
"acc"
:
0.7072243346007605
,
"acc_stderr"
:
0.02811223926963042
}
},
"versions"
:
{
"xwinograd_en"
:
0
,
"xwinograd_fr"
:
0
,
"xwinograd_jp"
:
0
,
"xwinograd_zh"
:
0
,
"xwinograd_ru"
:
0
,
"xwinograd_pt"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/README.md
deleted
100644 → 0
View file @
5e59782e
# mpt-7b
## mpt-7b_anli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|anli_r1| 0|acc | 33.2|± | 1.49|
|anli_r2| 0|acc | 33.6|± | 1.49|
|anli_r3| 0|acc | 34.5|± | 1.37|
## mpt-7b_arithmetic_5-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------------|------:|------|----:|---|-----:|
|arithmetic_1dc| 0|acc | 8.10|± | 0.61|
|arithmetic_2da| 0|acc |91.80|± | 0.61|
|arithmetic_2dm| 0|acc |25.60|± | 0.98|
|arithmetic_2ds| 0|acc |78.75|± | 0.91|
|arithmetic_3da| 0|acc |29.15|± | 1.02|
|arithmetic_3ds| 0|acc |42.80|± | 1.11|
|arithmetic_4da| 0|acc | 2.60|± | 0.36|
|arithmetic_4ds| 0|acc | 2.60|± | 0.36|
|arithmetic_5da| 0|acc | 0.45|± | 0.15|
|arithmetic_5ds| 0|acc | 0.20|± | 0.10|
## mpt-7b_bbh_3-shot.json
| Task |Version| Metric |Value| |Stderr|
|------------------------------------------------|------:|---------------------|----:|---|-----:|
|bigbench_causal_judgement | 0|multiple_choice_grade|56.32|± | 3.61|
|bigbench_date_understanding | 0|multiple_choice_grade|58.27|± | 2.57|
|bigbench_disambiguation_qa | 0|multiple_choice_grade|36.43|± | 3.00|
|bigbench_dyck_languages | 0|multiple_choice_grade|12.30|± | 1.04|
|bigbench_formal_fallacies_syllogisms_negation | 0|multiple_choice_grade|49.92|± | 0.42|
|bigbench_geometric_shapes | 0|multiple_choice_grade|20.33|± | 2.13|
| | |exact_str_match |12.26|± | 1.73|
|bigbench_hyperbaton | 0|multiple_choice_grade|49.36|± | 0.22|
|bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|24.00|± | 1.91|
|bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|16.57|± | 1.41|
|bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|38.67|± | 2.82|
|bigbench_movie_recommendation | 0|multiple_choice_grade|43.80|± | 2.22|
|bigbench_navigate | 0|multiple_choice_grade|48.60|± | 1.58|
|bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|29.85|± | 1.02|
|bigbench_ruin_names | 0|multiple_choice_grade|29.69|± | 2.16|
|bigbench_salient_translation_error_detection | 0|multiple_choice_grade|17.94|± | 1.22|
|bigbench_snarks | 0|multiple_choice_grade|53.04|± | 3.72|
|bigbench_sports_understanding | 0|multiple_choice_grade|49.49|± | 1.59|
|bigbench_temporal_sequences | 0|multiple_choice_grade|29.60|± | 1.44|
|bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|19.44|± | 1.12|
|bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|13.43|± | 0.82|
|bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|38.67|± | 2.82|
## mpt-7b_blimp_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------------------------------------------------|------:|------|----:|---|-----:|
|blimp_adjunct_island | 0|acc | 87.8|± | 1.04|
|blimp_anaphor_gender_agreement | 0|acc | 99.5|± | 0.22|
|blimp_anaphor_number_agreement | 0|acc | 99.5|± | 0.22|
|blimp_animate_subject_passive | 0|acc | 77.5|± | 1.32|
|blimp_animate_subject_trans | 0|acc | 88.4|± | 1.01|
|blimp_causative | 0|acc | 74.7|± | 1.38|
|blimp_complex_NP_island | 0|acc | 52.8|± | 1.58|
|blimp_coordinate_structure_constraint_complex_left_branch| 0|acc | 77.9|± | 1.31|
|blimp_coordinate_structure_constraint_object_extraction | 0|acc | 84.8|± | 1.14|
|blimp_determiner_noun_agreement_1 | 0|acc | 99.2|± | 0.28|
|blimp_determiner_noun_agreement_2 | 0|acc | 97.4|± | 0.50|
|blimp_determiner_noun_agreement_irregular_1 | 0|acc | 93.6|± | 0.77|
|blimp_determiner_noun_agreement_irregular_2 | 0|acc | 92.8|± | 0.82|
|blimp_determiner_noun_agreement_with_adj_2 | 0|acc | 93.5|± | 0.78|
|blimp_determiner_noun_agreement_with_adj_irregular_1 | 0|acc | 88.3|± | 1.02|
|blimp_determiner_noun_agreement_with_adj_irregular_2 | 0|acc | 91.9|± | 0.86|
|blimp_determiner_noun_agreement_with_adjective_1 | 0|acc | 97.2|± | 0.52|
|blimp_distractor_agreement_relational_noun | 0|acc | 88.9|± | 0.99|
|blimp_distractor_agreement_relative_clause | 0|acc | 74.3|± | 1.38|
|blimp_drop_argument | 0|acc | 78.7|± | 1.30|
|blimp_ellipsis_n_bar_1 | 0|acc | 79.0|± | 1.29|
|blimp_ellipsis_n_bar_2 | 0|acc | 92.8|± | 0.82|
|blimp_existential_there_object_raising | 0|acc | 83.4|± | 1.18|
|blimp_existential_there_quantifiers_1 | 0|acc | 98.8|± | 0.34|
|blimp_existential_there_quantifiers_2 | 0|acc | 27.0|± | 1.40|
|blimp_existential_there_subject_raising | 0|acc | 88.8|± | 1.00|
|blimp_expletive_it_object_raising | 0|acc | 80.0|± | 1.27|
|blimp_inchoative | 0|acc | 67.3|± | 1.48|
|blimp_intransitive | 0|acc | 83.2|± | 1.18|
|blimp_irregular_past_participle_adjectives | 0|acc | 97.2|± | 0.52|
|blimp_irregular_past_participle_verbs | 0|acc | 88.5|± | 1.01|
|blimp_irregular_plural_subject_verb_agreement_1 | 0|acc | 92.0|± | 0.86|
|blimp_irregular_plural_subject_verb_agreement_2 | 0|acc | 90.8|± | 0.91|
|blimp_left_branch_island_echo_question | 0|acc | 43.2|± | 1.57|
|blimp_left_branch_island_simple_question | 0|acc | 89.7|± | 0.96|
|blimp_matrix_question_npi_licensor_present | 0|acc | 70.5|± | 1.44|
|blimp_npi_present_1 | 0|acc | 57.9|± | 1.56|
|blimp_npi_present_2 | 0|acc | 68.8|± | 1.47|
|blimp_only_npi_licensor_present | 0|acc | 91.6|± | 0.88|
|blimp_only_npi_scope | 0|acc | 73.2|± | 1.40|
|blimp_passive_1 | 0|acc | 88.7|± | 1.00|
|blimp_passive_2 | 0|acc | 89.5|± | 0.97|
|blimp_principle_A_c_command | 0|acc | 75.0|± | 1.37|
|blimp_principle_A_case_1 | 0|acc |100.0|± | 0.00|
|blimp_principle_A_case_2 | 0|acc | 94.0|± | 0.75|
|blimp_principle_A_domain_1 | 0|acc | 99.7|± | 0.17|
|blimp_principle_A_domain_2 | 0|acc | 82.8|± | 1.19|
|blimp_principle_A_domain_3 | 0|acc | 76.1|± | 1.35|
|blimp_principle_A_reconstruction | 0|acc | 41.0|± | 1.56|
|blimp_regular_plural_subject_verb_agreement_1 | 0|acc | 97.1|± | 0.53|
|blimp_regular_plural_subject_verb_agreement_2 | 0|acc | 90.7|± | 0.92|
|blimp_sentential_negation_npi_licensor_present | 0|acc | 98.9|± | 0.33|
|blimp_sentential_negation_npi_scope | 0|acc | 73.3|± | 1.40|
|blimp_sentential_subject_island | 0|acc | 39.9|± | 1.55|
|blimp_superlative_quantifiers_1 | 0|acc | 82.2|± | 1.21|
|blimp_superlative_quantifiers_2 | 0|acc | 89.7|± | 0.96|
|blimp_tough_vs_raising_1 | 0|acc | 69.0|± | 1.46|
|blimp_tough_vs_raising_2 | 0|acc | 82.9|± | 1.19|
|blimp_transitive | 0|acc | 87.2|± | 1.06|
|blimp_wh_island | 0|acc | 81.3|± | 1.23|
|blimp_wh_questions_object_gap | 0|acc | 76.3|± | 1.35|
|blimp_wh_questions_subject_gap | 0|acc | 89.0|± | 0.99|
|blimp_wh_questions_subject_gap_long_distance | 0|acc | 89.3|± | 0.98|
|blimp_wh_vs_that_no_gap | 0|acc | 94.6|± | 0.72|
|blimp_wh_vs_that_no_gap_long_distance | 0|acc | 95.1|± | 0.68|
|blimp_wh_vs_that_with_gap | 0|acc | 32.1|± | 1.48|
|blimp_wh_vs_that_with_gap_long_distance | 0|acc | 29.2|± | 1.44|
## mpt-7b_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |40.61|± | 1.44|
| | |acc_norm|41.81|± | 1.44|
|arc_easy | 0|acc |74.87|± | 0.89|
| | |acc_norm|70.29|± | 0.94|
|boolq | 1|acc |73.52|± | 0.77|
|copa | 0|acc |85.00|± | 3.59|
|hellaswag | 0|acc |57.24|± | 0.49|
| | |acc_norm|76.12|± | 0.43|
|mc_taco | 0|em |13.51| | |
| | |f1 |45.48| | |
|openbookqa | 0|acc |32.00|± | 2.09|
| | |acc_norm|42.60|± | 2.21|
|piqa | 0|acc |79.16|± | 0.95|
| | |acc_norm|80.41|± | 0.93|
|prost | 0|acc |25.73|± | 0.32|
| | |acc_norm|30.12|± | 0.34|
|swag | 0|acc |56.17|± | 0.35|
| | |acc_norm|75.80|± | 0.30|
|winogrande | 0|acc |68.67|± | 1.30|
|wsc273 | 0|acc |85.71|± | 2.12|
## mpt-7b_glue_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|cola | 0|mcc |-4.41|± | 3.12|
|mnli | 0|acc |37.83|± | 0.49|
|mnli_mismatched| 0|acc |37.49|± | 0.49|
|mrpc | 0|acc |62.99|± | 2.39|
| | |f1 |75.61|± | 1.93|
|qnli | 0|acc |51.35|± | 0.68|
|qqp | 0|acc |50.36|± | 0.25|
| | |f1 |54.14|± | 0.29|
|rte | 0|acc |63.90|± | 2.89|
|sst | 0|acc |76.83|± | 1.43|
|wnli | 1|acc |47.89|± | 5.97|
## mpt-7b_human_alignment_0-shot.json
| Task |Version| Metric |Value | |Stderr|
|---------------------------------------|------:|---------------------|-----:|---|-----:|
|crows_pairs_english_age | 0|likelihood_difference|415.11|± | 38.32|
| | |pct_stereotype | 73.63|± | 4.64|
|crows_pairs_english_autre | 0|likelihood_difference|505.68|± |177.03|
| | |pct_stereotype | 72.73|± | 14.08|
|crows_pairs_english_disability | 0|likelihood_difference|601.92|± | 63.31|
| | |pct_stereotype | 76.92|± | 5.27|
|crows_pairs_english_gender | 0|likelihood_difference|268.24|± | 17.01|
| | |pct_stereotype | 63.75|± | 2.69|
|crows_pairs_english_nationality | 0|likelihood_difference|349.83|± | 21.51|
| | |pct_stereotype | 61.57|± | 3.32|
|crows_pairs_english_physical_appearance| 0|likelihood_difference|373.78|± | 33.85|
| | |pct_stereotype | 72.22|± | 5.32|
|crows_pairs_english_race_color | 0|likelihood_difference|336.20|± | 14.10|
| | |pct_stereotype | 57.28|± | 2.20|
|crows_pairs_english_religion | 0|likelihood_difference|366.44|± | 33.86|
| | |pct_stereotype | 72.97|± | 4.23|
|crows_pairs_english_sexual_orientation | 0|likelihood_difference|463.04|± | 45.75|
| | |pct_stereotype | 82.80|± | 3.93|
|crows_pairs_english_socioeconomic | 0|likelihood_difference|406.51|± | 23.52|
| | |pct_stereotype | 67.89|± | 3.40|
|crows_pairs_french_age | 0|likelihood_difference|360.97|± | 36.15|
| | |pct_stereotype | 42.22|± | 5.24|
|crows_pairs_french_autre | 0|likelihood_difference|269.23|± | 92.30|
| | |pct_stereotype | 61.54|± | 14.04|
|crows_pairs_french_disability | 0|likelihood_difference|495.83|± | 42.69|
| | |pct_stereotype | 63.64|± | 5.97|
|crows_pairs_french_gender | 0|likelihood_difference|321.38|± | 17.59|
| | |pct_stereotype | 51.09|± | 2.79|
|crows_pairs_french_nationality | 0|likelihood_difference|388.34|± | 21.84|
| | |pct_stereotype | 34.39|± | 2.99|
|crows_pairs_french_physical_appearance | 0|likelihood_difference|322.74|± | 43.29|
| | |pct_stereotype | 59.72|± | 5.82|
|crows_pairs_french_race_color | 0|likelihood_difference|316.14|± | 16.56|
| | |pct_stereotype | 43.70|± | 2.32|
|crows_pairs_french_religion | 0|likelihood_difference|356.74|± | 33.68|
| | |pct_stereotype | 62.61|± | 4.53|
|crows_pairs_french_sexual_orientation | 0|likelihood_difference|479.12|± | 40.10|
| | |pct_stereotype | 78.02|± | 4.36|
|crows_pairs_french_socioeconomic | 0|likelihood_difference|399.39|± | 26.31|
| | |pct_stereotype | 65.82|± | 3.40|
|ethics_cm | 0|acc | 54.59|± | 0.80|
|ethics_deontology | 0|acc | 50.25|± | 0.83|
| | |em | 0.44| | |
|ethics_justice | 0|acc | 51.96|± | 0.96|
| | |em | 1.18| | |
|ethics_utilitarianism | 0|acc | 57.49|± | 0.71|
|ethics_utilitarianism_original | 0|acc | 99.56|± | 0.10|
|ethics_virtue | 0|acc | 80.40|± | 0.56|
| | |em | 12.56| | |
|toxigen | 0|acc | 43.19|± | 1.62|
| | |acc_norm | 43.19|± | 1.62|
## mpt-7b_lambada_0-shot.json
| Task |Version|Metric|Value | |Stderr|
|----------------------|------:|------|-----:|---|-----:|
|lambada_openai | 0|ppl | 3.87|± | 0.08|
| | |acc | 68.35|± | 0.65|
|lambada_openai_cloze | 0|ppl | 26.56|± | 0.70|
| | |acc | 39.65|± | 0.68|
|lambada_openai_mt_de | 0|ppl | 70.12|± | 4.04|
| | |acc | 33.77|± | 0.66|
|lambada_openai_mt_en | 0|ppl | 3.87|± | 0.08|
| | |acc | 68.35|± | 0.65|
|lambada_openai_mt_es | 0|ppl | 67.23|± | 3.69|
| | |acc | 36.95|± | 0.67|
|lambada_openai_mt_fr | 0|ppl | 42.93|± | 2.37|
| | |acc | 43.02|± | 0.69|
|lambada_openai_mt_it | 0|ppl | 65.76|± | 3.87|
| | |acc | 39.20|± | 0.68|
|lambada_standard | 0|ppl | 4.92|± | 0.11|
| | |acc | 61.91|± | 0.68|
|lambada_standard_cloze| 0|ppl |109.10|± | 3.04|
| | |acc | 16.75|± | 0.52|
## mpt-7b_mmlu_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|hendrycksTest-abstract_algebra | 0|acc |18.00|± | 3.86|
| | |acc_norm|21.00|± | 4.09|
|hendrycksTest-anatomy | 0|acc |38.52|± | 4.20|
| | |acc_norm|37.78|± | 4.19|
|hendrycksTest-astronomy | 0|acc |39.47|± | 3.98|
| | |acc_norm|42.11|± | 4.02|
|hendrycksTest-business_ethics | 0|acc |49.00|± | 5.02|
| | |acc_norm|48.00|± | 5.02|
|hendrycksTest-clinical_knowledge | 0|acc |33.21|± | 2.90|
| | |acc_norm|37.74|± | 2.98|
|hendrycksTest-college_biology | 0|acc |38.19|± | 4.06|
| | |acc_norm|35.42|± | 4.00|
|hendrycksTest-college_chemistry | 0|acc |39.00|± | 4.90|
| | |acc_norm|41.00|± | 4.94|
|hendrycksTest-college_computer_science | 0|acc |34.00|± | 4.76|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-college_mathematics | 0|acc |27.00|± | 4.46|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-college_medicine | 0|acc |36.42|± | 3.67|
| | |acc_norm|34.68|± | 3.63|
|hendrycksTest-college_physics | 0|acc |30.39|± | 4.58|
| | |acc_norm|33.33|± | 4.69|
|hendrycksTest-computer_security | 0|acc |41.00|± | 4.94|
| | |acc_norm|41.00|± | 4.94|
|hendrycksTest-conceptual_physics | 0|acc |32.77|± | 3.07|
| | |acc_norm|25.53|± | 2.85|
|hendrycksTest-econometrics | 0|acc |27.19|± | 4.19|
| | |acc_norm|23.68|± | 4.00|
|hendrycksTest-electrical_engineering | 0|acc |36.55|± | 4.01|
| | |acc_norm|33.79|± | 3.94|
|hendrycksTest-elementary_mathematics | 0|acc |29.89|± | 2.36|
| | |acc_norm|28.84|± | 2.33|
|hendrycksTest-formal_logic | 0|acc |30.95|± | 4.13|
| | |acc_norm|28.57|± | 4.04|
|hendrycksTest-global_facts | 0|acc |35.00|± | 4.79|
| | |acc_norm|33.00|± | 4.73|
|hendrycksTest-high_school_biology | 0|acc |36.45|± | 2.74|
| | |acc_norm|39.03|± | 2.78|
|hendrycksTest-high_school_chemistry | 0|acc |21.18|± | 2.87|
| | |acc_norm|21.67|± | 2.90|
|hendrycksTest-high_school_computer_science | 0|acc |43.00|± | 4.98|
| | |acc_norm|41.00|± | 4.94|
|hendrycksTest-high_school_european_history | 0|acc |38.18|± | 3.79|
| | |acc_norm|37.58|± | 3.78|
|hendrycksTest-high_school_geography | 0|acc |38.38|± | 3.46|
| | |acc_norm|40.40|± | 3.50|
|hendrycksTest-high_school_government_and_politics| 0|acc |41.45|± | 3.56|
| | |acc_norm|41.45|± | 3.56|
|hendrycksTest-high_school_macroeconomics | 0|acc |34.87|± | 2.42|
| | |acc_norm|29.74|± | 2.32|
|hendrycksTest-high_school_mathematics | 0|acc |29.26|± | 2.77|
| | |acc_norm|30.37|± | 2.80|
|hendrycksTest-high_school_microeconomics | 0|acc |33.61|± | 3.07|
| | |acc_norm|36.97|± | 3.14|
|hendrycksTest-high_school_physics | 0|acc |27.81|± | 3.66|
| | |acc_norm|27.81|± | 3.66|
|hendrycksTest-high_school_psychology | 0|acc |46.97|± | 2.14|
| | |acc_norm|44.59|± | 2.13|
|hendrycksTest-high_school_statistics | 0|acc |32.87|± | 3.20|
| | |acc_norm|32.41|± | 3.19|
|hendrycksTest-high_school_us_history | 0|acc |34.31|± | 3.33|
| | |acc_norm|31.37|± | 3.26|
|hendrycksTest-high_school_world_history | 0|acc |29.54|± | 2.97|
| | |acc_norm|28.69|± | 2.94|
|hendrycksTest-human_aging | 0|acc |33.63|± | 3.17|
| | |acc_norm|32.74|± | 3.15|
|hendrycksTest-human_sexuality | 0|acc |27.48|± | 3.92|
| | |acc_norm|32.82|± | 4.12|
|hendrycksTest-international_law | 0|acc |37.19|± | 4.41|
| | |acc_norm|49.59|± | 4.56|
|hendrycksTest-jurisprudence | 0|acc |34.26|± | 4.59|
| | |acc_norm|39.81|± | 4.73|
|hendrycksTest-logical_fallacies | 0|acc |38.04|± | 3.81|
| | |acc_norm|36.81|± | 3.79|
|hendrycksTest-machine_learning | 0|acc |26.79|± | 4.20|
| | |acc_norm|24.11|± | 4.06|
|hendrycksTest-management | 0|acc |42.72|± | 4.90|
| | |acc_norm|39.81|± | 4.85|
|hendrycksTest-marketing | 0|acc |55.13|± | 3.26|
| | |acc_norm|55.13|± | 3.26|
|hendrycksTest-medical_genetics | 0|acc |39.00|± | 4.90|
| | |acc_norm|38.00|± | 4.88|
|hendrycksTest-miscellaneous | 0|acc |55.56|± | 1.78|
| | |acc_norm|55.68|± | 1.78|
|hendrycksTest-moral_disputes | 0|acc |32.08|± | 2.51|
| | |acc_norm|30.06|± | 2.47|
|hendrycksTest-moral_scenarios | 0|acc |26.03|± | 1.47|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |34.31|± | 2.72|
| | |acc_norm|40.20|± | 2.81|
|hendrycksTest-philosophy | 0|acc |37.62|± | 2.75|
| | |acc_norm|36.98|± | 2.74|
|hendrycksTest-prehistory | 0|acc |33.64|± | 2.63|
| | |acc_norm|30.56|± | 2.56|
|hendrycksTest-professional_accounting | 0|acc |30.50|± | 2.75|
| | |acc_norm|29.08|± | 2.71|
|hendrycksTest-professional_law | 0|acc |25.95|± | 1.12|
| | |acc_norm|28.42|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |29.41|± | 2.77|
| | |acc_norm|31.62|± | 2.82|
|hendrycksTest-professional_psychology | 0|acc |31.54|± | 1.88|
| | |acc_norm|30.23|± | 1.86|
|hendrycksTest-public_relations | 0|acc |41.82|± | 4.72|
| | |acc_norm|42.73|± | 4.74|
|hendrycksTest-security_studies | 0|acc |28.16|± | 2.88|
| | |acc_norm|24.08|± | 2.74|
|hendrycksTest-sociology | 0|acc |34.33|± | 3.36|
| | |acc_norm|36.82|± | 3.41|
|hendrycksTest-us_foreign_policy | 0|acc |38.00|± | 4.88|
| | |acc_norm|39.00|± | 4.90|
|hendrycksTest-virology | 0|acc |32.53|± | 3.65|
| | |acc_norm|32.53|± | 3.65|
|hendrycksTest-world_religions | 0|acc |54.39|± | 3.82|
| | |acc_norm|57.89|± | 3.79|
## mpt-7b_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |61.40|± | 1.09|
|pawsx_en| 0|acc |70.35|± | 1.02|
|pawsx_es| 0|acc |64.95|± | 1.07|
|pawsx_fr| 0|acc |62.85|± | 1.08|
|pawsx_ja| 0|acc |49.30|± | 1.12|
|pawsx_ko| 0|acc |53.65|± | 1.12|
|pawsx_zh| 0|acc |56.25|± | 1.11|
## mpt-7b_reading_comprehension_0-shot.json
|Task|Version|Metric|Value| |Stderr|
|----|------:|------|----:|---|-----:|
|coqa| 1|f1 |76.51|± | 1.48|
| | |em |63.02|± | 1.87|
|drop| 1|em | 3.43|± | 0.19|
| | |f1 |13.39|± | 0.25|
|race| 1|acc |38.66|± | 1.51|
## mpt-7b_superglue_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|boolq | 1|acc |73.82|± | 0.77|
|cb | 1|acc |41.07|± | 6.63|
| | |f1 |21.27| | |
|copa | 0|acc |84.00|± | 3.68|
|multirc| 1|acc | 0.84|± | 0.30|
|record | 0|f1 |90.10|± | 0.29|
| | |em |89.30|± | 0.31|
|wic | 0|acc |48.43|± | 1.98|
|wsc | 0|acc |63.46|± | 4.74|
## mpt-7b_unscramble_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|----------------|------:|------|----:|---|-----:|
|anagrams1 | 0|acc | 0.00|± | 0.00|
|anagrams2 | 0|acc | 0.01|± | 0.01|
|cycle_letters | 0|acc | 0.00|± | 0.00|
|random_insertion| 0|acc | 0.04|± | 0.02|
|reversed_words | 0|acc | 0.00|± | 0.00|
## mpt-7b_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 47.4|± | 2.24|
|xcopa_ht| 0|acc | 49.8|± | 2.24|
|xcopa_id| 0|acc | 56.8|± | 2.22|
|xcopa_it| 0|acc | 59.4|± | 2.20|
|xcopa_qu| 0|acc | 48.4|± | 2.24|
|xcopa_sw| 0|acc | 51.6|± | 2.24|
|xcopa_ta| 0|acc | 54.0|± | 2.23|
|xcopa_th| 0|acc | 54.2|± | 2.23|
|xcopa_tr| 0|acc | 51.6|± | 2.24|
|xcopa_vi| 0|acc | 53.6|± | 2.23|
|xcopa_zh| 0|acc | 63.2|± | 2.16|
## mpt-7b_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.31|± | 0.67|
|xnli_bg| 0|acc |36.83|± | 0.68|
|xnli_de| 0|acc |46.45|± | 0.70|
|xnli_el| 0|acc |36.19|± | 0.68|
|xnli_en| 0|acc |54.33|± | 0.70|
|xnli_es| 0|acc |45.65|± | 0.70|
|xnli_fr| 0|acc |48.80|± | 0.71|
|xnli_hi| 0|acc |34.73|± | 0.67|
|xnli_ru| 0|acc |44.43|± | 0.70|
|xnli_sw| 0|acc |33.41|± | 0.67|
|xnli_th| 0|acc |36.13|± | 0.68|
|xnli_tr| 0|acc |37.68|± | 0.68|
|xnli_ur| 0|acc |33.63|± | 0.67|
|xnli_vi| 0|acc |37.33|± | 0.68|
|xnli_zh| 0|acc |35.35|± | 0.68|
## mpt-7b_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |48.51|± | 1.29|
|xstory_cloze_en| 0|acc |77.90|± | 1.07|
|xstory_cloze_es| 0|acc |66.05|± | 1.22|
|xstory_cloze_eu| 0|acc |51.09|± | 1.29|
|xstory_cloze_hi| 0|acc |51.69|± | 1.29|
|xstory_cloze_id| 0|acc |55.20|± | 1.28|
|xstory_cloze_my| 0|acc |48.38|± | 1.29|
|xstory_cloze_ru| 0|acc |57.25|± | 1.27|
|xstory_cloze_sw| 0|acc |49.90|± | 1.29|
|xstory_cloze_te| 0|acc |52.95|± | 1.28|
|xstory_cloze_zh| 0|acc |59.56|± | 1.26|
## mpt-7b_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |86.67|± | 0.71|
|xwinograd_fr| 0|acc |66.27|± | 5.22|
|xwinograd_jp| 0|acc |60.27|± | 1.58|
|xwinograd_pt| 0|acc |66.92|± | 2.91|
|xwinograd_ru| 0|acc |69.52|± | 2.60|
|xwinograd_zh| 0|acc |71.63|± | 2.01|
results/mpt/mpt-7b/mpt-7b_anli_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"anli_r1"
:
{
"acc"
:
0.332
,
"acc_stderr"
:
0.014899597242811485
},
"anli_r2"
:
{
"acc"
:
0.336
,
"acc_stderr"
:
0.014944140233795027
},
"anli_r3"
:
{
"acc"
:
0.345
,
"acc_stderr"
:
0.013728421539454881
}
},
"versions"
:
{
"anli_r1"
:
0
,
"anli_r2"
:
0
,
"anli_r3"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_arithmetic_5-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"arithmetic_1dc"
:
{
"acc"
:
0.081
,
"acc_stderr"
:
0.006102304405675846
},
"arithmetic_2da"
:
{
"acc"
:
0.918
,
"acc_stderr"
:
0.006136515983374211
},
"arithmetic_2dm"
:
{
"acc"
:
0.256
,
"acc_stderr"
:
0.009761129023832868
},
"arithmetic_2ds"
:
{
"acc"
:
0.7875
,
"acc_stderr"
:
0.009149520854445372
},
"arithmetic_3da"
:
{
"acc"
:
0.2915
,
"acc_stderr"
:
0.010164424861564223
},
"arithmetic_3ds"
:
{
"acc"
:
0.428
,
"acc_stderr"
:
0.011066581884995264
},
"arithmetic_4da"
:
{
"acc"
:
0.026
,
"acc_stderr"
:
0.0035592603398856676
},
"arithmetic_4ds"
:
{
"acc"
:
0.026
,
"acc_stderr"
:
0.003559260339885677
},
"arithmetic_5da"
:
{
"acc"
:
0.0045
,
"acc_stderr"
:
0.0014969954902233202
},
"arithmetic_5ds"
:
{
"acc"
:
0.002
,
"acc_stderr"
:
0.0009992493430694993
}
},
"versions"
:
{
"arithmetic_1dc"
:
0
,
"arithmetic_2da"
:
0
,
"arithmetic_2dm"
:
0
,
"arithmetic_2ds"
:
0
,
"arithmetic_3da"
:
0
,
"arithmetic_3ds"
:
0
,
"arithmetic_4da"
:
0
,
"arithmetic_4ds"
:
0
,
"arithmetic_5da"
:
0
,
"arithmetic_5ds"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_bbh_3-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"bigbench_causal_judgement"
:
{
"multiple_choice_grade"
:
0.5631578947368421
,
"multiple_choice_grade_stderr"
:
0.036078330444807245
},
"bigbench_date_understanding"
:
{
"multiple_choice_grade"
:
0.5826558265582655
,
"multiple_choice_grade_stderr"
:
0.025705692903559226
},
"bigbench_disambiguation_qa"
:
{
"multiple_choice_grade"
:
0.3643410852713178
,
"multiple_choice_grade_stderr"
:
0.03001923241206336
},
"bigbench_dyck_languages"
:
{
"multiple_choice_grade"
:
0.123
,
"multiple_choice_grade_stderr"
:
0.010391293421849874
},
"bigbench_formal_fallacies_syllogisms_negation"
:
{
"multiple_choice_grade"
:
0.49915492957746477
,
"multiple_choice_grade_stderr"
:
0.0041960485493055645
},
"bigbench_geometric_shapes"
:
{
"multiple_choice_grade"
:
0.20334261838440112
,
"multiple_choice_grade_stderr"
:
0.021272007856536258
,
"exact_str_match"
:
0.12256267409470752
,
"exact_str_match_stderr"
:
0.017331879192703025
},
"bigbench_hyperbaton"
:
{
"multiple_choice_grade"
:
0.4936
,
"multiple_choice_grade_stderr"
:
0.002235907150490653
},
"bigbench_logical_deduction_five_objects"
:
{
"multiple_choice_grade"
:
0.24
,
"multiple_choice_grade_stderr"
:
0.01911886665375975
},
"bigbench_logical_deduction_seven_objects"
:
{
"multiple_choice_grade"
:
0.1657142857142857
,
"multiple_choice_grade_stderr"
:
0.014063673984033173
},
"bigbench_logical_deduction_three_objects"
:
{
"multiple_choice_grade"
:
0.38666666666666666
,
"multiple_choice_grade_stderr"
:
0.028163138908196852
},
"bigbench_movie_recommendation"
:
{
"multiple_choice_grade"
:
0.438
,
"multiple_choice_grade_stderr"
:
0.022210326363977417
},
"bigbench_navigate"
:
{
"multiple_choice_grade"
:
0.486
,
"multiple_choice_grade_stderr"
:
0.01581309754773099
},
"bigbench_reasoning_about_colored_objects"
:
{
"multiple_choice_grade"
:
0.2985
,
"multiple_choice_grade_stderr"
:
0.010234805842091589
},
"bigbench_ruin_names"
:
{
"multiple_choice_grade"
:
0.296875
,
"multiple_choice_grade_stderr"
:
0.021609729061250887
},
"bigbench_salient_translation_error_detection"
:
{
"multiple_choice_grade"
:
0.17935871743486975
,
"multiple_choice_grade_stderr"
:
0.012150393578288319
},
"bigbench_snarks"
:
{
"multiple_choice_grade"
:
0.5303867403314917
,
"multiple_choice_grade_stderr"
:
0.03719891321680327
},
"bigbench_sports_understanding"
:
{
"multiple_choice_grade"
:
0.4949290060851927
,
"multiple_choice_grade_stderr"
:
0.015930505328489487
},
"bigbench_temporal_sequences"
:
{
"multiple_choice_grade"
:
0.296
,
"multiple_choice_grade_stderr"
:
0.014442734941575016
},
"bigbench_tracking_shuffled_objects_five_objects"
:
{
"multiple_choice_grade"
:
0.1944
,
"multiple_choice_grade_stderr"
:
0.011197643581460408
},
"bigbench_tracking_shuffled_objects_seven_objects"
:
{
"multiple_choice_grade"
:
0.13428571428571429
,
"multiple_choice_grade_stderr"
:
0.008152809490408933
},
"bigbench_tracking_shuffled_objects_three_objects"
:
{
"multiple_choice_grade"
:
0.38666666666666666
,
"multiple_choice_grade_stderr"
:
0.028163138908196852
}
},
"versions"
:
{
"bigbench_causal_judgement"
:
0
,
"bigbench_date_understanding"
:
0
,
"bigbench_disambiguation_qa"
:
0
,
"bigbench_dyck_languages"
:
0
,
"bigbench_formal_fallacies_syllogisms_negation"
:
0
,
"bigbench_geometric_shapes"
:
0
,
"bigbench_hyperbaton"
:
0
,
"bigbench_logical_deduction_five_objects"
:
0
,
"bigbench_logical_deduction_seven_objects"
:
0
,
"bigbench_logical_deduction_three_objects"
:
0
,
"bigbench_movie_recommendation"
:
0
,
"bigbench_navigate"
:
0
,
"bigbench_reasoning_about_colored_objects"
:
0
,
"bigbench_ruin_names"
:
0
,
"bigbench_salient_translation_error_detection"
:
0
,
"bigbench_snarks"
:
0
,
"bigbench_sports_understanding"
:
0
,
"bigbench_temporal_sequences"
:
0
,
"bigbench_tracking_shuffled_objects_five_objects"
:
0
,
"bigbench_tracking_shuffled_objects_seven_objects"
:
0
,
"bigbench_tracking_shuffled_objects_three_objects"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
3
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_blimp_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"blimp_adjunct_island"
:
{
"acc"
:
0.878
,
"acc_stderr"
:
0.010354864712936724
},
"blimp_anaphor_gender_agreement"
:
{
"acc"
:
0.995
,
"acc_stderr"
:
0.002231586874844882
},
"blimp_anaphor_number_agreement"
:
{
"acc"
:
0.995
,
"acc_stderr"
:
0.0022315868748448817
},
"blimp_animate_subject_passive"
:
{
"acc"
:
0.775
,
"acc_stderr"
:
0.013211720158614753
},
"blimp_animate_subject_trans"
:
{
"acc"
:
0.884
,
"acc_stderr"
:
0.010131468138756997
},
"blimp_causative"
:
{
"acc"
:
0.747
,
"acc_stderr"
:
0.01375427861358708
},
"blimp_complex_NP_island"
:
{
"acc"
:
0.528
,
"acc_stderr"
:
0.01579447578951147
},
"blimp_coordinate_structure_constraint_complex_left_branch"
:
{
"acc"
:
0.779
,
"acc_stderr"
:
0.013127502859696239
},
"blimp_coordinate_structure_constraint_object_extraction"
:
{
"acc"
:
0.848
,
"acc_stderr"
:
0.011358918303475293
},
"blimp_determiner_noun_agreement_1"
:
{
"acc"
:
0.992
,
"acc_stderr"
:
0.002818500300504507
},
"blimp_determiner_noun_agreement_2"
:
{
"acc"
:
0.974
,
"acc_stderr"
:
0.0050348137353182195
},
"blimp_determiner_noun_agreement_irregular_1"
:
{
"acc"
:
0.936
,
"acc_stderr"
:
0.007743640226919308
},
"blimp_determiner_noun_agreement_irregular_2"
:
{
"acc"
:
0.928
,
"acc_stderr"
:
0.008178195576218681
},
"blimp_determiner_noun_agreement_with_adj_2"
:
{
"acc"
:
0.935
,
"acc_stderr"
:
0.007799733061832037
},
"blimp_determiner_noun_agreement_with_adj_irregular_1"
:
{
"acc"
:
0.883
,
"acc_stderr"
:
0.010169287802713327
},
"blimp_determiner_noun_agreement_with_adj_irregular_2"
:
{
"acc"
:
0.919
,
"acc_stderr"
:
0.008632121032139973
},
"blimp_determiner_noun_agreement_with_adjective_1"
:
{
"acc"
:
0.972
,
"acc_stderr"
:
0.005219506034410047
},
"blimp_distractor_agreement_relational_noun"
:
{
"acc"
:
0.889
,
"acc_stderr"
:
0.009938701010583726
},
"blimp_distractor_agreement_relative_clause"
:
{
"acc"
:
0.743
,
"acc_stderr"
:
0.013825416526895042
},
"blimp_drop_argument"
:
{
"acc"
:
0.787
,
"acc_stderr"
:
0.012953717566737234
},
"blimp_ellipsis_n_bar_1"
:
{
"acc"
:
0.79
,
"acc_stderr"
:
0.012886662332274522
},
"blimp_ellipsis_n_bar_2"
:
{
"acc"
:
0.928
,
"acc_stderr"
:
0.008178195576218681
},
"blimp_existential_there_object_raising"
:
{
"acc"
:
0.834
,
"acc_stderr"
:
0.01177211037081219
},
"blimp_existential_there_quantifiers_1"
:
{
"acc"
:
0.988
,
"acc_stderr"
:
0.003444977194099841
},
"blimp_existential_there_quantifiers_2"
:
{
"acc"
:
0.27
,
"acc_stderr"
:
0.014046255632633908
},
"blimp_existential_there_subject_raising"
:
{
"acc"
:
0.888
,
"acc_stderr"
:
0.00997775303139724
},
"blimp_expletive_it_object_raising"
:
{
"acc"
:
0.8
,
"acc_stderr"
:
0.012655439943366646
},
"blimp_inchoative"
:
{
"acc"
:
0.673
,
"acc_stderr"
:
0.01484221315341125
},
"blimp_intransitive"
:
{
"acc"
:
0.832
,
"acc_stderr"
:
0.01182860583145426
},
"blimp_irregular_past_participle_adjectives"
:
{
"acc"
:
0.972
,
"acc_stderr"
:
0.005219506034410053
},
"blimp_irregular_past_participle_verbs"
:
{
"acc"
:
0.885
,
"acc_stderr"
:
0.010093407594904614
},
"blimp_irregular_plural_subject_verb_agreement_1"
:
{
"acc"
:
0.92
,
"acc_stderr"
:
0.008583336977753651
},
"blimp_irregular_plural_subject_verb_agreement_2"
:
{
"acc"
:
0.908
,
"acc_stderr"
:
0.009144376393151112
},
"blimp_left_branch_island_echo_question"
:
{
"acc"
:
0.432
,
"acc_stderr"
:
0.01567232023733621
},
"blimp_left_branch_island_simple_question"
:
{
"acc"
:
0.897
,
"acc_stderr"
:
0.009616833339695798
},
"blimp_matrix_question_npi_licensor_present"
:
{
"acc"
:
0.705
,
"acc_stderr"
:
0.014428554438445514
},
"blimp_npi_present_1"
:
{
"acc"
:
0.579
,
"acc_stderr"
:
0.015620595475301318
},
"blimp_npi_present_2"
:
{
"acc"
:
0.688
,
"acc_stderr"
:
0.01465847437050901
},
"blimp_only_npi_licensor_present"
:
{
"acc"
:
0.916
,
"acc_stderr"
:
0.008776162089491096
},
"blimp_only_npi_scope"
:
{
"acc"
:
0.732
,
"acc_stderr"
:
0.014013292702729491
},
"blimp_passive_1"
:
{
"acc"
:
0.887
,
"acc_stderr"
:
0.010016552866696863
},
"blimp_passive_2"
:
{
"acc"
:
0.895
,
"acc_stderr"
:
0.009698921026024947
},
"blimp_principle_A_c_command"
:
{
"acc"
:
0.75
,
"acc_stderr"
:
0.013699915608779773
},
"blimp_principle_A_case_1"
:
{
"acc"
:
1.0
,
"acc_stderr"
:
0.0
},
"blimp_principle_A_case_2"
:
{
"acc"
:
0.94
,
"acc_stderr"
:
0.007513751157474928
},
"blimp_principle_A_domain_1"
:
{
"acc"
:
0.997
,
"acc_stderr"
:
0.0017303161543469417
},
"blimp_principle_A_domain_2"
:
{
"acc"
:
0.828
,
"acc_stderr"
:
0.011939788882495321
},
"blimp_principle_A_domain_3"
:
{
"acc"
:
0.761
,
"acc_stderr"
:
0.013493000446937591
},
"blimp_principle_A_reconstruction"
:
{
"acc"
:
0.41
,
"acc_stderr"
:
0.015560917136921646
},
"blimp_regular_plural_subject_verb_agreement_1"
:
{
"acc"
:
0.971
,
"acc_stderr"
:
0.005309160685756986
},
"blimp_regular_plural_subject_verb_agreement_2"
:
{
"acc"
:
0.907
,
"acc_stderr"
:
0.009188875634996659
},
"blimp_sentential_negation_npi_licensor_present"
:
{
"acc"
:
0.989
,
"acc_stderr"
:
0.0032999833166078166
},
"blimp_sentential_negation_npi_scope"
:
{
"acc"
:
0.733
,
"acc_stderr"
:
0.013996674851796264
},
"blimp_sentential_subject_island"
:
{
"acc"
:
0.399
,
"acc_stderr"
:
0.015493193313162906
},
"blimp_superlative_quantifiers_1"
:
{
"acc"
:
0.822
,
"acc_stderr"
:
0.012102167676183596
},
"blimp_superlative_quantifiers_2"
:
{
"acc"
:
0.897
,
"acc_stderr"
:
0.009616833339695796
},
"blimp_tough_vs_raising_1"
:
{
"acc"
:
0.69
,
"acc_stderr"
:
0.014632638658632898
},
"blimp_tough_vs_raising_2"
:
{
"acc"
:
0.829
,
"acc_stderr"
:
0.011912216456264606
},
"blimp_transitive"
:
{
"acc"
:
0.872
,
"acc_stderr"
:
0.010570133761108658
},
"blimp_wh_island"
:
{
"acc"
:
0.813
,
"acc_stderr"
:
0.012336254828074126
},
"blimp_wh_questions_object_gap"
:
{
"acc"
:
0.763
,
"acc_stderr"
:
0.01345407046257794
},
"blimp_wh_questions_subject_gap"
:
{
"acc"
:
0.89
,
"acc_stderr"
:
0.009899393819724435
},
"blimp_wh_questions_subject_gap_long_distance"
:
{
"acc"
:
0.893
,
"acc_stderr"
:
0.009779910359847169
},
"blimp_wh_vs_that_no_gap"
:
{
"acc"
:
0.946
,
"acc_stderr"
:
0.007150883521295428
},
"blimp_wh_vs_that_no_gap_long_distance"
:
{
"acc"
:
0.951
,
"acc_stderr"
:
0.006829761756140921
},
"blimp_wh_vs_that_with_gap"
:
{
"acc"
:
0.321
,
"acc_stderr"
:
0.01477082181793466
},
"blimp_wh_vs_that_with_gap_long_distance"
:
{
"acc"
:
0.292
,
"acc_stderr"
:
0.014385511563477347
}
},
"versions"
:
{
"blimp_adjunct_island"
:
0
,
"blimp_anaphor_gender_agreement"
:
0
,
"blimp_anaphor_number_agreement"
:
0
,
"blimp_animate_subject_passive"
:
0
,
"blimp_animate_subject_trans"
:
0
,
"blimp_causative"
:
0
,
"blimp_complex_NP_island"
:
0
,
"blimp_coordinate_structure_constraint_complex_left_branch"
:
0
,
"blimp_coordinate_structure_constraint_object_extraction"
:
0
,
"blimp_determiner_noun_agreement_1"
:
0
,
"blimp_determiner_noun_agreement_2"
:
0
,
"blimp_determiner_noun_agreement_irregular_1"
:
0
,
"blimp_determiner_noun_agreement_irregular_2"
:
0
,
"blimp_determiner_noun_agreement_with_adj_2"
:
0
,
"blimp_determiner_noun_agreement_with_adj_irregular_1"
:
0
,
"blimp_determiner_noun_agreement_with_adj_irregular_2"
:
0
,
"blimp_determiner_noun_agreement_with_adjective_1"
:
0
,
"blimp_distractor_agreement_relational_noun"
:
0
,
"blimp_distractor_agreement_relative_clause"
:
0
,
"blimp_drop_argument"
:
0
,
"blimp_ellipsis_n_bar_1"
:
0
,
"blimp_ellipsis_n_bar_2"
:
0
,
"blimp_existential_there_object_raising"
:
0
,
"blimp_existential_there_quantifiers_1"
:
0
,
"blimp_existential_there_quantifiers_2"
:
0
,
"blimp_existential_there_subject_raising"
:
0
,
"blimp_expletive_it_object_raising"
:
0
,
"blimp_inchoative"
:
0
,
"blimp_intransitive"
:
0
,
"blimp_irregular_past_participle_adjectives"
:
0
,
"blimp_irregular_past_participle_verbs"
:
0
,
"blimp_irregular_plural_subject_verb_agreement_1"
:
0
,
"blimp_irregular_plural_subject_verb_agreement_2"
:
0
,
"blimp_left_branch_island_echo_question"
:
0
,
"blimp_left_branch_island_simple_question"
:
0
,
"blimp_matrix_question_npi_licensor_present"
:
0
,
"blimp_npi_present_1"
:
0
,
"blimp_npi_present_2"
:
0
,
"blimp_only_npi_licensor_present"
:
0
,
"blimp_only_npi_scope"
:
0
,
"blimp_passive_1"
:
0
,
"blimp_passive_2"
:
0
,
"blimp_principle_A_c_command"
:
0
,
"blimp_principle_A_case_1"
:
0
,
"blimp_principle_A_case_2"
:
0
,
"blimp_principle_A_domain_1"
:
0
,
"blimp_principle_A_domain_2"
:
0
,
"blimp_principle_A_domain_3"
:
0
,
"blimp_principle_A_reconstruction"
:
0
,
"blimp_regular_plural_subject_verb_agreement_1"
:
0
,
"blimp_regular_plural_subject_verb_agreement_2"
:
0
,
"blimp_sentential_negation_npi_licensor_present"
:
0
,
"blimp_sentential_negation_npi_scope"
:
0
,
"blimp_sentential_subject_island"
:
0
,
"blimp_superlative_quantifiers_1"
:
0
,
"blimp_superlative_quantifiers_2"
:
0
,
"blimp_tough_vs_raising_1"
:
0
,
"blimp_tough_vs_raising_2"
:
0
,
"blimp_transitive"
:
0
,
"blimp_wh_island"
:
0
,
"blimp_wh_questions_object_gap"
:
0
,
"blimp_wh_questions_subject_gap"
:
0
,
"blimp_wh_questions_subject_gap_long_distance"
:
0
,
"blimp_wh_vs_that_no_gap"
:
0
,
"blimp_wh_vs_that_no_gap_long_distance"
:
0
,
"blimp_wh_vs_that_with_gap"
:
0
,
"blimp_wh_vs_that_with_gap_long_distance"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_common_sense_reasoning_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"arc_challenge"
:
{
"acc"
:
0.4061433447098976
,
"acc_stderr"
:
0.014351656690097862
,
"acc_norm"
:
0.4180887372013652
,
"acc_norm_stderr"
:
0.01441398839699608
},
"arc_easy"
:
{
"acc"
:
0.7487373737373737
,
"acc_stderr"
:
0.008900141191221648
,
"acc_norm"
:
0.7028619528619529
,
"acc_norm_stderr"
:
0.009377397867796849
},
"boolq"
:
{
"acc"
:
0.7351681957186544
,
"acc_stderr"
:
0.007717399182659714
},
"copa"
:
{
"acc"
:
0.85
,
"acc_stderr"
:
0.03588702812826373
},
"hellaswag"
:
{
"acc"
:
0.5723959370643298
,
"acc_stderr"
:
0.004937199759947679
,
"acc_norm"
:
0.761202947619996
,
"acc_norm_stderr"
:
0.004254771367531344
},
"mc_taco"
:
{
"em"
:
0.13513513513513514
,
"f1"
:
0.45480193909643063
},
"openbookqa"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.020882340488761805
,
"acc_norm"
:
0.426
,
"acc_norm_stderr"
:
0.022136577335085637
},
"piqa"
:
{
"acc"
:
0.7916213275299239
,
"acc_stderr"
:
0.009476125383049457
,
"acc_norm"
:
0.8041349292709467
,
"acc_norm_stderr"
:
0.009259518041395765
},
"prost"
:
{
"acc"
:
0.25731212638770284
,
"acc_stderr"
:
0.0031937906462958443
,
"acc_norm"
:
0.30118488471391974
,
"acc_norm_stderr"
:
0.003351744324251047
},
"swag"
:
{
"acc"
:
0.5616814955513346
,
"acc_stderr"
:
0.0035080896485241934
,
"acc_norm"
:
0.7579726082175348
,
"acc_norm_stderr"
:
0.003028236139561354
},
"winogrande"
:
{
"acc"
:
0.6866614048934491
,
"acc_stderr"
:
0.013036512096747986
},
"wsc273"
:
{
"acc"
:
0.8571428571428571
,
"acc_stderr"
:
0.021217447349500165
}
},
"versions"
:
{
"arc_challenge"
:
0
,
"arc_easy"
:
0
,
"boolq"
:
1
,
"copa"
:
0
,
"hellaswag"
:
0
,
"mc_taco"
:
0
,
"openbookqa"
:
0
,
"piqa"
:
0
,
"prost"
:
0
,
"swag"
:
0
,
"winogrande"
:
0
,
"wsc273"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_glue_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"cola"
:
{
"mcc"
:
-0.044109532013985
,
"mcc_stderr"
:
0.031207420387242075
},
"mnli"
:
{
"acc"
:
0.3782985226693836
,
"acc_stderr"
:
0.0048953668826959605
},
"mnli_mismatched"
:
{
"acc"
:
0.37489829129373475
,
"acc_stderr"
:
0.004882398521925481
},
"mrpc"
:
{
"acc"
:
0.6299019607843137
,
"acc_stderr"
:
0.023933029030729185
,
"f1"
:
0.7560581583198709
,
"f1_stderr"
:
0.019295620717095126
},
"qnli"
:
{
"acc"
:
0.513454146073586
,
"acc_stderr"
:
0.006762960839582666
},
"qqp"
:
{
"acc"
:
0.5035864457086322
,
"acc_stderr"
:
0.002486636660034357
,
"f1"
:
0.5414038936111873
,
"f1_stderr"
:
0.0028706161186533327
},
"rte"
:
{
"acc"
:
0.6389891696750902
,
"acc_stderr"
:
0.028910281676964168
},
"sst"
:
{
"acc"
:
0.768348623853211
,
"acc_stderr"
:
0.014295110726150017
},
"wnli"
:
{
"acc"
:
0.4788732394366197
,
"acc_stderr"
:
0.05970805879899505
}
},
"versions"
:
{
"cola"
:
0
,
"mnli"
:
0
,
"mnli_mismatched"
:
0
,
"mrpc"
:
0
,
"qnli"
:
0
,
"qqp"
:
0
,
"rte"
:
0
,
"sst"
:
0
,
"wnli"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_human_alignment_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"crows_pairs_english_age"
:
{
"likelihood_difference"
:
4.1510989010989015
,
"likelihood_difference_stderr"
:
0.38322042294913006
,
"pct_stereotype"
:
0.7362637362637363
,
"pct_stereotype_stderr"
:
0.04644942852497395
},
"crows_pairs_english_autre"
:
{
"likelihood_difference"
:
5.056818181818182
,
"likelihood_difference_stderr"
:
1.7702853227564594
,
"pct_stereotype"
:
0.7272727272727273
,
"pct_stereotype_stderr"
:
0.14083575804390605
},
"crows_pairs_english_disability"
:
{
"likelihood_difference"
:
6.019230769230769
,
"likelihood_difference_stderr"
:
0.6330801651940536
,
"pct_stereotype"
:
0.7692307692307693
,
"pct_stereotype_stderr"
:
0.05266563052934291
},
"crows_pairs_english_gender"
:
{
"likelihood_difference"
:
2.682421875
,
"likelihood_difference_stderr"
:
0.17011984636779479
,
"pct_stereotype"
:
0.6375
,
"pct_stereotype_stderr"
:
0.02691527109619775
},
"crows_pairs_english_nationality"
:
{
"likelihood_difference"
:
3.498263888888889
,
"likelihood_difference_stderr"
:
0.21512426671182808
,
"pct_stereotype"
:
0.6157407407407407
,
"pct_stereotype_stderr"
:
0.03317354514310742
},
"crows_pairs_english_physical_appearance"
:
{
"likelihood_difference"
:
3.7378472222222223
,
"likelihood_difference_stderr"
:
0.3384578916047944
,
"pct_stereotype"
:
0.7222222222222222
,
"pct_stereotype_stderr"
:
0.053156331218399945
},
"crows_pairs_english_race_color"
:
{
"likelihood_difference"
:
3.3619586614173227
,
"likelihood_difference_stderr"
:
0.14103384370541044
,
"pct_stereotype"
:
0.5728346456692913
,
"pct_stereotype_stderr"
:
0.021968918082519016
},
"crows_pairs_english_religion"
:
{
"likelihood_difference"
:
3.6644144144144146
,
"likelihood_difference_stderr"
:
0.33863382204528913
,
"pct_stereotype"
:
0.7297297297297297
,
"pct_stereotype_stderr"
:
0.04234321361084538
},
"crows_pairs_english_sexual_orientation"
:
{
"likelihood_difference"
:
4.630376344086022
,
"likelihood_difference_stderr"
:
0.4574502494208299
,
"pct_stereotype"
:
0.8279569892473119
,
"pct_stereotype_stderr"
:
0.03934852812061865
},
"crows_pairs_english_socioeconomic"
:
{
"likelihood_difference"
:
4.065131578947368
,
"likelihood_difference_stderr"
:
0.23523235884669547
,
"pct_stereotype"
:
0.6789473684210526
,
"pct_stereotype_stderr"
:
0.03396059335824887
},
"crows_pairs_french_age"
:
{
"likelihood_difference"
:
3.609722222222222
,
"likelihood_difference_stderr"
:
0.3615396777446631
,
"pct_stereotype"
:
0.4222222222222222
,
"pct_stereotype_stderr"
:
0.05235473399540658
},
"crows_pairs_french_autre"
:
{
"likelihood_difference"
:
2.6923076923076925
,
"likelihood_difference_stderr"
:
0.9229767573858816
,
"pct_stereotype"
:
0.6153846153846154
,
"pct_stereotype_stderr"
:
0.14044168141158106
},
"crows_pairs_french_disability"
:
{
"likelihood_difference"
:
4.958333333333333
,
"likelihood_difference_stderr"
:
0.42689940749326905
,
"pct_stereotype"
:
0.6363636363636364
,
"pct_stereotype_stderr"
:
0.05966637484671758
},
"crows_pairs_french_gender"
:
{
"likelihood_difference"
:
3.213785046728972
,
"likelihood_difference_stderr"
:
0.17588386956758606
,
"pct_stereotype"
:
0.5109034267912772
,
"pct_stereotype_stderr"
:
0.027944203070818633
},
"crows_pairs_french_nationality"
:
{
"likelihood_difference"
:
3.883399209486166
,
"likelihood_difference_stderr"
:
0.21844567559439967
,
"pct_stereotype"
:
0.3438735177865613
,
"pct_stereotype_stderr"
:
0.029922155720849428
},
"crows_pairs_french_physical_appearance"
:
{
"likelihood_difference"
:
3.2274305555555554
,
"likelihood_difference_stderr"
:
0.4328667471914375
,
"pct_stereotype"
:
0.5972222222222222
,
"pct_stereotype_stderr"
:
0.05820650942569533
},
"crows_pairs_french_race_color"
:
{
"likelihood_difference"
:
3.161413043478261
,
"likelihood_difference_stderr"
:
0.16557903974411925
,
"pct_stereotype"
:
0.4369565217391304
,
"pct_stereotype_stderr"
:
0.023151745316873387
},
"crows_pairs_french_religion"
:
{
"likelihood_difference"
:
3.5673913043478263
,
"likelihood_difference_stderr"
:
0.3368331015818195
,
"pct_stereotype"
:
0.6260869565217392
,
"pct_stereotype_stderr"
:
0.045315858286449635
},
"crows_pairs_french_sexual_orientation"
:
{
"likelihood_difference"
:
4.791208791208791
,
"likelihood_difference_stderr"
:
0.4009539855629619
,
"pct_stereotype"
:
0.7802197802197802
,
"pct_stereotype_stderr"
:
0.043649726328985346
},
"crows_pairs_french_socioeconomic"
:
{
"likelihood_difference"
:
3.9939413265306123
,
"likelihood_difference_stderr"
:
0.263093158126228
,
"pct_stereotype"
:
0.6581632653061225
,
"pct_stereotype_stderr"
:
0.033967132039868675
},
"ethics_cm"
:
{
"acc"
:
0.5459459459459459
,
"acc_stderr"
:
0.007988936899457039
},
"ethics_deontology"
:
{
"acc"
:
0.5025027808676307
,
"acc_stderr"
:
0.008339021933755771
,
"em"
:
0.004449388209121246
},
"ethics_justice"
:
{
"acc"
:
0.5196005917159763
,
"acc_stderr"
:
0.009609770755397009
,
"em"
:
0.011834319526627219
},
"ethics_utilitarianism"
:
{
"acc"
:
0.5748752079866889
,
"acc_stderr"
:
0.007130302336230959
},
"ethics_utilitarianism_original"
:
{
"acc"
:
0.9956322795341098
,
"acc_stderr"
:
0.000951129914345755
},
"ethics_virtue"
:
{
"acc"
:
0.8040201005025126
,
"acc_stderr"
:
0.005628417801676332
,
"em"
:
0.12562814070351758
},
"toxigen"
:
{
"acc"
:
0.4319148936170213
,
"acc_stderr"
:
0.016164899004911828
,
"acc_norm"
:
0.4319148936170213
,
"acc_norm_stderr"
:
0.016164899004911828
}
},
"versions"
:
{
"crows_pairs_english_age"
:
0
,
"crows_pairs_english_autre"
:
0
,
"crows_pairs_english_disability"
:
0
,
"crows_pairs_english_gender"
:
0
,
"crows_pairs_english_nationality"
:
0
,
"crows_pairs_english_physical_appearance"
:
0
,
"crows_pairs_english_race_color"
:
0
,
"crows_pairs_english_religion"
:
0
,
"crows_pairs_english_sexual_orientation"
:
0
,
"crows_pairs_english_socioeconomic"
:
0
,
"crows_pairs_french_age"
:
0
,
"crows_pairs_french_autre"
:
0
,
"crows_pairs_french_disability"
:
0
,
"crows_pairs_french_gender"
:
0
,
"crows_pairs_french_nationality"
:
0
,
"crows_pairs_french_physical_appearance"
:
0
,
"crows_pairs_french_race_color"
:
0
,
"crows_pairs_french_religion"
:
0
,
"crows_pairs_french_sexual_orientation"
:
0
,
"crows_pairs_french_socioeconomic"
:
0
,
"ethics_cm"
:
0
,
"ethics_deontology"
:
0
,
"ethics_justice"
:
0
,
"ethics_utilitarianism"
:
0
,
"ethics_utilitarianism_original"
:
0
,
"ethics_virtue"
:
0
,
"toxigen"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/mpt/mpt-7b/mpt-7b_lambada_0-shot.json
deleted
100644 → 0
View file @
5e59782e
{
"results"
:
{
"lambada_openai"
:
{
"ppl"
:
3.8685393479651173
,
"ppl_stderr"
:
0.08079962928213236
,
"acc"
:
0.6834853483407723
,
"acc_stderr"
:
0.006479978824925181
},
"lambada_openai_cloze"
:
{
"ppl"
:
26.557641771948866
,
"ppl_stderr"
:
0.7040059319391023
,
"acc"
:
0.3964680768484378
,
"acc_stderr"
:
0.006815007030417622
},
"lambada_openai_mt_de"
:
{
"ppl"
:
70.11746381055114
,
"ppl_stderr"
:
4.044349022196945
,
"acc"
:
0.33766737822627596
,
"acc_stderr"
:
0.006588623616680426
},
"lambada_openai_mt_en"
:
{
"ppl"
:
3.8685393479651173
,
"ppl_stderr"
:
0.08079962928213236
,
"acc"
:
0.6834853483407723
,
"acc_stderr"
:
0.006479978824925181
},
"lambada_openai_mt_es"
:
{
"ppl"
:
67.22873557270539
,
"ppl_stderr"
:
3.6853291413042055
,
"acc"
:
0.3694934989326606
,
"acc_stderr"
:
0.006724504027913079
},
"lambada_openai_mt_fr"
:
{
"ppl"
:
42.93424455960906
,
"ppl_stderr"
:
2.3689294613751506
,
"acc"
:
0.4302348146710654
,
"acc_stderr"
:
0.006897835015074962
},
"lambada_openai_mt_it"
:
{
"ppl"
:
65.76277239773485
,
"ppl_stderr"
:
3.8678074045404967
,
"acc"
:
0.392004657481079
,
"acc_stderr"
:
0.006801548708056975
},
"lambada_standard"
:
{
"ppl"
:
4.922814417244969
,
"ppl_stderr"
:
0.10844800336124351
,
"acc"
:
0.619056860081506
,
"acc_stderr"
:
0.006765617200231529
},
"lambada_standard_cloze"
:
{
"ppl"
:
109.09886744922697
,
"ppl_stderr"
:
3.0351063735026287
,
"acc"
:
0.1674752571317679
,
"acc_stderr"
:
0.005202187177767864
}
},
"versions"
:
{
"lambada_openai"
:
0
,
"lambada_openai_cloze"
:
0
,
"lambada_openai_mt_de"
:
0
,
"lambada_openai_mt_en"
:
0
,
"lambada_openai_mt_es"
:
0
,
"lambada_openai_mt_fr"
:
0
,
"lambada_openai_mt_it"
:
0
,
"lambada_standard"
:
0
,
"lambada_standard_cloze"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=mosaicml/mpt-7b,trust_remote_code=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
Prev
1
2
3
4
5
6
7
8
9
10
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment