Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
4a0b0d6e
Commit
4a0b0d6e
authored
Jun 16, 2023
by
lintangsutawika
Browse files
Merge branch 'gakada-big-refactor-merge' into big-refactor
parents
6ae376e3
c490f165
Changes
230
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
2548 additions
and
0 deletions
+2548
-0
results/llama/llama-30B/llama-30B_pawsx_0-shot.json
results/llama/llama-30B/llama-30B_pawsx_0-shot.json
+52
-0
results/llama/llama-30B/llama-30B_xcopa_0-shot.json
results/llama/llama-30B/llama-30B_xcopa_0-shot.json
+72
-0
results/llama/llama-30B/llama-30B_xnli_0-shot.json
results/llama/llama-30B/llama-30B_xnli_0-shot.json
+92
-0
results/llama/llama-30B/llama-30B_xstory_cloze_0-shot.json
results/llama/llama-30B/llama-30B_xstory_cloze_0-shot.json
+72
-0
results/llama/llama-30B/llama-30B_xwinograd_0-shot.json
results/llama/llama-30B/llama-30B_xwinograd_0-shot.json
+47
-0
results/llama/llama-7B/README.md
results/llama/llama-7B/README.md
+501
-0
results/llama/llama-7B/llama-7B_anli_0-shot.json
results/llama/llama-7B/llama-7B_anli_0-shot.json
+32
-0
results/llama/llama-7B/llama-7B_arithmetic_5-shot.json
results/llama/llama-7B/llama-7B_arithmetic_5-shot.json
+67
-0
results/llama/llama-7B/llama-7B_bbh_3-shot.json
results/llama/llama-7B/llama-7B_bbh_3-shot.json
+124
-0
results/llama/llama-7B/llama-7B_blimp_0-shot.json
results/llama/llama-7B/llama-7B_blimp_0-shot.json
+352
-0
results/llama/llama-7B/llama-7B_common_sense_reasoning_0-shot.json
...lama/llama-7B/llama-7B_common_sense_reasoning_0-shot.json
+91
-0
results/llama/llama-7B/llama-7B_glue_0-shot.json
results/llama/llama-7B/llama-7B_glue_0-shot.json
+66
-0
results/llama/llama-7B/llama-7B_gsm8k_8-shot.json
results/llama/llama-7B/llama-7B_gsm8k_8-shot.json
+22
-0
results/llama/llama-7B/llama-7B_human_alignment_0-shot.json
results/llama/llama-7B/llama-7B_human_alignment_0-shot.json
+197
-0
results/llama/llama-7B/llama-7B_lambada_0-shot.json
results/llama/llama-7B/llama-7B_lambada_0-shot.json
+80
-0
results/llama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
...lama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
+76
-0
results/llama/llama-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
...a-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
+71
-0
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
+416
-0
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
+52
-0
results/llama/llama-7B/llama-7B_question_answering_0-shot.json
...ts/llama/llama-7B/llama-7B_question_answering_0-shot.json
+66
-0
No files found.
results/llama/llama-30B/llama-30B_pawsx_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"pawsx_fr"
:
{
"acc"
:
0.5285
,
"acc_stderr"
:
0.01116495423642881
},
"pawsx_de"
:
{
"acc"
:
0.582
,
"acc_stderr"
:
0.011031720148042082
},
"pawsx_en"
:
{
"acc"
:
0.5875
,
"acc_stderr"
:
0.011010562712487564
},
"pawsx_ja"
:
{
"acc"
:
0.4675
,
"acc_stderr"
:
0.011159486640120933
},
"pawsx_ko"
:
{
"acc"
:
0.457
,
"acc_stderr"
:
0.011141704034140798
},
"pawsx_zh"
:
{
"acc"
:
0.459
,
"acc_stderr"
:
0.011145474902641256
},
"pawsx_es"
:
{
"acc"
:
0.558
,
"acc_stderr"
:
0.011107641056719623
}
},
"versions"
:
{
"pawsx_fr"
:
0
,
"pawsx_de"
:
0
,
"pawsx_en"
:
0
,
"pawsx_ja"
:
0
,
"pawsx_ko"
:
0
,
"pawsx_zh"
:
0
,
"pawsx_es"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-30B/llama-30B_xcopa_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"xcopa_vi"
:
{
"acc"
:
0.524
,
"acc_stderr"
:
0.0223572738810164
},
"xcopa_id"
:
{
"acc"
:
0.606
,
"acc_stderr"
:
0.021874299301689253
},
"xcopa_it"
:
{
"acc"
:
0.714
,
"acc_stderr"
:
0.020229346329177524
},
"xcopa_ht"
:
{
"acc"
:
0.518
,
"acc_stderr"
:
0.02236856511738799
},
"xcopa_zh"
:
{
"acc"
:
0.622
,
"acc_stderr"
:
0.021706550824518184
},
"xcopa_sw"
:
{
"acc"
:
0.524
,
"acc_stderr"
:
0.0223572738810164
},
"xcopa_et"
:
{
"acc"
:
0.472
,
"acc_stderr"
:
0.02234794983266809
},
"xcopa_qu"
:
{
"acc"
:
0.494
,
"acc_stderr"
:
0.022381462412439324
},
"xcopa_th"
:
{
"acc"
:
0.546
,
"acc_stderr"
:
0.02228814759117695
},
"xcopa_tr"
:
{
"acc"
:
0.522
,
"acc_stderr"
:
0.022361396739207888
},
"xcopa_ta"
:
{
"acc"
:
0.532
,
"acc_stderr"
:
0.022337186479044292
}
},
"versions"
:
{
"xcopa_vi"
:
0
,
"xcopa_id"
:
0
,
"xcopa_it"
:
0
,
"xcopa_ht"
:
0
,
"xcopa_zh"
:
0
,
"xcopa_sw"
:
0
,
"xcopa_et"
:
0
,
"xcopa_qu"
:
0
,
"xcopa_th"
:
0
,
"xcopa_tr"
:
0
,
"xcopa_ta"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-30B/llama-30B_xnli_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"xnli_ar"
:
{
"acc"
:
0.3449101796407186
,
"acc_stderr"
:
0.006716266425755
},
"xnli_bg"
:
{
"acc"
:
0.3852295409181637
,
"acc_stderr"
:
0.006876077627982856
},
"xnli_de"
:
{
"acc"
:
0.43872255489021955
,
"acc_stderr"
:
0.007011456767132425
},
"xnli_el"
:
{
"acc"
:
0.34910179640718564
,
"acc_stderr"
:
0.00673530182747736
},
"xnli_en"
:
{
"acc"
:
0.4818363273453094
,
"acc_stderr"
:
0.007060049324579861
},
"xnli_es"
:
{
"acc"
:
0.4023952095808383
,
"acc_stderr"
:
0.006928798318208028
},
"xnli_fr"
:
{
"acc"
:
0.4295409181636727
,
"acc_stderr"
:
0.006994215414803201
},
"xnli_hi"
:
{
"acc"
:
0.3646706586826347
,
"acc_stderr"
:
0.0068010246867294885
},
"xnli_ru"
:
{
"acc"
:
0.3812375249500998
,
"acc_stderr"
:
0.0068625305186053856
},
"xnli_sw"
:
{
"acc"
:
0.3409181636726547
,
"acc_stderr"
:
0.006697600297167045
},
"xnli_th"
:
{
"acc"
:
0.3397205588822355
,
"acc_stderr"
:
0.0066918975980483925
},
"xnli_tr"
:
{
"acc"
:
0.3652694610778443
,
"acc_stderr"
:
0.00680339776716209
},
"xnli_ur"
:
{
"acc"
:
0.34311377245508984
,
"acc_stderr"
:
0.006707931789556032
},
"xnli_vi"
:
{
"acc"
:
0.356686626746507
,
"acc_stderr"
:
0.0067682935643592285
},
"xnli_zh"
:
{
"acc"
:
0.33512974051896205
,
"acc_stderr"
:
0.006669594382503632
}
},
"versions"
:
{
"xnli_ar"
:
0
,
"xnli_bg"
:
0
,
"xnli_de"
:
0
,
"xnli_el"
:
0
,
"xnli_en"
:
0
,
"xnli_es"
:
0
,
"xnli_fr"
:
0
,
"xnli_hi"
:
0
,
"xnli_ru"
:
0
,
"xnli_sw"
:
0
,
"xnli_th"
:
0
,
"xnli_tr"
:
0
,
"xnli_ur"
:
0
,
"xnli_vi"
:
0
,
"xnli_zh"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-30B/llama-30B_xstory_cloze_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"xstory_cloze_en"
:
{
"acc"
:
0.7816015883520847
,
"acc_stderr"
:
0.010632343054700505
},
"xstory_cloze_te"
:
{
"acc"
:
0.5320979483785573
,
"acc_stderr"
:
0.012840584503982028
},
"xstory_cloze_zh"
:
{
"acc"
:
0.585704831237591
,
"acc_stderr"
:
0.012676689821720669
},
"xstory_cloze_id"
:
{
"acc"
:
0.5923229649238915
,
"acc_stderr"
:
0.01264587648804028
},
"xstory_cloze_ar"
:
{
"acc"
:
0.5089344804765056
,
"acc_stderr"
:
0.012865070917320802
},
"xstory_cloze_my"
:
{
"acc"
:
0.4877564526803441
,
"acc_stderr"
:
0.012863267059205548
},
"xstory_cloze_hi"
:
{
"acc"
:
0.5665122435473197
,
"acc_stderr"
:
0.012752771973917616
},
"xstory_cloze_eu"
:
{
"acc"
:
0.513567174056916
,
"acc_stderr"
:
0.012862387586650079
},
"xstory_cloze_sw"
:
{
"acc"
:
0.5062872270019855
,
"acc_stderr"
:
0.01286610802121821
},
"xstory_cloze_es"
:
{
"acc"
:
0.7081403044341495
,
"acc_stderr"
:
0.011699256037649366
},
"xstory_cloze_ru"
:
{
"acc"
:
0.6671078755790867
,
"acc_stderr"
:
0.012127221798743731
}
},
"versions"
:
{
"xstory_cloze_en"
:
0
,
"xstory_cloze_te"
:
0
,
"xstory_cloze_zh"
:
0
,
"xstory_cloze_id"
:
0
,
"xstory_cloze_ar"
:
0
,
"xstory_cloze_my"
:
0
,
"xstory_cloze_hi"
:
0
,
"xstory_cloze_eu"
:
0
,
"xstory_cloze_sw"
:
0
,
"xstory_cloze_es"
:
0
,
"xstory_cloze_ru"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-30B/llama-30B_xwinograd_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"xwinograd_en"
:
{
"acc"
:
0.873978494623656
,
"acc_stderr"
:
0.006884218449880497
},
"xwinograd_ru"
:
{
"acc"
:
0.6698412698412698
,
"acc_stderr"
:
0.026538875646287704
},
"xwinograd_jp"
:
{
"acc"
:
0.6736183524504692
,
"acc_stderr"
:
0.015149108150588548
},
"xwinograd_pt"
:
{
"acc"
:
0.7680608365019012
,
"acc_stderr"
:
0.02607559386030469
},
"xwinograd_zh"
:
{
"acc"
:
0.7123015873015873
,
"acc_stderr"
:
0.02018443961183448
},
"xwinograd_fr"
:
{
"acc"
:
0.7349397590361446
,
"acc_stderr"
:
0.04874064133109369
}
},
"versions"
:
{
"xwinograd_en"
:
0
,
"xwinograd_ru"
:
0
,
"xwinograd_jp"
:
0
,
"xwinograd_pt"
:
0
,
"xwinograd_zh"
:
0
,
"xwinograd_fr"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/README.md
0 → 100644
View file @
4a0b0d6e
# llama-7B
## llama-7B_anli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|anli_r1| 0|acc |34.80|± | 1.51|
|anli_r2| 0|acc |33.70|± | 1.50|
|anli_r3| 0|acc |36.58|± | 1.39|
## llama-7B_arithmetic_5-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------------|------:|------|----:|---|-----:|
|arithmetic_1dc| 0|acc | 0|± | 0|
|arithmetic_2da| 0|acc | 0|± | 0|
|arithmetic_2dm| 0|acc | 0|± | 0|
|arithmetic_2ds| 0|acc | 0|± | 0|
|arithmetic_3da| 0|acc | 0|± | 0|
|arithmetic_3ds| 0|acc | 0|± | 0|
|arithmetic_4da| 0|acc | 0|± | 0|
|arithmetic_4ds| 0|acc | 0|± | 0|
|arithmetic_5da| 0|acc | 0|± | 0|
|arithmetic_5ds| 0|acc | 0|± | 0|
## llama-7B_bbh_3-shot.json
| Task |Version| Metric |Value| |Stderr|
|------------------------------------------------|------:|---------------------|----:|---|-----:|
|bigbench_causal_judgement | 0|multiple_choice_grade|48.42|± | 3.64|
|bigbench_date_understanding | 0|multiple_choice_grade|62.06|± | 2.53|
|bigbench_disambiguation_qa | 0|multiple_choice_grade|35.27|± | 2.98|
|bigbench_dyck_languages | 0|multiple_choice_grade|15.40|± | 1.14|
|bigbench_formal_fallacies_syllogisms_negation | 0|multiple_choice_grade|51.35|± | 0.42|
|bigbench_geometric_shapes | 0|multiple_choice_grade|17.83|± | 2.02|
| | |exact_str_match | 0.00|± | 0.00|
|bigbench_hyperbaton | 0|multiple_choice_grade|49.51|± | 0.22|
|bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|29.00|± | 2.03|
|bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|24.57|± | 1.63|
|bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|39.33|± | 2.83|
|bigbench_movie_recommendation | 0|multiple_choice_grade|40.40|± | 2.20|
|bigbench_navigate | 0|multiple_choice_grade|49.50|± | 1.58|
|bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|34.60|± | 1.06|
|bigbench_ruin_names | 0|multiple_choice_grade|29.91|± | 2.17|
|bigbench_salient_translation_error_detection | 0|multiple_choice_grade|16.53|± | 1.18|
|bigbench_snarks | 0|multiple_choice_grade|50.83|± | 3.73|
|bigbench_sports_understanding | 0|multiple_choice_grade|50.00|± | 1.59|
|bigbench_temporal_sequences | 0|multiple_choice_grade|27.20|± | 1.41|
|bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|18.24|± | 1.09|
|bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|13.71|± | 0.82|
|bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|39.33|± | 2.83|
## llama-7B_blimp_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------------------------------------------------|------:|------|----:|---|-----:|
|blimp_adjunct_island | 0|acc | 53.9|± | 1.58|
|blimp_anaphor_gender_agreement | 0|acc | 44.8|± | 1.57|
|blimp_anaphor_number_agreement | 0|acc | 65.9|± | 1.50|
|blimp_animate_subject_passive | 0|acc | 62.6|± | 1.53|
|blimp_animate_subject_trans | 0|acc | 76.1|± | 1.35|
|blimp_causative | 0|acc | 50.8|± | 1.58|
|blimp_complex_NP_island | 0|acc | 41.6|± | 1.56|
|blimp_coordinate_structure_constraint_complex_left_branch| 0|acc | 68.2|± | 1.47|
|blimp_coordinate_structure_constraint_object_extraction | 0|acc | 62.9|± | 1.53|
|blimp_determiner_noun_agreement_1 | 0|acc | 63.6|± | 1.52|
|blimp_determiner_noun_agreement_2 | 0|acc | 59.8|± | 1.55|
|blimp_determiner_noun_agreement_irregular_1 | 0|acc | 57.2|± | 1.57|
|blimp_determiner_noun_agreement_irregular_2 | 0|acc | 60.2|± | 1.55|
|blimp_determiner_noun_agreement_with_adj_2 | 0|acc | 54.0|± | 1.58|
|blimp_determiner_noun_agreement_with_adj_irregular_1 | 0|acc | 56.3|± | 1.57|
|blimp_determiner_noun_agreement_with_adj_irregular_2 | 0|acc | 59.1|± | 1.56|
|blimp_determiner_noun_agreement_with_adjective_1 | 0|acc | 57.7|± | 1.56|
|blimp_distractor_agreement_relational_noun | 0|acc | 44.1|± | 1.57|
|blimp_distractor_agreement_relative_clause | 0|acc | 31.4|± | 1.47|
|blimp_drop_argument | 0|acc | 70.1|± | 1.45|
|blimp_ellipsis_n_bar_1 | 0|acc | 66.8|± | 1.49|
|blimp_ellipsis_n_bar_2 | 0|acc | 79.4|± | 1.28|
|blimp_existential_there_object_raising | 0|acc | 78.8|± | 1.29|
|blimp_existential_there_quantifiers_1 | 0|acc | 68.3|± | 1.47|
|blimp_existential_there_quantifiers_2 | 0|acc | 67.4|± | 1.48|
|blimp_existential_there_subject_raising | 0|acc | 69.6|± | 1.46|
|blimp_expletive_it_object_raising | 0|acc | 65.9|± | 1.50|
|blimp_inchoative | 0|acc | 42.0|± | 1.56|
|blimp_intransitive | 0|acc | 59.2|± | 1.55|
|blimp_irregular_past_participle_adjectives | 0|acc | 42.9|± | 1.57|
|blimp_irregular_past_participle_verbs | 0|acc | 72.5|± | 1.41|
|blimp_irregular_plural_subject_verb_agreement_1 | 0|acc | 65.3|± | 1.51|
|blimp_irregular_plural_subject_verb_agreement_2 | 0|acc | 70.0|± | 1.45|
|blimp_left_branch_island_echo_question | 0|acc | 83.5|± | 1.17|
|blimp_left_branch_island_simple_question | 0|acc | 74.0|± | 1.39|
|blimp_matrix_question_npi_licensor_present | 0|acc | 11.7|± | 1.02|
|blimp_npi_present_1 | 0|acc | 53.4|± | 1.58|
|blimp_npi_present_2 | 0|acc | 53.0|± | 1.58|
|blimp_only_npi_licensor_present | 0|acc | 81.4|± | 1.23|
|blimp_only_npi_scope | 0|acc | 26.6|± | 1.40|
|blimp_passive_1 | 0|acc | 70.2|± | 1.45|
|blimp_passive_2 | 0|acc | 70.3|± | 1.45|
|blimp_principle_A_c_command | 0|acc | 39.0|± | 1.54|
|blimp_principle_A_case_1 | 0|acc | 98.5|± | 0.38|
|blimp_principle_A_case_2 | 0|acc | 55.4|± | 1.57|
|blimp_principle_A_domain_1 | 0|acc | 96.2|± | 0.60|
|blimp_principle_A_domain_2 | 0|acc | 64.6|± | 1.51|
|blimp_principle_A_domain_3 | 0|acc | 50.1|± | 1.58|
|blimp_principle_A_reconstruction | 0|acc | 67.3|± | 1.48|
|blimp_regular_plural_subject_verb_agreement_1 | 0|acc | 64.5|± | 1.51|
|blimp_regular_plural_subject_verb_agreement_2 | 0|acc | 70.5|± | 1.44|
|blimp_sentential_negation_npi_licensor_present | 0|acc | 94.0|± | 0.75|
|blimp_sentential_negation_npi_scope | 0|acc | 58.8|± | 1.56|
|blimp_sentential_subject_island | 0|acc | 60.6|± | 1.55|
|blimp_superlative_quantifiers_1 | 0|acc | 61.2|± | 1.54|
|blimp_superlative_quantifiers_2 | 0|acc | 56.1|± | 1.57|
|blimp_tough_vs_raising_1 | 0|acc | 29.8|± | 1.45|
|blimp_tough_vs_raising_2 | 0|acc | 76.8|± | 1.34|
|blimp_transitive | 0|acc | 69.8|± | 1.45|
|blimp_wh_island | 0|acc | 27.5|± | 1.41|
|blimp_wh_questions_object_gap | 0|acc | 67.0|± | 1.49|
|blimp_wh_questions_subject_gap | 0|acc | 72.0|± | 1.42|
|blimp_wh_questions_subject_gap_long_distance | 0|acc | 74.6|± | 1.38|
|blimp_wh_vs_that_no_gap | 0|acc | 84.8|± | 1.14|
|blimp_wh_vs_that_no_gap_long_distance | 0|acc | 81.2|± | 1.24|
|blimp_wh_vs_that_with_gap | 0|acc | 23.9|± | 1.35|
|blimp_wh_vs_that_with_gap_long_distance | 0|acc | 22.7|± | 1.33|
## llama-7B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |38.23|± | 1.42|
| | |acc_norm|41.38|± | 1.44|
|arc_easy | 0|acc |67.38|± | 0.96|
| | |acc_norm|52.48|± | 1.02|
|boolq | 1|acc |73.06|± | 0.78|
|copa | 0|acc |84.00|± | 3.68|
|hellaswag | 0|acc |56.39|± | 0.49|
| | |acc_norm|72.98|± | 0.44|
|mc_taco | 0|em |11.26| | |
| | |f1 |48.27| | |
|openbookqa | 0|acc |28.20|± | 2.01|
| | |acc_norm|42.40|± | 2.21|
|piqa | 0|acc |78.18|± | 0.96|
| | |acc_norm|77.42|± | 0.98|
|prost | 0|acc |25.69|± | 0.32|
| | |acc_norm|28.03|± | 0.33|
|swag | 0|acc |55.47|± | 0.35|
| | |acc_norm|66.87|± | 0.33|
|winogrande | 0|acc |66.93|± | 1.32|
|wsc273 | 0|acc |80.95|± | 2.38|
## llama-7B_glue_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|cola | 0|mcc | 0.00|± | 0.00|
|mnli | 0|acc |34.40|± | 0.48|
|mnli_mismatched| 0|acc |35.72|± | 0.48|
|mrpc | 0|acc |68.38|± | 2.30|
| | |f1 |81.22|± | 1.62|
|qnli | 0|acc |49.57|± | 0.68|
|qqp | 0|acc |36.84|± | 0.24|
| | |f1 |53.81|± | 0.26|
|rte | 0|acc |53.07|± | 3.00|
|sst | 0|acc |52.98|± | 1.69|
|wnli | 1|acc |56.34|± | 5.93|
## llama-7B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 8.04|± | 0.75|
## llama-7B_human_alignment_0-shot.json
| Task |Version| Metric | Value | |Stderr|
|---------------------------------------|------:|---------------------|------:|---|-----:|
|crows_pairs_english_age | 0|likelihood_difference| 594.23|± | 79.03|
| | |pct_stereotype | 51.65|± | 5.27|
|crows_pairs_english_autre | 0|likelihood_difference|1101.14|± |589.08|
| | |pct_stereotype | 45.45|± | 15.75|
|crows_pairs_english_disability | 0|likelihood_difference| 966.97|± |113.86|
| | |pct_stereotype | 66.15|± | 5.91|
|crows_pairs_english_gender | 0|likelihood_difference| 791.74|± | 55.02|
| | |pct_stereotype | 53.12|± | 2.79|
|crows_pairs_english_nationality | 0|likelihood_difference| 676.26|± | 58.69|
| | |pct_stereotype | 53.70|± | 3.40|
|crows_pairs_english_physical_appearance| 0|likelihood_difference| 451.26|± | 69.32|
| | |pct_stereotype | 50.00|± | 5.93|
|crows_pairs_english_race_color | 0|likelihood_difference| 624.65|± | 32.39|
| | |pct_stereotype | 46.65|± | 2.22|
|crows_pairs_english_religion | 0|likelihood_difference| 721.96|± | 75.92|
| | |pct_stereotype | 66.67|± | 4.49|
|crows_pairs_english_sexual_orientation | 0|likelihood_difference| 830.48|± | 84.28|
| | |pct_stereotype | 62.37|± | 5.05|
|crows_pairs_english_socioeconomic | 0|likelihood_difference| 640.16|± | 54.20|
| | |pct_stereotype | 56.84|± | 3.60|
|crows_pairs_french_age | 0|likelihood_difference|1193.96|± |153.77|
| | |pct_stereotype | 35.56|± | 5.07|
|crows_pairs_french_autre | 0|likelihood_difference| 751.20|± |209.58|
| | |pct_stereotype | 61.54|± | 14.04|
|crows_pairs_french_disability | 0|likelihood_difference|1014.77|± |139.07|
| | |pct_stereotype | 42.42|± | 6.13|
|crows_pairs_french_gender | 0|likelihood_difference|1179.90|± | 87.14|
| | |pct_stereotype | 52.02|± | 2.79|
|crows_pairs_french_nationality | 0|likelihood_difference|1041.65|± | 90.66|
| | |pct_stereotype | 40.71|± | 3.09|
|crows_pairs_french_physical_appearance | 0|likelihood_difference| 704.51|± | 94.84|
| | |pct_stereotype | 55.56|± | 5.90|
|crows_pairs_french_race_color | 0|likelihood_difference|1204.89|± | 73.32|
| | |pct_stereotype | 43.26|± | 2.31|
|crows_pairs_french_religion | 0|likelihood_difference| 958.53|± | 87.50|
| | |pct_stereotype | 43.48|± | 4.64|
|crows_pairs_french_sexual_orientation | 0|likelihood_difference| 760.58|± | 79.39|
| | |pct_stereotype | 67.03|± | 4.96|
|crows_pairs_french_socioeconomic | 0|likelihood_difference| 980.84|± |101.51|
| | |pct_stereotype | 52.04|± | 3.58|
|ethics_cm | 0|acc | 56.91|± | 0.79|
|ethics_deontology | 0|acc | 50.58|± | 0.83|
| | |em | 0.22| | |
|ethics_justice | 0|acc | 49.96|± | 0.96|
| | |em | 0.15| | |
|ethics_utilitarianism | 0|acc | 49.81|± | 0.72|
|ethics_utilitarianism_original | 0|acc | 95.86|± | 0.29|
|ethics_virtue | 0|acc | 20.98|± | 0.58|
| | |em | 0.00| | |
|toxigen | 0|acc | 43.09|± | 1.62|
| | |acc_norm | 43.19|± | 1.62|
## llama-7B_lambada_0-shot.json
| Task |Version|Metric| Value | | Stderr |
|----------------------|------:|------|---------:|---|--------:|
|lambada_openai | 0|ppl |2817465.09|± |138319.09|
| | |acc | 0.00|± | 0.00|
|lambada_openai_cloze | 0|ppl | 255777.71|± | 11345.77|
| | |acc | 0.04|± | 0.03|
|lambada_openai_mt_de | 0|ppl |1805613.68|± | 97892.79|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_en | 0|ppl |2817465.09|± |138319.09|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_es | 0|ppl |3818890.45|± |197999.05|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_fr | 0|ppl |2111186.12|± |111724.43|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_it | 0|ppl |3653680.57|± |197082.99|
| | |acc | 0.00|± | 0.00|
|lambada_standard | 0|ppl |2460346.86|± | 81216.57|
| | |acc | 0.00|± | 0.00|
|lambada_standard_cloze| 0|ppl |6710057.24|± |169833.91|
| | |acc | 0.00|± | 0.00|
## llama-7B_mathematical_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 4.27|± | 0.21|
| | |f1 |12.16|± | 0.25|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 1.68|± | 0.37|
|math_asdiv | 0|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 1.69|± | 0.59|
|math_geometry | 1|acc | 0.84|± | 0.42|
|math_intermediate_algebra| 1|acc | 0.66|± | 0.27|
|math_num_theory | 1|acc | 0.74|± | 0.37|
|math_prealgebra | 1|acc | 1.26|± | 0.38|
|math_precalc | 1|acc | 0.37|± | 0.26|
|mathqa | 0|acc |26.77|± | 0.81|
| | |acc_norm|27.87|± | 0.82|
## llama-7B_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 1.24|± | 0.11|
| | |f1 | 2.10|± | 0.13|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.00|± | 0.00|
|math_prealgebra | 1|acc | 0.11|± | 0.11|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |28.21|± | 0.82|
| | |acc_norm|28.78|± | 0.83|
## llama-7B_mmlu_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-anatomy | 0|acc |38.52|± | 4.20|
| | |acc_norm|28.15|± | 3.89|
|hendrycksTest-astronomy | 0|acc |45.39|± | 4.05|
| | |acc_norm|46.05|± | 4.06|
|hendrycksTest-business_ethics | 0|acc |53.00|± | 5.02|
| | |acc_norm|46.00|± | 5.01|
|hendrycksTest-clinical_knowledge | 0|acc |38.87|± | 3.00|
| | |acc_norm|38.11|± | 2.99|
|hendrycksTest-college_biology | 0|acc |31.94|± | 3.90|
| | |acc_norm|29.17|± | 3.80|
|hendrycksTest-college_chemistry | 0|acc |33.00|± | 4.73|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_computer_science | 0|acc |33.00|± | 4.73|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_mathematics | 0|acc |32.00|± | 4.69|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-college_medicine | 0|acc |37.57|± | 3.69|
| | |acc_norm|30.64|± | 3.51|
|hendrycksTest-college_physics | 0|acc |23.53|± | 4.22|
| | |acc_norm|32.35|± | 4.66|
|hendrycksTest-computer_security | 0|acc |37.00|± | 4.85|
| | |acc_norm|44.00|± | 4.99|
|hendrycksTest-conceptual_physics | 0|acc |32.77|± | 3.07|
| | |acc_norm|21.70|± | 2.69|
|hendrycksTest-econometrics | 0|acc |28.95|± | 4.27|
| | |acc_norm|26.32|± | 4.14|
|hendrycksTest-electrical_engineering | 0|acc |35.86|± | 4.00|
| | |acc_norm|32.41|± | 3.90|
|hendrycksTest-elementary_mathematics | 0|acc |32.01|± | 2.40|
| | |acc_norm|29.10|± | 2.34|
|hendrycksTest-formal_logic | 0|acc |30.95|± | 4.13|
| | |acc_norm|34.92|± | 4.26|
|hendrycksTest-global_facts | 0|acc |32.00|± | 4.69|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-high_school_biology | 0|acc |35.81|± | 2.73|
| | |acc_norm|35.81|± | 2.73|
|hendrycksTest-high_school_chemistry | 0|acc |25.12|± | 3.05|
| | |acc_norm|29.56|± | 3.21|
|hendrycksTest-high_school_computer_science | 0|acc |41.00|± | 4.94|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-high_school_european_history | 0|acc |40.61|± | 3.83|
| | |acc_norm|36.97|± | 3.77|
|hendrycksTest-high_school_geography | 0|acc |42.93|± | 3.53|
| | |acc_norm|36.36|± | 3.43|
|hendrycksTest-high_school_government_and_politics| 0|acc |48.19|± | 3.61|
| | |acc_norm|37.31|± | 3.49|
|hendrycksTest-high_school_macroeconomics | 0|acc |31.79|± | 2.36|
| | |acc_norm|30.26|± | 2.33|
|hendrycksTest-high_school_mathematics | 0|acc |22.59|± | 2.55|
| | |acc_norm|30.74|± | 2.81|
|hendrycksTest-high_school_microeconomics | 0|acc |38.66|± | 3.16|
| | |acc_norm|36.55|± | 3.13|
|hendrycksTest-high_school_physics | 0|acc |20.53|± | 3.30|
| | |acc_norm|27.15|± | 3.63|
|hendrycksTest-high_school_psychology | 0|acc |46.61|± | 2.14|
| | |acc_norm|30.83|± | 1.98|
|hendrycksTest-high_school_statistics | 0|acc |34.26|± | 3.24|
| | |acc_norm|34.26|± | 3.24|
|hendrycksTest-high_school_us_history | 0|acc |42.65|± | 3.47|
| | |acc_norm|31.37|± | 3.26|
|hendrycksTest-high_school_world_history | 0|acc |39.24|± | 3.18|
| | |acc_norm|33.76|± | 3.08|
|hendrycksTest-human_aging | 0|acc |37.22|± | 3.24|
| | |acc_norm|25.11|± | 2.91|
|hendrycksTest-human_sexuality | 0|acc |51.15|± | 4.38|
| | |acc_norm|36.64|± | 4.23|
|hendrycksTest-international_law | 0|acc |38.84|± | 4.45|
| | |acc_norm|57.85|± | 4.51|
|hendrycksTest-jurisprudence | 0|acc |43.52|± | 4.79|
| | |acc_norm|50.00|± | 4.83|
|hendrycksTest-logical_fallacies | 0|acc |38.04|± | 3.81|
| | |acc_norm|34.97|± | 3.75|
|hendrycksTest-machine_learning | 0|acc |30.36|± | 4.36|
| | |acc_norm|26.79|± | 4.20|
|hendrycksTest-management | 0|acc |48.54|± | 4.95|
| | |acc_norm|36.89|± | 4.78|
|hendrycksTest-marketing | 0|acc |61.11|± | 3.19|
| | |acc_norm|50.43|± | 3.28|
|hendrycksTest-medical_genetics | 0|acc |44.00|± | 4.99|
| | |acc_norm|40.00|± | 4.92|
|hendrycksTest-miscellaneous | 0|acc |58.37|± | 1.76|
| | |acc_norm|38.95|± | 1.74|
|hendrycksTest-moral_disputes | 0|acc |36.42|± | 2.59|
| | |acc_norm|33.24|± | 2.54|
|hendrycksTest-moral_scenarios | 0|acc |27.60|± | 1.50|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |39.54|± | 2.80|
| | |acc_norm|43.79|± | 2.84|
|hendrycksTest-philosophy | 0|acc |40.19|± | 2.78|
| | |acc_norm|35.37|± | 2.72|
|hendrycksTest-prehistory | 0|acc |40.12|± | 2.73|
| | |acc_norm|27.78|± | 2.49|
|hendrycksTest-professional_accounting | 0|acc |30.14|± | 2.74|
| | |acc_norm|29.43|± | 2.72|
|hendrycksTest-professional_law | 0|acc |29.66|± | 1.17|
| | |acc_norm|28.55|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |33.82|± | 2.87|
| | |acc_norm|27.94|± | 2.73|
|hendrycksTest-professional_psychology | 0|acc |38.40|± | 1.97|
| | |acc_norm|29.90|± | 1.85|
|hendrycksTest-public_relations | 0|acc |39.09|± | 4.67|
| | |acc_norm|22.73|± | 4.01|
|hendrycksTest-security_studies | 0|acc |40.82|± | 3.15|
| | |acc_norm|31.02|± | 2.96|
|hendrycksTest-sociology | 0|acc |47.76|± | 3.53|
| | |acc_norm|42.79|± | 3.50|
|hendrycksTest-us_foreign_policy | 0|acc |56.00|± | 4.99|
| | |acc_norm|45.00|± | 5.00|
|hendrycksTest-virology | 0|acc |39.76|± | 3.81|
| | |acc_norm|28.92|± | 3.53|
|hendrycksTest-world_religions | 0|acc |62.57|± | 3.71|
| | |acc_norm|51.46|± | 3.83|
## llama-7B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |54.65|± | 1.11|
|pawsx_en| 0|acc |61.85|± | 1.09|
|pawsx_es| 0|acc |56.10|± | 1.11|
|pawsx_fr| 0|acc |52.95|± | 1.12|
|pawsx_ja| 0|acc |56.70|± | 1.11|
|pawsx_ko| 0|acc |49.70|± | 1.12|
|pawsx_zh| 0|acc |49.10|± | 1.12|
## llama-7B_question_answering_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|------------|----:|---|-----:|
|headqa_en | 0|acc |32.42|± | 0.89|
| | |acc_norm |35.92|± | 0.92|
|headqa_es | 0|acc |28.26|± | 0.86|
| | |acc_norm |32.42|± | 0.89|
|logiqa | 0|acc |21.81|± | 1.62|
| | |acc_norm |30.26|± | 1.80|
|squad2 | 1|exact | 9.42| | |
| | |f1 |19.45| | |
| | |HasAns_exact|18.49| | |
| | |HasAns_f1 |38.58| | |
| | |NoAns_exact | 0.37| | |
| | |NoAns_f1 | 0.37| | |
| | |best_exact |50.07| | |
| | |best_f1 |50.08| | |
|triviaqa | 1|acc | 0.00|± | 0.00|
|truthfulqa_mc| 1|mc1 |21.05|± | 1.43|
| | |mc2 |34.14|± | 1.31|
|webqs | 0|acc | 0.00|± | 0.00|
## llama-7B_reading_comprehension_0-shot.json
|Task|Version|Metric|Value| |Stderr|
|----|------:|------|----:|---|-----:|
|coqa| 1|f1 |75.21|± | 1.53|
| | |em |62.67|± | 1.88|
|drop| 1|em | 3.59|± | 0.19|
| | |f1 |11.35|± | 0.23|
|race| 1|acc |39.90|± | 1.52|
## llama-7B_unscramble_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|----------------|------:|------|----:|---|-----:|
|anagrams1 | 0|acc | 0|± | 0|
|anagrams2 | 0|acc | 0|± | 0|
|cycle_letters | 0|acc | 0|± | 0|
|random_insertion| 0|acc | 0|± | 0|
|reversed_words | 0|acc | 0|± | 0|
## llama-7B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 48.8|± | 2.24|
|xcopa_ht| 0|acc | 51.0|± | 2.24|
|xcopa_id| 0|acc | 54.6|± | 2.23|
|xcopa_it| 0|acc | 62.0|± | 2.17|
|xcopa_qu| 0|acc | 51.4|± | 2.24|
|xcopa_sw| 0|acc | 50.8|± | 2.24|
|xcopa_ta| 0|acc | 55.2|± | 2.23|
|xcopa_th| 0|acc | 55.8|± | 2.22|
|xcopa_tr| 0|acc | 55.6|± | 2.22|
|xcopa_vi| 0|acc | 51.6|± | 2.24|
|xcopa_zh| 0|acc | 56.2|± | 2.22|
## llama-7B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.57|± | 0.67|
|xnli_bg| 0|acc |36.99|± | 0.68|
|xnli_de| 0|acc |44.77|± | 0.70|
|xnli_el| 0|acc |34.93|± | 0.67|
|xnli_en| 0|acc |51.06|± | 0.71|
|xnli_es| 0|acc |40.62|± | 0.69|
|xnli_fr| 0|acc |43.75|± | 0.70|
|xnli_hi| 0|acc |36.11|± | 0.68|
|xnli_ru| 0|acc |39.36|± | 0.69|
|xnli_sw| 0|acc |33.71|± | 0.67|
|xnli_th| 0|acc |34.51|± | 0.67|
|xnli_tr| 0|acc |35.59|± | 0.68|
|xnli_ur| 0|acc |33.39|± | 0.67|
|xnli_vi| 0|acc |35.59|± | 0.68|
|xnli_zh| 0|acc |36.23|± | 0.68|
## llama-7B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |48.31|± | 1.29|
|xstory_cloze_en| 0|acc |74.78|± | 1.12|
|xstory_cloze_es| 0|acc |65.12|± | 1.23|
|xstory_cloze_eu| 0|acc |50.10|± | 1.29|
|xstory_cloze_hi| 0|acc |52.68|± | 1.28|
|xstory_cloze_id| 0|acc |52.08|± | 1.29|
|xstory_cloze_my| 0|acc |48.71|± | 1.29|
|xstory_cloze_ru| 0|acc |61.35|± | 1.25|
|xstory_cloze_sw| 0|acc |50.36|± | 1.29|
|xstory_cloze_te| 0|acc |52.88|± | 1.28|
|xstory_cloze_zh| 0|acc |54.33|± | 1.28|
## llama-7B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |84.95|± | 0.74|
|xwinograd_fr| 0|acc |72.29|± | 4.94|
|xwinograd_jp| 0|acc |58.92|± | 1.59|
|xwinograd_pt| 0|acc |70.72|± | 2.81|
|xwinograd_ru| 0|acc |64.44|± | 2.70|
|xwinograd_zh| 0|acc |63.69|± | 2.14|
results/llama/llama-7B/llama-7B_anli_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"anli_r1"
:
{
"acc"
:
0.348
,
"acc_stderr"
:
0.015070604603768408
},
"anli_r2"
:
{
"acc"
:
0.337
,
"acc_stderr"
:
0.014955087918653593
},
"anli_r3"
:
{
"acc"
:
0.36583333333333334
,
"acc_stderr"
:
0.01391021206270117
}
},
"versions"
:
{
"anli_r1"
:
0
,
"anli_r2"
:
0
,
"anli_r3"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_arithmetic_5-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"arithmetic_3ds"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_1dc"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_2da"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_4ds"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_3da"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_2ds"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_4da"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_5ds"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_2dm"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"arithmetic_5da"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
}
},
"versions"
:
{
"arithmetic_3ds"
:
0
,
"arithmetic_1dc"
:
0
,
"arithmetic_2da"
:
0
,
"arithmetic_4ds"
:
0
,
"arithmetic_3da"
:
0
,
"arithmetic_2ds"
:
0
,
"arithmetic_4da"
:
0
,
"arithmetic_5ds"
:
0
,
"arithmetic_2dm"
:
0
,
"arithmetic_5da"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_bbh_3-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"bigbench_tracking_shuffled_objects_five_objects"
:
{
"multiple_choice_grade"
:
0.1824
,
"multiple_choice_grade_stderr"
:
0.010927017514830547
},
"bigbench_logical_deduction_seven_objects"
:
{
"multiple_choice_grade"
:
0.24571428571428572
,
"multiple_choice_grade_stderr"
:
0.01628337995683342
},
"bigbench_date_understanding"
:
{
"multiple_choice_grade"
:
0.6205962059620597
,
"multiple_choice_grade_stderr"
:
0.02529481360676469
},
"bigbench_navigate"
:
{
"multiple_choice_grade"
:
0.495
,
"multiple_choice_grade_stderr"
:
0.015818508944436645
},
"bigbench_geometric_shapes"
:
{
"multiple_choice_grade"
:
0.17827298050139276
,
"multiple_choice_grade_stderr"
:
0.02022856303248108
,
"exact_str_match"
:
0.0
,
"exact_str_match_stderr"
:
0.0
},
"bigbench_dyck_languages"
:
{
"multiple_choice_grade"
:
0.154
,
"multiple_choice_grade_stderr"
:
0.011419913065098684
},
"bigbench_temporal_sequences"
:
{
"multiple_choice_grade"
:
0.272
,
"multiple_choice_grade_stderr"
:
0.014078856992462611
},
"bigbench_snarks"
:
{
"multiple_choice_grade"
:
0.5082872928176796
,
"multiple_choice_grade_stderr"
:
0.03726268022638988
},
"bigbench_disambiguation_qa"
:
{
"multiple_choice_grade"
:
0.35271317829457366
,
"multiple_choice_grade_stderr"
:
0.029805242804674153
},
"bigbench_tracking_shuffled_objects_seven_objects"
:
{
"multiple_choice_grade"
:
0.13714285714285715
,
"multiple_choice_grade_stderr"
:
0.008225477923226985
},
"bigbench_ruin_names"
:
{
"multiple_choice_grade"
:
0.29910714285714285
,
"multiple_choice_grade_stderr"
:
0.021656359273376977
},
"bigbench_movie_recommendation"
:
{
"multiple_choice_grade"
:
0.404
,
"multiple_choice_grade_stderr"
:
0.021966635293832918
},
"bigbench_salient_translation_error_detection"
:
{
"multiple_choice_grade"
:
0.1653306613226453
,
"multiple_choice_grade_stderr"
:
0.011764848862417502
},
"bigbench_logical_deduction_five_objects"
:
{
"multiple_choice_grade"
:
0.29
,
"multiple_choice_grade_stderr"
:
0.020313179231745183
},
"bigbench_causal_judgement"
:
{
"multiple_choice_grade"
:
0.4842105263157895
,
"multiple_choice_grade_stderr"
:
0.036351509398643456
},
"bigbench_hyperbaton"
:
{
"multiple_choice_grade"
:
0.49508
,
"multiple_choice_grade_stderr"
:
0.0022359820804999713
},
"bigbench_sports_understanding"
:
{
"multiple_choice_grade"
:
0.5
,
"multiple_choice_grade_stderr"
:
0.015931324696929153
},
"bigbench_logical_deduction_three_objects"
:
{
"multiple_choice_grade"
:
0.3933333333333333
,
"multiple_choice_grade_stderr"
:
0.028250090846760875
},
"bigbench_tracking_shuffled_objects_three_objects"
:
{
"multiple_choice_grade"
:
0.3933333333333333
,
"multiple_choice_grade_stderr"
:
0.028250090846760875
},
"bigbench_formal_fallacies_syllogisms_negation"
:
{
"multiple_choice_grade"
:
0.5134507042253521
,
"multiple_choice_grade_stderr"
:
0.004194535955193854
},
"bigbench_reasoning_about_colored_objects"
:
{
"multiple_choice_grade"
:
0.346
,
"multiple_choice_grade_stderr"
:
0.010639483037236658
}
},
"versions"
:
{
"bigbench_tracking_shuffled_objects_five_objects"
:
0
,
"bigbench_logical_deduction_seven_objects"
:
0
,
"bigbench_date_understanding"
:
0
,
"bigbench_navigate"
:
0
,
"bigbench_geometric_shapes"
:
0
,
"bigbench_dyck_languages"
:
0
,
"bigbench_temporal_sequences"
:
0
,
"bigbench_snarks"
:
0
,
"bigbench_disambiguation_qa"
:
0
,
"bigbench_tracking_shuffled_objects_seven_objects"
:
0
,
"bigbench_ruin_names"
:
0
,
"bigbench_movie_recommendation"
:
0
,
"bigbench_salient_translation_error_detection"
:
0
,
"bigbench_logical_deduction_five_objects"
:
0
,
"bigbench_causal_judgement"
:
0
,
"bigbench_hyperbaton"
:
0
,
"bigbench_sports_understanding"
:
0
,
"bigbench_logical_deduction_three_objects"
:
0
,
"bigbench_tracking_shuffled_objects_three_objects"
:
0
,
"bigbench_formal_fallacies_syllogisms_negation"
:
0
,
"bigbench_reasoning_about_colored_objects"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
3
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_blimp_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"blimp_wh_vs_that_no_gap_long_distance"
:
{
"acc"
:
0.812
,
"acc_stderr"
:
0.01236158601510377
},
"blimp_ellipsis_n_bar_1"
:
{
"acc"
:
0.668
,
"acc_stderr"
:
0.014899597242811478
},
"blimp_distractor_agreement_relative_clause"
:
{
"acc"
:
0.314
,
"acc_stderr"
:
0.014683991951087966
},
"blimp_determiner_noun_agreement_with_adjective_1"
:
{
"acc"
:
0.577
,
"acc_stderr"
:
0.01563058909047635
},
"blimp_principle_A_reconstruction"
:
{
"acc"
:
0.673
,
"acc_stderr"
:
0.014842213153411245
},
"blimp_determiner_noun_agreement_2"
:
{
"acc"
:
0.598
,
"acc_stderr"
:
0.015512467135715077
},
"blimp_npi_present_1"
:
{
"acc"
:
0.534
,
"acc_stderr"
:
0.015782683329937618
},
"blimp_existential_there_quantifiers_2"
:
{
"acc"
:
0.674
,
"acc_stderr"
:
0.01483050720454104
},
"blimp_existential_there_subject_raising"
:
{
"acc"
:
0.696
,
"acc_stderr"
:
0.014553205687950438
},
"blimp_tough_vs_raising_1"
:
{
"acc"
:
0.298
,
"acc_stderr"
:
0.014470846741134717
},
"blimp_wh_questions_subject_gap_long_distance"
:
{
"acc"
:
0.746
,
"acc_stderr"
:
0.01377220656516854
},
"blimp_left_branch_island_echo_question"
:
{
"acc"
:
0.835
,
"acc_stderr"
:
0.011743632866916164
},
"blimp_only_npi_licensor_present"
:
{
"acc"
:
0.814
,
"acc_stderr"
:
0.012310790208412803
},
"blimp_adjunct_island"
:
{
"acc"
:
0.539
,
"acc_stderr"
:
0.01577110420128319
},
"blimp_coordinate_structure_constraint_object_extraction"
:
{
"acc"
:
0.629
,
"acc_stderr"
:
0.015283736211823188
},
"blimp_irregular_plural_subject_verb_agreement_1"
:
{
"acc"
:
0.653
,
"acc_stderr"
:
0.01506047203170662
},
"blimp_passive_2"
:
{
"acc"
:
0.703
,
"acc_stderr"
:
0.0144568322948011
},
"blimp_drop_argument"
:
{
"acc"
:
0.701
,
"acc_stderr"
:
0.014484778521220461
},
"blimp_wh_vs_that_with_gap_long_distance"
:
{
"acc"
:
0.227
,
"acc_stderr"
:
0.013253174964763925
},
"blimp_existential_there_quantifiers_1"
:
{
"acc"
:
0.683
,
"acc_stderr"
:
0.014721675438880224
},
"blimp_coordinate_structure_constraint_complex_left_branch"
:
{
"acc"
:
0.682
,
"acc_stderr"
:
0.014734079309311901
},
"blimp_ellipsis_n_bar_2"
:
{
"acc"
:
0.794
,
"acc_stderr"
:
0.012795613612786548
},
"blimp_sentential_subject_island"
:
{
"acc"
:
0.606
,
"acc_stderr"
:
0.01545972195749338
},
"blimp_determiner_noun_agreement_with_adj_irregular_2"
:
{
"acc"
:
0.591
,
"acc_stderr"
:
0.015555094373257946
},
"blimp_npi_present_2"
:
{
"acc"
:
0.53
,
"acc_stderr"
:
0.015790799515836763
},
"blimp_wh_questions_subject_gap"
:
{
"acc"
:
0.72
,
"acc_stderr"
:
0.014205696104091519
},
"blimp_determiner_noun_agreement_irregular_1"
:
{
"acc"
:
0.572
,
"acc_stderr"
:
0.01565442624502927
},
"blimp_superlative_quantifiers_1"
:
{
"acc"
:
0.612
,
"acc_stderr"
:
0.015417317979911076
},
"blimp_left_branch_island_simple_question"
:
{
"acc"
:
0.74
,
"acc_stderr"
:
0.013877773329774164
},
"blimp_irregular_past_participle_adjectives"
:
{
"acc"
:
0.429
,
"acc_stderr"
:
0.01565899754787024
},
"blimp_principle_A_domain_2"
:
{
"acc"
:
0.646
,
"acc_stderr"
:
0.015129868238451772
},
"blimp_regular_plural_subject_verb_agreement_1"
:
{
"acc"
:
0.645
,
"acc_stderr"
:
0.015139491543780529
},
"blimp_principle_A_case_1"
:
{
"acc"
:
0.985
,
"acc_stderr"
:
0.003845749574502997
},
"blimp_principle_A_case_2"
:
{
"acc"
:
0.554
,
"acc_stderr"
:
0.015726771166750357
},
"blimp_matrix_question_npi_licensor_present"
:
{
"acc"
:
0.117
,
"acc_stderr"
:
0.010169287802713327
},
"blimp_determiner_noun_agreement_with_adj_irregular_1"
:
{
"acc"
:
0.563
,
"acc_stderr"
:
0.015693223928730377
},
"blimp_principle_A_domain_1"
:
{
"acc"
:
0.962
,
"acc_stderr"
:
0.006049181150584942
},
"blimp_superlative_quantifiers_2"
:
{
"acc"
:
0.561
,
"acc_stderr"
:
0.01570113134540077
},
"blimp_wh_island"
:
{
"acc"
:
0.275
,
"acc_stderr"
:
0.014127086556490531
},
"blimp_only_npi_scope"
:
{
"acc"
:
0.266
,
"acc_stderr"
:
0.013979965645145156
},
"blimp_regular_plural_subject_verb_agreement_2"
:
{
"acc"
:
0.705
,
"acc_stderr"
:
0.014428554438445512
},
"blimp_complex_NP_island"
:
{
"acc"
:
0.416
,
"acc_stderr"
:
0.015594460144140603
},
"blimp_sentential_negation_npi_scope"
:
{
"acc"
:
0.588
,
"acc_stderr"
:
0.015572363292015093
},
"blimp_transitive"
:
{
"acc"
:
0.698
,
"acc_stderr"
:
0.014526080235459543
},
"blimp_wh_vs_that_with_gap"
:
{
"acc"
:
0.239
,
"acc_stderr"
:
0.013493000446937587
},
"blimp_wh_questions_object_gap"
:
{
"acc"
:
0.67
,
"acc_stderr"
:
0.014876872027456736
},
"blimp_sentential_negation_npi_licensor_present"
:
{
"acc"
:
0.94
,
"acc_stderr"
:
0.007513751157474913
},
"blimp_expletive_it_object_raising"
:
{
"acc"
:
0.659
,
"acc_stderr"
:
0.0149981313484027
},
"blimp_determiner_noun_agreement_1"
:
{
"acc"
:
0.636
,
"acc_stderr"
:
0.015222868840522017
},
"blimp_anaphor_gender_agreement"
:
{
"acc"
:
0.448
,
"acc_stderr"
:
0.01573351656634784
},
"blimp_irregular_plural_subject_verb_agreement_2"
:
{
"acc"
:
0.7
,
"acc_stderr"
:
0.014498627873361425
},
"blimp_passive_1"
:
{
"acc"
:
0.702
,
"acc_stderr"
:
0.014470846741134705
},
"blimp_determiner_noun_agreement_irregular_2"
:
{
"acc"
:
0.602
,
"acc_stderr"
:
0.015486634102858913
},
"blimp_irregular_past_participle_verbs"
:
{
"acc"
:
0.725
,
"acc_stderr"
:
0.014127086556490523
},
"blimp_existential_there_object_raising"
:
{
"acc"
:
0.788
,
"acc_stderr"
:
0.012931481864938055
},
"blimp_determiner_noun_agreement_with_adj_2"
:
{
"acc"
:
0.54
,
"acc_stderr"
:
0.015768596914394375
},
"blimp_principle_A_domain_3"
:
{
"acc"
:
0.501
,
"acc_stderr"
:
0.015819268290576817
},
"blimp_causative"
:
{
"acc"
:
0.508
,
"acc_stderr"
:
0.015817274929209008
},
"blimp_tough_vs_raising_2"
:
{
"acc"
:
0.768
,
"acc_stderr"
:
0.01335493745228157
},
"blimp_wh_vs_that_no_gap"
:
{
"acc"
:
0.848
,
"acc_stderr"
:
0.011358918303475294
},
"blimp_anaphor_number_agreement"
:
{
"acc"
:
0.659
,
"acc_stderr"
:
0.014998131348402704
},
"blimp_principle_A_c_command"
:
{
"acc"
:
0.39
,
"acc_stderr"
:
0.01543172505386661
},
"blimp_distractor_agreement_relational_noun"
:
{
"acc"
:
0.441
,
"acc_stderr"
:
0.015708779894242676
},
"blimp_intransitive"
:
{
"acc"
:
0.592
,
"acc_stderr"
:
0.015549205052920673
},
"blimp_animate_subject_passive"
:
{
"acc"
:
0.626
,
"acc_stderr"
:
0.015308767369006363
},
"blimp_animate_subject_trans"
:
{
"acc"
:
0.761
,
"acc_stderr"
:
0.01349300044693759
},
"blimp_inchoative"
:
{
"acc"
:
0.42
,
"acc_stderr"
:
0.015615500115072957
}
},
"versions"
:
{
"blimp_wh_vs_that_no_gap_long_distance"
:
0
,
"blimp_ellipsis_n_bar_1"
:
0
,
"blimp_distractor_agreement_relative_clause"
:
0
,
"blimp_determiner_noun_agreement_with_adjective_1"
:
0
,
"blimp_principle_A_reconstruction"
:
0
,
"blimp_determiner_noun_agreement_2"
:
0
,
"blimp_npi_present_1"
:
0
,
"blimp_existential_there_quantifiers_2"
:
0
,
"blimp_existential_there_subject_raising"
:
0
,
"blimp_tough_vs_raising_1"
:
0
,
"blimp_wh_questions_subject_gap_long_distance"
:
0
,
"blimp_left_branch_island_echo_question"
:
0
,
"blimp_only_npi_licensor_present"
:
0
,
"blimp_adjunct_island"
:
0
,
"blimp_coordinate_structure_constraint_object_extraction"
:
0
,
"blimp_irregular_plural_subject_verb_agreement_1"
:
0
,
"blimp_passive_2"
:
0
,
"blimp_drop_argument"
:
0
,
"blimp_wh_vs_that_with_gap_long_distance"
:
0
,
"blimp_existential_there_quantifiers_1"
:
0
,
"blimp_coordinate_structure_constraint_complex_left_branch"
:
0
,
"blimp_ellipsis_n_bar_2"
:
0
,
"blimp_sentential_subject_island"
:
0
,
"blimp_determiner_noun_agreement_with_adj_irregular_2"
:
0
,
"blimp_npi_present_2"
:
0
,
"blimp_wh_questions_subject_gap"
:
0
,
"blimp_determiner_noun_agreement_irregular_1"
:
0
,
"blimp_superlative_quantifiers_1"
:
0
,
"blimp_left_branch_island_simple_question"
:
0
,
"blimp_irregular_past_participle_adjectives"
:
0
,
"blimp_principle_A_domain_2"
:
0
,
"blimp_regular_plural_subject_verb_agreement_1"
:
0
,
"blimp_principle_A_case_1"
:
0
,
"blimp_principle_A_case_2"
:
0
,
"blimp_matrix_question_npi_licensor_present"
:
0
,
"blimp_determiner_noun_agreement_with_adj_irregular_1"
:
0
,
"blimp_principle_A_domain_1"
:
0
,
"blimp_superlative_quantifiers_2"
:
0
,
"blimp_wh_island"
:
0
,
"blimp_only_npi_scope"
:
0
,
"blimp_regular_plural_subject_verb_agreement_2"
:
0
,
"blimp_complex_NP_island"
:
0
,
"blimp_sentential_negation_npi_scope"
:
0
,
"blimp_transitive"
:
0
,
"blimp_wh_vs_that_with_gap"
:
0
,
"blimp_wh_questions_object_gap"
:
0
,
"blimp_sentential_negation_npi_licensor_present"
:
0
,
"blimp_expletive_it_object_raising"
:
0
,
"blimp_determiner_noun_agreement_1"
:
0
,
"blimp_anaphor_gender_agreement"
:
0
,
"blimp_irregular_plural_subject_verb_agreement_2"
:
0
,
"blimp_passive_1"
:
0
,
"blimp_determiner_noun_agreement_irregular_2"
:
0
,
"blimp_irregular_past_participle_verbs"
:
0
,
"blimp_existential_there_object_raising"
:
0
,
"blimp_determiner_noun_agreement_with_adj_2"
:
0
,
"blimp_principle_A_domain_3"
:
0
,
"blimp_causative"
:
0
,
"blimp_tough_vs_raising_2"
:
0
,
"blimp_wh_vs_that_no_gap"
:
0
,
"blimp_anaphor_number_agreement"
:
0
,
"blimp_principle_A_c_command"
:
0
,
"blimp_distractor_agreement_relational_noun"
:
0
,
"blimp_intransitive"
:
0
,
"blimp_animate_subject_passive"
:
0
,
"blimp_animate_subject_trans"
:
0
,
"blimp_inchoative"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_common_sense_reasoning_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"piqa"
:
{
"acc"
:
0.7818280739934712
,
"acc_stderr"
:
0.009636081958374381
,
"acc_norm"
:
0.7742110990206746
,
"acc_norm_stderr"
:
0.00975498067091731
},
"wsc273"
:
{
"acc"
:
0.8095238095238095
,
"acc_stderr"
:
0.023809523809523777
},
"arc_easy"
:
{
"acc"
:
0.6738215488215489
,
"acc_stderr"
:
0.009619849417035172
,
"acc_norm"
:
0.5248316498316499
,
"acc_norm_stderr"
:
0.010247123122159281
},
"hellaswag"
:
{
"acc"
:
0.563931487751444
,
"acc_stderr"
:
0.004948824501355491
,
"acc_norm"
:
0.7298346942840072
,
"acc_norm_stderr"
:
0.004431375549911366
},
"winogrande"
:
{
"acc"
:
0.6692975532754538
,
"acc_stderr"
:
0.013222435887002705
},
"prost"
:
{
"acc"
:
0.2568851409052092
,
"acc_stderr"
:
0.003192056839011391
,
"acc_norm"
:
0.28031596925704527
,
"acc_norm_stderr"
:
0.0032814667207950675
},
"swag"
:
{
"acc"
:
0.5546835949215235
,
"acc_stderr"
:
0.0035138865053857436
,
"acc_norm"
:
0.6687493751874438
,
"acc_norm_stderr"
:
0.003327673972187388
},
"boolq"
:
{
"acc"
:
0.7305810397553517
,
"acc_stderr"
:
0.007759626474907443
},
"arc_challenge"
:
{
"acc"
:
0.3822525597269625
,
"acc_stderr"
:
0.014200454049979293
,
"acc_norm"
:
0.4138225255972696
,
"acc_norm_stderr"
:
0.014392730009221009
},
"mc_taco"
:
{
"em"
:
0.11261261261261261
,
"f1"
:
0.4827075067316446
},
"copa"
:
{
"acc"
:
0.84
,
"acc_stderr"
:
0.036845294917747094
},
"openbookqa"
:
{
"acc"
:
0.282
,
"acc_stderr"
:
0.020143572847290774
,
"acc_norm"
:
0.424
,
"acc_norm_stderr"
:
0.022122993778135404
}
},
"versions"
:
{
"piqa"
:
0
,
"wsc273"
:
0
,
"arc_easy"
:
0
,
"hellaswag"
:
0
,
"winogrande"
:
0
,
"prost"
:
0
,
"swag"
:
0
,
"boolq"
:
1
,
"arc_challenge"
:
0
,
"mc_taco"
:
0
,
"copa"
:
0
,
"openbookqa"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_glue_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"mrpc"
:
{
"acc"
:
0.6838235294117647
,
"acc_stderr"
:
0.023048336668420204
,
"f1"
:
0.8122270742358079
,
"f1_stderr"
:
0.01624762253426993
},
"sst"
:
{
"acc"
:
0.5298165137614679
,
"acc_stderr"
:
0.016911703415318852
},
"wnli"
:
{
"acc"
:
0.5633802816901409
,
"acc_stderr"
:
0.0592793555841297
},
"mnli_mismatched"
:
{
"acc"
:
0.3572009764035802
,
"acc_stderr"
:
0.0048327582938812235
},
"qnli"
:
{
"acc"
:
0.49569833424858134
,
"acc_stderr"
:
0.006765160168388141
},
"rte"
:
{
"acc"
:
0.5306859205776173
,
"acc_stderr"
:
0.03003973059219781
},
"qqp"
:
{
"acc"
:
0.3683650754390304
,
"acc_stderr"
:
0.002398975385820536
,
"f1"
:
0.5380844713755992
,
"f1_stderr"
:
0.0025560675394743124
},
"mnli"
:
{
"acc"
:
0.34396332144676517
,
"acc_stderr"
:
0.0047950937299233165
},
"cola"
:
{
"mcc"
:
0.0
,
"mcc_stderr"
:
0.0
}
},
"versions"
:
{
"mrpc"
:
0
,
"sst"
:
0
,
"wnli"
:
1
,
"mnli_mismatched"
:
0
,
"qnli"
:
0
,
"rte"
:
0
,
"qqp"
:
0
,
"mnli"
:
0
,
"cola"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_gsm8k_8-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"gsm8k"
:
{
"acc"
:
0.0803639120545868
,
"acc_stderr"
:
0.007488258573239077
}
},
"versions"
:
{
"gsm8k"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
8
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_human_alignment_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"ethics_virtue"
:
{
"acc"
:
0.20984924623115578
,
"acc_stderr"
:
0.005773721023799748
,
"em"
:
0.0
},
"crows_pairs_french_race_color"
:
{
"likelihood_difference"
:
12.048913043478262
,
"likelihood_difference_stderr"
:
0.7332463392189781
,
"pct_stereotype"
:
0.4326086956521739
,
"pct_stereotype_stderr"
:
0.023125046645341776
},
"ethics_utilitarianism_original"
:
{
"acc"
:
0.9586106489184693
,
"acc_stderr"
:
0.002872952014248801
},
"crows_pairs_english_nationality"
:
{
"likelihood_difference"
:
6.762586805555555
,
"likelihood_difference_stderr"
:
0.5868865852525466
,
"pct_stereotype"
:
0.5370370370370371
,
"pct_stereotype_stderr"
:
0.03400603625538272
},
"crows_pairs_english_socioeconomic"
:
{
"likelihood_difference"
:
6.401644736842106
,
"likelihood_difference_stderr"
:
0.5420413190484897
,
"pct_stereotype"
:
0.5684210526315789
,
"pct_stereotype_stderr"
:
0.03602751443822843
},
"crows_pairs_french_socioeconomic"
:
{
"likelihood_difference"
:
9.80843431122449
,
"likelihood_difference_stderr"
:
1.0151042209820862
,
"pct_stereotype"
:
0.5204081632653061
,
"pct_stereotype_stderr"
:
0.03577590557703757
},
"crows_pairs_english_religion"
:
{
"likelihood_difference"
:
7.219594594594595
,
"likelihood_difference_stderr"
:
0.759154104063707
,
"pct_stereotype"
:
0.6666666666666666
,
"pct_stereotype_stderr"
:
0.04494665749754944
},
"ethics_justice"
:
{
"acc"
:
0.4996301775147929
,
"acc_stderr"
:
0.009617160470756728
,
"em"
:
0.0014792899408284023
},
"crows_pairs_english_autre"
:
{
"likelihood_difference"
:
11.011363636363637
,
"likelihood_difference_stderr"
:
5.8907614264514025
,
"pct_stereotype"
:
0.45454545454545453
,
"pct_stereotype_stderr"
:
0.15745916432444335
},
"toxigen"
:
{
"acc"
:
0.4308510638297872
,
"acc_stderr"
:
0.016160089171486036
,
"acc_norm"
:
0.4319148936170213
,
"acc_norm_stderr"
:
0.016164899004911828
},
"crows_pairs_french_autre"
:
{
"likelihood_difference"
:
7.512019230769231
,
"likelihood_difference_stderr"
:
2.0958404773406696
,
"pct_stereotype"
:
0.6153846153846154
,
"pct_stereotype_stderr"
:
0.14044168141158106
},
"ethics_cm"
:
{
"acc"
:
0.5691119691119692
,
"acc_stderr"
:
0.007945870163705206
},
"crows_pairs_english_gender"
:
{
"likelihood_difference"
:
7.9173828125
,
"likelihood_difference_stderr"
:
0.5501949212762886
,
"pct_stereotype"
:
0.53125
,
"pct_stereotype_stderr"
:
0.0279398950447155
},
"crows_pairs_english_race_color"
:
{
"likelihood_difference"
:
6.246493602362205
,
"likelihood_difference_stderr"
:
0.3239007651371134
,
"pct_stereotype"
:
0.46653543307086615
,
"pct_stereotype_stderr"
:
0.022155988267174086
},
"crows_pairs_english_age"
:
{
"likelihood_difference"
:
5.9423076923076925
,
"likelihood_difference_stderr"
:
0.7902909296461826
,
"pct_stereotype"
:
0.5164835164835165
,
"pct_stereotype_stderr"
:
0.05267597952306975
},
"ethics_utilitarianism"
:
{
"acc"
:
0.4981281198003328
,
"acc_stderr"
:
0.007211571268099885
},
"crows_pairs_english_sexual_orientation"
:
{
"likelihood_difference"
:
8.304771505376344
,
"likelihood_difference_stderr"
:
0.8427804261467623
,
"pct_stereotype"
:
0.6236559139784946
,
"pct_stereotype_stderr"
:
0.05050927755267201
},
"ethics_deontology"
:
{
"acc"
:
0.5058398220244716
,
"acc_stderr"
:
0.008338557598970859
,
"em"
:
0.002224694104560623
},
"crows_pairs_french_religion"
:
{
"likelihood_difference"
:
9.585326086956522
,
"likelihood_difference_stderr"
:
0.8749663998788697
,
"pct_stereotype"
:
0.43478260869565216
,
"pct_stereotype_stderr"
:
0.04642922286356426
},
"crows_pairs_french_gender"
:
{
"likelihood_difference"
:
11.798968068535826
,
"likelihood_difference_stderr"
:
0.8713501661430004
,
"pct_stereotype"
:
0.5202492211838006
,
"pct_stereotype_stderr"
:
0.0279279188851323
},
"crows_pairs_french_nationality"
:
{
"likelihood_difference"
:
10.416501976284586
,
"likelihood_difference_stderr"
:
0.9065784742122508
,
"pct_stereotype"
:
0.40711462450592883
,
"pct_stereotype_stderr"
:
0.030948774049323072
},
"crows_pairs_english_physical_appearance"
:
{
"likelihood_difference"
:
4.512586805555555
,
"likelihood_difference_stderr"
:
0.6931576110749077
,
"pct_stereotype"
:
0.5
,
"pct_stereotype_stderr"
:
0.05933908290969268
},
"crows_pairs_french_age"
:
{
"likelihood_difference"
:
11.939583333333333
,
"likelihood_difference_stderr"
:
1.5376984338772959
,
"pct_stereotype"
:
0.35555555555555557
,
"pct_stereotype_stderr"
:
0.05074011803597719
},
"crows_pairs_english_disability"
:
{
"likelihood_difference"
:
9.669711538461538
,
"likelihood_difference_stderr"
:
1.1386178272217904
,
"pct_stereotype"
:
0.6615384615384615
,
"pct_stereotype_stderr"
:
0.05914829422780653
},
"crows_pairs_french_sexual_orientation"
:
{
"likelihood_difference"
:
7.605769230769231
,
"likelihood_difference_stderr"
:
0.7938984905689758
,
"pct_stereotype"
:
0.6703296703296703
,
"pct_stereotype_stderr"
:
0.04955219508596587
},
"crows_pairs_french_physical_appearance"
:
{
"likelihood_difference"
:
7.045138888888889
,
"likelihood_difference_stderr"
:
0.9484318157143898
,
"pct_stereotype"
:
0.5555555555555556
,
"pct_stereotype_stderr"
:
0.05897165471491952
},
"crows_pairs_french_disability"
:
{
"likelihood_difference"
:
10.147727272727273
,
"likelihood_difference_stderr"
:
1.3907137676702652
,
"pct_stereotype"
:
0.42424242424242425
,
"pct_stereotype_stderr"
:
0.06130137276858363
}
},
"versions"
:
{
"ethics_virtue"
:
0
,
"crows_pairs_french_race_color"
:
0
,
"ethics_utilitarianism_original"
:
0
,
"crows_pairs_english_nationality"
:
0
,
"crows_pairs_english_socioeconomic"
:
0
,
"crows_pairs_french_socioeconomic"
:
0
,
"crows_pairs_english_religion"
:
0
,
"ethics_justice"
:
0
,
"crows_pairs_english_autre"
:
0
,
"toxigen"
:
0
,
"crows_pairs_french_autre"
:
0
,
"ethics_cm"
:
0
,
"crows_pairs_english_gender"
:
0
,
"crows_pairs_english_race_color"
:
0
,
"crows_pairs_english_age"
:
0
,
"ethics_utilitarianism"
:
0
,
"crows_pairs_english_sexual_orientation"
:
0
,
"ethics_deontology"
:
0
,
"crows_pairs_french_religion"
:
0
,
"crows_pairs_french_gender"
:
0
,
"crows_pairs_french_nationality"
:
0
,
"crows_pairs_english_physical_appearance"
:
0
,
"crows_pairs_french_age"
:
0
,
"crows_pairs_english_disability"
:
0
,
"crows_pairs_french_sexual_orientation"
:
0
,
"crows_pairs_french_physical_appearance"
:
0
,
"crows_pairs_french_disability"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
false
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_lambada_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"lambada_openai_mt_it"
:
{
"ppl"
:
3653680.5734235523
,
"ppl_stderr"
:
197082.9860932525
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_standard"
:
{
"ppl"
:
2460346.8572795168
,
"ppl_stderr"
:
81216.56551688322
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai_mt_es"
:
{
"ppl"
:
3818890.4545065593
,
"ppl_stderr"
:
197999.05318216747
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai"
:
{
"ppl"
:
2817465.092468485
,
"ppl_stderr"
:
138319.08822004002
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai_mt_fr"
:
{
"ppl"
:
2111186.115467981
,
"ppl_stderr"
:
111724.42842108487
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai_mt_de"
:
{
"ppl"
:
1805613.6770583114
,
"ppl_stderr"
:
97892.78908113715
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_standard_cloze"
:
{
"ppl"
:
6710057.2411258025
,
"ppl_stderr"
:
169833.90998542923
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai_mt_en"
:
{
"ppl"
:
2817465.092468485
,
"ppl_stderr"
:
138319.08822004002
,
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"lambada_openai_cloze"
:
{
"ppl"
:
255777.71115985065
,
"ppl_stderr"
:
11345.7709705634
,
"acc"
:
0.00038812342324859306
,
"acc_stderr"
:
0.00027441806845051746
}
},
"versions"
:
{
"lambada_openai_mt_it"
:
0
,
"lambada_standard"
:
0
,
"lambada_openai_mt_es"
:
0
,
"lambada_openai"
:
0
,
"lambada_openai_mt_fr"
:
0
,
"lambada_openai_mt_de"
:
0
,
"lambada_standard_cloze"
:
0
,
"lambada_openai_mt_en"
:
0
,
"lambada_openai_cloze"
:
0
},
"config"
:
{
"model"
:
"hf-causal"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_mathematical_reasoning_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"mathqa"
:
{
"acc"
:
0.26767169179229483
,
"acc_stderr"
:
0.008105031808599684
,
"acc_norm"
:
0.27872696817420434
,
"acc_norm_stderr"
:
0.008208048863665952
},
"math_asdiv"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"gsm8k"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_num_theory"
:
{
"acc"
:
0.007407407407407408
,
"acc_stderr"
:
0.003693382168437238
},
"math_precalc"
:
{
"acc"
:
0.003663003663003663
,
"acc_stderr"
:
0.002587757368193461
},
"drop"
:
{
"em"
:
0.04268036912751678
,
"em_stderr"
:
0.002070056585023236
,
"f1"
:
0.1215950083892614
,
"f1_stderr"
:
0.0024765528531984883
},
"math_geometry"
:
{
"acc"
:
0.008350730688935281
,
"acc_stderr"
:
0.004162242110295851
},
"math_counting_and_prob"
:
{
"acc"
:
0.016877637130801686
,
"acc_stderr"
:
0.0059228268948526815
},
"math_intermediate_algebra"
:
{
"acc"
:
0.006644518272425249
,
"acc_stderr"
:
0.0027050844483854013
},
"math_prealgebra"
:
{
"acc"
:
0.012629161882893225
,
"acc_stderr"
:
0.003785888218263002
},
"math_algebra"
:
{
"acc"
:
0.016849199663016005
,
"acc_stderr"
:
0.0037372948497597248
}
},
"versions"
:
{
"mathqa"
:
0
,
"math_asdiv"
:
0
,
"gsm8k"
:
0
,
"math_num_theory"
:
1
,
"math_precalc"
:
1
,
"drop"
:
1
,
"math_geometry"
:
1
,
"math_counting_and_prob"
:
1
,
"math_intermediate_algebra"
:
1
,
"math_prealgebra"
:
1
,
"math_algebra"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_mathematical_reasoning_few_shot_5-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"mathqa"
:
{
"acc"
:
0.2820770519262982
,
"acc_stderr"
:
0.008238030326915545
,
"acc_norm"
:
0.2877721943048576
,
"acc_norm_stderr"
:
0.008287708494779904
},
"math_prealgebra"
:
{
"acc"
:
0.001148105625717566
,
"acc_stderr"
:
0.0011481056257175704
},
"math_geometry"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_intermediate_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_counting_and_prob"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_num_theory"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"gsm8k"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"drop"
:
{
"em"
:
0.012374161073825503
,
"em_stderr"
:
0.0011321233703992673
,
"f1"
:
0.020981543624161086
,
"f1_stderr"
:
0.001252441423790731
},
"math_precalc"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"math_algebra"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
}
},
"versions"
:
{
"mathqa"
:
0
,
"math_prealgebra"
:
1
,
"math_geometry"
:
1
,
"math_intermediate_algebra"
:
1
,
"math_counting_and_prob"
:
1
,
"math_num_theory"
:
1
,
"gsm8k"
:
0
,
"drop"
:
1
,
"math_precalc"
:
1
,
"math_algebra"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_mmlu_5-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"hendrycksTest-high_school_geography"
:
{
"acc"
:
0.4292929292929293
,
"acc_stderr"
:
0.035265527246011986
,
"acc_norm"
:
0.36363636363636365
,
"acc_norm_stderr"
:
0.03427308652999934
},
"hendrycksTest-philosophy"
:
{
"acc"
:
0.40192926045016075
,
"acc_stderr"
:
0.027846476005930477
,
"acc_norm"
:
0.3536977491961415
,
"acc_norm_stderr"
:
0.02715520810320088
},
"hendrycksTest-world_religions"
:
{
"acc"
:
0.6257309941520468
,
"acc_stderr"
:
0.03711601185389481
,
"acc_norm"
:
0.5146198830409356
,
"acc_norm_stderr"
:
0.038331852752130254
},
"hendrycksTest-college_biology"
:
{
"acc"
:
0.3194444444444444
,
"acc_stderr"
:
0.03899073687357335
,
"acc_norm"
:
0.2916666666666667
,
"acc_norm_stderr"
:
0.03800968060554858
},
"hendrycksTest-electrical_engineering"
:
{
"acc"
:
0.3586206896551724
,
"acc_stderr"
:
0.03996629574876719
,
"acc_norm"
:
0.32413793103448274
,
"acc_norm_stderr"
:
0.03900432069185554
},
"hendrycksTest-global_facts"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.046882617226215034
,
"acc_norm"
:
0.29
,
"acc_norm_stderr"
:
0.045604802157206824
},
"hendrycksTest-high_school_government_and_politics"
:
{
"acc"
:
0.48186528497409326
,
"acc_stderr"
:
0.03606065001832917
,
"acc_norm"
:
0.37305699481865284
,
"acc_norm_stderr"
:
0.03490205592048573
},
"hendrycksTest-moral_scenarios"
:
{
"acc"
:
0.2759776536312849
,
"acc_stderr"
:
0.014950103002475353
,
"acc_norm"
:
0.27262569832402234
,
"acc_norm_stderr"
:
0.014893391735249588
},
"hendrycksTest-econometrics"
:
{
"acc"
:
0.2894736842105263
,
"acc_stderr"
:
0.04266339443159394
,
"acc_norm"
:
0.2631578947368421
,
"acc_norm_stderr"
:
0.0414243971948936
},
"hendrycksTest-international_law"
:
{
"acc"
:
0.3884297520661157
,
"acc_stderr"
:
0.04449270350068382
,
"acc_norm"
:
0.5785123966942148
,
"acc_norm_stderr"
:
0.045077322787750874
},
"hendrycksTest-us_foreign_policy"
:
{
"acc"
:
0.56
,
"acc_stderr"
:
0.049888765156985884
,
"acc_norm"
:
0.45
,
"acc_norm_stderr"
:
0.05
},
"hendrycksTest-high_school_macroeconomics"
:
{
"acc"
:
0.31794871794871793
,
"acc_stderr"
:
0.02361088430892786
,
"acc_norm"
:
0.30256410256410254
,
"acc_norm_stderr"
:
0.023290888053772742
},
"hendrycksTest-virology"
:
{
"acc"
:
0.39759036144578314
,
"acc_stderr"
:
0.038099730845402184
,
"acc_norm"
:
0.2891566265060241
,
"acc_norm_stderr"
:
0.035294868015111155
},
"hendrycksTest-high_school_mathematics"
:
{
"acc"
:
0.22592592592592592
,
"acc_stderr"
:
0.025497532639609542
,
"acc_norm"
:
0.3074074074074074
,
"acc_norm_stderr"
:
0.02813325257881564
},
"hendrycksTest-clinical_knowledge"
:
{
"acc"
:
0.3886792452830189
,
"acc_stderr"
:
0.03000048544867599
,
"acc_norm"
:
0.38113207547169814
,
"acc_norm_stderr"
:
0.029890609686286627
},
"hendrycksTest-professional_psychology"
:
{
"acc"
:
0.3839869281045752
,
"acc_stderr"
:
0.01967580813528152
,
"acc_norm"
:
0.29901960784313725
,
"acc_norm_stderr"
:
0.01852175621542302
},
"hendrycksTest-formal_logic"
:
{
"acc"
:
0.30952380952380953
,
"acc_stderr"
:
0.04134913018303316
,
"acc_norm"
:
0.3492063492063492
,
"acc_norm_stderr"
:
0.042639068927951315
},
"hendrycksTest-management"
:
{
"acc"
:
0.4854368932038835
,
"acc_stderr"
:
0.04948637324026637
,
"acc_norm"
:
0.36893203883495146
,
"acc_norm_stderr"
:
0.0477761518115674
},
"hendrycksTest-human_sexuality"
:
{
"acc"
:
0.5114503816793893
,
"acc_stderr"
:
0.043841400240780176
,
"acc_norm"
:
0.366412213740458
,
"acc_norm_stderr"
:
0.042258754519696386
},
"hendrycksTest-high_school_world_history"
:
{
"acc"
:
0.3924050632911392
,
"acc_stderr"
:
0.03178471874564729
,
"acc_norm"
:
0.33755274261603374
,
"acc_norm_stderr"
:
0.030781549102026216
},
"hendrycksTest-medical_genetics"
:
{
"acc"
:
0.44
,
"acc_stderr"
:
0.04988876515698589
,
"acc_norm"
:
0.4
,
"acc_norm_stderr"
:
0.04923659639173309
},
"hendrycksTest-computer_security"
:
{
"acc"
:
0.37
,
"acc_stderr"
:
0.048523658709391
,
"acc_norm"
:
0.44
,
"acc_norm_stderr"
:
0.04988876515698589
},
"hendrycksTest-miscellaneous"
:
{
"acc"
:
0.5836526181353767
,
"acc_stderr"
:
0.0176279480304303
,
"acc_norm"
:
0.3895274584929757
,
"acc_norm_stderr"
:
0.017438082556264597
},
"hendrycksTest-public_relations"
:
{
"acc"
:
0.39090909090909093
,
"acc_stderr"
:
0.046737523336702384
,
"acc_norm"
:
0.22727272727272727
,
"acc_norm_stderr"
:
0.040139645540727735
},
"hendrycksTest-college_physics"
:
{
"acc"
:
0.23529411764705882
,
"acc_stderr"
:
0.04220773659171453
,
"acc_norm"
:
0.3235294117647059
,
"acc_norm_stderr"
:
0.046550104113196177
},
"hendrycksTest-professional_accounting"
:
{
"acc"
:
0.30141843971631205
,
"acc_stderr"
:
0.02737412888263115
,
"acc_norm"
:
0.29432624113475175
,
"acc_norm_stderr"
:
0.027187127011503793
},
"hendrycksTest-logical_fallacies"
:
{
"acc"
:
0.3803680981595092
,
"acc_stderr"
:
0.03814269893261837
,
"acc_norm"
:
0.3496932515337423
,
"acc_norm_stderr"
:
0.037466683254700206
},
"hendrycksTest-business_ethics"
:
{
"acc"
:
0.53
,
"acc_stderr"
:
0.050161355804659205
,
"acc_norm"
:
0.46
,
"acc_norm_stderr"
:
0.05009082659620332
},
"hendrycksTest-high_school_chemistry"
:
{
"acc"
:
0.2512315270935961
,
"acc_stderr"
:
0.030516530732694436
,
"acc_norm"
:
0.2955665024630542
,
"acc_norm_stderr"
:
0.03210494433751458
},
"hendrycksTest-astronomy"
:
{
"acc"
:
0.45394736842105265
,
"acc_stderr"
:
0.04051646342874143
,
"acc_norm"
:
0.4605263157894737
,
"acc_norm_stderr"
:
0.04056242252249033
},
"hendrycksTest-high_school_us_history"
:
{
"acc"
:
0.4264705882352941
,
"acc_stderr"
:
0.03471157907953424
,
"acc_norm"
:
0.3137254901960784
,
"acc_norm_stderr"
:
0.032566854844603886
},
"hendrycksTest-college_chemistry"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.047258156262526045
,
"acc_norm"
:
0.3
,
"acc_norm_stderr"
:
0.046056618647183814
},
"hendrycksTest-abstract_algebra"
:
{
"acc"
:
0.23
,
"acc_stderr"
:
0.042295258468165065
,
"acc_norm"
:
0.26
,
"acc_norm_stderr"
:
0.0440844002276808
},
"hendrycksTest-moral_disputes"
:
{
"acc"
:
0.36416184971098264
,
"acc_stderr"
:
0.025906632631016117
,
"acc_norm"
:
0.33236994219653176
,
"acc_norm_stderr"
:
0.02536116874968821
},
"hendrycksTest-college_computer_science"
:
{
"acc"
:
0.33
,
"acc_stderr"
:
0.04725815626252605
,
"acc_norm"
:
0.28
,
"acc_norm_stderr"
:
0.04512608598542128
},
"hendrycksTest-professional_law"
:
{
"acc"
:
0.2966101694915254
,
"acc_stderr"
:
0.011665946586082849
,
"acc_norm"
:
0.28552803129074317
,
"acc_norm_stderr"
:
0.011535751586665664
},
"hendrycksTest-college_mathematics"
:
{
"acc"
:
0.32
,
"acc_stderr"
:
0.046882617226215034
,
"acc_norm"
:
0.32
,
"acc_norm_stderr"
:
0.04688261722621505
},
"hendrycksTest-high_school_microeconomics"
:
{
"acc"
:
0.3865546218487395
,
"acc_stderr"
:
0.0316314580755238
,
"acc_norm"
:
0.36554621848739494
,
"acc_norm_stderr"
:
0.0312821770636846
},
"hendrycksTest-high_school_european_history"
:
{
"acc"
:
0.40606060606060607
,
"acc_stderr"
:
0.03834816355401181
,
"acc_norm"
:
0.3696969696969697
,
"acc_norm_stderr"
:
0.03769430314512568
},
"hendrycksTest-high_school_biology"
:
{
"acc"
:
0.3580645161290323
,
"acc_stderr"
:
0.027273890594300642
,
"acc_norm"
:
0.3580645161290323
,
"acc_norm_stderr"
:
0.02727389059430063
},
"hendrycksTest-security_studies"
:
{
"acc"
:
0.40816326530612246
,
"acc_stderr"
:
0.03146465712827424
,
"acc_norm"
:
0.31020408163265306
,
"acc_norm_stderr"
:
0.029613459872484375
},
"hendrycksTest-high_school_psychology"
:
{
"acc"
:
0.46605504587155966
,
"acc_stderr"
:
0.02138786335035399
,
"acc_norm"
:
0.30825688073394497
,
"acc_norm_stderr"
:
0.01979836669836726
},
"hendrycksTest-conceptual_physics"
:
{
"acc"
:
0.3276595744680851
,
"acc_stderr"
:
0.030683020843231004
,
"acc_norm"
:
0.2170212765957447
,
"acc_norm_stderr"
:
0.026947483121496228
},
"hendrycksTest-human_aging"
:
{
"acc"
:
0.3721973094170404
,
"acc_stderr"
:
0.03244305283008731
,
"acc_norm"
:
0.25112107623318386
,
"acc_norm_stderr"
:
0.02910522083322462
},
"hendrycksTest-prehistory"
:
{
"acc"
:
0.4012345679012346
,
"acc_stderr"
:
0.0272725828498398
,
"acc_norm"
:
0.2777777777777778
,
"acc_norm_stderr"
:
0.02492200116888633
},
"hendrycksTest-sociology"
:
{
"acc"
:
0.47761194029850745
,
"acc_stderr"
:
0.035319879302087305
,
"acc_norm"
:
0.42786069651741293
,
"acc_norm_stderr"
:
0.03498541988407795
},
"hendrycksTest-marketing"
:
{
"acc"
:
0.6111111111111112
,
"acc_stderr"
:
0.031937057262002924
,
"acc_norm"
:
0.5042735042735043
,
"acc_norm_stderr"
:
0.03275489264382132
},
"hendrycksTest-high_school_computer_science"
:
{
"acc"
:
0.41
,
"acc_stderr"
:
0.049431107042371025
,
"acc_norm"
:
0.34
,
"acc_norm_stderr"
:
0.047609522856952365
},
"hendrycksTest-machine_learning"
:
{
"acc"
:
0.30357142857142855
,
"acc_stderr"
:
0.04364226155841044
,
"acc_norm"
:
0.26785714285714285
,
"acc_norm_stderr"
:
0.04203277291467762
},
"hendrycksTest-elementary_mathematics"
:
{
"acc"
:
0.3201058201058201
,
"acc_stderr"
:
0.024026846392873506
,
"acc_norm"
:
0.291005291005291
,
"acc_norm_stderr"
:
0.023393826500484865
},
"hendrycksTest-nutrition"
:
{
"acc"
:
0.3954248366013072
,
"acc_stderr"
:
0.027996723180631435
,
"acc_norm"
:
0.43790849673202614
,
"acc_norm_stderr"
:
0.028408302020332694
},
"hendrycksTest-anatomy"
:
{
"acc"
:
0.3851851851851852
,
"acc_stderr"
:
0.042039210401562783
,
"acc_norm"
:
0.2814814814814815
,
"acc_norm_stderr"
:
0.03885004245800254
},
"hendrycksTest-jurisprudence"
:
{
"acc"
:
0.4351851851851852
,
"acc_stderr"
:
0.04792898170907062
,
"acc_norm"
:
0.5
,
"acc_norm_stderr"
:
0.04833682445228318
},
"hendrycksTest-college_medicine"
:
{
"acc"
:
0.37572254335260113
,
"acc_stderr"
:
0.036928207672648664
,
"acc_norm"
:
0.3063583815028902
,
"acc_norm_stderr"
:
0.03514942551267439
},
"hendrycksTest-high_school_statistics"
:
{
"acc"
:
0.3425925925925926
,
"acc_stderr"
:
0.03236585252602156
,
"acc_norm"
:
0.3425925925925926
,
"acc_norm_stderr"
:
0.03236585252602156
},
"hendrycksTest-high_school_physics"
:
{
"acc"
:
0.2052980132450331
,
"acc_stderr"
:
0.03297986648473834
,
"acc_norm"
:
0.271523178807947
,
"acc_norm_stderr"
:
0.036313298039696525
},
"hendrycksTest-professional_medicine"
:
{
"acc"
:
0.3382352941176471
,
"acc_stderr"
:
0.028739328513983576
,
"acc_norm"
:
0.27941176470588236
,
"acc_norm_stderr"
:
0.027257202606114948
}
},
"versions"
:
{
"hendrycksTest-high_school_geography"
:
0
,
"hendrycksTest-philosophy"
:
0
,
"hendrycksTest-world_religions"
:
0
,
"hendrycksTest-college_biology"
:
0
,
"hendrycksTest-electrical_engineering"
:
0
,
"hendrycksTest-global_facts"
:
0
,
"hendrycksTest-high_school_government_and_politics"
:
0
,
"hendrycksTest-moral_scenarios"
:
0
,
"hendrycksTest-econometrics"
:
0
,
"hendrycksTest-international_law"
:
0
,
"hendrycksTest-us_foreign_policy"
:
0
,
"hendrycksTest-high_school_macroeconomics"
:
0
,
"hendrycksTest-virology"
:
0
,
"hendrycksTest-high_school_mathematics"
:
0
,
"hendrycksTest-clinical_knowledge"
:
0
,
"hendrycksTest-professional_psychology"
:
0
,
"hendrycksTest-formal_logic"
:
0
,
"hendrycksTest-management"
:
0
,
"hendrycksTest-human_sexuality"
:
0
,
"hendrycksTest-high_school_world_history"
:
0
,
"hendrycksTest-medical_genetics"
:
0
,
"hendrycksTest-computer_security"
:
0
,
"hendrycksTest-miscellaneous"
:
0
,
"hendrycksTest-public_relations"
:
0
,
"hendrycksTest-college_physics"
:
0
,
"hendrycksTest-professional_accounting"
:
0
,
"hendrycksTest-logical_fallacies"
:
0
,
"hendrycksTest-business_ethics"
:
0
,
"hendrycksTest-high_school_chemistry"
:
0
,
"hendrycksTest-astronomy"
:
0
,
"hendrycksTest-high_school_us_history"
:
0
,
"hendrycksTest-college_chemistry"
:
0
,
"hendrycksTest-abstract_algebra"
:
0
,
"hendrycksTest-moral_disputes"
:
0
,
"hendrycksTest-college_computer_science"
:
0
,
"hendrycksTest-professional_law"
:
0
,
"hendrycksTest-college_mathematics"
:
0
,
"hendrycksTest-high_school_microeconomics"
:
0
,
"hendrycksTest-high_school_european_history"
:
0
,
"hendrycksTest-high_school_biology"
:
0
,
"hendrycksTest-security_studies"
:
0
,
"hendrycksTest-high_school_psychology"
:
0
,
"hendrycksTest-conceptual_physics"
:
0
,
"hendrycksTest-human_aging"
:
0
,
"hendrycksTest-prehistory"
:
0
,
"hendrycksTest-sociology"
:
0
,
"hendrycksTest-marketing"
:
0
,
"hendrycksTest-high_school_computer_science"
:
0
,
"hendrycksTest-machine_learning"
:
0
,
"hendrycksTest-elementary_mathematics"
:
0
,
"hendrycksTest-nutrition"
:
0
,
"hendrycksTest-anatomy"
:
0
,
"hendrycksTest-jurisprudence"
:
0
,
"hendrycksTest-college_medicine"
:
0
,
"hendrycksTest-high_school_statistics"
:
0
,
"hendrycksTest-high_school_physics"
:
0
,
"hendrycksTest-professional_medicine"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
5
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_pawsx_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"pawsx_en"
:
{
"acc"
:
0.6185
,
"acc_stderr"
:
0.010864524561478635
},
"pawsx_es"
:
{
"acc"
:
0.561
,
"acc_stderr"
:
0.011099599116647334
},
"pawsx_zh"
:
{
"acc"
:
0.491
,
"acc_stderr"
:
0.01118132420626029
},
"pawsx_fr"
:
{
"acc"
:
0.5295
,
"acc_stderr"
:
0.01116365480451166
},
"pawsx_de"
:
{
"acc"
:
0.5465
,
"acc_stderr"
:
0.011134669525078668
},
"pawsx_ko"
:
{
"acc"
:
0.497
,
"acc_stderr"
:
0.011182934722804558
},
"pawsx_ja"
:
{
"acc"
:
0.567
,
"acc_stderr"
:
0.011082279027990133
}
},
"versions"
:
{
"pawsx_en"
:
0
,
"pawsx_es"
:
0
,
"pawsx_zh"
:
0
,
"pawsx_fr"
:
0
,
"pawsx_de"
:
0
,
"pawsx_ko"
:
0
,
"pawsx_ja"
:
0
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
results/llama/llama-7B/llama-7B_question_answering_0-shot.json
0 → 100644
View file @
4a0b0d6e
{
"results"
:
{
"webqs"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"truthfulqa_mc"
:
{
"mc1"
:
0.21052631578947367
,
"mc1_stderr"
:
0.014271740645964186
,
"mc2"
:
0.3413779054949544
,
"mc2_stderr"
:
0.013147086422190785
},
"headqa_en"
:
{
"acc"
:
0.3242159008023341
,
"acc_stderr"
:
0.008940599111132593
,
"acc_norm"
:
0.35922684172137126
,
"acc_norm_stderr"
:
0.009163935584608705
},
"triviaqa"
:
{
"acc"
:
0.0
,
"acc_stderr"
:
0.0
},
"headqa_es"
:
{
"acc"
:
0.2826404084609774
,
"acc_stderr"
:
0.00860064580960105
,
"acc_norm"
:
0.3242159008023341
,
"acc_norm_stderr"
:
0.008940599111132597
},
"logiqa"
:
{
"acc"
:
0.21812596006144394
,
"acc_stderr"
:
0.01619814925841932
,
"acc_norm"
:
0.30261136712749614
,
"acc_norm_stderr"
:
0.018018696598158843
},
"squad2"
:
{
"exact"
:
9.416322749094585
,
"f1"
:
19.449005859216243
,
"HasAns_exact"
:
18.488529014844804
,
"HasAns_f1"
:
38.58266642484388
,
"NoAns_exact"
:
0.3700588730025231
,
"NoAns_f1"
:
0.3700588730025231
,
"best_exact"
:
50.07159100480081
,
"best_f1"
:
50.0801228586961
}
},
"versions"
:
{
"webqs"
:
0
,
"truthfulqa_mc"
:
1
,
"headqa_en"
:
0
,
"triviaqa"
:
1
,
"headqa_es"
:
0
,
"logiqa"
:
0
,
"squad2"
:
1
},
"config"
:
{
"model"
:
"hf-causal-experimental"
,
"model_args"
:
"pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True"
,
"num_fewshot"
:
0
,
"batch_size"
:
"auto"
,
"device"
:
"cuda:0"
,
"no_cache"
:
true
,
"limit"
:
null
,
"bootstrap_iters"
:
100000
,
"description_dict"
:
{}
}
}
Prev
1
…
3
4
5
6
7
8
9
10
11
12
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment