Unverified Commit e53eb332 authored by Stella Biderman's avatar Stella Biderman Committed by GitHub
Browse files

Merge pull request #477 from juletx/results

Add results of various models in json and md format
parents d1327193 92a50856
{
"results": {
"gsm8k": {
"acc": 0.30477634571645185,
"acc_stderr": 0.012679297549515422
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"crows_pairs_french_disability": {
"likelihood_difference": 10.202651515151516,
"likelihood_difference_stderr": 1.261700816634343,
"pct_stereotype": 0.5606060606060606,
"pct_stereotype_stderr": 0.06156009014560979
},
"crows_pairs_french_religion": {
"likelihood_difference": 7.940217391304348,
"likelihood_difference_stderr": 0.938898141048901,
"pct_stereotype": 0.5652173913043478,
"pct_stereotype_stderr": 0.04642922286356427
},
"crows_pairs_french_sexual_orientation": {
"likelihood_difference": 9.890796703296703,
"likelihood_difference_stderr": 1.6112974194891465,
"pct_stereotype": 0.7142857142857143,
"pct_stereotype_stderr": 0.04761904761904758
},
"toxigen": {
"acc": 0.42659574468085104,
"acc_stderr": 0.01614008877637632,
"acc_norm": 0.4319148936170213,
"acc_norm_stderr": 0.016164899004911828
},
"crows_pairs_english_gender": {
"likelihood_difference": 6.6615234375,
"likelihood_difference_stderr": 0.4284975339207996,
"pct_stereotype": 0.540625,
"pct_stereotype_stderr": 0.02790206840430007
},
"crows_pairs_english_age": {
"likelihood_difference": 5.1291208791208796,
"likelihood_difference_stderr": 0.5813404620923356,
"pct_stereotype": 0.5824175824175825,
"pct_stereotype_stderr": 0.05198368783767557
},
"crows_pairs_english_disability": {
"likelihood_difference": 8.886538461538462,
"likelihood_difference_stderr": 1.0342476212707912,
"pct_stereotype": 0.5384615384615384,
"pct_stereotype_stderr": 0.06231481440776789
},
"crows_pairs_french_age": {
"likelihood_difference": 8.925,
"likelihood_difference_stderr": 1.01086298976785,
"pct_stereotype": 0.4,
"pct_stereotype_stderr": 0.05192907868894985
},
"ethics_utilitarianism": {
"acc": 0.5012479201331115,
"acc_stderr": 0.00721159934497283
},
"crows_pairs_english_physical_appearance": {
"likelihood_difference": 5.401041666666667,
"likelihood_difference_stderr": 0.5913652974915496,
"pct_stereotype": 0.5277777777777778,
"pct_stereotype_stderr": 0.05924743948371486
},
"crows_pairs_french_socioeconomic": {
"likelihood_difference": 8.312898596938776,
"likelihood_difference_stderr": 0.8737467813045966,
"pct_stereotype": 0.5255102040816326,
"pct_stereotype_stderr": 0.03575911069046443
},
"crows_pairs_english_nationality": {
"likelihood_difference": 5.872829861111111,
"likelihood_difference_stderr": 0.3994396285401545,
"pct_stereotype": 0.5324074074074074,
"pct_stereotype_stderr": 0.03402801581358966
},
"ethics_cm": {
"acc": 0.5750321750321751,
"acc_stderr": 0.007932032541825585
},
"crows_pairs_french_gender": {
"likelihood_difference": 13.732768691588785,
"likelihood_difference_stderr": 1.1030097530113459,
"pct_stereotype": 0.5015576323987538,
"pct_stereotype_stderr": 0.027950714088670354
},
"crows_pairs_french_nationality": {
"likelihood_difference": 9.851037549407115,
"likelihood_difference_stderr": 0.8908345552184256,
"pct_stereotype": 0.38735177865612647,
"pct_stereotype_stderr": 0.03068725875850367
},
"ethics_deontology": {
"acc": 0.5417130144605117,
"acc_stderr": 0.008310055982844088,
"em": 0.06117908787541713
},
"ethics_utilitarianism_original": {
"acc": 0.9396838602329451,
"acc_stderr": 0.0034337651785718414
},
"crows_pairs_english_sexual_orientation": {
"likelihood_difference": 7.547715053763441,
"likelihood_difference_stderr": 0.7682550004765589,
"pct_stereotype": 0.6344086021505376,
"pct_stereotype_stderr": 0.05020981279330232
},
"crows_pairs_english_religion": {
"likelihood_difference": 8.075731981981981,
"likelihood_difference_stderr": 0.9438303669276185,
"pct_stereotype": 0.6216216216216216,
"pct_stereotype_stderr": 0.04624128233851482
},
"ethics_justice": {
"acc": 0.5170118343195266,
"acc_stderr": 0.009611595027307154,
"em": 0.013313609467455622
},
"ethics_virtue": {
"acc": 0.5181909547738693,
"acc_stderr": 0.007084831046245509,
"em": 0.0814070351758794
},
"crows_pairs_english_race_color": {
"likelihood_difference": 7.68214812992126,
"likelihood_difference_stderr": 0.3913516470344277,
"pct_stereotype": 0.5610236220472441,
"pct_stereotype_stderr": 0.022039775660119297
},
"crows_pairs_english_autre": {
"likelihood_difference": 11.380681818181818,
"likelihood_difference_stderr": 3.487665507491904,
"pct_stereotype": 0.6363636363636364,
"pct_stereotype_stderr": 0.15212000482437738
},
"crows_pairs_french_race_color": {
"likelihood_difference": 10.611684782608696,
"likelihood_difference_stderr": 0.7668117638923473,
"pct_stereotype": 0.41739130434782606,
"pct_stereotype_stderr": 0.023017271312104015
},
"crows_pairs_french_physical_appearance": {
"likelihood_difference": 8.217881944444445,
"likelihood_difference_stderr": 1.3267643213128657,
"pct_stereotype": 0.5694444444444444,
"pct_stereotype_stderr": 0.05876396677084613
},
"crows_pairs_french_autre": {
"likelihood_difference": 6.3798076923076925,
"likelihood_difference_stderr": 1.6568389364513447,
"pct_stereotype": 0.6153846153846154,
"pct_stereotype_stderr": 0.1404416814115811
},
"crows_pairs_english_socioeconomic": {
"likelihood_difference": 7.303947368421053,
"likelihood_difference_stderr": 0.5463280290787818,
"pct_stereotype": 0.5368421052631579,
"pct_stereotype_stderr": 0.036270781985214155
}
},
"versions": {
"crows_pairs_french_disability": 0,
"crows_pairs_french_religion": 0,
"crows_pairs_french_sexual_orientation": 0,
"toxigen": 0,
"crows_pairs_english_gender": 0,
"crows_pairs_english_age": 0,
"crows_pairs_english_disability": 0,
"crows_pairs_french_age": 0,
"ethics_utilitarianism": 0,
"crows_pairs_english_physical_appearance": 0,
"crows_pairs_french_socioeconomic": 0,
"crows_pairs_english_nationality": 0,
"ethics_cm": 0,
"crows_pairs_french_gender": 0,
"crows_pairs_french_nationality": 0,
"ethics_deontology": 0,
"ethics_utilitarianism_original": 0,
"crows_pairs_english_sexual_orientation": 0,
"crows_pairs_english_religion": 0,
"ethics_justice": 0,
"ethics_virtue": 0,
"crows_pairs_english_race_color": 0,
"crows_pairs_english_autre": 0,
"crows_pairs_french_race_color": 0,
"crows_pairs_french_physical_appearance": 0,
"crows_pairs_french_autre": 0,
"crows_pairs_english_socioeconomic": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"math_prealgebra": {
"acc": 0.04133180252583238,
"acc_stderr": 0.006748646916387575
},
"drop": {
"em": 0.0382760067114094,
"em_stderr": 0.0019648445106113135,
"f1": 0.13911493288590454,
"f1_stderr": 0.0024846240125468515
},
"math_intermediate_algebra": {
"acc": 0.008859357696566999,
"acc_stderr": 0.0031200782932944743
},
"gsm8k": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_num_theory": {
"acc": 0.02962962962962963,
"acc_stderr": 0.007303608618028771
},
"math_algebra": {
"acc": 0.02948609941027801,
"acc_stderr": 0.004912099985374022
},
"math_precalc": {
"acc": 0.018315018315018316,
"acc_stderr": 0.005743696731653661
},
"math_geometry": {
"acc": 0.014613778705636743,
"acc_stderr": 0.005488713443686309
},
"math_counting_and_prob": {
"acc": 0.04008438818565401,
"acc_stderr": 0.009019315660749231
},
"math_asdiv": {
"acc": 0.0,
"acc_stderr": 0.0
},
"mathqa": {
"acc": 0.30586264656616413,
"acc_stderr": 0.00843502782274867,
"acc_norm": 0.3088777219430486,
"acc_norm_stderr": 0.008458071062361336
}
},
"versions": {
"math_prealgebra": 1,
"drop": 1,
"math_intermediate_algebra": 1,
"gsm8k": 0,
"math_asdiv": 0,
"math_num_theory": 1,
"math_algebra": 1,
"math_precalc": 1,
"math_geometry": 1,
"math_counting_and_prob": 1,
"mathqa": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"math_prealgebra": {
"acc": 0.001148105625717566,
"acc_stderr": 0.001148105625717572
},
"math_intermediate_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_counting_and_prob": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_geometry": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_precalc": {
"acc": 0.0,
"acc_stderr": 0.0
},
"drop": {
"em": 0.008389261744966443,
"em_stderr": 0.0009340543216866975,
"f1": 0.016472315436241603,
"f1_stderr": 0.001049526866424092
},
"gsm8k": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_num_theory": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"mathqa": {
"acc": 0.3474036850921273,
"acc_stderr": 0.008716459359487392,
"acc_norm": 0.34539363484087104,
"acc_norm_stderr": 0.008704580930350191
}
},
"versions": {
"math_prealgebra": 1,
"math_intermediate_algebra": 1,
"math_counting_and_prob": 1,
"math_geometry": 1,
"math_precalc": 1,
"drop": 1,
"mathqa": 0,
"gsm8k": 0,
"math_num_theory": 1,
"math_algebra": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B,use_accelerate=True",
"num_fewshot": 5,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"hendrycksTest-high_school_world_history": {
"acc": 0.6962025316455697,
"acc_stderr": 0.029936696387138598,
"acc_norm": 0.569620253164557,
"acc_norm_stderr": 0.032230171959375976
},
"hendrycksTest-formal_logic": {
"acc": 0.42063492063492064,
"acc_stderr": 0.04415438226743743,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.043758884927270605
},
"hendrycksTest-human_aging": {
"acc": 0.672645739910314,
"acc_stderr": 0.03149384670994131,
"acc_norm": 0.3632286995515695,
"acc_norm_stderr": 0.032277904428505
},
"hendrycksTest-international_law": {
"acc": 0.7024793388429752,
"acc_stderr": 0.04173349148083499,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794088
},
"hendrycksTest-security_studies": {
"acc": 0.5714285714285714,
"acc_stderr": 0.031680911612338825,
"acc_norm": 0.40408163265306124,
"acc_norm_stderr": 0.0314147080258659
},
"hendrycksTest-medical_genetics": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"hendrycksTest-econometrics": {
"acc": 0.3508771929824561,
"acc_stderr": 0.044895393502707,
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.043727482902780064
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.5153846153846153,
"acc_stderr": 0.025339003010106515,
"acc_norm": 0.4153846153846154,
"acc_norm_stderr": 0.024985354923102332
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"hendrycksTest-logical_fallacies": {
"acc": 0.6993865030674846,
"acc_stderr": 0.03602511318806771,
"acc_norm": 0.5398773006134969,
"acc_norm_stderr": 0.039158572914369714
},
"hendrycksTest-prehistory": {
"acc": 0.6635802469135802,
"acc_stderr": 0.026289734945952926,
"acc_norm": 0.42901234567901236,
"acc_norm_stderr": 0.027538925613470867
},
"hendrycksTest-professional_psychology": {
"acc": 0.5882352941176471,
"acc_stderr": 0.019910377463105932,
"acc_norm": 0.43300653594771243,
"acc_norm_stderr": 0.02004544247332422
},
"hendrycksTest-professional_accounting": {
"acc": 0.3971631205673759,
"acc_stderr": 0.029189805673587105,
"acc_norm": 0.33687943262411346,
"acc_norm_stderr": 0.02819553487396673
},
"hendrycksTest-college_biology": {
"acc": 0.6111111111111112,
"acc_stderr": 0.04076663253918567,
"acc_norm": 0.4236111111111111,
"acc_norm_stderr": 0.04132125019723369
},
"hendrycksTest-high_school_biology": {
"acc": 0.6709677419354839,
"acc_stderr": 0.02672949906834996,
"acc_norm": 0.5451612903225806,
"acc_norm_stderr": 0.028327743091561074
},
"hendrycksTest-philosophy": {
"acc": 0.6752411575562701,
"acc_stderr": 0.02659678228769704,
"acc_norm": 0.5016077170418006,
"acc_norm_stderr": 0.02839794490780661
},
"hendrycksTest-high_school_european_history": {
"acc": 0.696969696969697,
"acc_stderr": 0.03588624800091707,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.03872592983524754
},
"hendrycksTest-college_medicine": {
"acc": 0.5144508670520231,
"acc_stderr": 0.03810871630454764,
"acc_norm": 0.43352601156069365,
"acc_norm_stderr": 0.03778621079092055
},
"hendrycksTest-professional_medicine": {
"acc": 0.5551470588235294,
"acc_stderr": 0.03018753206032938,
"acc_norm": 0.35661764705882354,
"acc_norm_stderr": 0.02909720956841195
},
"hendrycksTest-moral_scenarios": {
"acc": 0.34301675977653634,
"acc_stderr": 0.015876912673057724,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.39901477832512317,
"acc_stderr": 0.03445487686264716,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998573
},
"hendrycksTest-high_school_physics": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.8082901554404145,
"acc_stderr": 0.028408953626245282,
"acc_norm": 0.6113989637305699,
"acc_norm_stderr": 0.03517739796373132
},
"hendrycksTest-high_school_geography": {
"acc": 0.7575757575757576,
"acc_stderr": 0.030532892233932026,
"acc_norm": 0.5505050505050505,
"acc_norm_stderr": 0.0354413249194797
},
"hendrycksTest-global_facts": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"hendrycksTest-professional_law": {
"acc": 0.4002607561929596,
"acc_stderr": 0.012513582529136213,
"acc_norm": 0.3435462842242503,
"acc_norm_stderr": 0.012128961174190158
},
"hendrycksTest-college_mathematics": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-college_physics": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171452,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.04533838195929774
},
"hendrycksTest-high_school_statistics": {
"acc": 0.4351851851851852,
"acc_stderr": 0.03381200005643525,
"acc_norm": 0.35648148148148145,
"acc_norm_stderr": 0.032664783315272714
},
"hendrycksTest-machine_learning": {
"acc": 0.4017857142857143,
"acc_stderr": 0.04653333146973646,
"acc_norm": 0.30357142857142855,
"acc_norm_stderr": 0.04364226155841044
},
"hendrycksTest-public_relations": {
"acc": 0.6454545454545455,
"acc_stderr": 0.045820048415054174,
"acc_norm": 0.4090909090909091,
"acc_norm_stderr": 0.047093069786618966
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.61,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"hendrycksTest-high_school_psychology": {
"acc": 0.7706422018348624,
"acc_stderr": 0.018025349724618684,
"acc_norm": 0.5541284403669725,
"acc_norm_stderr": 0.021311335009708582
},
"hendrycksTest-virology": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333045,
"acc_norm": 0.3433734939759036,
"acc_norm_stderr": 0.03696584317010601
},
"hendrycksTest-marketing": {
"acc": 0.8461538461538461,
"acc_stderr": 0.023636873317489294,
"acc_norm": 0.7649572649572649,
"acc_norm_stderr": 0.027778835904935437
},
"hendrycksTest-human_sexuality": {
"acc": 0.7022900763358778,
"acc_stderr": 0.04010358942462203,
"acc_norm": 0.46564885496183206,
"acc_norm_stderr": 0.04374928560599738
},
"hendrycksTest-sociology": {
"acc": 0.7611940298507462,
"acc_stderr": 0.03014777593540922,
"acc_norm": 0.6616915422885572,
"acc_norm_stderr": 0.033455630703391914
},
"hendrycksTest-college_computer_science": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-conceptual_physics": {
"acc": 0.5106382978723404,
"acc_stderr": 0.03267862331014063,
"acc_norm": 0.3276595744680851,
"acc_norm_stderr": 0.030683020843231004
},
"hendrycksTest-anatomy": {
"acc": 0.5185185185185185,
"acc_stderr": 0.043163785995113245,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04244633238353228
},
"hendrycksTest-miscellaneous": {
"acc": 0.8186462324393359,
"acc_stderr": 0.013778693778464062,
"acc_norm": 0.6143039591315453,
"acc_norm_stderr": 0.017406476619212907
},
"hendrycksTest-jurisprudence": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04803752235190193
},
"hendrycksTest-moral_disputes": {
"acc": 0.6184971098265896,
"acc_stderr": 0.026152198619726792,
"acc_norm": 0.4595375722543353,
"acc_norm_stderr": 0.026830805998952236
},
"hendrycksTest-high_school_us_history": {
"acc": 0.7205882352941176,
"acc_stderr": 0.031493281045079556,
"acc_norm": 0.553921568627451,
"acc_norm_stderr": 0.03488845451304974
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.25925925925925924,
"acc_stderr": 0.026719240783712177,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.02831753349606648
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.5840336134453782,
"acc_stderr": 0.032016501007396114,
"acc_norm": 0.4831932773109244,
"acc_norm_stderr": 0.03246013680375308
},
"hendrycksTest-astronomy": {
"acc": 0.5723684210526315,
"acc_stderr": 0.04026097083296564,
"acc_norm": 0.5657894736842105,
"acc_norm_stderr": 0.04033565667848319
},
"hendrycksTest-world_religions": {
"acc": 0.8128654970760234,
"acc_stderr": 0.029913127232368043,
"acc_norm": 0.7660818713450293,
"acc_norm_stderr": 0.03246721765117825
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.5320754716981132,
"acc_stderr": 0.03070948699255654,
"acc_norm": 0.4641509433962264,
"acc_norm_stderr": 0.030693675018458003
},
"hendrycksTest-college_chemistry": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-abstract_algebra": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-business_ethics": {
"acc": 0.67,
"acc_stderr": 0.04725815626252609,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.4417989417989418,
"acc_stderr": 0.02557625706125384,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.024870815251057075
},
"hendrycksTest-management": {
"acc": 0.7184466019417476,
"acc_stderr": 0.044532548363264673,
"acc_norm": 0.5533980582524272,
"acc_norm_stderr": 0.04922424153458933
},
"hendrycksTest-electrical_engineering": {
"acc": 0.5172413793103449,
"acc_stderr": 0.04164188720169375,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.040573247344190336
},
"hendrycksTest-nutrition": {
"acc": 0.6111111111111112,
"acc_stderr": 0.02791405551046801,
"acc_norm": 0.5032679738562091,
"acc_norm_stderr": 0.028629305194003543
},
"hendrycksTest-computer_security": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
}
},
"versions": {
"hendrycksTest-high_school_world_history": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-philosophy": 0,
"hendrycksTest-high_school_european_history": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-high_school_statistics": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-public_relations": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-high_school_microeconomics": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-world_religions": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-business_ethics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-management": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-nutrition": 0,
"hendrycksTest-computer_security": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B,use_accelerate=True",
"num_fewshot": 5,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"pawsx_fr": {
"acc": 0.5285,
"acc_stderr": 0.01116495423642881
},
"pawsx_de": {
"acc": 0.582,
"acc_stderr": 0.011031720148042082
},
"pawsx_en": {
"acc": 0.5875,
"acc_stderr": 0.011010562712487564
},
"pawsx_ja": {
"acc": 0.4675,
"acc_stderr": 0.011159486640120933
},
"pawsx_ko": {
"acc": 0.457,
"acc_stderr": 0.011141704034140798
},
"pawsx_zh": {
"acc": 0.459,
"acc_stderr": 0.011145474902641256
},
"pawsx_es": {
"acc": 0.558,
"acc_stderr": 0.011107641056719623
}
},
"versions": {
"pawsx_fr": 0,
"pawsx_de": 0,
"pawsx_en": 0,
"pawsx_ja": 0,
"pawsx_ko": 0,
"pawsx_zh": 0,
"pawsx_es": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xcopa_vi": {
"acc": 0.524,
"acc_stderr": 0.0223572738810164
},
"xcopa_id": {
"acc": 0.606,
"acc_stderr": 0.021874299301689253
},
"xcopa_it": {
"acc": 0.714,
"acc_stderr": 0.020229346329177524
},
"xcopa_ht": {
"acc": 0.518,
"acc_stderr": 0.02236856511738799
},
"xcopa_zh": {
"acc": 0.622,
"acc_stderr": 0.021706550824518184
},
"xcopa_sw": {
"acc": 0.524,
"acc_stderr": 0.0223572738810164
},
"xcopa_et": {
"acc": 0.472,
"acc_stderr": 0.02234794983266809
},
"xcopa_qu": {
"acc": 0.494,
"acc_stderr": 0.022381462412439324
},
"xcopa_th": {
"acc": 0.546,
"acc_stderr": 0.02228814759117695
},
"xcopa_tr": {
"acc": 0.522,
"acc_stderr": 0.022361396739207888
},
"xcopa_ta": {
"acc": 0.532,
"acc_stderr": 0.022337186479044292
}
},
"versions": {
"xcopa_vi": 0,
"xcopa_id": 0,
"xcopa_it": 0,
"xcopa_ht": 0,
"xcopa_zh": 0,
"xcopa_sw": 0,
"xcopa_et": 0,
"xcopa_qu": 0,
"xcopa_th": 0,
"xcopa_tr": 0,
"xcopa_ta": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xnli_ar": {
"acc": 0.3449101796407186,
"acc_stderr": 0.006716266425755
},
"xnli_bg": {
"acc": 0.3852295409181637,
"acc_stderr": 0.006876077627982856
},
"xnli_de": {
"acc": 0.43872255489021955,
"acc_stderr": 0.007011456767132425
},
"xnli_el": {
"acc": 0.34910179640718564,
"acc_stderr": 0.00673530182747736
},
"xnli_en": {
"acc": 0.4818363273453094,
"acc_stderr": 0.007060049324579861
},
"xnli_es": {
"acc": 0.4023952095808383,
"acc_stderr": 0.006928798318208028
},
"xnli_fr": {
"acc": 0.4295409181636727,
"acc_stderr": 0.006994215414803201
},
"xnli_hi": {
"acc": 0.3646706586826347,
"acc_stderr": 0.0068010246867294885
},
"xnli_ru": {
"acc": 0.3812375249500998,
"acc_stderr": 0.0068625305186053856
},
"xnli_sw": {
"acc": 0.3409181636726547,
"acc_stderr": 0.006697600297167045
},
"xnli_th": {
"acc": 0.3397205588822355,
"acc_stderr": 0.0066918975980483925
},
"xnli_tr": {
"acc": 0.3652694610778443,
"acc_stderr": 0.00680339776716209
},
"xnli_ur": {
"acc": 0.34311377245508984,
"acc_stderr": 0.006707931789556032
},
"xnli_vi": {
"acc": 0.356686626746507,
"acc_stderr": 0.0067682935643592285
},
"xnli_zh": {
"acc": 0.33512974051896205,
"acc_stderr": 0.006669594382503632
}
},
"versions": {
"xnli_ar": 0,
"xnli_bg": 0,
"xnli_de": 0,
"xnli_el": 0,
"xnli_en": 0,
"xnli_es": 0,
"xnli_fr": 0,
"xnli_hi": 0,
"xnli_ru": 0,
"xnli_sw": 0,
"xnli_th": 0,
"xnli_tr": 0,
"xnli_ur": 0,
"xnli_vi": 0,
"xnli_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xstory_cloze_en": {
"acc": 0.7816015883520847,
"acc_stderr": 0.010632343054700505
},
"xstory_cloze_te": {
"acc": 0.5320979483785573,
"acc_stderr": 0.012840584503982028
},
"xstory_cloze_zh": {
"acc": 0.585704831237591,
"acc_stderr": 0.012676689821720669
},
"xstory_cloze_id": {
"acc": 0.5923229649238915,
"acc_stderr": 0.01264587648804028
},
"xstory_cloze_ar": {
"acc": 0.5089344804765056,
"acc_stderr": 0.012865070917320802
},
"xstory_cloze_my": {
"acc": 0.4877564526803441,
"acc_stderr": 0.012863267059205548
},
"xstory_cloze_hi": {
"acc": 0.5665122435473197,
"acc_stderr": 0.012752771973917616
},
"xstory_cloze_eu": {
"acc": 0.513567174056916,
"acc_stderr": 0.012862387586650079
},
"xstory_cloze_sw": {
"acc": 0.5062872270019855,
"acc_stderr": 0.01286610802121821
},
"xstory_cloze_es": {
"acc": 0.7081403044341495,
"acc_stderr": 0.011699256037649366
},
"xstory_cloze_ru": {
"acc": 0.6671078755790867,
"acc_stderr": 0.012127221798743731
}
},
"versions": {
"xstory_cloze_en": 0,
"xstory_cloze_te": 0,
"xstory_cloze_zh": 0,
"xstory_cloze_id": 0,
"xstory_cloze_ar": 0,
"xstory_cloze_my": 0,
"xstory_cloze_hi": 0,
"xstory_cloze_eu": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_es": 0,
"xstory_cloze_ru": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_en": {
"acc": 0.873978494623656,
"acc_stderr": 0.006884218449880497
},
"xwinograd_ru": {
"acc": 0.6698412698412698,
"acc_stderr": 0.026538875646287704
},
"xwinograd_jp": {
"acc": 0.6736183524504692,
"acc_stderr": 0.015149108150588548
},
"xwinograd_pt": {
"acc": 0.7680608365019012,
"acc_stderr": 0.02607559386030469
},
"xwinograd_zh": {
"acc": 0.7123015873015873,
"acc_stderr": 0.02018443961183448
},
"xwinograd_fr": {
"acc": 0.7349397590361446,
"acc_stderr": 0.04874064133109369
}
},
"versions": {
"xwinograd_en": 0,
"xwinograd_ru": 0,
"xwinograd_jp": 0,
"xwinograd_pt": 0,
"xwinograd_zh": 0,
"xwinograd_fr": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/30B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# llama-7B
## llama-7B_anli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|anli_r1| 0|acc |34.80|± | 1.51|
|anli_r2| 0|acc |33.70|± | 1.50|
|anli_r3| 0|acc |36.58|± | 1.39|
## llama-7B_arithmetic_5-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------------|------:|------|----:|---|-----:|
|arithmetic_1dc| 0|acc | 0|± | 0|
|arithmetic_2da| 0|acc | 0|± | 0|
|arithmetic_2dm| 0|acc | 0|± | 0|
|arithmetic_2ds| 0|acc | 0|± | 0|
|arithmetic_3da| 0|acc | 0|± | 0|
|arithmetic_3ds| 0|acc | 0|± | 0|
|arithmetic_4da| 0|acc | 0|± | 0|
|arithmetic_4ds| 0|acc | 0|± | 0|
|arithmetic_5da| 0|acc | 0|± | 0|
|arithmetic_5ds| 0|acc | 0|± | 0|
## llama-7B_bbh_3-shot.json
| Task |Version| Metric |Value| |Stderr|
|------------------------------------------------|------:|---------------------|----:|---|-----:|
|bigbench_causal_judgement | 0|multiple_choice_grade|48.42|± | 3.64|
|bigbench_date_understanding | 0|multiple_choice_grade|62.06|± | 2.53|
|bigbench_disambiguation_qa | 0|multiple_choice_grade|35.27|± | 2.98|
|bigbench_dyck_languages | 0|multiple_choice_grade|15.40|± | 1.14|
|bigbench_formal_fallacies_syllogisms_negation | 0|multiple_choice_grade|51.35|± | 0.42|
|bigbench_geometric_shapes | 0|multiple_choice_grade|17.83|± | 2.02|
| | |exact_str_match | 0.00|± | 0.00|
|bigbench_hyperbaton | 0|multiple_choice_grade|49.51|± | 0.22|
|bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|29.00|± | 2.03|
|bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|24.57|± | 1.63|
|bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|39.33|± | 2.83|
|bigbench_movie_recommendation | 0|multiple_choice_grade|40.40|± | 2.20|
|bigbench_navigate | 0|multiple_choice_grade|49.50|± | 1.58|
|bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|34.60|± | 1.06|
|bigbench_ruin_names | 0|multiple_choice_grade|29.91|± | 2.17|
|bigbench_salient_translation_error_detection | 0|multiple_choice_grade|16.53|± | 1.18|
|bigbench_snarks | 0|multiple_choice_grade|50.83|± | 3.73|
|bigbench_sports_understanding | 0|multiple_choice_grade|50.00|± | 1.59|
|bigbench_temporal_sequences | 0|multiple_choice_grade|27.20|± | 1.41|
|bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|18.24|± | 1.09|
|bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|13.71|± | 0.82|
|bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|39.33|± | 2.83|
## llama-7B_blimp_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------------------------------------------------|------:|------|----:|---|-----:|
|blimp_adjunct_island | 0|acc | 53.9|± | 1.58|
|blimp_anaphor_gender_agreement | 0|acc | 44.8|± | 1.57|
|blimp_anaphor_number_agreement | 0|acc | 65.9|± | 1.50|
|blimp_animate_subject_passive | 0|acc | 62.6|± | 1.53|
|blimp_animate_subject_trans | 0|acc | 76.1|± | 1.35|
|blimp_causative | 0|acc | 50.8|± | 1.58|
|blimp_complex_NP_island | 0|acc | 41.6|± | 1.56|
|blimp_coordinate_structure_constraint_complex_left_branch| 0|acc | 68.2|± | 1.47|
|blimp_coordinate_structure_constraint_object_extraction | 0|acc | 62.9|± | 1.53|
|blimp_determiner_noun_agreement_1 | 0|acc | 63.6|± | 1.52|
|blimp_determiner_noun_agreement_2 | 0|acc | 59.8|± | 1.55|
|blimp_determiner_noun_agreement_irregular_1 | 0|acc | 57.2|± | 1.57|
|blimp_determiner_noun_agreement_irregular_2 | 0|acc | 60.2|± | 1.55|
|blimp_determiner_noun_agreement_with_adj_2 | 0|acc | 54.0|± | 1.58|
|blimp_determiner_noun_agreement_with_adj_irregular_1 | 0|acc | 56.3|± | 1.57|
|blimp_determiner_noun_agreement_with_adj_irregular_2 | 0|acc | 59.1|± | 1.56|
|blimp_determiner_noun_agreement_with_adjective_1 | 0|acc | 57.7|± | 1.56|
|blimp_distractor_agreement_relational_noun | 0|acc | 44.1|± | 1.57|
|blimp_distractor_agreement_relative_clause | 0|acc | 31.4|± | 1.47|
|blimp_drop_argument | 0|acc | 70.1|± | 1.45|
|blimp_ellipsis_n_bar_1 | 0|acc | 66.8|± | 1.49|
|blimp_ellipsis_n_bar_2 | 0|acc | 79.4|± | 1.28|
|blimp_existential_there_object_raising | 0|acc | 78.8|± | 1.29|
|blimp_existential_there_quantifiers_1 | 0|acc | 68.3|± | 1.47|
|blimp_existential_there_quantifiers_2 | 0|acc | 67.4|± | 1.48|
|blimp_existential_there_subject_raising | 0|acc | 69.6|± | 1.46|
|blimp_expletive_it_object_raising | 0|acc | 65.9|± | 1.50|
|blimp_inchoative | 0|acc | 42.0|± | 1.56|
|blimp_intransitive | 0|acc | 59.2|± | 1.55|
|blimp_irregular_past_participle_adjectives | 0|acc | 42.9|± | 1.57|
|blimp_irregular_past_participle_verbs | 0|acc | 72.5|± | 1.41|
|blimp_irregular_plural_subject_verb_agreement_1 | 0|acc | 65.3|± | 1.51|
|blimp_irregular_plural_subject_verb_agreement_2 | 0|acc | 70.0|± | 1.45|
|blimp_left_branch_island_echo_question | 0|acc | 83.5|± | 1.17|
|blimp_left_branch_island_simple_question | 0|acc | 74.0|± | 1.39|
|blimp_matrix_question_npi_licensor_present | 0|acc | 11.7|± | 1.02|
|blimp_npi_present_1 | 0|acc | 53.4|± | 1.58|
|blimp_npi_present_2 | 0|acc | 53.0|± | 1.58|
|blimp_only_npi_licensor_present | 0|acc | 81.4|± | 1.23|
|blimp_only_npi_scope | 0|acc | 26.6|± | 1.40|
|blimp_passive_1 | 0|acc | 70.2|± | 1.45|
|blimp_passive_2 | 0|acc | 70.3|± | 1.45|
|blimp_principle_A_c_command | 0|acc | 39.0|± | 1.54|
|blimp_principle_A_case_1 | 0|acc | 98.5|± | 0.38|
|blimp_principle_A_case_2 | 0|acc | 55.4|± | 1.57|
|blimp_principle_A_domain_1 | 0|acc | 96.2|± | 0.60|
|blimp_principle_A_domain_2 | 0|acc | 64.6|± | 1.51|
|blimp_principle_A_domain_3 | 0|acc | 50.1|± | 1.58|
|blimp_principle_A_reconstruction | 0|acc | 67.3|± | 1.48|
|blimp_regular_plural_subject_verb_agreement_1 | 0|acc | 64.5|± | 1.51|
|blimp_regular_plural_subject_verb_agreement_2 | 0|acc | 70.5|± | 1.44|
|blimp_sentential_negation_npi_licensor_present | 0|acc | 94.0|± | 0.75|
|blimp_sentential_negation_npi_scope | 0|acc | 58.8|± | 1.56|
|blimp_sentential_subject_island | 0|acc | 60.6|± | 1.55|
|blimp_superlative_quantifiers_1 | 0|acc | 61.2|± | 1.54|
|blimp_superlative_quantifiers_2 | 0|acc | 56.1|± | 1.57|
|blimp_tough_vs_raising_1 | 0|acc | 29.8|± | 1.45|
|blimp_tough_vs_raising_2 | 0|acc | 76.8|± | 1.34|
|blimp_transitive | 0|acc | 69.8|± | 1.45|
|blimp_wh_island | 0|acc | 27.5|± | 1.41|
|blimp_wh_questions_object_gap | 0|acc | 67.0|± | 1.49|
|blimp_wh_questions_subject_gap | 0|acc | 72.0|± | 1.42|
|blimp_wh_questions_subject_gap_long_distance | 0|acc | 74.6|± | 1.38|
|blimp_wh_vs_that_no_gap | 0|acc | 84.8|± | 1.14|
|blimp_wh_vs_that_no_gap_long_distance | 0|acc | 81.2|± | 1.24|
|blimp_wh_vs_that_with_gap | 0|acc | 23.9|± | 1.35|
|blimp_wh_vs_that_with_gap_long_distance | 0|acc | 22.7|± | 1.33|
## llama-7B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |38.23|± | 1.42|
| | |acc_norm|41.38|± | 1.44|
|arc_easy | 0|acc |67.38|± | 0.96|
| | |acc_norm|52.48|± | 1.02|
|boolq | 1|acc |73.06|± | 0.78|
|copa | 0|acc |84.00|± | 3.68|
|hellaswag | 0|acc |56.39|± | 0.49|
| | |acc_norm|72.98|± | 0.44|
|mc_taco | 0|em |11.26| | |
| | |f1 |48.27| | |
|openbookqa | 0|acc |28.20|± | 2.01|
| | |acc_norm|42.40|± | 2.21|
|piqa | 0|acc |78.18|± | 0.96|
| | |acc_norm|77.42|± | 0.98|
|prost | 0|acc |25.69|± | 0.32|
| | |acc_norm|28.03|± | 0.33|
|swag | 0|acc |55.47|± | 0.35|
| | |acc_norm|66.87|± | 0.33|
|winogrande | 0|acc |66.93|± | 1.32|
|wsc273 | 0|acc |80.95|± | 2.38|
## llama-7B_glue_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|cola | 0|mcc | 0.00|± | 0.00|
|mnli | 0|acc |34.40|± | 0.48|
|mnli_mismatched| 0|acc |35.72|± | 0.48|
|mrpc | 0|acc |68.38|± | 2.30|
| | |f1 |81.22|± | 1.62|
|qnli | 0|acc |49.57|± | 0.68|
|qqp | 0|acc |36.84|± | 0.24|
| | |f1 |53.81|± | 0.26|
|rte | 0|acc |53.07|± | 3.00|
|sst | 0|acc |52.98|± | 1.69|
|wnli | 1|acc |56.34|± | 5.93|
## llama-7B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 8.04|± | 0.75|
## llama-7B_human_alignment_0-shot.json
| Task |Version| Metric | Value | |Stderr|
|---------------------------------------|------:|---------------------|------:|---|-----:|
|crows_pairs_english_age | 0|likelihood_difference| 594.23|± | 79.03|
| | |pct_stereotype | 51.65|± | 5.27|
|crows_pairs_english_autre | 0|likelihood_difference|1101.14|± |589.08|
| | |pct_stereotype | 45.45|± | 15.75|
|crows_pairs_english_disability | 0|likelihood_difference| 966.97|± |113.86|
| | |pct_stereotype | 66.15|± | 5.91|
|crows_pairs_english_gender | 0|likelihood_difference| 791.74|± | 55.02|
| | |pct_stereotype | 53.12|± | 2.79|
|crows_pairs_english_nationality | 0|likelihood_difference| 676.26|± | 58.69|
| | |pct_stereotype | 53.70|± | 3.40|
|crows_pairs_english_physical_appearance| 0|likelihood_difference| 451.26|± | 69.32|
| | |pct_stereotype | 50.00|± | 5.93|
|crows_pairs_english_race_color | 0|likelihood_difference| 624.65|± | 32.39|
| | |pct_stereotype | 46.65|± | 2.22|
|crows_pairs_english_religion | 0|likelihood_difference| 721.96|± | 75.92|
| | |pct_stereotype | 66.67|± | 4.49|
|crows_pairs_english_sexual_orientation | 0|likelihood_difference| 830.48|± | 84.28|
| | |pct_stereotype | 62.37|± | 5.05|
|crows_pairs_english_socioeconomic | 0|likelihood_difference| 640.16|± | 54.20|
| | |pct_stereotype | 56.84|± | 3.60|
|crows_pairs_french_age | 0|likelihood_difference|1193.96|± |153.77|
| | |pct_stereotype | 35.56|± | 5.07|
|crows_pairs_french_autre | 0|likelihood_difference| 751.20|± |209.58|
| | |pct_stereotype | 61.54|± | 14.04|
|crows_pairs_french_disability | 0|likelihood_difference|1014.77|± |139.07|
| | |pct_stereotype | 42.42|± | 6.13|
|crows_pairs_french_gender | 0|likelihood_difference|1179.90|± | 87.14|
| | |pct_stereotype | 52.02|± | 2.79|
|crows_pairs_french_nationality | 0|likelihood_difference|1041.65|± | 90.66|
| | |pct_stereotype | 40.71|± | 3.09|
|crows_pairs_french_physical_appearance | 0|likelihood_difference| 704.51|± | 94.84|
| | |pct_stereotype | 55.56|± | 5.90|
|crows_pairs_french_race_color | 0|likelihood_difference|1204.89|± | 73.32|
| | |pct_stereotype | 43.26|± | 2.31|
|crows_pairs_french_religion | 0|likelihood_difference| 958.53|± | 87.50|
| | |pct_stereotype | 43.48|± | 4.64|
|crows_pairs_french_sexual_orientation | 0|likelihood_difference| 760.58|± | 79.39|
| | |pct_stereotype | 67.03|± | 4.96|
|crows_pairs_french_socioeconomic | 0|likelihood_difference| 980.84|± |101.51|
| | |pct_stereotype | 52.04|± | 3.58|
|ethics_cm | 0|acc | 56.91|± | 0.79|
|ethics_deontology | 0|acc | 50.58|± | 0.83|
| | |em | 0.22| | |
|ethics_justice | 0|acc | 49.96|± | 0.96|
| | |em | 0.15| | |
|ethics_utilitarianism | 0|acc | 49.81|± | 0.72|
|ethics_utilitarianism_original | 0|acc | 95.86|± | 0.29|
|ethics_virtue | 0|acc | 20.98|± | 0.58|
| | |em | 0.00| | |
|toxigen | 0|acc | 43.09|± | 1.62|
| | |acc_norm | 43.19|± | 1.62|
## llama-7B_lambada_0-shot.json
| Task |Version|Metric| Value | | Stderr |
|----------------------|------:|------|---------:|---|--------:|
|lambada_openai | 0|ppl |2817465.09|± |138319.09|
| | |acc | 0.00|± | 0.00|
|lambada_openai_cloze | 0|ppl | 255777.71|± | 11345.77|
| | |acc | 0.04|± | 0.03|
|lambada_openai_mt_de | 0|ppl |1805613.68|± | 97892.79|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_en | 0|ppl |2817465.09|± |138319.09|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_es | 0|ppl |3818890.45|± |197999.05|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_fr | 0|ppl |2111186.12|± |111724.43|
| | |acc | 0.00|± | 0.00|
|lambada_openai_mt_it | 0|ppl |3653680.57|± |197082.99|
| | |acc | 0.00|± | 0.00|
|lambada_standard | 0|ppl |2460346.86|± | 81216.57|
| | |acc | 0.00|± | 0.00|
|lambada_standard_cloze| 0|ppl |6710057.24|± |169833.91|
| | |acc | 0.00|± | 0.00|
## llama-7B_mathematical_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 4.27|± | 0.21|
| | |f1 |12.16|± | 0.25|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 1.68|± | 0.37|
|math_asdiv | 0|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 1.69|± | 0.59|
|math_geometry | 1|acc | 0.84|± | 0.42|
|math_intermediate_algebra| 1|acc | 0.66|± | 0.27|
|math_num_theory | 1|acc | 0.74|± | 0.37|
|math_prealgebra | 1|acc | 1.26|± | 0.38|
|math_precalc | 1|acc | 0.37|± | 0.26|
|mathqa | 0|acc |26.77|± | 0.81|
| | |acc_norm|27.87|± | 0.82|
## llama-7B_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 1.24|± | 0.11|
| | |f1 | 2.10|± | 0.13|
|gsm8k | 0|acc | 0.00|± | 0.00|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.00|± | 0.00|
|math_prealgebra | 1|acc | 0.11|± | 0.11|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |28.21|± | 0.82|
| | |acc_norm|28.78|± | 0.83|
## llama-7B_mmlu_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------------------------------|------:|--------|----:|---|-----:|
|hendrycksTest-abstract_algebra | 0|acc |23.00|± | 4.23|
| | |acc_norm|26.00|± | 4.41|
|hendrycksTest-anatomy | 0|acc |38.52|± | 4.20|
| | |acc_norm|28.15|± | 3.89|
|hendrycksTest-astronomy | 0|acc |45.39|± | 4.05|
| | |acc_norm|46.05|± | 4.06|
|hendrycksTest-business_ethics | 0|acc |53.00|± | 5.02|
| | |acc_norm|46.00|± | 5.01|
|hendrycksTest-clinical_knowledge | 0|acc |38.87|± | 3.00|
| | |acc_norm|38.11|± | 2.99|
|hendrycksTest-college_biology | 0|acc |31.94|± | 3.90|
| | |acc_norm|29.17|± | 3.80|
|hendrycksTest-college_chemistry | 0|acc |33.00|± | 4.73|
| | |acc_norm|30.00|± | 4.61|
|hendrycksTest-college_computer_science | 0|acc |33.00|± | 4.73|
| | |acc_norm|28.00|± | 4.51|
|hendrycksTest-college_mathematics | 0|acc |32.00|± | 4.69|
| | |acc_norm|32.00|± | 4.69|
|hendrycksTest-college_medicine | 0|acc |37.57|± | 3.69|
| | |acc_norm|30.64|± | 3.51|
|hendrycksTest-college_physics | 0|acc |23.53|± | 4.22|
| | |acc_norm|32.35|± | 4.66|
|hendrycksTest-computer_security | 0|acc |37.00|± | 4.85|
| | |acc_norm|44.00|± | 4.99|
|hendrycksTest-conceptual_physics | 0|acc |32.77|± | 3.07|
| | |acc_norm|21.70|± | 2.69|
|hendrycksTest-econometrics | 0|acc |28.95|± | 4.27|
| | |acc_norm|26.32|± | 4.14|
|hendrycksTest-electrical_engineering | 0|acc |35.86|± | 4.00|
| | |acc_norm|32.41|± | 3.90|
|hendrycksTest-elementary_mathematics | 0|acc |32.01|± | 2.40|
| | |acc_norm|29.10|± | 2.34|
|hendrycksTest-formal_logic | 0|acc |30.95|± | 4.13|
| | |acc_norm|34.92|± | 4.26|
|hendrycksTest-global_facts | 0|acc |32.00|± | 4.69|
| | |acc_norm|29.00|± | 4.56|
|hendrycksTest-high_school_biology | 0|acc |35.81|± | 2.73|
| | |acc_norm|35.81|± | 2.73|
|hendrycksTest-high_school_chemistry | 0|acc |25.12|± | 3.05|
| | |acc_norm|29.56|± | 3.21|
|hendrycksTest-high_school_computer_science | 0|acc |41.00|± | 4.94|
| | |acc_norm|34.00|± | 4.76|
|hendrycksTest-high_school_european_history | 0|acc |40.61|± | 3.83|
| | |acc_norm|36.97|± | 3.77|
|hendrycksTest-high_school_geography | 0|acc |42.93|± | 3.53|
| | |acc_norm|36.36|± | 3.43|
|hendrycksTest-high_school_government_and_politics| 0|acc |48.19|± | 3.61|
| | |acc_norm|37.31|± | 3.49|
|hendrycksTest-high_school_macroeconomics | 0|acc |31.79|± | 2.36|
| | |acc_norm|30.26|± | 2.33|
|hendrycksTest-high_school_mathematics | 0|acc |22.59|± | 2.55|
| | |acc_norm|30.74|± | 2.81|
|hendrycksTest-high_school_microeconomics | 0|acc |38.66|± | 3.16|
| | |acc_norm|36.55|± | 3.13|
|hendrycksTest-high_school_physics | 0|acc |20.53|± | 3.30|
| | |acc_norm|27.15|± | 3.63|
|hendrycksTest-high_school_psychology | 0|acc |46.61|± | 2.14|
| | |acc_norm|30.83|± | 1.98|
|hendrycksTest-high_school_statistics | 0|acc |34.26|± | 3.24|
| | |acc_norm|34.26|± | 3.24|
|hendrycksTest-high_school_us_history | 0|acc |42.65|± | 3.47|
| | |acc_norm|31.37|± | 3.26|
|hendrycksTest-high_school_world_history | 0|acc |39.24|± | 3.18|
| | |acc_norm|33.76|± | 3.08|
|hendrycksTest-human_aging | 0|acc |37.22|± | 3.24|
| | |acc_norm|25.11|± | 2.91|
|hendrycksTest-human_sexuality | 0|acc |51.15|± | 4.38|
| | |acc_norm|36.64|± | 4.23|
|hendrycksTest-international_law | 0|acc |38.84|± | 4.45|
| | |acc_norm|57.85|± | 4.51|
|hendrycksTest-jurisprudence | 0|acc |43.52|± | 4.79|
| | |acc_norm|50.00|± | 4.83|
|hendrycksTest-logical_fallacies | 0|acc |38.04|± | 3.81|
| | |acc_norm|34.97|± | 3.75|
|hendrycksTest-machine_learning | 0|acc |30.36|± | 4.36|
| | |acc_norm|26.79|± | 4.20|
|hendrycksTest-management | 0|acc |48.54|± | 4.95|
| | |acc_norm|36.89|± | 4.78|
|hendrycksTest-marketing | 0|acc |61.11|± | 3.19|
| | |acc_norm|50.43|± | 3.28|
|hendrycksTest-medical_genetics | 0|acc |44.00|± | 4.99|
| | |acc_norm|40.00|± | 4.92|
|hendrycksTest-miscellaneous | 0|acc |58.37|± | 1.76|
| | |acc_norm|38.95|± | 1.74|
|hendrycksTest-moral_disputes | 0|acc |36.42|± | 2.59|
| | |acc_norm|33.24|± | 2.54|
|hendrycksTest-moral_scenarios | 0|acc |27.60|± | 1.50|
| | |acc_norm|27.26|± | 1.49|
|hendrycksTest-nutrition | 0|acc |39.54|± | 2.80|
| | |acc_norm|43.79|± | 2.84|
|hendrycksTest-philosophy | 0|acc |40.19|± | 2.78|
| | |acc_norm|35.37|± | 2.72|
|hendrycksTest-prehistory | 0|acc |40.12|± | 2.73|
| | |acc_norm|27.78|± | 2.49|
|hendrycksTest-professional_accounting | 0|acc |30.14|± | 2.74|
| | |acc_norm|29.43|± | 2.72|
|hendrycksTest-professional_law | 0|acc |29.66|± | 1.17|
| | |acc_norm|28.55|± | 1.15|
|hendrycksTest-professional_medicine | 0|acc |33.82|± | 2.87|
| | |acc_norm|27.94|± | 2.73|
|hendrycksTest-professional_psychology | 0|acc |38.40|± | 1.97|
| | |acc_norm|29.90|± | 1.85|
|hendrycksTest-public_relations | 0|acc |39.09|± | 4.67|
| | |acc_norm|22.73|± | 4.01|
|hendrycksTest-security_studies | 0|acc |40.82|± | 3.15|
| | |acc_norm|31.02|± | 2.96|
|hendrycksTest-sociology | 0|acc |47.76|± | 3.53|
| | |acc_norm|42.79|± | 3.50|
|hendrycksTest-us_foreign_policy | 0|acc |56.00|± | 4.99|
| | |acc_norm|45.00|± | 5.00|
|hendrycksTest-virology | 0|acc |39.76|± | 3.81|
| | |acc_norm|28.92|± | 3.53|
|hendrycksTest-world_religions | 0|acc |62.57|± | 3.71|
| | |acc_norm|51.46|± | 3.83|
## llama-7B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |54.65|± | 1.11|
|pawsx_en| 0|acc |61.85|± | 1.09|
|pawsx_es| 0|acc |56.10|± | 1.11|
|pawsx_fr| 0|acc |52.95|± | 1.12|
|pawsx_ja| 0|acc |56.70|± | 1.11|
|pawsx_ko| 0|acc |49.70|± | 1.12|
|pawsx_zh| 0|acc |49.10|± | 1.12|
## llama-7B_question_answering_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|------------|----:|---|-----:|
|headqa_en | 0|acc |32.42|± | 0.89|
| | |acc_norm |35.92|± | 0.92|
|headqa_es | 0|acc |28.26|± | 0.86|
| | |acc_norm |32.42|± | 0.89|
|logiqa | 0|acc |21.81|± | 1.62|
| | |acc_norm |30.26|± | 1.80|
|squad2 | 1|exact | 9.42| | |
| | |f1 |19.45| | |
| | |HasAns_exact|18.49| | |
| | |HasAns_f1 |38.58| | |
| | |NoAns_exact | 0.37| | |
| | |NoAns_f1 | 0.37| | |
| | |best_exact |50.07| | |
| | |best_f1 |50.08| | |
|triviaqa | 1|acc | 0.00|± | 0.00|
|truthfulqa_mc| 1|mc1 |21.05|± | 1.43|
| | |mc2 |34.14|± | 1.31|
|webqs | 0|acc | 0.00|± | 0.00|
## llama-7B_reading_comprehension_0-shot.json
|Task|Version|Metric|Value| |Stderr|
|----|------:|------|----:|---|-----:|
|coqa| 1|f1 |75.21|± | 1.53|
| | |em |62.67|± | 1.88|
|drop| 1|em | 3.59|± | 0.19|
| | |f1 |11.35|± | 0.23|
|race| 1|acc |39.90|± | 1.52|
## llama-7B_unscramble_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|----------------|------:|------|----:|---|-----:|
|anagrams1 | 0|acc | 0|± | 0|
|anagrams2 | 0|acc | 0|± | 0|
|cycle_letters | 0|acc | 0|± | 0|
|random_insertion| 0|acc | 0|± | 0|
|reversed_words | 0|acc | 0|± | 0|
## llama-7B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 48.8|± | 2.24|
|xcopa_ht| 0|acc | 51.0|± | 2.24|
|xcopa_id| 0|acc | 54.6|± | 2.23|
|xcopa_it| 0|acc | 62.0|± | 2.17|
|xcopa_qu| 0|acc | 51.4|± | 2.24|
|xcopa_sw| 0|acc | 50.8|± | 2.24|
|xcopa_ta| 0|acc | 55.2|± | 2.23|
|xcopa_th| 0|acc | 55.8|± | 2.22|
|xcopa_tr| 0|acc | 55.6|± | 2.22|
|xcopa_vi| 0|acc | 51.6|± | 2.24|
|xcopa_zh| 0|acc | 56.2|± | 2.22|
## llama-7B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.57|± | 0.67|
|xnli_bg| 0|acc |36.99|± | 0.68|
|xnli_de| 0|acc |44.77|± | 0.70|
|xnli_el| 0|acc |34.93|± | 0.67|
|xnli_en| 0|acc |51.06|± | 0.71|
|xnli_es| 0|acc |40.62|± | 0.69|
|xnli_fr| 0|acc |43.75|± | 0.70|
|xnli_hi| 0|acc |36.11|± | 0.68|
|xnli_ru| 0|acc |39.36|± | 0.69|
|xnli_sw| 0|acc |33.71|± | 0.67|
|xnli_th| 0|acc |34.51|± | 0.67|
|xnli_tr| 0|acc |35.59|± | 0.68|
|xnli_ur| 0|acc |33.39|± | 0.67|
|xnli_vi| 0|acc |35.59|± | 0.68|
|xnli_zh| 0|acc |36.23|± | 0.68|
## llama-7B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |48.31|± | 1.29|
|xstory_cloze_en| 0|acc |74.78|± | 1.12|
|xstory_cloze_es| 0|acc |65.12|± | 1.23|
|xstory_cloze_eu| 0|acc |50.10|± | 1.29|
|xstory_cloze_hi| 0|acc |52.68|± | 1.28|
|xstory_cloze_id| 0|acc |52.08|± | 1.29|
|xstory_cloze_my| 0|acc |48.71|± | 1.29|
|xstory_cloze_ru| 0|acc |61.35|± | 1.25|
|xstory_cloze_sw| 0|acc |50.36|± | 1.29|
|xstory_cloze_te| 0|acc |52.88|± | 1.28|
|xstory_cloze_zh| 0|acc |54.33|± | 1.28|
## llama-7B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |84.95|± | 0.74|
|xwinograd_fr| 0|acc |72.29|± | 4.94|
|xwinograd_jp| 0|acc |58.92|± | 1.59|
|xwinograd_pt| 0|acc |70.72|± | 2.81|
|xwinograd_ru| 0|acc |64.44|± | 2.70|
|xwinograd_zh| 0|acc |63.69|± | 2.14|
{
"results": {
"anli_r1": {
"acc": 0.348,
"acc_stderr": 0.015070604603768408
},
"anli_r2": {
"acc": 0.337,
"acc_stderr": 0.014955087918653593
},
"anli_r3": {
"acc": 0.36583333333333334,
"acc_stderr": 0.01391021206270117
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"arithmetic_3ds": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_1dc": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_2da": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_4ds": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_3da": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_2ds": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_4da": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_5ds": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_2dm": {
"acc": 0.0,
"acc_stderr": 0.0
},
"arithmetic_5da": {
"acc": 0.0,
"acc_stderr": 0.0
}
},
"versions": {
"arithmetic_3ds": 0,
"arithmetic_1dc": 0,
"arithmetic_2da": 0,
"arithmetic_4ds": 0,
"arithmetic_3da": 0,
"arithmetic_2ds": 0,
"arithmetic_4da": 0,
"arithmetic_5ds": 0,
"arithmetic_2dm": 0,
"arithmetic_5da": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 5,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"bigbench_tracking_shuffled_objects_five_objects": {
"multiple_choice_grade": 0.1824,
"multiple_choice_grade_stderr": 0.010927017514830547
},
"bigbench_logical_deduction_seven_objects": {
"multiple_choice_grade": 0.24571428571428572,
"multiple_choice_grade_stderr": 0.01628337995683342
},
"bigbench_date_understanding": {
"multiple_choice_grade": 0.6205962059620597,
"multiple_choice_grade_stderr": 0.02529481360676469
},
"bigbench_navigate": {
"multiple_choice_grade": 0.495,
"multiple_choice_grade_stderr": 0.015818508944436645
},
"bigbench_geometric_shapes": {
"multiple_choice_grade": 0.17827298050139276,
"multiple_choice_grade_stderr": 0.02022856303248108,
"exact_str_match": 0.0,
"exact_str_match_stderr": 0.0
},
"bigbench_dyck_languages": {
"multiple_choice_grade": 0.154,
"multiple_choice_grade_stderr": 0.011419913065098684
},
"bigbench_temporal_sequences": {
"multiple_choice_grade": 0.272,
"multiple_choice_grade_stderr": 0.014078856992462611
},
"bigbench_snarks": {
"multiple_choice_grade": 0.5082872928176796,
"multiple_choice_grade_stderr": 0.03726268022638988
},
"bigbench_disambiguation_qa": {
"multiple_choice_grade": 0.35271317829457366,
"multiple_choice_grade_stderr": 0.029805242804674153
},
"bigbench_tracking_shuffled_objects_seven_objects": {
"multiple_choice_grade": 0.13714285714285715,
"multiple_choice_grade_stderr": 0.008225477923226985
},
"bigbench_ruin_names": {
"multiple_choice_grade": 0.29910714285714285,
"multiple_choice_grade_stderr": 0.021656359273376977
},
"bigbench_movie_recommendation": {
"multiple_choice_grade": 0.404,
"multiple_choice_grade_stderr": 0.021966635293832918
},
"bigbench_salient_translation_error_detection": {
"multiple_choice_grade": 0.1653306613226453,
"multiple_choice_grade_stderr": 0.011764848862417502
},
"bigbench_logical_deduction_five_objects": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.020313179231745183
},
"bigbench_causal_judgement": {
"multiple_choice_grade": 0.4842105263157895,
"multiple_choice_grade_stderr": 0.036351509398643456
},
"bigbench_hyperbaton": {
"multiple_choice_grade": 0.49508,
"multiple_choice_grade_stderr": 0.0022359820804999713
},
"bigbench_sports_understanding": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.015931324696929153
},
"bigbench_logical_deduction_three_objects": {
"multiple_choice_grade": 0.3933333333333333,
"multiple_choice_grade_stderr": 0.028250090846760875
},
"bigbench_tracking_shuffled_objects_three_objects": {
"multiple_choice_grade": 0.3933333333333333,
"multiple_choice_grade_stderr": 0.028250090846760875
},
"bigbench_formal_fallacies_syllogisms_negation": {
"multiple_choice_grade": 0.5134507042253521,
"multiple_choice_grade_stderr": 0.004194535955193854
},
"bigbench_reasoning_about_colored_objects": {
"multiple_choice_grade": 0.346,
"multiple_choice_grade_stderr": 0.010639483037236658
}
},
"versions": {
"bigbench_tracking_shuffled_objects_five_objects": 0,
"bigbench_logical_deduction_seven_objects": 0,
"bigbench_date_understanding": 0,
"bigbench_navigate": 0,
"bigbench_geometric_shapes": 0,
"bigbench_dyck_languages": 0,
"bigbench_temporal_sequences": 0,
"bigbench_snarks": 0,
"bigbench_disambiguation_qa": 0,
"bigbench_tracking_shuffled_objects_seven_objects": 0,
"bigbench_ruin_names": 0,
"bigbench_movie_recommendation": 0,
"bigbench_salient_translation_error_detection": 0,
"bigbench_logical_deduction_five_objects": 0,
"bigbench_causal_judgement": 0,
"bigbench_hyperbaton": 0,
"bigbench_sports_understanding": 0,
"bigbench_logical_deduction_three_objects": 0,
"bigbench_tracking_shuffled_objects_three_objects": 0,
"bigbench_formal_fallacies_syllogisms_negation": 0,
"bigbench_reasoning_about_colored_objects": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 3,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"blimp_wh_vs_that_no_gap_long_distance": {
"acc": 0.812,
"acc_stderr": 0.01236158601510377
},
"blimp_ellipsis_n_bar_1": {
"acc": 0.668,
"acc_stderr": 0.014899597242811478
},
"blimp_distractor_agreement_relative_clause": {
"acc": 0.314,
"acc_stderr": 0.014683991951087966
},
"blimp_determiner_noun_agreement_with_adjective_1": {
"acc": 0.577,
"acc_stderr": 0.01563058909047635
},
"blimp_principle_A_reconstruction": {
"acc": 0.673,
"acc_stderr": 0.014842213153411245
},
"blimp_determiner_noun_agreement_2": {
"acc": 0.598,
"acc_stderr": 0.015512467135715077
},
"blimp_npi_present_1": {
"acc": 0.534,
"acc_stderr": 0.015782683329937618
},
"blimp_existential_there_quantifiers_2": {
"acc": 0.674,
"acc_stderr": 0.01483050720454104
},
"blimp_existential_there_subject_raising": {
"acc": 0.696,
"acc_stderr": 0.014553205687950438
},
"blimp_tough_vs_raising_1": {
"acc": 0.298,
"acc_stderr": 0.014470846741134717
},
"blimp_wh_questions_subject_gap_long_distance": {
"acc": 0.746,
"acc_stderr": 0.01377220656516854
},
"blimp_left_branch_island_echo_question": {
"acc": 0.835,
"acc_stderr": 0.011743632866916164
},
"blimp_only_npi_licensor_present": {
"acc": 0.814,
"acc_stderr": 0.012310790208412803
},
"blimp_adjunct_island": {
"acc": 0.539,
"acc_stderr": 0.01577110420128319
},
"blimp_coordinate_structure_constraint_object_extraction": {
"acc": 0.629,
"acc_stderr": 0.015283736211823188
},
"blimp_irregular_plural_subject_verb_agreement_1": {
"acc": 0.653,
"acc_stderr": 0.01506047203170662
},
"blimp_passive_2": {
"acc": 0.703,
"acc_stderr": 0.0144568322948011
},
"blimp_drop_argument": {
"acc": 0.701,
"acc_stderr": 0.014484778521220461
},
"blimp_wh_vs_that_with_gap_long_distance": {
"acc": 0.227,
"acc_stderr": 0.013253174964763925
},
"blimp_existential_there_quantifiers_1": {
"acc": 0.683,
"acc_stderr": 0.014721675438880224
},
"blimp_coordinate_structure_constraint_complex_left_branch": {
"acc": 0.682,
"acc_stderr": 0.014734079309311901
},
"blimp_ellipsis_n_bar_2": {
"acc": 0.794,
"acc_stderr": 0.012795613612786548
},
"blimp_sentential_subject_island": {
"acc": 0.606,
"acc_stderr": 0.01545972195749338
},
"blimp_determiner_noun_agreement_with_adj_irregular_2": {
"acc": 0.591,
"acc_stderr": 0.015555094373257946
},
"blimp_npi_present_2": {
"acc": 0.53,
"acc_stderr": 0.015790799515836763
},
"blimp_wh_questions_subject_gap": {
"acc": 0.72,
"acc_stderr": 0.014205696104091519
},
"blimp_determiner_noun_agreement_irregular_1": {
"acc": 0.572,
"acc_stderr": 0.01565442624502927
},
"blimp_superlative_quantifiers_1": {
"acc": 0.612,
"acc_stderr": 0.015417317979911076
},
"blimp_left_branch_island_simple_question": {
"acc": 0.74,
"acc_stderr": 0.013877773329774164
},
"blimp_irregular_past_participle_adjectives": {
"acc": 0.429,
"acc_stderr": 0.01565899754787024
},
"blimp_principle_A_domain_2": {
"acc": 0.646,
"acc_stderr": 0.015129868238451772
},
"blimp_regular_plural_subject_verb_agreement_1": {
"acc": 0.645,
"acc_stderr": 0.015139491543780529
},
"blimp_principle_A_case_1": {
"acc": 0.985,
"acc_stderr": 0.003845749574502997
},
"blimp_principle_A_case_2": {
"acc": 0.554,
"acc_stderr": 0.015726771166750357
},
"blimp_matrix_question_npi_licensor_present": {
"acc": 0.117,
"acc_stderr": 0.010169287802713327
},
"blimp_determiner_noun_agreement_with_adj_irregular_1": {
"acc": 0.563,
"acc_stderr": 0.015693223928730377
},
"blimp_principle_A_domain_1": {
"acc": 0.962,
"acc_stderr": 0.006049181150584942
},
"blimp_superlative_quantifiers_2": {
"acc": 0.561,
"acc_stderr": 0.01570113134540077
},
"blimp_wh_island": {
"acc": 0.275,
"acc_stderr": 0.014127086556490531
},
"blimp_only_npi_scope": {
"acc": 0.266,
"acc_stderr": 0.013979965645145156
},
"blimp_regular_plural_subject_verb_agreement_2": {
"acc": 0.705,
"acc_stderr": 0.014428554438445512
},
"blimp_complex_NP_island": {
"acc": 0.416,
"acc_stderr": 0.015594460144140603
},
"blimp_sentential_negation_npi_scope": {
"acc": 0.588,
"acc_stderr": 0.015572363292015093
},
"blimp_transitive": {
"acc": 0.698,
"acc_stderr": 0.014526080235459543
},
"blimp_wh_vs_that_with_gap": {
"acc": 0.239,
"acc_stderr": 0.013493000446937587
},
"blimp_wh_questions_object_gap": {
"acc": 0.67,
"acc_stderr": 0.014876872027456736
},
"blimp_sentential_negation_npi_licensor_present": {
"acc": 0.94,
"acc_stderr": 0.007513751157474913
},
"blimp_expletive_it_object_raising": {
"acc": 0.659,
"acc_stderr": 0.0149981313484027
},
"blimp_determiner_noun_agreement_1": {
"acc": 0.636,
"acc_stderr": 0.015222868840522017
},
"blimp_anaphor_gender_agreement": {
"acc": 0.448,
"acc_stderr": 0.01573351656634784
},
"blimp_irregular_plural_subject_verb_agreement_2": {
"acc": 0.7,
"acc_stderr": 0.014498627873361425
},
"blimp_passive_1": {
"acc": 0.702,
"acc_stderr": 0.014470846741134705
},
"blimp_determiner_noun_agreement_irregular_2": {
"acc": 0.602,
"acc_stderr": 0.015486634102858913
},
"blimp_irregular_past_participle_verbs": {
"acc": 0.725,
"acc_stderr": 0.014127086556490523
},
"blimp_existential_there_object_raising": {
"acc": 0.788,
"acc_stderr": 0.012931481864938055
},
"blimp_determiner_noun_agreement_with_adj_2": {
"acc": 0.54,
"acc_stderr": 0.015768596914394375
},
"blimp_principle_A_domain_3": {
"acc": 0.501,
"acc_stderr": 0.015819268290576817
},
"blimp_causative": {
"acc": 0.508,
"acc_stderr": 0.015817274929209008
},
"blimp_tough_vs_raising_2": {
"acc": 0.768,
"acc_stderr": 0.01335493745228157
},
"blimp_wh_vs_that_no_gap": {
"acc": 0.848,
"acc_stderr": 0.011358918303475294
},
"blimp_anaphor_number_agreement": {
"acc": 0.659,
"acc_stderr": 0.014998131348402704
},
"blimp_principle_A_c_command": {
"acc": 0.39,
"acc_stderr": 0.01543172505386661
},
"blimp_distractor_agreement_relational_noun": {
"acc": 0.441,
"acc_stderr": 0.015708779894242676
},
"blimp_intransitive": {
"acc": 0.592,
"acc_stderr": 0.015549205052920673
},
"blimp_animate_subject_passive": {
"acc": 0.626,
"acc_stderr": 0.015308767369006363
},
"blimp_animate_subject_trans": {
"acc": 0.761,
"acc_stderr": 0.01349300044693759
},
"blimp_inchoative": {
"acc": 0.42,
"acc_stderr": 0.015615500115072957
}
},
"versions": {
"blimp_wh_vs_that_no_gap_long_distance": 0,
"blimp_ellipsis_n_bar_1": 0,
"blimp_distractor_agreement_relative_clause": 0,
"blimp_determiner_noun_agreement_with_adjective_1": 0,
"blimp_principle_A_reconstruction": 0,
"blimp_determiner_noun_agreement_2": 0,
"blimp_npi_present_1": 0,
"blimp_existential_there_quantifiers_2": 0,
"blimp_existential_there_subject_raising": 0,
"blimp_tough_vs_raising_1": 0,
"blimp_wh_questions_subject_gap_long_distance": 0,
"blimp_left_branch_island_echo_question": 0,
"blimp_only_npi_licensor_present": 0,
"blimp_adjunct_island": 0,
"blimp_coordinate_structure_constraint_object_extraction": 0,
"blimp_irregular_plural_subject_verb_agreement_1": 0,
"blimp_passive_2": 0,
"blimp_drop_argument": 0,
"blimp_wh_vs_that_with_gap_long_distance": 0,
"blimp_existential_there_quantifiers_1": 0,
"blimp_coordinate_structure_constraint_complex_left_branch": 0,
"blimp_ellipsis_n_bar_2": 0,
"blimp_sentential_subject_island": 0,
"blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
"blimp_npi_present_2": 0,
"blimp_wh_questions_subject_gap": 0,
"blimp_determiner_noun_agreement_irregular_1": 0,
"blimp_superlative_quantifiers_1": 0,
"blimp_left_branch_island_simple_question": 0,
"blimp_irregular_past_participle_adjectives": 0,
"blimp_principle_A_domain_2": 0,
"blimp_regular_plural_subject_verb_agreement_1": 0,
"blimp_principle_A_case_1": 0,
"blimp_principle_A_case_2": 0,
"blimp_matrix_question_npi_licensor_present": 0,
"blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
"blimp_principle_A_domain_1": 0,
"blimp_superlative_quantifiers_2": 0,
"blimp_wh_island": 0,
"blimp_only_npi_scope": 0,
"blimp_regular_plural_subject_verb_agreement_2": 0,
"blimp_complex_NP_island": 0,
"blimp_sentential_negation_npi_scope": 0,
"blimp_transitive": 0,
"blimp_wh_vs_that_with_gap": 0,
"blimp_wh_questions_object_gap": 0,
"blimp_sentential_negation_npi_licensor_present": 0,
"blimp_expletive_it_object_raising": 0,
"blimp_determiner_noun_agreement_1": 0,
"blimp_anaphor_gender_agreement": 0,
"blimp_irregular_plural_subject_verb_agreement_2": 0,
"blimp_passive_1": 0,
"blimp_determiner_noun_agreement_irregular_2": 0,
"blimp_irregular_past_participle_verbs": 0,
"blimp_existential_there_object_raising": 0,
"blimp_determiner_noun_agreement_with_adj_2": 0,
"blimp_principle_A_domain_3": 0,
"blimp_causative": 0,
"blimp_tough_vs_raising_2": 0,
"blimp_wh_vs_that_no_gap": 0,
"blimp_anaphor_number_agreement": 0,
"blimp_principle_A_c_command": 0,
"blimp_distractor_agreement_relational_noun": 0,
"blimp_intransitive": 0,
"blimp_animate_subject_passive": 0,
"blimp_animate_subject_trans": 0,
"blimp_inchoative": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"piqa": {
"acc": 0.7818280739934712,
"acc_stderr": 0.009636081958374381,
"acc_norm": 0.7742110990206746,
"acc_norm_stderr": 0.00975498067091731
},
"wsc273": {
"acc": 0.8095238095238095,
"acc_stderr": 0.023809523809523777
},
"arc_easy": {
"acc": 0.6738215488215489,
"acc_stderr": 0.009619849417035172,
"acc_norm": 0.5248316498316499,
"acc_norm_stderr": 0.010247123122159281
},
"hellaswag": {
"acc": 0.563931487751444,
"acc_stderr": 0.004948824501355491,
"acc_norm": 0.7298346942840072,
"acc_norm_stderr": 0.004431375549911366
},
"winogrande": {
"acc": 0.6692975532754538,
"acc_stderr": 0.013222435887002705
},
"prost": {
"acc": 0.2568851409052092,
"acc_stderr": 0.003192056839011391,
"acc_norm": 0.28031596925704527,
"acc_norm_stderr": 0.0032814667207950675
},
"swag": {
"acc": 0.5546835949215235,
"acc_stderr": 0.0035138865053857436,
"acc_norm": 0.6687493751874438,
"acc_norm_stderr": 0.003327673972187388
},
"boolq": {
"acc": 0.7305810397553517,
"acc_stderr": 0.007759626474907443
},
"arc_challenge": {
"acc": 0.3822525597269625,
"acc_stderr": 0.014200454049979293,
"acc_norm": 0.4138225255972696,
"acc_norm_stderr": 0.014392730009221009
},
"mc_taco": {
"em": 0.11261261261261261,
"f1": 0.4827075067316446
},
"copa": {
"acc": 0.84,
"acc_stderr": 0.036845294917747094
},
"openbookqa": {
"acc": 0.282,
"acc_stderr": 0.020143572847290774,
"acc_norm": 0.424,
"acc_norm_stderr": 0.022122993778135404
}
},
"versions": {
"piqa": 0,
"wsc273": 0,
"arc_easy": 0,
"hellaswag": 0,
"winogrande": 0,
"prost": 0,
"swag": 0,
"boolq": 1,
"arc_challenge": 0,
"mc_taco": 0,
"copa": 0,
"openbookqa": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"mrpc": {
"acc": 0.6838235294117647,
"acc_stderr": 0.023048336668420204,
"f1": 0.8122270742358079,
"f1_stderr": 0.01624762253426993
},
"sst": {
"acc": 0.5298165137614679,
"acc_stderr": 0.016911703415318852
},
"wnli": {
"acc": 0.5633802816901409,
"acc_stderr": 0.0592793555841297
},
"mnli_mismatched": {
"acc": 0.3572009764035802,
"acc_stderr": 0.0048327582938812235
},
"qnli": {
"acc": 0.49569833424858134,
"acc_stderr": 0.006765160168388141
},
"rte": {
"acc": 0.5306859205776173,
"acc_stderr": 0.03003973059219781
},
"qqp": {
"acc": 0.3683650754390304,
"acc_stderr": 0.002398975385820536,
"f1": 0.5380844713755992,
"f1_stderr": 0.0025560675394743124
},
"mnli": {
"acc": 0.34396332144676517,
"acc_stderr": 0.0047950937299233165
},
"cola": {
"mcc": 0.0,
"mcc_stderr": 0.0
}
},
"versions": {
"mrpc": 0,
"sst": 0,
"wnli": 1,
"mnli_mismatched": 0,
"qnli": 0,
"rte": 0,
"qqp": 0,
"mnli": 0,
"cola": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"gsm8k": {
"acc": 0.0803639120545868,
"acc_stderr": 0.007488258573239077
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"ethics_virtue": {
"acc": 0.20984924623115578,
"acc_stderr": 0.005773721023799748,
"em": 0.0
},
"crows_pairs_french_race_color": {
"likelihood_difference": 12.048913043478262,
"likelihood_difference_stderr": 0.7332463392189781,
"pct_stereotype": 0.4326086956521739,
"pct_stereotype_stderr": 0.023125046645341776
},
"ethics_utilitarianism_original": {
"acc": 0.9586106489184693,
"acc_stderr": 0.002872952014248801
},
"crows_pairs_english_nationality": {
"likelihood_difference": 6.762586805555555,
"likelihood_difference_stderr": 0.5868865852525466,
"pct_stereotype": 0.5370370370370371,
"pct_stereotype_stderr": 0.03400603625538272
},
"crows_pairs_english_socioeconomic": {
"likelihood_difference": 6.401644736842106,
"likelihood_difference_stderr": 0.5420413190484897,
"pct_stereotype": 0.5684210526315789,
"pct_stereotype_stderr": 0.03602751443822843
},
"crows_pairs_french_socioeconomic": {
"likelihood_difference": 9.80843431122449,
"likelihood_difference_stderr": 1.0151042209820862,
"pct_stereotype": 0.5204081632653061,
"pct_stereotype_stderr": 0.03577590557703757
},
"crows_pairs_english_religion": {
"likelihood_difference": 7.219594594594595,
"likelihood_difference_stderr": 0.759154104063707,
"pct_stereotype": 0.6666666666666666,
"pct_stereotype_stderr": 0.04494665749754944
},
"ethics_justice": {
"acc": 0.4996301775147929,
"acc_stderr": 0.009617160470756728,
"em": 0.0014792899408284023
},
"crows_pairs_english_autre": {
"likelihood_difference": 11.011363636363637,
"likelihood_difference_stderr": 5.8907614264514025,
"pct_stereotype": 0.45454545454545453,
"pct_stereotype_stderr": 0.15745916432444335
},
"toxigen": {
"acc": 0.4308510638297872,
"acc_stderr": 0.016160089171486036,
"acc_norm": 0.4319148936170213,
"acc_norm_stderr": 0.016164899004911828
},
"crows_pairs_french_autre": {
"likelihood_difference": 7.512019230769231,
"likelihood_difference_stderr": 2.0958404773406696,
"pct_stereotype": 0.6153846153846154,
"pct_stereotype_stderr": 0.14044168141158106
},
"ethics_cm": {
"acc": 0.5691119691119692,
"acc_stderr": 0.007945870163705206
},
"crows_pairs_english_gender": {
"likelihood_difference": 7.9173828125,
"likelihood_difference_stderr": 0.5501949212762886,
"pct_stereotype": 0.53125,
"pct_stereotype_stderr": 0.0279398950447155
},
"crows_pairs_english_race_color": {
"likelihood_difference": 6.246493602362205,
"likelihood_difference_stderr": 0.3239007651371134,
"pct_stereotype": 0.46653543307086615,
"pct_stereotype_stderr": 0.022155988267174086
},
"crows_pairs_english_age": {
"likelihood_difference": 5.9423076923076925,
"likelihood_difference_stderr": 0.7902909296461826,
"pct_stereotype": 0.5164835164835165,
"pct_stereotype_stderr": 0.05267597952306975
},
"ethics_utilitarianism": {
"acc": 0.4981281198003328,
"acc_stderr": 0.007211571268099885
},
"crows_pairs_english_sexual_orientation": {
"likelihood_difference": 8.304771505376344,
"likelihood_difference_stderr": 0.8427804261467623,
"pct_stereotype": 0.6236559139784946,
"pct_stereotype_stderr": 0.05050927755267201
},
"ethics_deontology": {
"acc": 0.5058398220244716,
"acc_stderr": 0.008338557598970859,
"em": 0.002224694104560623
},
"crows_pairs_french_religion": {
"likelihood_difference": 9.585326086956522,
"likelihood_difference_stderr": 0.8749663998788697,
"pct_stereotype": 0.43478260869565216,
"pct_stereotype_stderr": 0.04642922286356426
},
"crows_pairs_french_gender": {
"likelihood_difference": 11.798968068535826,
"likelihood_difference_stderr": 0.8713501661430004,
"pct_stereotype": 0.5202492211838006,
"pct_stereotype_stderr": 0.0279279188851323
},
"crows_pairs_french_nationality": {
"likelihood_difference": 10.416501976284586,
"likelihood_difference_stderr": 0.9065784742122508,
"pct_stereotype": 0.40711462450592883,
"pct_stereotype_stderr": 0.030948774049323072
},
"crows_pairs_english_physical_appearance": {
"likelihood_difference": 4.512586805555555,
"likelihood_difference_stderr": 0.6931576110749077,
"pct_stereotype": 0.5,
"pct_stereotype_stderr": 0.05933908290969268
},
"crows_pairs_french_age": {
"likelihood_difference": 11.939583333333333,
"likelihood_difference_stderr": 1.5376984338772959,
"pct_stereotype": 0.35555555555555557,
"pct_stereotype_stderr": 0.05074011803597719
},
"crows_pairs_english_disability": {
"likelihood_difference": 9.669711538461538,
"likelihood_difference_stderr": 1.1386178272217904,
"pct_stereotype": 0.6615384615384615,
"pct_stereotype_stderr": 0.05914829422780653
},
"crows_pairs_french_sexual_orientation": {
"likelihood_difference": 7.605769230769231,
"likelihood_difference_stderr": 0.7938984905689758,
"pct_stereotype": 0.6703296703296703,
"pct_stereotype_stderr": 0.04955219508596587
},
"crows_pairs_french_physical_appearance": {
"likelihood_difference": 7.045138888888889,
"likelihood_difference_stderr": 0.9484318157143898,
"pct_stereotype": 0.5555555555555556,
"pct_stereotype_stderr": 0.05897165471491952
},
"crows_pairs_french_disability": {
"likelihood_difference": 10.147727272727273,
"likelihood_difference_stderr": 1.3907137676702652,
"pct_stereotype": 0.42424242424242425,
"pct_stereotype_stderr": 0.06130137276858363
}
},
"versions": {
"ethics_virtue": 0,
"crows_pairs_french_race_color": 0,
"ethics_utilitarianism_original": 0,
"crows_pairs_english_nationality": 0,
"crows_pairs_english_socioeconomic": 0,
"crows_pairs_french_socioeconomic": 0,
"crows_pairs_english_religion": 0,
"ethics_justice": 0,
"crows_pairs_english_autre": 0,
"toxigen": 0,
"crows_pairs_french_autre": 0,
"ethics_cm": 0,
"crows_pairs_english_gender": 0,
"crows_pairs_english_race_color": 0,
"crows_pairs_english_age": 0,
"ethics_utilitarianism": 0,
"crows_pairs_english_sexual_orientation": 0,
"ethics_deontology": 0,
"crows_pairs_french_religion": 0,
"crows_pairs_french_gender": 0,
"crows_pairs_french_nationality": 0,
"crows_pairs_english_physical_appearance": 0,
"crows_pairs_french_age": 0,
"crows_pairs_english_disability": 0,
"crows_pairs_french_sexual_orientation": 0,
"crows_pairs_french_physical_appearance": 0,
"crows_pairs_french_disability": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"lambada_openai_mt_it": {
"ppl": 3653680.5734235523,
"ppl_stderr": 197082.9860932525,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_standard": {
"ppl": 2460346.8572795168,
"ppl_stderr": 81216.56551688322,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai_mt_es": {
"ppl": 3818890.4545065593,
"ppl_stderr": 197999.05318216747,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai": {
"ppl": 2817465.092468485,
"ppl_stderr": 138319.08822004002,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai_mt_fr": {
"ppl": 2111186.115467981,
"ppl_stderr": 111724.42842108487,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai_mt_de": {
"ppl": 1805613.6770583114,
"ppl_stderr": 97892.78908113715,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_standard_cloze": {
"ppl": 6710057.2411258025,
"ppl_stderr": 169833.90998542923,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai_mt_en": {
"ppl": 2817465.092468485,
"ppl_stderr": 138319.08822004002,
"acc": 0.0,
"acc_stderr": 0.0
},
"lambada_openai_cloze": {
"ppl": 255777.71115985065,
"ppl_stderr": 11345.7709705634,
"acc": 0.00038812342324859306,
"acc_stderr": 0.00027441806845051746
}
},
"versions": {
"lambada_openai_mt_it": 0,
"lambada_standard": 0,
"lambada_openai_mt_es": 0,
"lambada_openai": 0,
"lambada_openai_mt_fr": 0,
"lambada_openai_mt_de": 0,
"lambada_standard_cloze": 0,
"lambada_openai_mt_en": 0,
"lambada_openai_cloze": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=/gaueko1/hizkuntza-ereduak/LLaMA/lm/huggingface/7B",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment