Commit e495e3a0 authored by gk's avatar gk
Browse files

Merge branch 'master' into big-refactor-test

parents 6d355b85 9d06c953
{
"results": {
"hendrycksTest-college_mathematics": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_physics": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008937,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.03445406271987054
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2909090909090909,
"acc_stderr": 0.03546563019624337,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.03567969772268047
},
"arc_easy": {
"acc": 0.7167508417508418,
"acc_stderr": 0.009245632200075455,
"acc_norm": 0.672979797979798,
"acc_norm_stderr": 0.009626235849372198
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322004,
"acc_norm": 0.20175438596491227,
"acc_norm_stderr": 0.037752050135836386
},
"hendrycksTest-professional_law": {
"acc": 0.2790091264667536,
"acc_stderr": 0.011455208832803538,
"acc_norm": 0.2953063885267275,
"acc_norm_stderr": 0.011651061936208818
},
"hendrycksTest-human_aging": {
"acc": 0.28699551569506726,
"acc_stderr": 0.030360379710291954,
"acc_norm": 0.2242152466367713,
"acc_norm_stderr": 0.027991534258519527
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-abstract_algebra": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.26424870466321243,
"acc_stderr": 0.03182155050916647,
"acc_norm": 0.26424870466321243,
"acc_norm_stderr": 0.03182155050916647
},
"hendrycksTest-college_computer_science": {
"acc": 0.23,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.29831932773109243,
"acc_stderr": 0.029719142876342853,
"acc_norm": 0.36134453781512604,
"acc_norm_stderr": 0.03120469122515001
},
"arc_challenge": {
"acc": 0.3720136518771331,
"acc_stderr": 0.014124597881844461,
"acc_norm": 0.40102389078498296,
"acc_norm_stderr": 0.014322255790719864
},
"hendrycksTest-nutrition": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02699254433929723,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.027996723180631435
},
"sciq": {
"acc": 0.926,
"acc_stderr": 0.008282064512704159,
"acc_norm": 0.873,
"acc_norm_stderr": 0.01053479862085575
},
"hendrycksTest-jurisprudence": {
"acc": 0.3055555555555556,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.0478034362693679
},
"hendrycksTest-sociology": {
"acc": 0.2885572139303483,
"acc_stderr": 0.03203841040213321,
"acc_norm": 0.2736318407960199,
"acc_norm_stderr": 0.031524391865554016
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.24150943396226415,
"acc_stderr": 0.02634148037111837,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.02749566368372407
},
"hendrycksTest-international_law": {
"acc": 0.256198347107438,
"acc_stderr": 0.03984979653302871,
"acc_norm": 0.49586776859504134,
"acc_norm_stderr": 0.04564198767432754
},
"hendrycksTest-virology": {
"acc": 0.3253012048192771,
"acc_stderr": 0.036471685236832266,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.035716092300534796
},
"hendrycksTest-college_physics": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.043898699568087805
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03010833071801162,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280458
},
"hendrycksTest-moral_disputes": {
"acc": 0.3208092485549133,
"acc_stderr": 0.02513100023364791,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153183
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2916666666666667,
"acc_stderr": 0.030998666304560534,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802749
},
"winogrande": {
"acc": 0.6874506708760852,
"acc_stderr": 0.013027563620748837
},
"hendrycksTest-philosophy": {
"acc": 0.2508038585209003,
"acc_stderr": 0.024619771956697168,
"acc_norm": 0.3504823151125402,
"acc_norm_stderr": 0.027098652621301747
},
"wsc": {
"acc": 0.5480769230769231,
"acc_stderr": 0.049038186969314335
},
"hendrycksTest-astronomy": {
"acc": 0.2894736842105263,
"acc_stderr": 0.03690677986137283,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.03988903703336285
},
"hendrycksTest-computer_security": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"hendrycksTest-high_school_psychology": {
"acc": 0.28440366972477066,
"acc_stderr": 0.019342036587702588,
"acc_norm": 0.25871559633027524,
"acc_norm_stderr": 0.01877605231961962
},
"hendrycksTest-college_chemistry": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-management": {
"acc": 0.2912621359223301,
"acc_stderr": 0.04498676320572922,
"acc_norm": 0.33980582524271846,
"acc_norm_stderr": 0.046897659372781335
},
"hendrycksTest-miscellaneous": {
"acc": 0.3269476372924649,
"acc_stderr": 0.016774908180131463,
"acc_norm": 0.2937420178799489,
"acc_norm_stderr": 0.016287759388491675
},
"hendrycksTest-high_school_world_history": {
"acc": 0.3080168776371308,
"acc_stderr": 0.030052389335605695,
"acc_norm": 0.32489451476793246,
"acc_norm_stderr": 0.030486039389105293
},
"lambada_openai": {
"ppl": 3.2877565882479303,
"ppl_stderr": 0.06361523543774811,
"acc": 0.7389869978653212,
"acc_stderr": 0.006118733561625588
},
"hendrycksTest-electrical_engineering": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.04057324734419034
},
"hendrycksTest-high_school_us_history": {
"acc": 0.27450980392156865,
"acc_stderr": 0.03132179803083292,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.03228210387037892
},
"hendrycksTest-college_medicine": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483099,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"hendrycksTest-high_school_geography": {
"acc": 0.26262626262626265,
"acc_stderr": 0.03135305009533084,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.03318477333845331
},
"hendrycksTest-professional_medicine": {
"acc": 0.2647058823529412,
"acc_stderr": 0.026799562024887667,
"acc_norm": 0.2977941176470588,
"acc_norm_stderr": 0.02777829870154544
},
"hendrycksTest-machine_learning": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952686,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755807
},
"hendrycksTest-logical_fallacies": {
"acc": 0.2392638036809816,
"acc_stderr": 0.03351953879521271,
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"hendrycksTest-college_biology": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"hendrycksTest-professional_accounting": {
"acc": 0.20212765957446807,
"acc_stderr": 0.023956668237850226,
"acc_norm": 0.22695035460992907,
"acc_norm_stderr": 0.02498710636564297
},
"hendrycksTest-business_ethics": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"piqa": {
"acc": 0.7878128400435256,
"acc_stderr": 0.009539299828174051,
"acc_norm": 0.7986942328618063,
"acc_norm_stderr": 0.009355431098990426
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2948717948717949,
"acc_stderr": 0.023119362758232294,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923714
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"hendrycksTest-human_sexuality": {
"acc": 0.366412213740458,
"acc_stderr": 0.042258754519696386,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.04118438565806299
},
"hendrycksTest-high_school_biology": {
"acc": 0.26129032258064516,
"acc_stderr": 0.024993053397764826,
"acc_norm": 0.3193548387096774,
"acc_norm_stderr": 0.026522709674667768
},
"hendrycksTest-security_studies": {
"acc": 0.3836734693877551,
"acc_stderr": 0.03113088039623595,
"acc_norm": 0.32653061224489793,
"acc_norm_stderr": 0.030021056238440307
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.21851851851851853,
"acc_stderr": 0.025195752251823793,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.0284934650910286
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.26455026455026454,
"acc_stderr": 0.022717467897708628,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.02293097307163336
},
"hendrycksTest-conceptual_physics": {
"acc": 0.2553191489361702,
"acc_stderr": 0.028504856470514196,
"acc_norm": 0.2297872340425532,
"acc_norm_stderr": 0.027501752944412417
},
"hendrycksTest-prehistory": {
"acc": 0.24382716049382716,
"acc_stderr": 0.023891879541959593,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.022779719088733396
},
"hendrycksTest-medical_genetics": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"hendrycksTest-anatomy": {
"acc": 0.2740740740740741,
"acc_stderr": 0.03853254836552003,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"hendrycksTest-professional_psychology": {
"acc": 0.26143790849673204,
"acc_stderr": 0.017776947157528037,
"acc_norm": 0.28594771241830064,
"acc_norm_stderr": 0.01828048507295467
},
"hendrycksTest-global_facts": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"hendrycksTest-moral_scenarios": {
"acc": 0.2212290502793296,
"acc_stderr": 0.013882164598887275,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249588
},
"hendrycksTest-marketing": {
"acc": 0.2905982905982906,
"acc_stderr": 0.029745048572674057,
"acc_norm": 0.2905982905982906,
"acc_norm_stderr": 0.029745048572674054
},
"hendrycksTest-formal_logic": {
"acc": 0.30158730158730157,
"acc_stderr": 0.04104947269903394,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.040406101782088394
},
"hendrycksTest-public_relations": {
"acc": 0.34545454545454546,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.15454545454545454,
"acc_norm_stderr": 0.03462262571262667
},
"logiqa": {
"acc": 0.22734254992319508,
"acc_stderr": 0.016439067675117748,
"acc_norm": 0.2872503840245776,
"acc_norm_stderr": 0.017747701948846593
},
"hendrycksTest-world_religions": {
"acc": 0.3391812865497076,
"acc_stderr": 0.036310534964889056,
"acc_norm": 0.3742690058479532,
"acc_norm_stderr": 0.03711601185389481
}
},
"versions": {
"hendrycksTest-college_mathematics": 0,
"hendrycksTest-high_school_physics": 0,
"hendrycksTest-high_school_european_history": 0,
"arc_easy": 0,
"hendrycksTest-econometrics": 0,
"hendrycksTest-professional_law": 0,
"hendrycksTest-human_aging": 0,
"hendrycksTest-high_school_computer_science": 0,
"hendrycksTest-abstract_algebra": 0,
"hendrycksTest-high_school_government_and_politics": 0,
"hendrycksTest-college_computer_science": 0,
"hendrycksTest-high_school_microeconomics": 0,
"arc_challenge": 0,
"hendrycksTest-nutrition": 0,
"sciq": 0,
"hendrycksTest-jurisprudence": 0,
"hendrycksTest-sociology": 0,
"hendrycksTest-clinical_knowledge": 0,
"hendrycksTest-international_law": 0,
"hendrycksTest-virology": 0,
"hendrycksTest-college_physics": 0,
"hendrycksTest-high_school_chemistry": 0,
"hendrycksTest-moral_disputes": 0,
"hendrycksTest-high_school_statistics": 0,
"winogrande": 0,
"hendrycksTest-philosophy": 0,
"wsc": 0,
"hendrycksTest-astronomy": 0,
"hendrycksTest-computer_security": 0,
"hendrycksTest-high_school_psychology": 0,
"hendrycksTest-college_chemistry": 0,
"hendrycksTest-management": 0,
"hendrycksTest-miscellaneous": 0,
"hendrycksTest-high_school_world_history": 0,
"lambada_openai": 0,
"hendrycksTest-electrical_engineering": 0,
"hendrycksTest-high_school_us_history": 0,
"hendrycksTest-college_medicine": 0,
"hendrycksTest-high_school_geography": 0,
"hendrycksTest-professional_medicine": 0,
"hendrycksTest-machine_learning": 0,
"hendrycksTest-logical_fallacies": 0,
"hendrycksTest-college_biology": 0,
"hendrycksTest-professional_accounting": 0,
"hendrycksTest-business_ethics": 0,
"piqa": 0,
"hendrycksTest-high_school_macroeconomics": 0,
"hendrycksTest-us_foreign_policy": 0,
"hendrycksTest-human_sexuality": 0,
"hendrycksTest-high_school_biology": 0,
"hendrycksTest-security_studies": 0,
"hendrycksTest-high_school_mathematics": 0,
"hendrycksTest-elementary_mathematics": 0,
"hendrycksTest-conceptual_physics": 0,
"hendrycksTest-prehistory": 0,
"hendrycksTest-medical_genetics": 0,
"hendrycksTest-anatomy": 0,
"hendrycksTest-professional_psychology": 0,
"hendrycksTest-global_facts": 0,
"hendrycksTest-moral_scenarios": 0,
"hendrycksTest-marketing": 0,
"hendrycksTest-formal_logic": 0,
"hendrycksTest-public_relations": 0,
"logiqa": 0,
"hendrycksTest-world_religions": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=facebook/opt-66b,use_accelerate=True,device_map_option=sequential,max_memory_per_gpu=40GIB",
"num_fewshot": 0,
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# xglm-1.7B
## xglm-1.7B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |20.99|± | 1.19|
| | |acc_norm|24.32|± | 1.25|
|arc_easy | 0|acc |53.62|± | 1.02|
| | |acc_norm|47.90|± | 1.03|
|boolq | 1|acc |58.56|± | 0.86|
|copa | 0|acc |68.00|± | 4.69|
|hellaswag | 0|acc |36.18|± | 0.48|
| | |acc_norm|45.80|± | 0.50|
|mc_taco | 0|em |12.91| | |
| | |f1 |34.52| | |
|openbookqa | 0|acc |17.00|± | 1.68|
| | |acc_norm|29.80|± | 2.05|
|piqa | 0|acc |69.70|± | 1.07|
| | |acc_norm|70.35|± | 1.07|
|prost | 0|acc |22.69|± | 0.31|
| | |acc_norm|27.21|± | 0.33|
|swag | 0|acc |45.97|± | 0.35|
| | |acc_norm|62.19|± | 0.34|
|winogrande | 0|acc |54.93|± | 1.40|
|wsc273 | 0|acc |68.13|± | 2.83|
## xglm-1.7B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 0.99|± | 0.27|
## xglm-1.7B_mathematical_reasoning_few_shot_5-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------------------|------:|--------|----:|---|-----:|
|drop | 1|em | 0.67|± | 0.08|
| | |f1 | 3.44|± | 0.13|
|gsm8k | 0|acc | 0.83|± | 0.25|
|math_algebra | 1|acc | 0.00|± | 0.00|
|math_counting_and_prob | 1|acc | 0.00|± | 0.00|
|math_geometry | 1|acc | 0.00|± | 0.00|
|math_intermediate_algebra| 1|acc | 0.00|± | 0.00|
|math_num_theory | 1|acc | 0.00|± | 0.00|
|math_prealgebra | 1|acc | 0.00|± | 0.00|
|math_precalc | 1|acc | 0.00|± | 0.00|
|mathqa | 0|acc |22.91|± | 0.77|
| | |acc_norm|21.44|± | 0.75|
## xglm-1.7B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |57.55|± | 1.11|
|pawsx_en| 0|acc |52.65|± | 1.12|
|pawsx_es| 0|acc |53.80|± | 1.12|
|pawsx_fr| 0|acc |47.35|± | 1.12|
|pawsx_ja| 0|acc |46.10|± | 1.11|
|pawsx_ko| 0|acc |51.40|± | 1.12|
|pawsx_zh| 0|acc |48.10|± | 1.12|
## xglm-1.7B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 56.8|± | 2.22|
|xcopa_ht| 0|acc | 55.8|± | 2.22|
|xcopa_id| 0|acc | 64.6|± | 2.14|
|xcopa_it| 0|acc | 54.0|± | 2.23|
|xcopa_qu| 0|acc | 52.2|± | 2.24|
|xcopa_sw| 0|acc | 56.6|± | 2.22|
|xcopa_ta| 0|acc | 55.2|± | 2.23|
|xcopa_th| 0|acc | 58.2|± | 2.21|
|xcopa_tr| 0|acc | 53.4|± | 2.23|
|xcopa_vi| 0|acc | 63.0|± | 2.16|
|xcopa_zh| 0|acc | 58.0|± | 2.21|
## xglm-1.7B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.51|± | 0.67|
|xnli_bg| 0|acc |44.73|± | 0.70|
|xnli_de| 0|acc |45.33|± | 0.70|
|xnli_el| 0|acc |40.10|± | 0.69|
|xnli_en| 0|acc |49.68|± | 0.71|
|xnli_es| 0|acc |43.61|± | 0.70|
|xnli_fr| 0|acc |45.73|± | 0.70|
|xnli_hi| 0|acc |42.61|± | 0.70|
|xnli_ru| 0|acc |45.97|± | 0.70|
|xnli_sw| 0|acc |42.00|± | 0.70|
|xnli_th| 0|acc |41.70|± | 0.70|
|xnli_tr| 0|acc |42.95|± | 0.70|
|xnli_ur| 0|acc |39.50|± | 0.69|
|xnli_vi| 0|acc |45.03|± | 0.70|
|xnli_zh| 0|acc |33.77|± | 0.67|
## xglm-1.7B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |52.48|± | 1.29|
|xstory_cloze_en| 0|acc |64.33|± | 1.23|
|xstory_cloze_es| 0|acc |59.23|± | 1.26|
|xstory_cloze_eu| 0|acc |56.12|± | 1.28|
|xstory_cloze_hi| 0|acc |55.79|± | 1.28|
|xstory_cloze_id| 0|acc |57.97|± | 1.27|
|xstory_cloze_my| 0|acc |53.81|± | 1.28|
|xstory_cloze_ru| 0|acc |59.83|± | 1.26|
|xstory_cloze_sw| 0|acc |55.99|± | 1.28|
|xstory_cloze_te| 0|acc |58.04|± | 1.27|
|xstory_cloze_zh| 0|acc |56.19|± | 1.28|
## xglm-1.7B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |71.05|± | 0.94|
|xwinograd_fr| 0|acc |60.24|± | 5.40|
|xwinograd_jp| 0|acc |60.58|± | 1.58|
|xwinograd_pt| 0|acc |63.88|± | 2.97|
|xwinograd_ru| 0|acc |59.68|± | 2.77|
|xwinograd_zh| 0|acc |69.84|± | 2.05|
{
"results": {
"copa": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034
},
"piqa": {
"acc": 0.6969532100108814,
"acc_stderr": 0.010722648689531515,
"acc_norm": 0.7034820457018498,
"acc_norm_stderr": 0.010656078922661134
},
"prost": {
"acc": 0.22694278394534587,
"acc_stderr": 0.003060110855833208,
"acc_norm": 0.27209649871904357,
"acc_norm_stderr": 0.0032514084657504338
},
"arc_easy": {
"acc": 0.5361952861952862,
"acc_stderr": 0.01023286555034672,
"acc_norm": 0.47895622895622897,
"acc_norm_stderr": 0.01025069260202258
},
"hellaswag": {
"acc": 0.3617805218084047,
"acc_stderr": 0.004795337009118189,
"acc_norm": 0.45797649870543716,
"acc_norm_stderr": 0.004972126523031943
},
"mc_taco": {
"em": 0.12912912912912913,
"f1": 0.34519977153598014
},
"winogrande": {
"acc": 0.5493291239147593,
"acc_stderr": 0.013983928869040239
},
"wsc273": {
"acc": 0.6813186813186813,
"acc_stderr": 0.02825328818739863
},
"swag": {
"acc": 0.4596621013695891,
"acc_stderr": 0.0035235690445916223,
"acc_norm": 0.6219134259722083,
"acc_norm_stderr": 0.003428398656668824
},
"boolq": {
"acc": 0.5856269113149847,
"acc_stderr": 0.00861586377642113
},
"openbookqa": {
"acc": 0.17,
"acc_stderr": 0.016815633531393426,
"acc_norm": 0.298,
"acc_norm_stderr": 0.02047511809298897
},
"arc_challenge": {
"acc": 0.2098976109215017,
"acc_stderr": 0.011900548748047446,
"acc_norm": 0.2431740614334471,
"acc_norm_stderr": 0.012536554144587089
}
},
"versions": {
"copa": 0,
"piqa": 0,
"prost": 0,
"arc_easy": 0,
"hellaswag": 0,
"mc_taco": 0,
"winogrande": 0,
"wsc273": 0,
"swag": 0,
"boolq": 1,
"openbookqa": 0,
"arc_challenge": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"gsm8k": {
"acc": 0.009855951478392721,
"acc_stderr": 0.00272107657704166
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"math_prealgebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"gsm8k": {
"acc": 0.008339651250947688,
"acc_stderr": 0.002504942226860518
},
"math_num_theory": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_precalc": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_geometry": {
"acc": 0.0,
"acc_stderr": 0.0
},
"drop": {
"em": 0.006711409395973154,
"em_stderr": 0.0008361500895152447,
"f1": 0.03435402684563763,
"f1_stderr": 0.0012720691502648663
},
"math_intermediate_algebra": {
"acc": 0.0,
"acc_stderr": 0.0
},
"math_counting_and_prob": {
"acc": 0.0,
"acc_stderr": 0.0
},
"mathqa": {
"acc": 0.22914572864321608,
"acc_stderr": 0.007693830518376538,
"acc_norm": 0.21440536013400335,
"acc_norm_stderr": 0.0075130739863118485
}
},
"versions": {
"math_prealgebra": 1,
"gsm8k": 0,
"math_num_theory": 1,
"math_precalc": 1,
"mathqa": 0,
"math_algebra": 1,
"math_geometry": 1,
"drop": 1,
"math_intermediate_algebra": 1,
"math_counting_and_prob": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 5,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"pawsx_fr": {
"acc": 0.4735,
"acc_stderr": 0.011167418260963935
},
"pawsx_de": {
"acc": 0.5755,
"acc_stderr": 0.011054907529701135
},
"pawsx_ko": {
"acc": 0.514,
"acc_stderr": 0.011178751372184865
},
"pawsx_ja": {
"acc": 0.461,
"acc_stderr": 0.011149065020234333
},
"pawsx_en": {
"acc": 0.5265,
"acc_stderr": 0.011167418260963933
},
"pawsx_es": {
"acc": 0.538,
"acc_stderr": 0.011150792352341666
},
"pawsx_zh": {
"acc": 0.481,
"acc_stderr": 0.011175058879956061
}
},
"versions": {
"pawsx_fr": 0,
"pawsx_de": 0,
"pawsx_ko": 0,
"pawsx_ja": 0,
"pawsx_en": 0,
"pawsx_es": 0,
"pawsx_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xcopa_id": {
"acc": 0.646,
"acc_stderr": 0.02140758204791645
},
"xcopa_ta": {
"acc": 0.552,
"acc_stderr": 0.02226169729227013
},
"xcopa_ht": {
"acc": 0.558,
"acc_stderr": 0.022231970696321122
},
"xcopa_it": {
"acc": 0.54,
"acc_stderr": 0.022311333245289663
},
"xcopa_tr": {
"acc": 0.534,
"acc_stderr": 0.02233126442325838
},
"xcopa_th": {
"acc": 0.582,
"acc_stderr": 0.022080014812228137
},
"xcopa_qu": {
"acc": 0.522,
"acc_stderr": 0.022361396739207878
},
"xcopa_zh": {
"acc": 0.58,
"acc_stderr": 0.02209471322976178
},
"xcopa_vi": {
"acc": 0.63,
"acc_stderr": 0.02161328916516579
},
"xcopa_et": {
"acc": 0.568,
"acc_stderr": 0.022175109265613165
},
"xcopa_sw": {
"acc": 0.566,
"acc_stderr": 0.022187215803029008
}
},
"versions": {
"xcopa_id": 0,
"xcopa_ta": 0,
"xcopa_ht": 0,
"xcopa_it": 0,
"xcopa_tr": 0,
"xcopa_th": 0,
"xcopa_qu": 0,
"xcopa_zh": 0,
"xcopa_vi": 0,
"xcopa_et": 0,
"xcopa_sw": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xnli_bg": {
"acc": 0.4473053892215569,
"acc_stderr": 0.0070253693946827935
},
"xnli_hi": {
"acc": 0.42614770459081835,
"acc_stderr": 0.006987223294820979
},
"xnli_el": {
"acc": 0.40099800399201596,
"acc_stderr": 0.006924839696959944
},
"xnli_en": {
"acc": 0.49680638722554893,
"acc_stderr": 0.00706456831954508
},
"xnli_ar": {
"acc": 0.33512974051896205,
"acc_stderr": 0.006669594382503631
},
"xnli_ru": {
"acc": 0.4596806387225549,
"acc_stderr": 0.00704170545485625
},
"xnli_fr": {
"acc": 0.45728542914171655,
"acc_stderr": 0.007038885597058048
},
"xnli_tr": {
"acc": 0.4295409181636727,
"acc_stderr": 0.006994215414803208
},
"xnli_th": {
"acc": 0.4169660678642715,
"acc_stderr": 0.006966614137458995
},
"xnli_vi": {
"acc": 0.4502994011976048,
"acc_stderr": 0.007029723996054755
},
"xnli_de": {
"acc": 0.4532934131736527,
"acc_stderr": 0.0070338214783393326
},
"xnli_sw": {
"acc": 0.41996007984031936,
"acc_stderr": 0.006973606391328806
},
"xnli_zh": {
"acc": 0.3377245508982036,
"acc_stderr": 0.006682287063203171
},
"xnli_es": {
"acc": 0.436127744510978,
"acc_stderr": 0.007006832004922492
},
"xnli_ur": {
"acc": 0.39500998003992016,
"acc_stderr": 0.0069072094196003676
}
},
"versions": {
"xnli_bg": 0,
"xnli_hi": 0,
"xnli_el": 0,
"xnli_en": 0,
"xnli_ar": 0,
"xnli_ru": 0,
"xnli_fr": 0,
"xnli_tr": 0,
"xnli_th": 0,
"xnli_vi": 0,
"xnli_de": 0,
"xnli_sw": 0,
"xnli_zh": 0,
"xnli_es": 0,
"xnli_ur": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xstory_cloze_hi": {
"acc": 0.5579086697551291,
"acc_stderr": 0.012780536370279769
},
"xstory_cloze_sw": {
"acc": 0.5598941098610192,
"acc_stderr": 0.012774475160716338
},
"xstory_cloze_zh": {
"acc": 0.5618795499669094,
"acc_stderr": 0.012768206616277759
},
"xstory_cloze_my": {
"acc": 0.5380542686962276,
"acc_stderr": 0.0128298047203217
},
"xstory_cloze_eu": {
"acc": 0.5612177365982793,
"acc_stderr": 0.012770319186938004
},
"xstory_cloze_id": {
"acc": 0.5797485109199206,
"acc_stderr": 0.012702405649149104
},
"xstory_cloze_te": {
"acc": 0.5804103242885507,
"acc_stderr": 0.012699642268200749
},
"xstory_cloze_en": {
"acc": 0.6432825943084051,
"acc_stderr": 0.01232748767711036
},
"xstory_cloze_es": {
"acc": 0.5923229649238915,
"acc_stderr": 0.012645876488040282
},
"xstory_cloze_ar": {
"acc": 0.5248180013236268,
"acc_stderr": 0.012851264962354841
},
"xstory_cloze_ru": {
"acc": 0.5982792852415619,
"acc_stderr": 0.012616114526927917
}
},
"versions": {
"xstory_cloze_hi": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_zh": 0,
"xstory_cloze_my": 0,
"xstory_cloze_eu": 0,
"xstory_cloze_id": 0,
"xstory_cloze_te": 0,
"xstory_cloze_en": 0,
"xstory_cloze_es": 0,
"xstory_cloze_ar": 0,
"xstory_cloze_ru": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_pt": {
"acc": 0.6387832699619772,
"acc_stderr": 0.029676320268041578
},
"xwinograd_zh": {
"acc": 0.6984126984126984,
"acc_stderr": 0.020463437846223773
},
"xwinograd_jp": {
"acc": 0.6058394160583942,
"acc_stderr": 0.015788199459722305
},
"xwinograd_fr": {
"acc": 0.6024096385542169,
"acc_stderr": 0.05404517824786813
},
"xwinograd_en": {
"acc": 0.7105376344086022,
"acc_stderr": 0.009407441676993788
},
"xwinograd_ru": {
"acc": 0.5968253968253968,
"acc_stderr": 0.02768250629102932
}
},
"versions": {
"xwinograd_pt": 0,
"xwinograd_zh": 0,
"xwinograd_jp": 0,
"xwinograd_fr": 0,
"xwinograd_en": 0,
"xwinograd_ru": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-1.7B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# xglm-2.9B
## xglm-2.9B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |23.46|± | 1.24|
| | |acc_norm|27.39|± | 1.30|
|arc_easy | 0|acc |56.65|± | 1.02|
| | |acc_norm|53.37|± | 1.02|
|boolq | 1|acc |61.44|± | 0.85|
|copa | 0|acc |74.00|± | 4.41|
|hellaswag | 0|acc |40.92|± | 0.49|
| | |acc_norm|53.70|± | 0.50|
|mc_taco | 0|em |11.94| | |
| | |f1 |47.80| | |
|openbookqa | 0|acc |21.60|± | 1.84|
| | |acc_norm|33.20|± | 2.11|
|piqa | 0|acc |71.27|± | 1.06|
| | |acc_norm|73.01|± | 1.04|
|prost | 0|acc |21.92|± | 0.30|
| | |acc_norm|26.64|± | 0.32|
|swag | 0|acc |48.49|± | 0.35|
| | |acc_norm|65.78|± | 0.34|
|winogrande | 0|acc |54.62|± | 1.40|
|wsc273 | 0|acc |71.06|± | 2.75|
## xglm-2.9B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |50.65|± | 1.12|
|pawsx_en| 0|acc |54.75|± | 1.11|
|pawsx_es| 0|acc |53.15|± | 1.12|
|pawsx_fr| 0|acc |49.70|± | 1.12|
|pawsx_ja| 0|acc |50.95|± | 1.12|
|pawsx_ko| 0|acc |46.75|± | 1.12|
|pawsx_zh| 0|acc |53.70|± | 1.12|
## xglm-2.9B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 58.2|± | 2.21|
|xcopa_ht| 0|acc | 55.8|± | 2.22|
|xcopa_id| 0|acc | 66.8|± | 2.11|
|xcopa_it| 0|acc | 60.2|± | 2.19|
|xcopa_qu| 0|acc | 50.2|± | 2.24|
|xcopa_sw| 0|acc | 58.8|± | 2.20|
|xcopa_ta| 0|acc | 54.2|± | 2.23|
|xcopa_th| 0|acc | 57.0|± | 2.22|
|xcopa_tr| 0|acc | 56.6|± | 2.22|
|xcopa_vi| 0|acc | 65.2|± | 2.13|
|xcopa_zh| 0|acc | 60.0|± | 2.19|
## xglm-2.9B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.65|± | 0.67|
|xnli_bg| 0|acc |45.97|± | 0.70|
|xnli_de| 0|acc |48.32|± | 0.71|
|xnli_el| 0|acc |41.40|± | 0.70|
|xnli_en| 0|acc |51.08|± | 0.71|
|xnli_es| 0|acc |46.67|± | 0.70|
|xnli_fr| 0|acc |45.03|± | 0.70|
|xnli_hi| 0|acc |44.03|± | 0.70|
|xnli_ru| 0|acc |45.29|± | 0.70|
|xnli_sw| 0|acc |44.43|± | 0.70|
|xnli_th| 0|acc |41.98|± | 0.70|
|xnli_tr| 0|acc |44.97|± | 0.70|
|xnli_ur| 0|acc |40.10|± | 0.69|
|xnli_vi| 0|acc |45.99|± | 0.70|
|xnli_zh| 0|acc |34.81|± | 0.67|
## xglm-2.9B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |53.87|± | 1.28|
|xstory_cloze_en| 0|acc |67.31|± | 1.21|
|xstory_cloze_es| 0|acc |60.95|± | 1.26|
|xstory_cloze_eu| 0|acc |56.32|± | 1.28|
|xstory_cloze_hi| 0|acc |57.51|± | 1.27|
|xstory_cloze_id| 0|acc |61.35|± | 1.25|
|xstory_cloze_my| 0|acc |55.20|± | 1.28|
|xstory_cloze_ru| 0|acc |62.21|± | 1.25|
|xstory_cloze_sw| 0|acc |56.72|± | 1.28|
|xstory_cloze_te| 0|acc |60.03|± | 1.26|
|xstory_cloze_zh| 0|acc |57.64|± | 1.27|
## xglm-2.9B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |75.61|± | 0.89|
|xwinograd_fr| 0|acc |59.04|± | 5.43|
|xwinograd_jp| 0|acc |64.65|± | 1.54|
|xwinograd_pt| 0|acc |66.16|± | 2.92|
|xwinograd_ru| 0|acc |62.86|± | 2.73|
|xwinograd_zh| 0|acc |71.63|± | 2.01|
{
"results": {
"piqa": {
"acc": 0.7127312295973884,
"acc_stderr": 0.010557291761528637,
"acc_norm": 0.7301414581066377,
"acc_norm_stderr": 0.010356595421852193
},
"openbookqa": {
"acc": 0.216,
"acc_stderr": 0.01842190906141194,
"acc_norm": 0.332,
"acc_norm_stderr": 0.021081766571222856
},
"arc_challenge": {
"acc": 0.23464163822525597,
"acc_stderr": 0.01238387356076868,
"acc_norm": 0.2738907849829352,
"acc_norm_stderr": 0.013032004972989505
},
"arc_easy": {
"acc": 0.5664983164983165,
"acc_stderr": 0.010168640625454107,
"acc_norm": 0.5336700336700336,
"acc_norm_stderr": 0.010236494647406476
},
"boolq": {
"acc": 0.6143730886850153,
"acc_stderr": 0.008513189460768057
},
"wsc273": {
"acc": 0.7106227106227107,
"acc_stderr": 0.027495860234525278
},
"copa": {
"acc": 0.74,
"acc_stderr": 0.04408440022768077
},
"hellaswag": {
"acc": 0.4091814379605656,
"acc_stderr": 0.004906779523192668,
"acc_norm": 0.5370444134634534,
"acc_norm_stderr": 0.004976067726432559
},
"prost": {
"acc": 0.21920367207514946,
"acc_stderr": 0.003022497462586152,
"acc_norm": 0.2664389410760034,
"acc_norm_stderr": 0.0032299078734217036
},
"swag": {
"acc": 0.4849045286414076,
"acc_stderr": 0.0035334805738792946,
"acc_norm": 0.6578026592022393,
"acc_norm_stderr": 0.0033544154500719224
},
"mc_taco": {
"em": 0.11936936936936937,
"f1": 0.4779548809969738
},
"winogrande": {
"acc": 0.5461720599842147,
"acc_stderr": 0.013992441563707058
}
},
"versions": {
"piqa": 0,
"openbookqa": 0,
"arc_challenge": 0,
"arc_easy": 0,
"boolq": 1,
"wsc273": 0,
"copa": 0,
"hellaswag": 0,
"prost": 0,
"swag": 0,
"mc_taco": 0,
"winogrande": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"pawsx_zh": {
"acc": 0.537,
"acc_stderr": 0.011152474561478182
},
"pawsx_de": {
"acc": 0.5065,
"acc_stderr": 0.0111821910061423
},
"pawsx_en": {
"acc": 0.5475,
"acc_stderr": 0.011132557743886095
},
"pawsx_es": {
"acc": 0.5315,
"acc_stderr": 0.011160921022883278
},
"pawsx_fr": {
"acc": 0.497,
"acc_stderr": 0.011182934722804556
},
"pawsx_ja": {
"acc": 0.5095,
"acc_stderr": 0.011181117282805214
},
"pawsx_ko": {
"acc": 0.4675,
"acc_stderr": 0.011159486640120933
}
},
"versions": {
"pawsx_zh": 0,
"pawsx_de": 0,
"pawsx_en": 0,
"pawsx_es": 0,
"pawsx_fr": 0,
"pawsx_ja": 0,
"pawsx_ko": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xcopa_zh": {
"acc": 0.6,
"acc_stderr": 0.021930844120728505
},
"xcopa_ht": {
"acc": 0.558,
"acc_stderr": 0.02223197069632112
},
"xcopa_id": {
"acc": 0.668,
"acc_stderr": 0.021081766571222856
},
"xcopa_tr": {
"acc": 0.566,
"acc_stderr": 0.02218721580302901
},
"xcopa_it": {
"acc": 0.602,
"acc_stderr": 0.021912377885779967
},
"xcopa_qu": {
"acc": 0.502,
"acc_stderr": 0.02238289498648353
},
"xcopa_sw": {
"acc": 0.588,
"acc_stderr": 0.022033677993740865
},
"xcopa_th": {
"acc": 0.57,
"acc_stderr": 0.02216263442665284
},
"xcopa_vi": {
"acc": 0.652,
"acc_stderr": 0.021323728632807494
},
"xcopa_ta": {
"acc": 0.542,
"acc_stderr": 0.022303966774269938
},
"xcopa_et": {
"acc": 0.582,
"acc_stderr": 0.022080014812228137
}
},
"versions": {
"xcopa_zh": 0,
"xcopa_ht": 0,
"xcopa_id": 0,
"xcopa_tr": 0,
"xcopa_it": 0,
"xcopa_qu": 0,
"xcopa_sw": 0,
"xcopa_th": 0,
"xcopa_vi": 0,
"xcopa_ta": 0,
"xcopa_et": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xnli_ar": {
"acc": 0.33652694610778444,
"acc_stderr": 0.006676456919028458
},
"xnli_bg": {
"acc": 0.4596806387225549,
"acc_stderr": 0.007041705454856254
},
"xnli_de": {
"acc": 0.48323353293413174,
"acc_stderr": 0.007060739327060854
},
"xnli_el": {
"acc": 0.41397205588822356,
"acc_stderr": 0.00695935771309272
},
"xnli_en": {
"acc": 0.5107784431137724,
"acc_stderr": 0.007063070754956929
},
"xnli_es": {
"acc": 0.4666666666666667,
"acc_stderr": 0.0070489955857553875
},
"xnli_fr": {
"acc": 0.4502994011976048,
"acc_stderr": 0.00702972399605476
},
"xnli_hi": {
"acc": 0.4403193612774451,
"acc_stderr": 0.007014206007644934
},
"xnli_ru": {
"acc": 0.4528942115768463,
"acc_stderr": 0.007033289986695003
},
"xnli_sw": {
"acc": 0.444311377245509,
"acc_stderr": 0.007020757195791273
},
"xnli_th": {
"acc": 0.41976047904191616,
"acc_stderr": 0.006973148443615152
},
"xnli_tr": {
"acc": 0.4497005988023952,
"acc_stderr": 0.007028873660193274
},
"xnli_ur": {
"acc": 0.40099800399201596,
"acc_stderr": 0.006924839696959946
},
"xnli_vi": {
"acc": 0.4598802395209581,
"acc_stderr": 0.00704193305036814
},
"xnli_zh": {
"acc": 0.34810379241516964,
"acc_stderr": 0.006730821739872395
}
},
"versions": {
"xnli_ar": 0,
"xnli_bg": 0,
"xnli_de": 0,
"xnli_el": 0,
"xnli_en": 0,
"xnli_es": 0,
"xnli_fr": 0,
"xnli_hi": 0,
"xnli_ru": 0,
"xnli_sw": 0,
"xnli_th": 0,
"xnli_tr": 0,
"xnli_ur": 0,
"xnli_vi": 0,
"xnli_zh": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xstory_cloze_es": {
"acc": 0.6095301125082727,
"acc_stderr": 0.012554600076548371
},
"xstory_cloze_sw": {
"acc": 0.5671740569159497,
"acc_stderr": 0.01275047450298583
},
"xstory_cloze_en": {
"acc": 0.6730641958967571,
"acc_stderr": 0.012071771683911351
},
"xstory_cloze_zh": {
"acc": 0.5764394440767704,
"acc_stderr": 0.01271587138288145
},
"xstory_cloze_id": {
"acc": 0.613500992720053,
"acc_stderr": 0.012531219943771486
},
"xstory_cloze_eu": {
"acc": 0.5632031767041694,
"acc_stderr": 0.01276391225017363
},
"xstory_cloze_te": {
"acc": 0.600264725347452,
"acc_stderr": 0.01260576407762715
},
"xstory_cloze_ru": {
"acc": 0.6221045665122436,
"acc_stderr": 0.012477542072994664
},
"xstory_cloze_hi": {
"acc": 0.5751158173395102,
"acc_stderr": 0.01272109407352333
},
"xstory_cloze_ar": {
"acc": 0.5387160820648577,
"acc_stderr": 0.01282849335327155
},
"xstory_cloze_my": {
"acc": 0.5519523494374586,
"acc_stderr": 0.012797478885304733
}
},
"versions": {
"xstory_cloze_es": 0,
"xstory_cloze_sw": 0,
"xstory_cloze_en": 0,
"xstory_cloze_zh": 0,
"xstory_cloze_id": 0,
"xstory_cloze_eu": 0,
"xstory_cloze_te": 0,
"xstory_cloze_ru": 0,
"xstory_cloze_hi": 0,
"xstory_cloze_ar": 0,
"xstory_cloze_my": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"xwinograd_fr": {
"acc": 0.5903614457831325,
"acc_stderr": 0.05430658329539147
},
"xwinograd_en": {
"acc": 0.7561290322580645,
"acc_stderr": 0.008907584394182084
},
"xwinograd_zh": {
"acc": 0.7162698412698413,
"acc_stderr": 0.02010051064884106
},
"xwinograd_ru": {
"acc": 0.6285714285714286,
"acc_stderr": 0.027267803028895015
},
"xwinograd_jp": {
"acc": 0.6465067778936392,
"acc_stderr": 0.015445228301221376
},
"xwinograd_pt": {
"acc": 0.6615969581749049,
"acc_stderr": 0.02923231657730264
}
},
"versions": {
"xwinograd_fr": 0,
"xwinograd_en": 0,
"xwinograd_zh": 0,
"xwinograd_ru": 0,
"xwinograd_jp": 0,
"xwinograd_pt": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-2.9B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
# xglm-4.5B
## xglm-4.5B_common_sense_reasoning_0-shot.json
| Task |Version| Metric |Value| |Stderr|
|-------------|------:|--------|----:|---|-----:|
|arc_challenge| 0|acc |27.13|± | 1.30|
| | |acc_norm|28.16|± | 1.31|
|arc_easy | 0|acc |60.31|± | 1.00|
| | |acc_norm|57.24|± | 1.02|
|boolq | 1|acc |61.19|± | 0.85|
|copa | 0|acc |81.00|± | 3.94|
|hellaswag | 0|acc |43.77|± | 0.50|
| | |acc_norm|58.24|± | 0.49|
|mc_taco | 0|em |15.39| | |
| | |f1 |43.51| | |
|openbookqa | 0|acc |23.20|± | 1.89|
| | |acc_norm|34.40|± | 2.13|
|piqa | 0|acc |72.74|± | 1.04|
| | |acc_norm|72.96|± | 1.04|
|prost | 0|acc |26.43|± | 0.32|
| | |acc_norm|26.28|± | 0.32|
|swag | 0|acc |49.65|± | 0.35|
| | |acc_norm|67.87|± | 0.33|
|winogrande | 0|acc |56.12|± | 1.39|
|wsc273 | 0|acc |71.79|± | 2.73|
## xglm-4.5B_gsm8k_8-shot.json
|Task |Version|Metric|Value| |Stderr|
|-----|------:|------|----:|---|-----:|
|gsm8k| 0|acc | 0.08|± | 0.08|
## xglm-4.5B_pawsx_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|pawsx_de| 0|acc |52.65|± | 1.12|
|pawsx_en| 0|acc |55.40|± | 1.11|
|pawsx_es| 0|acc |51.05|± | 1.12|
|pawsx_fr| 0|acc |51.60|± | 1.12|
|pawsx_ja| 0|acc |47.75|± | 1.12|
|pawsx_ko| 0|acc |49.10|± | 1.12|
|pawsx_zh| 0|acc |54.60|± | 1.11|
## xglm-4.5B_xcopa_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|--------|------:|------|----:|---|-----:|
|xcopa_et| 0|acc | 55.0|± | 2.23|
|xcopa_ht| 0|acc | 51.2|± | 2.24|
|xcopa_id| 0|acc | 67.0|± | 2.10|
|xcopa_it| 0|acc | 61.6|± | 2.18|
|xcopa_qu| 0|acc | 50.0|± | 2.24|
|xcopa_sw| 0|acc | 56.2|± | 2.22|
|xcopa_ta| 0|acc | 55.6|± | 2.22|
|xcopa_th| 0|acc | 55.2|± | 2.23|
|xcopa_tr| 0|acc | 57.2|± | 2.21|
|xcopa_vi| 0|acc | 66.0|± | 2.12|
|xcopa_zh| 0|acc | 61.6|± | 2.18|
## xglm-4.5B_xnli_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|-------|------:|------|----:|---|-----:|
|xnli_ar| 0|acc |33.59|± | 0.67|
|xnli_bg| 0|acc |45.61|± | 0.70|
|xnli_de| 0|acc |47.11|± | 0.71|
|xnli_el| 0|acc |39.84|± | 0.69|
|xnli_en| 0|acc |53.63|± | 0.70|
|xnli_es| 0|acc |47.68|± | 0.71|
|xnli_fr| 0|acc |47.31|± | 0.71|
|xnli_hi| 0|acc |42.50|± | 0.70|
|xnli_ru| 0|acc |46.15|± | 0.70|
|xnli_sw| 0|acc |39.58|± | 0.69|
|xnli_th| 0|acc |39.68|± | 0.69|
|xnli_tr| 0|acc |44.85|± | 0.70|
|xnli_ur| 0|acc |37.47|± | 0.68|
|xnli_vi| 0|acc |45.87|± | 0.70|
|xnli_zh| 0|acc |34.77|± | 0.67|
## xglm-4.5B_xstory_cloze_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|---------------|------:|------|----:|---|-----:|
|xstory_cloze_ar| 0|acc |53.67|± | 1.28|
|xstory_cloze_en| 0|acc |69.16|± | 1.19|
|xstory_cloze_es| 0|acc |62.81|± | 1.24|
|xstory_cloze_eu| 0|acc |53.74|± | 1.28|
|xstory_cloze_hi| 0|acc |56.85|± | 1.27|
|xstory_cloze_id| 0|acc |60.42|± | 1.26|
|xstory_cloze_my| 0|acc |50.76|± | 1.29|
|xstory_cloze_ru| 0|acc |62.74|± | 1.24|
|xstory_cloze_sw| 0|acc |55.06|± | 1.28|
|xstory_cloze_te| 0|acc |57.05|± | 1.27|
|xstory_cloze_zh| 0|acc |58.17|± | 1.27|
## xglm-4.5B_xwinograd_0-shot.json
| Task |Version|Metric|Value| |Stderr|
|------------|------:|------|----:|---|-----:|
|xwinograd_en| 0|acc |76.26|± | 0.88|
|xwinograd_fr| 0|acc |60.24|± | 5.40|
|xwinograd_jp| 0|acc |62.67|± | 1.56|
|xwinograd_pt| 0|acc |64.64|± | 2.95|
|xwinograd_ru| 0|acc |62.22|± | 2.74|
|xwinograd_zh| 0|acc |70.63|± | 2.03|
{
"results": {
"copa": {
"acc": 0.81,
"acc_stderr": 0.03942772444036623
},
"arc_challenge": {
"acc": 0.2713310580204778,
"acc_stderr": 0.012993807727545796,
"acc_norm": 0.2815699658703072,
"acc_norm_stderr": 0.01314337673500902
},
"mc_taco": {
"em": 0.1539039039039039,
"f1": 0.435083658174568
},
"prost": {
"acc": 0.26430401366353545,
"acc_stderr": 0.003221619340165698,
"acc_norm": 0.2627561912894962,
"acc_norm_stderr": 0.003215549484247182
},
"piqa": {
"acc": 0.7274211099020674,
"acc_stderr": 0.01038925680329602,
"acc_norm": 0.7295973884657236,
"acc_norm_stderr": 0.010363167031620785
},
"wsc273": {
"acc": 0.717948717948718,
"acc_stderr": 0.027285147081637318
},
"winogrande": {
"acc": 0.5611681136543015,
"acc_stderr": 0.013946933444507032
},
"boolq": {
"acc": 0.6119266055045871,
"acc_stderr": 0.008523130584760844
},
"openbookqa": {
"acc": 0.232,
"acc_stderr": 0.018896193591952045,
"acc_norm": 0.344,
"acc_norm_stderr": 0.02126575803797874
},
"arc_easy": {
"acc": 0.6031144781144782,
"acc_stderr": 0.010039236800583206,
"acc_norm": 0.5723905723905723,
"acc_norm_stderr": 0.010151683397430673
},
"hellaswag": {
"acc": 0.43766182035451107,
"acc_stderr": 0.004950848456984543,
"acc_norm": 0.5823541127265485,
"acc_norm_stderr": 0.004921632645102376
},
"swag": {
"acc": 0.49650104968509445,
"acc_stderr": 0.0035350054881690377,
"acc_norm": 0.6786963910826752,
"acc_norm_stderr": 0.0033016139730438565
}
},
"versions": {
"copa": 0,
"arc_challenge": 0,
"mc_taco": 0,
"prost": 0,
"piqa": 0,
"wsc273": 0,
"winogrande": 0,
"boolq": 1,
"openbookqa": 0,
"arc_easy": 0,
"hellaswag": 0,
"swag": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-4.5B,use_accelerate=True",
"num_fewshot": 0,
"batch_size": "auto",
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
{
"results": {
"gsm8k": {
"acc": 0.000758150113722517,
"acc_stderr": 0.0007581501137225263
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=facebook/xglm-4.5B,use_accelerate=True",
"num_fewshot": 8,
"batch_size": "auto",
"device": "cuda",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment