lm1-misc-pile / 1b566b66b /1b566b66bpile /evaluation /rankeval /lm1-1b5-66b-results_lm-eval_global_step125429_2023-01-24-13-57-10_4shots.json
Muennighoff's picture
Add
46fa37e
{
"results": {
"anli_r1": {
"acc": 0.329,
"acc_stderr": 0.014865395385928366
},
"anli_r2": {
"acc": 0.367,
"acc_stderr": 0.015249378464171752
},
"anli_r3": {
"acc": 0.35,
"acc_stderr": 0.013774667009018556
},
"cb": {
"acc": 0.42857142857142855,
"acc_stderr": 0.06672848092813058,
"f1": 0.30170664757130927
},
"copa": {
"acc": 0.72,
"acc_stderr": 0.04512608598542127
},
"hellaswag": {
"acc": 0.3701453893646684,
"acc_stderr": 0.004818566366066927,
"acc_norm": 0.4627564230233021,
"acc_norm_stderr": 0.004975919665116536
},
"rte": {
"acc": 0.51985559566787,
"acc_stderr": 0.030072723167317184
},
"winogrande": {
"acc": 0.5477505919494869,
"acc_stderr": 0.013988256216606015
},
"storycloze_2016": {
"acc": 0.6638161411010155,
"acc_stderr": 0.01092424747257778
},
"boolq": {
"acc": 0.5489296636085627,
"acc_stderr": 0.008703080962379617
},
"arc_easy": {
"acc": 0.5816498316498316,
"acc_stderr": 0.010122061470742856,
"acc_norm": 0.5824915824915825,
"acc_norm_stderr": 0.01011918737777604
},
"arc_challenge": {
"acc": 0.24488054607508533,
"acc_stderr": 0.012566273985131356,
"acc_norm": 0.2935153583617747,
"acc_norm_stderr": 0.013307250444941125
},
"sciq": {
"acc": 0.919,
"acc_stderr": 0.00863212103214,
"acc_norm": 0.92,
"acc_norm_stderr": 0.008583336977753653
},
"piqa": {
"acc": 0.6920565832426551,
"acc_stderr": 0.010770892367463683,
"acc_norm": 0.6991294885745375,
"acc_norm_stderr": 0.010700745724145973
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}