lm1-misc-pile / 3b92b62b6 /evaluation /rankeval /lm1-3b9-26b-results_lm-eval_global_step24424_2023-01-23-20-00-31_1shots.json
Muennighoff's picture
Add
0461a88
{
"results": {
"anli_r1": {
"acc": 0.317,
"acc_stderr": 0.014721675438880224
},
"anli_r2": {
"acc": 0.334,
"acc_stderr": 0.014922019523732958
},
"anli_r3": {
"acc": 0.3433333333333333,
"acc_stderr": 0.01371263383046586
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.06737697508644648,
"f1": 0.3421052631578947
},
"copa": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845
},
"hellaswag": {
"acc": 0.3366859191396136,
"acc_stderr": 0.004716106475905092,
"acc_norm": 0.3980282812188807,
"acc_norm_stderr": 0.0048849095444771
},
"rte": {
"acc": 0.5234657039711191,
"acc_stderr": 0.030063300411902652
},
"winogrande": {
"acc": 0.500394632991318,
"acc_stderr": 0.014052481306049516
},
"storycloze_2016": {
"acc": 0.6242650988776055,
"acc_stderr": 0.011199651092754325
},
"boolq": {
"acc": 0.5116207951070336,
"acc_stderr": 0.008742692742551265
},
"arc_easy": {
"acc": 0.5252525252525253,
"acc_stderr": 0.010246690042583856,
"acc_norm": 0.4957912457912458,
"acc_norm_stderr": 0.010259420038764091
},
"arc_challenge": {
"acc": 0.22440273037542663,
"acc_stderr": 0.012191404938603835,
"acc_norm": 0.2568259385665529,
"acc_norm_stderr": 0.012766923794116796
},
"sciq": {
"acc": 0.87,
"acc_stderr": 0.01064016979249936,
"acc_norm": 0.868,
"acc_norm_stderr": 0.010709373963528019
},
"piqa": {
"acc": 0.6719260065288357,
"acc_stderr": 0.010954487135124235,
"acc_norm": 0.6637649619151251,
"acc_norm_stderr": 0.011022346708970236
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}