|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 984, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003048780487804878, |
|
"grad_norm": 0.0672889974881053, |
|
"learning_rate": 5.050505050505052e-07, |
|
"loss": 0.5879, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006097560975609756, |
|
"grad_norm": 0.06658887990303611, |
|
"learning_rate": 1.0101010101010103e-06, |
|
"loss": 0.5344, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009146341463414634, |
|
"grad_norm": 0.06759856887025624, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.5734, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012195121951219513, |
|
"grad_norm": 0.07151981947202596, |
|
"learning_rate": 2.0202020202020206e-06, |
|
"loss": 0.6133, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01524390243902439, |
|
"grad_norm": 0.07851977916778793, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 0.5983, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.018292682926829267, |
|
"grad_norm": 0.0800165592411649, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6898, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.021341463414634148, |
|
"grad_norm": 0.07309066641656121, |
|
"learning_rate": 3.5353535353535352e-06, |
|
"loss": 0.6453, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024390243902439025, |
|
"grad_norm": 0.07569404816722357, |
|
"learning_rate": 4.040404040404041e-06, |
|
"loss": 0.6345, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.027439024390243903, |
|
"grad_norm": 0.07647277110891933, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.6177, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03048780487804878, |
|
"grad_norm": 0.07743239710231267, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 0.6586, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03353658536585366, |
|
"grad_norm": 0.06456324181839977, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.5362, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.036585365853658534, |
|
"grad_norm": 0.07310464363002138, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.5606, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.039634146341463415, |
|
"grad_norm": 0.06589675953784346, |
|
"learning_rate": 6.565656565656567e-06, |
|
"loss": 0.5444, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.042682926829268296, |
|
"grad_norm": 0.06985336076319228, |
|
"learning_rate": 7.0707070707070704e-06, |
|
"loss": 0.5802, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04573170731707317, |
|
"grad_norm": 0.07816492456588642, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.6828, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.08029559940720289, |
|
"learning_rate": 8.080808080808082e-06, |
|
"loss": 0.6571, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.051829268292682924, |
|
"grad_norm": 0.0716618460526677, |
|
"learning_rate": 8.585858585858587e-06, |
|
"loss": 0.5627, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.054878048780487805, |
|
"grad_norm": 0.07391850146450858, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5875, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.057926829268292686, |
|
"grad_norm": 0.0730095019246016, |
|
"learning_rate": 9.595959595959595e-06, |
|
"loss": 0.5981, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 0.08299791488120488, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 0.6851, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06402439024390244, |
|
"grad_norm": 0.08322761474768019, |
|
"learning_rate": 1.0606060606060607e-05, |
|
"loss": 0.6895, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06707317073170732, |
|
"grad_norm": 0.06946779021320976, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.5958, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0701219512195122, |
|
"grad_norm": 0.0840644563494239, |
|
"learning_rate": 1.1616161616161616e-05, |
|
"loss": 0.6865, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07317073170731707, |
|
"grad_norm": 0.08103942510491499, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.6385, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07621951219512195, |
|
"grad_norm": 0.06960617777809865, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 0.5766, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07926829268292683, |
|
"grad_norm": 0.06350551869608764, |
|
"learning_rate": 1.3131313131313134e-05, |
|
"loss": 0.53, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08231707317073171, |
|
"grad_norm": 0.06954405913739202, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.5804, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08536585365853659, |
|
"grad_norm": 0.05801979787649268, |
|
"learning_rate": 1.4141414141414141e-05, |
|
"loss": 0.5463, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08841463414634146, |
|
"grad_norm": 0.0707113147835271, |
|
"learning_rate": 1.4646464646464647e-05, |
|
"loss": 0.6901, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09146341463414634, |
|
"grad_norm": 0.07199565636119566, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.5657, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09451219512195122, |
|
"grad_norm": 0.06031348351647801, |
|
"learning_rate": 1.565656565656566e-05, |
|
"loss": 0.5793, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.051381021893855267, |
|
"learning_rate": 1.6161616161616165e-05, |
|
"loss": 0.5032, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.10060975609756098, |
|
"grad_norm": 0.05224030536199245, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.535, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10365853658536585, |
|
"grad_norm": 0.057566908214629696, |
|
"learning_rate": 1.7171717171717173e-05, |
|
"loss": 0.5695, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.10670731707317073, |
|
"grad_norm": 0.050553154622884115, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 0.4747, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10975609756097561, |
|
"grad_norm": 0.0555320047190129, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.5131, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11280487804878049, |
|
"grad_norm": 0.04980278635904654, |
|
"learning_rate": 1.8686868686868688e-05, |
|
"loss": 0.5091, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11585365853658537, |
|
"grad_norm": 0.05349296511174711, |
|
"learning_rate": 1.919191919191919e-05, |
|
"loss": 0.6005, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11890243902439024, |
|
"grad_norm": 0.05531305808997776, |
|
"learning_rate": 1.9696969696969697e-05, |
|
"loss": 0.584, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 0.05341724693526889, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 0.5734, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.04936955763599991, |
|
"learning_rate": 2.070707070707071e-05, |
|
"loss": 0.4993, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12804878048780488, |
|
"grad_norm": 0.05063294439179141, |
|
"learning_rate": 2.1212121212121215e-05, |
|
"loss": 0.5256, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.13109756097560976, |
|
"grad_norm": 0.04558814730407359, |
|
"learning_rate": 2.171717171717172e-05, |
|
"loss": 0.4779, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13414634146341464, |
|
"grad_norm": 0.052246397012185086, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.4885, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13719512195121952, |
|
"grad_norm": 0.049611145112304265, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.5412, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1402439024390244, |
|
"grad_norm": 0.04616911363965657, |
|
"learning_rate": 2.3232323232323232e-05, |
|
"loss": 0.4997, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.14329268292682926, |
|
"grad_norm": 0.04431786715654772, |
|
"learning_rate": 2.3737373737373738e-05, |
|
"loss": 0.4489, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.049509312908459314, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 0.5677, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14939024390243902, |
|
"grad_norm": 0.043303149705511926, |
|
"learning_rate": 2.474747474747475e-05, |
|
"loss": 0.5461, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1524390243902439, |
|
"grad_norm": 0.04346369573012801, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 0.4852, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15548780487804878, |
|
"grad_norm": 0.04010543981497035, |
|
"learning_rate": 2.575757575757576e-05, |
|
"loss": 0.4827, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15853658536585366, |
|
"grad_norm": 0.04273578550839348, |
|
"learning_rate": 2.6262626262626268e-05, |
|
"loss": 0.456, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.16158536585365854, |
|
"grad_norm": 0.04899472557064514, |
|
"learning_rate": 2.676767676767677e-05, |
|
"loss": 0.5793, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16463414634146342, |
|
"grad_norm": 0.04318797168345568, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.4915, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1676829268292683, |
|
"grad_norm": 0.05006798307827536, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.4948, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.17073170731707318, |
|
"grad_norm": 0.04034250617082293, |
|
"learning_rate": 2.8282828282828282e-05, |
|
"loss": 0.4916, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17378048780487804, |
|
"grad_norm": 0.03657488246202251, |
|
"learning_rate": 2.878787878787879e-05, |
|
"loss": 0.4937, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17682926829268292, |
|
"grad_norm": 0.03213988489946405, |
|
"learning_rate": 2.9292929292929294e-05, |
|
"loss": 0.4011, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1798780487804878, |
|
"grad_norm": 0.03474129460321165, |
|
"learning_rate": 2.9797979797979796e-05, |
|
"loss": 0.4638, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 0.038909552802220075, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.5359, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18597560975609756, |
|
"grad_norm": 0.03437975920891206, |
|
"learning_rate": 3.080808080808081e-05, |
|
"loss": 0.4677, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18902439024390244, |
|
"grad_norm": 0.03564686452785141, |
|
"learning_rate": 3.131313131313132e-05, |
|
"loss": 0.4852, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.19207317073170732, |
|
"grad_norm": 0.03402175099897292, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 0.478, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.04071628630413561, |
|
"learning_rate": 3.232323232323233e-05, |
|
"loss": 0.4804, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19817073170731708, |
|
"grad_norm": 0.03133737153925272, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 0.4796, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.20121951219512196, |
|
"grad_norm": 0.030606542892269543, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.4543, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.20426829268292682, |
|
"grad_norm": 0.028220765431020804, |
|
"learning_rate": 3.3838383838383844e-05, |
|
"loss": 0.445, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2073170731707317, |
|
"grad_norm": 0.030351542499101217, |
|
"learning_rate": 3.434343434343435e-05, |
|
"loss": 0.411, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.21036585365853658, |
|
"grad_norm": 0.03059399901204726, |
|
"learning_rate": 3.484848484848485e-05, |
|
"loss": 0.515, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.21341463414634146, |
|
"grad_norm": 0.03296963214213472, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 0.499, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21646341463414634, |
|
"grad_norm": 0.030683874962795153, |
|
"learning_rate": 3.5858585858585855e-05, |
|
"loss": 0.4457, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21951219512195122, |
|
"grad_norm": 0.03279445122686611, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.4637, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2225609756097561, |
|
"grad_norm": 0.027670174967853636, |
|
"learning_rate": 3.686868686868687e-05, |
|
"loss": 0.4637, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22560975609756098, |
|
"grad_norm": 0.03204511638784144, |
|
"learning_rate": 3.7373737373737376e-05, |
|
"loss": 0.4684, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22865853658536586, |
|
"grad_norm": 0.02692814426837153, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.4564, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.23170731707317074, |
|
"grad_norm": 0.03159646644993244, |
|
"learning_rate": 3.838383838383838e-05, |
|
"loss": 0.501, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2347560975609756, |
|
"grad_norm": 0.02614313741800732, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.4242, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23780487804878048, |
|
"grad_norm": 0.026446688459674766, |
|
"learning_rate": 3.939393939393939e-05, |
|
"loss": 0.5611, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.24085365853658536, |
|
"grad_norm": 0.029235728604449545, |
|
"learning_rate": 3.98989898989899e-05, |
|
"loss": 0.4523, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.029403985571445752, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 0.5413, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24695121951219512, |
|
"grad_norm": 0.0248895165085513, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 0.4353, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.02514252171483617, |
|
"learning_rate": 4.141414141414142e-05, |
|
"loss": 0.4019, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2530487804878049, |
|
"grad_norm": 0.02526428258646879, |
|
"learning_rate": 4.191919191919192e-05, |
|
"loss": 0.4026, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.25609756097560976, |
|
"grad_norm": 0.02774638271334102, |
|
"learning_rate": 4.242424242424243e-05, |
|
"loss": 0.4443, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25914634146341464, |
|
"grad_norm": 0.02552350005556174, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 0.532, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2621951219512195, |
|
"grad_norm": 0.02337099188494046, |
|
"learning_rate": 4.343434343434344e-05, |
|
"loss": 0.3992, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2652439024390244, |
|
"grad_norm": 0.023989118355155412, |
|
"learning_rate": 4.3939393939393944e-05, |
|
"loss": 0.4473, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2682926829268293, |
|
"grad_norm": 0.02687814732230884, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.4594, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.27134146341463417, |
|
"grad_norm": 0.027896900163840974, |
|
"learning_rate": 4.494949494949495e-05, |
|
"loss": 0.4866, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.27439024390243905, |
|
"grad_norm": 0.025924142515604206, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.4598, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2774390243902439, |
|
"grad_norm": 0.02824961803928781, |
|
"learning_rate": 4.595959595959596e-05, |
|
"loss": 0.4895, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2804878048780488, |
|
"grad_norm": 0.025064170396782423, |
|
"learning_rate": 4.6464646464646464e-05, |
|
"loss": 0.3995, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.28353658536585363, |
|
"grad_norm": 0.026567289363745077, |
|
"learning_rate": 4.696969696969697e-05, |
|
"loss": 0.3898, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2865853658536585, |
|
"grad_norm": 0.0235303526093093, |
|
"learning_rate": 4.7474747474747476e-05, |
|
"loss": 0.5007, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2896341463414634, |
|
"grad_norm": 0.0232550129311821, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 0.4353, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.027355889003192944, |
|
"learning_rate": 4.848484848484849e-05, |
|
"loss": 0.4228, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.29573170731707316, |
|
"grad_norm": 0.026046752710899224, |
|
"learning_rate": 4.898989898989899e-05, |
|
"loss": 0.4911, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.29878048780487804, |
|
"grad_norm": 0.024133091415805927, |
|
"learning_rate": 4.94949494949495e-05, |
|
"loss": 0.4194, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3018292682926829, |
|
"grad_norm": 0.027499375160356283, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4401, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 0.02426792472675091, |
|
"learning_rate": 4.99998424846941e-05, |
|
"loss": 0.3926, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3079268292682927, |
|
"grad_norm": 0.028683991879347375, |
|
"learning_rate": 4.999936994076129e-05, |
|
"loss": 0.4317, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.31097560975609756, |
|
"grad_norm": 0.02515593602285064, |
|
"learning_rate": 4.999858237415621e-05, |
|
"loss": 0.4921, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.31402439024390244, |
|
"grad_norm": 0.02731378150973361, |
|
"learning_rate": 4.999747979480314e-05, |
|
"loss": 0.3999, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3170731707317073, |
|
"grad_norm": 0.02469325829557163, |
|
"learning_rate": 4.999606221659595e-05, |
|
"loss": 0.4291, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3201219512195122, |
|
"grad_norm": 0.025887949421610226, |
|
"learning_rate": 4.999432965739785e-05, |
|
"loss": 0.4064, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3231707317073171, |
|
"grad_norm": 0.029562890120902404, |
|
"learning_rate": 4.999228213904123e-05, |
|
"loss": 0.4704, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.32621951219512196, |
|
"grad_norm": 0.026468507235623037, |
|
"learning_rate": 4.998991968732729e-05, |
|
"loss": 0.4291, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.32926829268292684, |
|
"grad_norm": 0.028415208011529337, |
|
"learning_rate": 4.9987242332025846e-05, |
|
"loss": 0.4445, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3323170731707317, |
|
"grad_norm": 0.02690846850933201, |
|
"learning_rate": 4.998425010687484e-05, |
|
"loss": 0.4413, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3353658536585366, |
|
"grad_norm": 0.029434783335942422, |
|
"learning_rate": 4.9980943049579975e-05, |
|
"loss": 0.5259, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3384146341463415, |
|
"grad_norm": 0.02732698412255557, |
|
"learning_rate": 4.997732120181423e-05, |
|
"loss": 0.458, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.02817361324202221, |
|
"learning_rate": 4.997338460921731e-05, |
|
"loss": 0.5046, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3445121951219512, |
|
"grad_norm": 0.026598418378303214, |
|
"learning_rate": 4.9969133321395104e-05, |
|
"loss": 0.4538, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3475609756097561, |
|
"grad_norm": 0.026712012837418288, |
|
"learning_rate": 4.996456739191905e-05, |
|
"loss": 0.4333, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.35060975609756095, |
|
"grad_norm": 0.02664382392284045, |
|
"learning_rate": 4.995968687832545e-05, |
|
"loss": 0.3973, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.35365853658536583, |
|
"grad_norm": 0.026832454561682675, |
|
"learning_rate": 4.995449184211474e-05, |
|
"loss": 0.3963, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3567073170731707, |
|
"grad_norm": 0.03128497596944388, |
|
"learning_rate": 4.994898234875075e-05, |
|
"loss": 0.4612, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3597560975609756, |
|
"grad_norm": 0.027920438805892456, |
|
"learning_rate": 4.994315846765984e-05, |
|
"loss": 0.4803, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3628048780487805, |
|
"grad_norm": 0.02790719813073735, |
|
"learning_rate": 4.993702027223004e-05, |
|
"loss": 0.3983, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 0.02954525910398473, |
|
"learning_rate": 4.993056783981013e-05, |
|
"loss": 0.53, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.36890243902439024, |
|
"grad_norm": 0.03279451861565122, |
|
"learning_rate": 4.9923801251708655e-05, |
|
"loss": 0.4329, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3719512195121951, |
|
"grad_norm": 0.02969073542607525, |
|
"learning_rate": 4.9916720593192925e-05, |
|
"loss": 0.495, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.030046395052057563, |
|
"learning_rate": 4.990932595348789e-05, |
|
"loss": 0.5039, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3780487804878049, |
|
"grad_norm": 0.030594799975909838, |
|
"learning_rate": 4.9901617425775067e-05, |
|
"loss": 0.5416, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.38109756097560976, |
|
"grad_norm": 0.02419088359613875, |
|
"learning_rate": 4.9893595107191355e-05, |
|
"loss": 0.3206, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.38414634146341464, |
|
"grad_norm": 0.028896084169673618, |
|
"learning_rate": 4.988525909882779e-05, |
|
"loss": 0.4127, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3871951219512195, |
|
"grad_norm": 0.04001825579621416, |
|
"learning_rate": 4.9876609505728266e-05, |
|
"loss": 0.4392, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.14115804141301908, |
|
"learning_rate": 4.9867646436888274e-05, |
|
"loss": 0.4594, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3932926829268293, |
|
"grad_norm": 0.029050747953028554, |
|
"learning_rate": 4.985837000525343e-05, |
|
"loss": 0.4337, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.39634146341463417, |
|
"grad_norm": 0.02920084405101805, |
|
"learning_rate": 4.984878032771815e-05, |
|
"loss": 0.4225, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39939024390243905, |
|
"grad_norm": 0.03011718239999231, |
|
"learning_rate": 4.983887752512412e-05, |
|
"loss": 0.4538, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4024390243902439, |
|
"grad_norm": 0.027897119508686768, |
|
"learning_rate": 4.982866172225875e-05, |
|
"loss": 0.4071, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4054878048780488, |
|
"grad_norm": 0.03577152763319291, |
|
"learning_rate": 4.981813304785369e-05, |
|
"loss": 0.5895, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.40853658536585363, |
|
"grad_norm": 0.03277686846451814, |
|
"learning_rate": 4.980729163458312e-05, |
|
"loss": 0.5017, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4115853658536585, |
|
"grad_norm": 0.03217006570314937, |
|
"learning_rate": 4.979613761906212e-05, |
|
"loss": 0.4384, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4146341463414634, |
|
"grad_norm": 0.030949220084112952, |
|
"learning_rate": 4.978467114184495e-05, |
|
"loss": 0.4815, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4176829268292683, |
|
"grad_norm": 0.030260238522408403, |
|
"learning_rate": 4.977289234742325e-05, |
|
"loss": 0.4143, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.42073170731707316, |
|
"grad_norm": 0.028522608089292575, |
|
"learning_rate": 4.976080138422428e-05, |
|
"loss": 0.402, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.42378048780487804, |
|
"grad_norm": 0.029049158089749227, |
|
"learning_rate": 4.974839840460895e-05, |
|
"loss": 0.3804, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 0.0298218460769968, |
|
"learning_rate": 4.973568356487001e-05, |
|
"loss": 0.4665, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4298780487804878, |
|
"grad_norm": 0.030435705321632405, |
|
"learning_rate": 4.972265702523e-05, |
|
"loss": 0.4775, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4329268292682927, |
|
"grad_norm": 0.033498527194517305, |
|
"learning_rate": 4.970931894983927e-05, |
|
"loss": 0.4643, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.43597560975609756, |
|
"grad_norm": 0.024129978615902534, |
|
"learning_rate": 4.969566950677391e-05, |
|
"loss": 0.387, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.033295978068974476, |
|
"learning_rate": 4.9681708868033616e-05, |
|
"loss": 0.4444, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4420731707317073, |
|
"grad_norm": 0.0318133787810746, |
|
"learning_rate": 4.9667437209539525e-05, |
|
"loss": 0.3922, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4451219512195122, |
|
"grad_norm": 0.031838890935437666, |
|
"learning_rate": 4.965285471113201e-05, |
|
"loss": 0.43, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4481707317073171, |
|
"grad_norm": 0.031279587433315055, |
|
"learning_rate": 4.9637961556568404e-05, |
|
"loss": 0.4794, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.45121951219512196, |
|
"grad_norm": 0.029605363976328944, |
|
"learning_rate": 4.9622757933520694e-05, |
|
"loss": 0.4084, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.45426829268292684, |
|
"grad_norm": 0.03328501411562014, |
|
"learning_rate": 4.9607244033573156e-05, |
|
"loss": 0.3862, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4573170731707317, |
|
"grad_norm": 0.02977331587790026, |
|
"learning_rate": 4.959142005221991e-05, |
|
"loss": 0.4317, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4603658536585366, |
|
"grad_norm": 0.03445716661980318, |
|
"learning_rate": 4.957528618886249e-05, |
|
"loss": 0.3587, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4634146341463415, |
|
"grad_norm": 0.03349464222579777, |
|
"learning_rate": 4.9558842646807354e-05, |
|
"loss": 0.4175, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.46646341463414637, |
|
"grad_norm": 0.03172092320527115, |
|
"learning_rate": 4.954208963326327e-05, |
|
"loss": 0.4291, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4695121951219512, |
|
"grad_norm": 0.03495378537426485, |
|
"learning_rate": 4.9525027359338696e-05, |
|
"loss": 0.4943, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4725609756097561, |
|
"grad_norm": 0.02829629512141535, |
|
"learning_rate": 4.9507656040039186e-05, |
|
"loss": 0.4044, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.47560975609756095, |
|
"grad_norm": 0.03372581762913699, |
|
"learning_rate": 4.9489975894264636e-05, |
|
"loss": 0.4583, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.47865853658536583, |
|
"grad_norm": 0.031007930096541795, |
|
"learning_rate": 4.947198714480653e-05, |
|
"loss": 0.3455, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4817073170731707, |
|
"grad_norm": 0.03294155045988215, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 0.4384, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4847560975609756, |
|
"grad_norm": 0.032312853442576034, |
|
"learning_rate": 4.9435084745446666e-05, |
|
"loss": 0.4049, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.030987604019126144, |
|
"learning_rate": 4.941617156056032e-05, |
|
"loss": 0.4172, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.49085365853658536, |
|
"grad_norm": 0.02617581898410179, |
|
"learning_rate": 4.939695070201541e-05, |
|
"loss": 0.3645, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.49390243902439024, |
|
"grad_norm": 0.03584401963601039, |
|
"learning_rate": 4.937742241201826e-05, |
|
"loss": 0.4821, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4969512195121951, |
|
"grad_norm": 0.03254199704880417, |
|
"learning_rate": 4.9357586936649255e-05, |
|
"loss": 0.4286, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.03301611426756909, |
|
"learning_rate": 4.933744452585966e-05, |
|
"loss": 0.4156, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5030487804878049, |
|
"grad_norm": 0.036375576445087335, |
|
"learning_rate": 4.931699543346854e-05, |
|
"loss": 0.5381, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5060975609756098, |
|
"grad_norm": 0.02629100047771681, |
|
"learning_rate": 4.929623991715947e-05, |
|
"loss": 0.3748, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5091463414634146, |
|
"grad_norm": 0.03271065383289755, |
|
"learning_rate": 4.927517823847738e-05, |
|
"loss": 0.424, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5121951219512195, |
|
"grad_norm": 0.02977415364330819, |
|
"learning_rate": 4.9253810662825216e-05, |
|
"loss": 0.4107, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5152439024390244, |
|
"grad_norm": 0.029592038707982913, |
|
"learning_rate": 4.923213745946059e-05, |
|
"loss": 0.4416, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5182926829268293, |
|
"grad_norm": 0.034123248186661195, |
|
"learning_rate": 4.92101589014924e-05, |
|
"loss": 0.4724, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5213414634146342, |
|
"grad_norm": 0.030117950572343562, |
|
"learning_rate": 4.918787526587739e-05, |
|
"loss": 0.4342, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.524390243902439, |
|
"grad_norm": 0.02784489931949698, |
|
"learning_rate": 4.9165286833416666e-05, |
|
"loss": 0.414, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5274390243902439, |
|
"grad_norm": 0.028838060629043785, |
|
"learning_rate": 4.9142393888752125e-05, |
|
"loss": 0.3891, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5304878048780488, |
|
"grad_norm": 0.03039770773924711, |
|
"learning_rate": 4.91191967203629e-05, |
|
"loss": 0.4696, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5335365853658537, |
|
"grad_norm": 0.027932971891971924, |
|
"learning_rate": 4.909569562056172e-05, |
|
"loss": 0.3999, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.032440243318377006, |
|
"learning_rate": 4.9071890885491224e-05, |
|
"loss": 0.4376, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5396341463414634, |
|
"grad_norm": 0.028239012069124093, |
|
"learning_rate": 4.9047782815120214e-05, |
|
"loss": 0.4173, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5426829268292683, |
|
"grad_norm": 0.03145028792920836, |
|
"learning_rate": 4.9023371713239895e-05, |
|
"loss": 0.4447, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5457317073170732, |
|
"grad_norm": 0.0331574276058771, |
|
"learning_rate": 4.899865788746005e-05, |
|
"loss": 0.4591, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 0.03236199256446896, |
|
"learning_rate": 4.897364164920514e-05, |
|
"loss": 0.4119, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.551829268292683, |
|
"grad_norm": 0.031426884352228006, |
|
"learning_rate": 4.8948323313710405e-05, |
|
"loss": 0.3928, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5548780487804879, |
|
"grad_norm": 0.029533984825273876, |
|
"learning_rate": 4.892270320001786e-05, |
|
"loss": 0.4302, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5579268292682927, |
|
"grad_norm": 0.0275533238930569, |
|
"learning_rate": 4.889678163097233e-05, |
|
"loss": 0.3847, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5609756097560976, |
|
"grad_norm": 0.02879278222254772, |
|
"learning_rate": 4.88705589332173e-05, |
|
"loss": 0.3578, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5640243902439024, |
|
"grad_norm": 0.02856312748685412, |
|
"learning_rate": 4.884403543719089e-05, |
|
"loss": 0.3671, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5670731707317073, |
|
"grad_norm": 0.03325654158942621, |
|
"learning_rate": 4.881721147712162e-05, |
|
"loss": 0.4458, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5701219512195121, |
|
"grad_norm": 0.03203686650016824, |
|
"learning_rate": 4.879008739102423e-05, |
|
"loss": 0.4267, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.573170731707317, |
|
"grad_norm": 0.031235969957028178, |
|
"learning_rate": 4.876266352069542e-05, |
|
"loss": 0.4254, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5762195121951219, |
|
"grad_norm": 0.03192624303186046, |
|
"learning_rate": 4.873494021170953e-05, |
|
"loss": 0.38, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5792682926829268, |
|
"grad_norm": 0.030168180809586265, |
|
"learning_rate": 4.870691781341422e-05, |
|
"loss": 0.3605, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5823170731707317, |
|
"grad_norm": 0.032779575767083055, |
|
"learning_rate": 4.867859667892599e-05, |
|
"loss": 0.4632, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.031635000676225315, |
|
"learning_rate": 4.864997716512584e-05, |
|
"loss": 0.4022, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5884146341463414, |
|
"grad_norm": 0.03300217321436167, |
|
"learning_rate": 4.8621059632654684e-05, |
|
"loss": 0.3976, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5914634146341463, |
|
"grad_norm": 0.036533189955443766, |
|
"learning_rate": 4.859184444590882e-05, |
|
"loss": 0.4193, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5945121951219512, |
|
"grad_norm": 0.03429837316646944, |
|
"learning_rate": 4.856233197303539e-05, |
|
"loss": 0.4551, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5975609756097561, |
|
"grad_norm": 0.042055757960286724, |
|
"learning_rate": 4.853252258592769e-05, |
|
"loss": 0.5209, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.600609756097561, |
|
"grad_norm": 0.032413833464997835, |
|
"learning_rate": 4.85024166602205e-05, |
|
"loss": 0.4542, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6036585365853658, |
|
"grad_norm": 0.032486592113239954, |
|
"learning_rate": 4.847201457528533e-05, |
|
"loss": 0.4547, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6067073170731707, |
|
"grad_norm": 0.03146274594158332, |
|
"learning_rate": 4.84413167142257e-05, |
|
"loss": 0.4303, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 0.03300614850893232, |
|
"learning_rate": 4.841032346387223e-05, |
|
"loss": 0.3996, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6128048780487805, |
|
"grad_norm": 0.03033077869518619, |
|
"learning_rate": 4.8379035214777836e-05, |
|
"loss": 0.3868, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6158536585365854, |
|
"grad_norm": 0.041107227614117905, |
|
"learning_rate": 4.834745236121276e-05, |
|
"loss": 0.4447, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6189024390243902, |
|
"grad_norm": 0.03258067072978055, |
|
"learning_rate": 4.8315575301159644e-05, |
|
"loss": 0.4083, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6219512195121951, |
|
"grad_norm": 0.03324141495975344, |
|
"learning_rate": 4.8283404436308464e-05, |
|
"loss": 0.3783, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.0346026019862594, |
|
"learning_rate": 4.8250940172051506e-05, |
|
"loss": 0.4676, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6280487804878049, |
|
"grad_norm": 0.034218054427013485, |
|
"learning_rate": 4.821818291747826e-05, |
|
"loss": 0.4161, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6310975609756098, |
|
"grad_norm": 0.03578241935145395, |
|
"learning_rate": 4.8185133085370246e-05, |
|
"loss": 0.4546, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.029220203681492543, |
|
"learning_rate": 4.815179109219581e-05, |
|
"loss": 0.3681, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.6371951219512195, |
|
"grad_norm": 0.034325656907965206, |
|
"learning_rate": 4.81181573581049e-05, |
|
"loss": 0.4236, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6402439024390244, |
|
"grad_norm": 0.031648709603629754, |
|
"learning_rate": 4.808423230692374e-05, |
|
"loss": 0.4113, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6432926829268293, |
|
"grad_norm": 0.04023544907777028, |
|
"learning_rate": 4.805001636614951e-05, |
|
"loss": 0.4344, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6463414634146342, |
|
"grad_norm": 0.033804893131760355, |
|
"learning_rate": 4.801550996694498e-05, |
|
"loss": 0.4015, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.649390243902439, |
|
"grad_norm": 0.030694782148620977, |
|
"learning_rate": 4.798071354413302e-05, |
|
"loss": 0.4318, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6524390243902439, |
|
"grad_norm": 0.032401947613213765, |
|
"learning_rate": 4.7945627536191166e-05, |
|
"loss": 0.4608, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6554878048780488, |
|
"grad_norm": 0.034950291203224994, |
|
"learning_rate": 4.791025238524608e-05, |
|
"loss": 0.4746, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6585365853658537, |
|
"grad_norm": 0.03170711324948168, |
|
"learning_rate": 4.787458853706798e-05, |
|
"loss": 0.4547, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6615853658536586, |
|
"grad_norm": 0.03091501658528955, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 0.4375, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6646341463414634, |
|
"grad_norm": 0.035732798404897494, |
|
"learning_rate": 4.780239655027764e-05, |
|
"loss": 0.3974, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6676829268292683, |
|
"grad_norm": 0.035871702747555335, |
|
"learning_rate": 4.7765869321372836e-05, |
|
"loss": 0.4421, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 0.030446573090157556, |
|
"learning_rate": 4.772905521463841e-05, |
|
"loss": 0.3724, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6737804878048781, |
|
"grad_norm": 0.03069146337023481, |
|
"learning_rate": 4.769195469397719e-05, |
|
"loss": 0.4074, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.676829268292683, |
|
"grad_norm": 0.030890634848987315, |
|
"learning_rate": 4.765456822690116e-05, |
|
"loss": 0.4885, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6798780487804879, |
|
"grad_norm": 0.03259691598463501, |
|
"learning_rate": 4.7616896284525594e-05, |
|
"loss": 0.4435, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.03610520773760829, |
|
"learning_rate": 4.7578939341563095e-05, |
|
"loss": 0.4422, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6859756097560976, |
|
"grad_norm": 0.030129525739252205, |
|
"learning_rate": 4.754069787631761e-05, |
|
"loss": 0.4183, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6890243902439024, |
|
"grad_norm": 0.03226262495076605, |
|
"learning_rate": 4.7502172370678434e-05, |
|
"loss": 0.414, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6920731707317073, |
|
"grad_norm": 0.038231135681208804, |
|
"learning_rate": 4.7463363310114106e-05, |
|
"loss": 0.5366, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6951219512195121, |
|
"grad_norm": 0.03350943712591451, |
|
"learning_rate": 4.742427118366632e-05, |
|
"loss": 0.5113, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.698170731707317, |
|
"grad_norm": 0.033377477689505115, |
|
"learning_rate": 4.738489648394373e-05, |
|
"loss": 0.3785, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.7012195121951219, |
|
"grad_norm": 0.02912803902171078, |
|
"learning_rate": 4.7345239707115764e-05, |
|
"loss": 0.3561, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7042682926829268, |
|
"grad_norm": 0.03279893881426138, |
|
"learning_rate": 4.7305301352906376e-05, |
|
"loss": 0.392, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7073170731707317, |
|
"grad_norm": 0.033540891496370996, |
|
"learning_rate": 4.7265081924587715e-05, |
|
"loss": 0.4123, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.7103658536585366, |
|
"grad_norm": 0.037955097184342194, |
|
"learning_rate": 4.722458192897384e-05, |
|
"loss": 0.3895, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.7134146341463414, |
|
"grad_norm": 0.03823305877451305, |
|
"learning_rate": 4.7183801876414294e-05, |
|
"loss": 0.4963, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7164634146341463, |
|
"grad_norm": 0.0325437922967289, |
|
"learning_rate": 4.7142742280787654e-05, |
|
"loss": 0.395, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7195121951219512, |
|
"grad_norm": 0.033910959532630194, |
|
"learning_rate": 4.7101403659495114e-05, |
|
"loss": 0.4213, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.7225609756097561, |
|
"grad_norm": 0.03988837387160677, |
|
"learning_rate": 4.705978653345392e-05, |
|
"loss": 0.4515, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.725609756097561, |
|
"grad_norm": 0.033382737192550964, |
|
"learning_rate": 4.701789142709081e-05, |
|
"loss": 0.4293, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7286585365853658, |
|
"grad_norm": 0.03683967072126718, |
|
"learning_rate": 4.697571886833544e-05, |
|
"loss": 0.4869, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.03389014227863287, |
|
"learning_rate": 4.693326938861366e-05, |
|
"loss": 0.432, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7347560975609756, |
|
"grad_norm": 0.03338844412509684, |
|
"learning_rate": 4.689054352284094e-05, |
|
"loss": 0.3734, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7378048780487805, |
|
"grad_norm": 0.03350579155466827, |
|
"learning_rate": 4.684754180941545e-05, |
|
"loss": 0.4609, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.7408536585365854, |
|
"grad_norm": 0.0360391450589852, |
|
"learning_rate": 4.680426479021147e-05, |
|
"loss": 0.4488, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7439024390243902, |
|
"grad_norm": 0.0325562245901543, |
|
"learning_rate": 4.676071301057243e-05, |
|
"loss": 0.3746, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7469512195121951, |
|
"grad_norm": 0.037989790962959104, |
|
"learning_rate": 4.6716887019304075e-05, |
|
"loss": 0.4802, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.03446224282201849, |
|
"learning_rate": 4.6672787368667556e-05, |
|
"loss": 0.3878, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7530487804878049, |
|
"grad_norm": 0.038240603812219866, |
|
"learning_rate": 4.662841461437247e-05, |
|
"loss": 0.4311, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7560975609756098, |
|
"grad_norm": 0.03293308938612019, |
|
"learning_rate": 4.658376931556986e-05, |
|
"loss": 0.4569, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7591463414634146, |
|
"grad_norm": 0.03479760626237619, |
|
"learning_rate": 4.653885203484515e-05, |
|
"loss": 0.3793, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7621951219512195, |
|
"grad_norm": 0.037388404653232496, |
|
"learning_rate": 4.649366333821109e-05, |
|
"loss": 0.4452, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7652439024390244, |
|
"grad_norm": 0.0352761753839379, |
|
"learning_rate": 4.644820379510058e-05, |
|
"loss": 0.4568, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7682926829268293, |
|
"grad_norm": 0.035571316960728315, |
|
"learning_rate": 4.640247397835953e-05, |
|
"loss": 0.4964, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7713414634146342, |
|
"grad_norm": 0.042108811767305916, |
|
"learning_rate": 4.635647446423963e-05, |
|
"loss": 0.4635, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.774390243902439, |
|
"grad_norm": 0.03740018008517267, |
|
"learning_rate": 4.631020583239107e-05, |
|
"loss": 0.4075, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7774390243902439, |
|
"grad_norm": 0.03406740378233361, |
|
"learning_rate": 4.626366866585528e-05, |
|
"loss": 0.4123, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.03534462880308758, |
|
"learning_rate": 4.621686355105752e-05, |
|
"loss": 0.5181, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7835365853658537, |
|
"grad_norm": 0.039032255535887364, |
|
"learning_rate": 4.616979107779958e-05, |
|
"loss": 0.4219, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7865853658536586, |
|
"grad_norm": 0.03228976154931515, |
|
"learning_rate": 4.612245183925225e-05, |
|
"loss": 0.4264, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7896341463414634, |
|
"grad_norm": 0.03359739152114295, |
|
"learning_rate": 4.607484643194788e-05, |
|
"loss": 0.39, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 0.04618842209757573, |
|
"learning_rate": 4.602697545577292e-05, |
|
"loss": 0.4484, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7957317073170732, |
|
"grad_norm": 0.03767698901292077, |
|
"learning_rate": 4.597883951396027e-05, |
|
"loss": 0.4427, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7987804878048781, |
|
"grad_norm": 0.033711281520042095, |
|
"learning_rate": 4.593043921308175e-05, |
|
"loss": 0.3961, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.801829268292683, |
|
"grad_norm": 0.03517505501582254, |
|
"learning_rate": 4.588177516304042e-05, |
|
"loss": 0.4626, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.8048780487804879, |
|
"grad_norm": 0.041222873153210056, |
|
"learning_rate": 4.5832847977062874e-05, |
|
"loss": 0.489, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.8079268292682927, |
|
"grad_norm": 0.036670715661490515, |
|
"learning_rate": 4.578365827169159e-05, |
|
"loss": 0.4201, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8109756097560976, |
|
"grad_norm": 0.03429356814619947, |
|
"learning_rate": 4.5734206666777075e-05, |
|
"loss": 0.4228, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8140243902439024, |
|
"grad_norm": 0.03454066782608884, |
|
"learning_rate": 4.5684493785470105e-05, |
|
"loss": 0.33, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.8170731707317073, |
|
"grad_norm": 0.042630367804240565, |
|
"learning_rate": 4.563452025421386e-05, |
|
"loss": 0.4113, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.8201219512195121, |
|
"grad_norm": 0.03518819540379091, |
|
"learning_rate": 4.558428670273601e-05, |
|
"loss": 0.4554, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.823170731707317, |
|
"grad_norm": 0.03059671220624392, |
|
"learning_rate": 4.5533793764040844e-05, |
|
"loss": 0.3427, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8262195121951219, |
|
"grad_norm": 0.03994211546654242, |
|
"learning_rate": 4.5483042074401185e-05, |
|
"loss": 0.4469, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.042677969772066116, |
|
"learning_rate": 4.5432032273350484e-05, |
|
"loss": 0.4816, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8323170731707317, |
|
"grad_norm": 0.03453477271523532, |
|
"learning_rate": 4.538076500367469e-05, |
|
"loss": 0.4339, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8353658536585366, |
|
"grad_norm": 0.033868608488940716, |
|
"learning_rate": 4.532924091140417e-05, |
|
"loss": 0.4343, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8384146341463414, |
|
"grad_norm": 0.040360291012410086, |
|
"learning_rate": 4.527746064580559e-05, |
|
"loss": 0.4379, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8414634146341463, |
|
"grad_norm": 0.03915688403846989, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 0.4507, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8445121951219512, |
|
"grad_norm": 0.03296011686492542, |
|
"learning_rate": 4.51731342078231e-05, |
|
"loss": 0.4712, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8475609756097561, |
|
"grad_norm": 0.03431476570558208, |
|
"learning_rate": 4.5120589350080045e-05, |
|
"loss": 0.4128, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.850609756097561, |
|
"grad_norm": 0.03296989189683222, |
|
"learning_rate": 4.5067790948274094e-05, |
|
"loss": 0.4259, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 0.03888989456612432, |
|
"learning_rate": 4.501473966772974e-05, |
|
"loss": 0.4363, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8567073170731707, |
|
"grad_norm": 0.036013879189621865, |
|
"learning_rate": 4.4961436176958096e-05, |
|
"loss": 0.3842, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8597560975609756, |
|
"grad_norm": 0.03538488532386994, |
|
"learning_rate": 4.49078811476484e-05, |
|
"loss": 0.4286, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8628048780487805, |
|
"grad_norm": 0.029695892400757717, |
|
"learning_rate": 4.485407525465961e-05, |
|
"loss": 0.326, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8658536585365854, |
|
"grad_norm": 0.03618612518943503, |
|
"learning_rate": 4.480001917601185e-05, |
|
"loss": 0.4609, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8689024390243902, |
|
"grad_norm": 0.03408841436436889, |
|
"learning_rate": 4.47457135928779e-05, |
|
"loss": 0.3644, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8719512195121951, |
|
"grad_norm": 0.03633183392407226, |
|
"learning_rate": 4.469115918957462e-05, |
|
"loss": 0.4926, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.03228844576866147, |
|
"learning_rate": 4.463635665355427e-05, |
|
"loss": 0.4056, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.03273543543687427, |
|
"learning_rate": 4.458130667539592e-05, |
|
"loss": 0.5236, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8810975609756098, |
|
"grad_norm": 0.0367258255673261, |
|
"learning_rate": 4.4526009948796703e-05, |
|
"loss": 0.4307, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8841463414634146, |
|
"grad_norm": 0.03703121197597588, |
|
"learning_rate": 4.447046717056308e-05, |
|
"loss": 0.414, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8871951219512195, |
|
"grad_norm": 0.035618837315914385, |
|
"learning_rate": 4.4414679040602066e-05, |
|
"loss": 0.4478, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8902439024390244, |
|
"grad_norm": 0.03548278274265021, |
|
"learning_rate": 4.435864626191242e-05, |
|
"loss": 0.3895, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8932926829268293, |
|
"grad_norm": 0.037705061087907554, |
|
"learning_rate": 4.430236954057575e-05, |
|
"loss": 0.471, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8963414634146342, |
|
"grad_norm": 0.034130715604090416, |
|
"learning_rate": 4.4245849585747654e-05, |
|
"loss": 0.4321, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.899390243902439, |
|
"grad_norm": 0.035178382604340114, |
|
"learning_rate": 4.4189087109648786e-05, |
|
"loss": 0.4426, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.9024390243902439, |
|
"grad_norm": 0.033104141044304335, |
|
"learning_rate": 4.413208282755583e-05, |
|
"loss": 0.4569, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.9054878048780488, |
|
"grad_norm": 0.033911276401756896, |
|
"learning_rate": 4.4074837457792565e-05, |
|
"loss": 0.3703, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.9085365853658537, |
|
"grad_norm": 0.035965621143592474, |
|
"learning_rate": 4.4017351721720713e-05, |
|
"loss": 0.4585, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.9115853658536586, |
|
"grad_norm": 0.03218777441907488, |
|
"learning_rate": 4.395962634373097e-05, |
|
"loss": 0.3582, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 0.0410793082328498, |
|
"learning_rate": 4.3901662051233746e-05, |
|
"loss": 0.4138, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9176829268292683, |
|
"grad_norm": 0.03352096380275321, |
|
"learning_rate": 4.384345957465014e-05, |
|
"loss": 0.3597, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.9207317073170732, |
|
"grad_norm": 0.03722187766731886, |
|
"learning_rate": 4.3785019647402596e-05, |
|
"loss": 0.3942, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.9237804878048781, |
|
"grad_norm": 0.0371788079208431, |
|
"learning_rate": 4.372634300590578e-05, |
|
"loss": 0.3483, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.03611699462478195, |
|
"learning_rate": 4.36674303895572e-05, |
|
"loss": 0.4075, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9298780487804879, |
|
"grad_norm": 0.03723998605579338, |
|
"learning_rate": 4.360828254072796e-05, |
|
"loss": 0.4151, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9329268292682927, |
|
"grad_norm": 0.03959076079536807, |
|
"learning_rate": 4.35489002047534e-05, |
|
"loss": 0.4752, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9359756097560976, |
|
"grad_norm": 0.03766438805908114, |
|
"learning_rate": 4.348928412992365e-05, |
|
"loss": 0.4568, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9390243902439024, |
|
"grad_norm": 0.03778927701157187, |
|
"learning_rate": 4.3429435067474255e-05, |
|
"loss": 0.487, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9420731707317073, |
|
"grad_norm": 0.036336021394423754, |
|
"learning_rate": 4.336935377157668e-05, |
|
"loss": 0.4099, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9451219512195121, |
|
"grad_norm": 0.0431887089760307, |
|
"learning_rate": 4.330904099932883e-05, |
|
"loss": 0.497, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.948170731707317, |
|
"grad_norm": 0.039944309237872226, |
|
"learning_rate": 4.324849751074549e-05, |
|
"loss": 0.443, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9512195121951219, |
|
"grad_norm": 0.03854560157459489, |
|
"learning_rate": 4.318772406874873e-05, |
|
"loss": 0.3739, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9542682926829268, |
|
"grad_norm": 0.03734951750084128, |
|
"learning_rate": 4.312672143915836e-05, |
|
"loss": 0.5006, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9573170731707317, |
|
"grad_norm": 0.03689350747693368, |
|
"learning_rate": 4.3065490390682186e-05, |
|
"loss": 0.3988, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9603658536585366, |
|
"grad_norm": 0.038314638731371316, |
|
"learning_rate": 4.30040316949064e-05, |
|
"loss": 0.424, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9634146341463414, |
|
"grad_norm": 0.04066874086781242, |
|
"learning_rate": 4.294234612628584e-05, |
|
"loss": 0.3774, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9664634146341463, |
|
"grad_norm": 0.04039695985914129, |
|
"learning_rate": 4.288043446213418e-05, |
|
"loss": 0.4616, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9695121951219512, |
|
"grad_norm": 0.03660869126341122, |
|
"learning_rate": 4.2818297482614215e-05, |
|
"loss": 0.4401, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9725609756097561, |
|
"grad_norm": 0.03562869147236029, |
|
"learning_rate": 4.275593597072796e-05, |
|
"loss": 0.361, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.03711094322444415, |
|
"learning_rate": 4.269335071230683e-05, |
|
"loss": 0.3837, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9786585365853658, |
|
"grad_norm": 0.03892312477131053, |
|
"learning_rate": 4.263054249600172e-05, |
|
"loss": 0.4111, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9817073170731707, |
|
"grad_norm": 0.03865864546953116, |
|
"learning_rate": 4.2567512113273053e-05, |
|
"loss": 0.4433, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9847560975609756, |
|
"grad_norm": 0.03867539491346883, |
|
"learning_rate": 4.250426035838083e-05, |
|
"loss": 0.4212, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9878048780487805, |
|
"grad_norm": 0.040465376955033085, |
|
"learning_rate": 4.2440788028374624e-05, |
|
"loss": 0.4628, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9908536585365854, |
|
"grad_norm": 0.03704119504678098, |
|
"learning_rate": 4.23770959230835e-05, |
|
"loss": 0.4128, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9939024390243902, |
|
"grad_norm": 0.03839322577794025, |
|
"learning_rate": 4.231318484510598e-05, |
|
"loss": 0.44, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9969512195121951, |
|
"grad_norm": 0.03315102993940294, |
|
"learning_rate": 4.2249055599799905e-05, |
|
"loss": 0.4046, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.03625310265220718, |
|
"learning_rate": 4.218470899527229e-05, |
|
"loss": 0.4283, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.0030487804878048, |
|
"grad_norm": 0.039661819040880925, |
|
"learning_rate": 4.212014584236914e-05, |
|
"loss": 0.4333, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.0060975609756098, |
|
"grad_norm": 0.036493365868269324, |
|
"learning_rate": 4.2055366954665245e-05, |
|
"loss": 0.3777, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0091463414634145, |
|
"grad_norm": 0.035949114268750526, |
|
"learning_rate": 4.19903731484539e-05, |
|
"loss": 0.4056, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.0121951219512195, |
|
"grad_norm": 0.038016945564005054, |
|
"learning_rate": 4.1925165242736645e-05, |
|
"loss": 0.3595, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.0152439024390243, |
|
"grad_norm": 0.043854287453910595, |
|
"learning_rate": 4.1859744059212945e-05, |
|
"loss": 0.3806, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.0182926829268293, |
|
"grad_norm": 0.032489628100366166, |
|
"learning_rate": 4.1794110422269825e-05, |
|
"loss": 0.3716, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.021341463414634, |
|
"grad_norm": 0.034846328027673276, |
|
"learning_rate": 4.172826515897146e-05, |
|
"loss": 0.378, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.024390243902439, |
|
"grad_norm": 0.03617467028978075, |
|
"learning_rate": 4.16622090990488e-05, |
|
"loss": 0.4319, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0274390243902438, |
|
"grad_norm": 0.036298981743650975, |
|
"learning_rate": 4.159594307488909e-05, |
|
"loss": 0.4165, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.0304878048780488, |
|
"grad_norm": 0.04008248827120437, |
|
"learning_rate": 4.152946792152537e-05, |
|
"loss": 0.4598, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0335365853658536, |
|
"grad_norm": 0.03413084170917385, |
|
"learning_rate": 4.146278447662597e-05, |
|
"loss": 0.3911, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0365853658536586, |
|
"grad_norm": 0.0401261349416301, |
|
"learning_rate": 4.1395893580483946e-05, |
|
"loss": 0.4359, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0396341463414633, |
|
"grad_norm": 0.03712587827307479, |
|
"learning_rate": 4.13287960760065e-05, |
|
"loss": 0.3907, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.0426829268292683, |
|
"grad_norm": 0.03662865750568463, |
|
"learning_rate": 4.1261492808704336e-05, |
|
"loss": 0.3809, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.045731707317073, |
|
"grad_norm": 0.03818973631464467, |
|
"learning_rate": 4.1193984626681055e-05, |
|
"loss": 0.392, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.048780487804878, |
|
"grad_norm": 0.039592831441071695, |
|
"learning_rate": 4.112627238062239e-05, |
|
"loss": 0.4329, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0518292682926829, |
|
"grad_norm": 0.034611760797123796, |
|
"learning_rate": 4.105835692378557e-05, |
|
"loss": 0.3833, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0548780487804879, |
|
"grad_norm": 0.041529978989313134, |
|
"learning_rate": 4.0990239111988495e-05, |
|
"loss": 0.4961, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0579268292682926, |
|
"grad_norm": 0.03915634524818885, |
|
"learning_rate": 4.092191980359902e-05, |
|
"loss": 0.367, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0609756097560976, |
|
"grad_norm": 0.039821471806743135, |
|
"learning_rate": 4.085339985952407e-05, |
|
"loss": 0.4019, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0640243902439024, |
|
"grad_norm": 0.0381647553606081, |
|
"learning_rate": 4.0784680143198836e-05, |
|
"loss": 0.3899, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.0670731707317074, |
|
"grad_norm": 0.038813602489438916, |
|
"learning_rate": 4.071576152057592e-05, |
|
"loss": 0.4504, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0701219512195121, |
|
"grad_norm": 0.03759452805764784, |
|
"learning_rate": 4.064664486011433e-05, |
|
"loss": 0.4096, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0731707317073171, |
|
"grad_norm": 0.03559570239193551, |
|
"learning_rate": 4.057733103276862e-05, |
|
"loss": 0.4448, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.076219512195122, |
|
"grad_norm": 0.03934894672014586, |
|
"learning_rate": 4.05078209119779e-05, |
|
"loss": 0.4315, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.079268292682927, |
|
"grad_norm": 0.03536865777657963, |
|
"learning_rate": 4.04381153736548e-05, |
|
"loss": 0.3166, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0823170731707317, |
|
"grad_norm": 0.06112220739940428, |
|
"learning_rate": 4.036821529617445e-05, |
|
"loss": 0.4604, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0853658536585367, |
|
"grad_norm": 0.042098722892453, |
|
"learning_rate": 4.0298121560363425e-05, |
|
"loss": 0.467, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0884146341463414, |
|
"grad_norm": 0.033546501683242795, |
|
"learning_rate": 4.0227835049488615e-05, |
|
"loss": 0.4055, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0914634146341464, |
|
"grad_norm": 0.040738677792441114, |
|
"learning_rate": 4.015735664924613e-05, |
|
"loss": 0.4435, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0945121951219512, |
|
"grad_norm": 0.03997113538785167, |
|
"learning_rate": 4.00866872477501e-05, |
|
"loss": 0.4176, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0975609756097562, |
|
"grad_norm": 0.037012001536335506, |
|
"learning_rate": 4.0015827735521525e-05, |
|
"loss": 0.4083, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.100609756097561, |
|
"grad_norm": 0.03848505076268115, |
|
"learning_rate": 3.994477900547703e-05, |
|
"loss": 0.4417, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.103658536585366, |
|
"grad_norm": 0.041316815401570055, |
|
"learning_rate": 3.98735419529176e-05, |
|
"loss": 0.4736, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.1067073170731707, |
|
"grad_norm": 0.03911772664638055, |
|
"learning_rate": 3.9802117475517333e-05, |
|
"loss": 0.4158, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.1097560975609757, |
|
"grad_norm": 0.03522394343528472, |
|
"learning_rate": 3.973050647331209e-05, |
|
"loss": 0.4062, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.1128048780487805, |
|
"grad_norm": 0.03658235791600257, |
|
"learning_rate": 3.9658709848688204e-05, |
|
"loss": 0.3585, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.1158536585365855, |
|
"grad_norm": 0.03982898125923274, |
|
"learning_rate": 3.9586728506371036e-05, |
|
"loss": 0.4337, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.1189024390243902, |
|
"grad_norm": 0.037684991163154595, |
|
"learning_rate": 3.951456335341365e-05, |
|
"loss": 0.3462, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.1219512195121952, |
|
"grad_norm": 0.0397290039170204, |
|
"learning_rate": 3.944221529918534e-05, |
|
"loss": 0.4048, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 0.040291593478486015, |
|
"learning_rate": 3.9369685255360175e-05, |
|
"loss": 0.4069, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.1280487804878048, |
|
"grad_norm": 0.03850647603937439, |
|
"learning_rate": 3.929697413590551e-05, |
|
"loss": 0.3627, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1310975609756098, |
|
"grad_norm": 0.038135080456534534, |
|
"learning_rate": 3.92240828570705e-05, |
|
"loss": 0.4451, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.1341463414634148, |
|
"grad_norm": 0.03816254670509036, |
|
"learning_rate": 3.9151012337374495e-05, |
|
"loss": 0.4146, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1371951219512195, |
|
"grad_norm": 0.044412156062597616, |
|
"learning_rate": 3.907776349759552e-05, |
|
"loss": 0.5105, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.1402439024390243, |
|
"grad_norm": 0.04640808798970655, |
|
"learning_rate": 3.900433726075865e-05, |
|
"loss": 0.455, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.1432926829268293, |
|
"grad_norm": 0.038192479317907146, |
|
"learning_rate": 3.893073455212438e-05, |
|
"loss": 0.3909, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.146341463414634, |
|
"grad_norm": 0.04165289486551481, |
|
"learning_rate": 3.885695629917696e-05, |
|
"loss": 0.4276, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.149390243902439, |
|
"grad_norm": 0.04119881876804503, |
|
"learning_rate": 3.878300343161272e-05, |
|
"loss": 0.3558, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1524390243902438, |
|
"grad_norm": 0.040337725300083456, |
|
"learning_rate": 3.8708876881328335e-05, |
|
"loss": 0.3624, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.1554878048780488, |
|
"grad_norm": 0.04080570289033096, |
|
"learning_rate": 3.863457758240912e-05, |
|
"loss": 0.4121, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1585365853658536, |
|
"grad_norm": 0.046098474965416324, |
|
"learning_rate": 3.8560106471117205e-05, |
|
"loss": 0.442, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1615853658536586, |
|
"grad_norm": 0.04336491573458105, |
|
"learning_rate": 3.848546448587979e-05, |
|
"loss": 0.4463, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1646341463414633, |
|
"grad_norm": 0.0372102108830308, |
|
"learning_rate": 3.8410652567277274e-05, |
|
"loss": 0.4164, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1676829268292683, |
|
"grad_norm": 0.0411184143025625, |
|
"learning_rate": 3.8335671658031443e-05, |
|
"loss": 0.4219, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.170731707317073, |
|
"grad_norm": 0.03761498632507343, |
|
"learning_rate": 3.826052270299356e-05, |
|
"loss": 0.3702, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.173780487804878, |
|
"grad_norm": 0.04054206319013793, |
|
"learning_rate": 3.81852066491325e-05, |
|
"loss": 0.4193, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.1768292682926829, |
|
"grad_norm": 0.04409636671558726, |
|
"learning_rate": 3.810972444552273e-05, |
|
"loss": 0.4673, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.1798780487804879, |
|
"grad_norm": 0.05497073494441137, |
|
"learning_rate": 3.8034077043332463e-05, |
|
"loss": 0.393, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1829268292682926, |
|
"grad_norm": 0.035394828898099424, |
|
"learning_rate": 3.795826539581159e-05, |
|
"loss": 0.3822, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1859756097560976, |
|
"grad_norm": 0.04315585167542129, |
|
"learning_rate": 3.78822904582797e-05, |
|
"loss": 0.3952, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1890243902439024, |
|
"grad_norm": 0.04172163659394637, |
|
"learning_rate": 3.7806153188114026e-05, |
|
"loss": 0.4454, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1920731707317074, |
|
"grad_norm": 0.038328909237282424, |
|
"learning_rate": 3.7729854544737405e-05, |
|
"loss": 0.4894, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1951219512195121, |
|
"grad_norm": 0.04453016859851793, |
|
"learning_rate": 3.765339548960618e-05, |
|
"loss": 0.4578, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1981707317073171, |
|
"grad_norm": 0.03985170342712862, |
|
"learning_rate": 3.7576776986198065e-05, |
|
"loss": 0.4054, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.201219512195122, |
|
"grad_norm": 0.0363919638493405, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.4151, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.204268292682927, |
|
"grad_norm": 0.04296486186004094, |
|
"learning_rate": 3.742306549849605e-05, |
|
"loss": 0.4092, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.2073170731707317, |
|
"grad_norm": 0.043399261442856184, |
|
"learning_rate": 3.7345974451155105e-05, |
|
"loss": 0.4375, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.2103658536585367, |
|
"grad_norm": 0.04546310712631861, |
|
"learning_rate": 3.72687278294188e-05, |
|
"loss": 0.3944, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.2134146341463414, |
|
"grad_norm": 0.03869718679031596, |
|
"learning_rate": 3.719132660668912e-05, |
|
"loss": 0.38, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.2164634146341464, |
|
"grad_norm": 0.04423908976746331, |
|
"learning_rate": 3.711377175831626e-05, |
|
"loss": 0.4413, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 0.03992059124798192, |
|
"learning_rate": 3.703606426158627e-05, |
|
"loss": 0.3975, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2225609756097562, |
|
"grad_norm": 0.04234926941131931, |
|
"learning_rate": 3.695820509570876e-05, |
|
"loss": 0.4182, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.225609756097561, |
|
"grad_norm": 0.03752584303283549, |
|
"learning_rate": 3.688019524180457e-05, |
|
"loss": 0.4336, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.228658536585366, |
|
"grad_norm": 0.040254488930115774, |
|
"learning_rate": 3.680203568289335e-05, |
|
"loss": 0.3102, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.2317073170731707, |
|
"grad_norm": 0.04126452897719161, |
|
"learning_rate": 3.6723727403881284e-05, |
|
"loss": 0.4864, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.2347560975609757, |
|
"grad_norm": 0.0397168573533502, |
|
"learning_rate": 3.664527139154854e-05, |
|
"loss": 0.39, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2378048780487805, |
|
"grad_norm": 0.04394740053425026, |
|
"learning_rate": 3.656666863453697e-05, |
|
"loss": 0.4457, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.2408536585365852, |
|
"grad_norm": 0.040165587324300686, |
|
"learning_rate": 3.6487920123337526e-05, |
|
"loss": 0.4072, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.2439024390243902, |
|
"grad_norm": 0.039573002429541836, |
|
"learning_rate": 3.6409026850277904e-05, |
|
"loss": 0.4209, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2469512195121952, |
|
"grad_norm": 0.04325973648718029, |
|
"learning_rate": 3.632998980950993e-05, |
|
"loss": 0.4865, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.045634562526459706, |
|
"learning_rate": 3.625080999699711e-05, |
|
"loss": 0.3904, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2530487804878048, |
|
"grad_norm": 0.04352830294344096, |
|
"learning_rate": 3.617148841050202e-05, |
|
"loss": 0.435, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2560975609756098, |
|
"grad_norm": 0.04556248919890376, |
|
"learning_rate": 3.609202604957379e-05, |
|
"loss": 0.4506, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2591463414634148, |
|
"grad_norm": 0.038127895927304076, |
|
"learning_rate": 3.601242391553546e-05, |
|
"loss": 0.3466, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2621951219512195, |
|
"grad_norm": 0.03894730371478106, |
|
"learning_rate": 3.593268301147139e-05, |
|
"loss": 0.4473, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2652439024390243, |
|
"grad_norm": 0.03946795235138391, |
|
"learning_rate": 3.585280434221461e-05, |
|
"loss": 0.456, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2682926829268293, |
|
"grad_norm": 0.04521220263241203, |
|
"learning_rate": 3.5772788914334165e-05, |
|
"loss": 0.4479, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.2713414634146343, |
|
"grad_norm": 0.04497082961254816, |
|
"learning_rate": 3.5692637736122425e-05, |
|
"loss": 0.4769, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.274390243902439, |
|
"grad_norm": 0.041729507940111016, |
|
"learning_rate": 3.5612351817582375e-05, |
|
"loss": 0.4383, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2774390243902438, |
|
"grad_norm": 0.04030254563385526, |
|
"learning_rate": 3.5531932170414896e-05, |
|
"loss": 0.3696, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.2804878048780488, |
|
"grad_norm": 0.04350184053951066, |
|
"learning_rate": 3.545137980800601e-05, |
|
"loss": 0.4084, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2835365853658536, |
|
"grad_norm": 0.046162386459619606, |
|
"learning_rate": 3.5370695745414137e-05, |
|
"loss": 0.4124, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2865853658536586, |
|
"grad_norm": 0.04399090038019498, |
|
"learning_rate": 3.5289880999357235e-05, |
|
"loss": 0.433, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.2896341463414633, |
|
"grad_norm": 0.0398583981998761, |
|
"learning_rate": 3.520893658820007e-05, |
|
"loss": 0.484, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2926829268292683, |
|
"grad_norm": 0.049515626274668, |
|
"learning_rate": 3.512786353194134e-05, |
|
"loss": 0.4273, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.295731707317073, |
|
"grad_norm": 0.051021698402422024, |
|
"learning_rate": 3.504666285220083e-05, |
|
"loss": 0.3773, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.298780487804878, |
|
"grad_norm": 0.04752736955001905, |
|
"learning_rate": 3.496533557220651e-05, |
|
"loss": 0.38, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.3018292682926829, |
|
"grad_norm": 0.03855841961124006, |
|
"learning_rate": 3.488388271678173e-05, |
|
"loss": 0.3691, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.3048780487804879, |
|
"grad_norm": 0.042396695569388354, |
|
"learning_rate": 3.480230531233216e-05, |
|
"loss": 0.3916, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.3079268292682926, |
|
"grad_norm": 0.04273608922242111, |
|
"learning_rate": 3.472060438683302e-05, |
|
"loss": 0.3981, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.3109756097560976, |
|
"grad_norm": 0.049523730946746974, |
|
"learning_rate": 3.4638780969816e-05, |
|
"loss": 0.42, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.3140243902439024, |
|
"grad_norm": 0.040081605113161875, |
|
"learning_rate": 3.455683609235633e-05, |
|
"loss": 0.3995, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.3170731707317074, |
|
"grad_norm": 0.045376936177541204, |
|
"learning_rate": 3.447477078705983e-05, |
|
"loss": 0.3677, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.3201219512195121, |
|
"grad_norm": 0.04334373283790506, |
|
"learning_rate": 3.439258608804983e-05, |
|
"loss": 0.506, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.3231707317073171, |
|
"grad_norm": 0.04180544711303642, |
|
"learning_rate": 3.431028303095415e-05, |
|
"loss": 0.4131, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.326219512195122, |
|
"grad_norm": 0.040548583882156285, |
|
"learning_rate": 3.422786265289211e-05, |
|
"loss": 0.4098, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.329268292682927, |
|
"grad_norm": 0.0481011172486242, |
|
"learning_rate": 3.414532599246138e-05, |
|
"loss": 0.4053, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.3323170731707317, |
|
"grad_norm": 0.04072709256507153, |
|
"learning_rate": 3.4062674089724944e-05, |
|
"loss": 0.4115, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.3353658536585367, |
|
"grad_norm": 0.04386676391625629, |
|
"learning_rate": 3.3979907986197996e-05, |
|
"loss": 0.4421, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.3384146341463414, |
|
"grad_norm": 0.04109122248944048, |
|
"learning_rate": 3.389702872483477e-05, |
|
"loss": 0.4108, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.3414634146341464, |
|
"grad_norm": 0.03919269914070091, |
|
"learning_rate": 3.381403735001546e-05, |
|
"loss": 0.3833, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3445121951219512, |
|
"grad_norm": 0.04239685630936478, |
|
"learning_rate": 3.3730934907533e-05, |
|
"loss": 0.4242, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.3475609756097562, |
|
"grad_norm": 0.038258355769465166, |
|
"learning_rate": 3.364772244457991e-05, |
|
"loss": 0.3938, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.350609756097561, |
|
"grad_norm": 0.04096483905702994, |
|
"learning_rate": 3.356440100973513e-05, |
|
"loss": 0.4106, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.3536585365853657, |
|
"grad_norm": 0.047393278834682626, |
|
"learning_rate": 3.348097165295076e-05, |
|
"loss": 0.5145, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.3567073170731707, |
|
"grad_norm": 0.04633439099806606, |
|
"learning_rate": 3.3397435425538845e-05, |
|
"loss": 0.3849, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3597560975609757, |
|
"grad_norm": 0.039579677850455526, |
|
"learning_rate": 3.331379338015815e-05, |
|
"loss": 0.383, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3628048780487805, |
|
"grad_norm": 0.039540731669071356, |
|
"learning_rate": 3.3230046570800864e-05, |
|
"loss": 0.3098, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.3658536585365852, |
|
"grad_norm": 0.044430392635034954, |
|
"learning_rate": 3.3146196052779314e-05, |
|
"loss": 0.4134, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3689024390243902, |
|
"grad_norm": 0.0461209856378901, |
|
"learning_rate": 3.3062242882712724e-05, |
|
"loss": 0.4762, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.3719512195121952, |
|
"grad_norm": 0.04095847664841931, |
|
"learning_rate": 3.297818811851382e-05, |
|
"loss": 0.3562, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 0.04167488392555382, |
|
"learning_rate": 3.289403281937554e-05, |
|
"loss": 0.3533, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.3780487804878048, |
|
"grad_norm": 0.04319674461159365, |
|
"learning_rate": 3.280977804575773e-05, |
|
"loss": 0.3939, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.3810975609756098, |
|
"grad_norm": 0.04464600230901868, |
|
"learning_rate": 3.272542485937369e-05, |
|
"loss": 0.4278, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.3841463414634148, |
|
"grad_norm": 0.044632110302958494, |
|
"learning_rate": 3.2640974323176846e-05, |
|
"loss": 0.4559, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3871951219512195, |
|
"grad_norm": 0.041606750321008176, |
|
"learning_rate": 3.255642750134738e-05, |
|
"loss": 0.3438, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.3902439024390243, |
|
"grad_norm": 0.046396118274942486, |
|
"learning_rate": 3.2471785459278756e-05, |
|
"loss": 0.3536, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3932926829268293, |
|
"grad_norm": 0.04967698041003248, |
|
"learning_rate": 3.2387049263564365e-05, |
|
"loss": 0.4603, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.3963414634146343, |
|
"grad_norm": 0.04423036929072222, |
|
"learning_rate": 3.2302219981984014e-05, |
|
"loss": 0.4108, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.399390243902439, |
|
"grad_norm": 0.04692015853572872, |
|
"learning_rate": 3.2217298683490525e-05, |
|
"loss": 0.44, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.4024390243902438, |
|
"grad_norm": 0.051126417646320296, |
|
"learning_rate": 3.2132286438196245e-05, |
|
"loss": 0.3984, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.4054878048780488, |
|
"grad_norm": 0.05132235831429306, |
|
"learning_rate": 3.2047184317359566e-05, |
|
"loss": 0.4838, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.4085365853658536, |
|
"grad_norm": 0.0420060500286704, |
|
"learning_rate": 3.19619933933714e-05, |
|
"loss": 0.4112, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.4115853658536586, |
|
"grad_norm": 0.042290123347956725, |
|
"learning_rate": 3.187671473974172e-05, |
|
"loss": 0.3994, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.4146341463414633, |
|
"grad_norm": 0.040748174494785384, |
|
"learning_rate": 3.179134943108597e-05, |
|
"loss": 0.3853, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.4176829268292683, |
|
"grad_norm": 0.04059345235444476, |
|
"learning_rate": 3.1705898543111575e-05, |
|
"loss": 0.4084, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.420731707317073, |
|
"grad_norm": 0.055103011348927346, |
|
"learning_rate": 3.1620363152604354e-05, |
|
"loss": 0.4206, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.423780487804878, |
|
"grad_norm": 0.04158726319207225, |
|
"learning_rate": 3.153474433741496e-05, |
|
"loss": 0.3494, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.4268292682926829, |
|
"grad_norm": 0.04706275403580267, |
|
"learning_rate": 3.14490431764453e-05, |
|
"loss": 0.4605, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.4298780487804879, |
|
"grad_norm": 0.044939505391916955, |
|
"learning_rate": 3.136326074963494e-05, |
|
"loss": 0.3611, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.4329268292682926, |
|
"grad_norm": 0.046842293410835734, |
|
"learning_rate": 3.1277398137947516e-05, |
|
"loss": 0.5025, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4359756097560976, |
|
"grad_norm": 0.05269740887022689, |
|
"learning_rate": 3.119145642335705e-05, |
|
"loss": 0.4378, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.4390243902439024, |
|
"grad_norm": 0.04498556893820305, |
|
"learning_rate": 3.110543668883438e-05, |
|
"loss": 0.5231, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.4420731707317074, |
|
"grad_norm": 0.041432180188026094, |
|
"learning_rate": 3.10193400183335e-05, |
|
"loss": 0.4327, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.4451219512195121, |
|
"grad_norm": 0.05125275934132454, |
|
"learning_rate": 3.093316749677788e-05, |
|
"loss": 0.4129, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4481707317073171, |
|
"grad_norm": 0.044066353635275675, |
|
"learning_rate": 3.0846920210046796e-05, |
|
"loss": 0.4087, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.451219512195122, |
|
"grad_norm": 0.04350573606760448, |
|
"learning_rate": 3.076059924496167e-05, |
|
"loss": 0.4113, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.454268292682927, |
|
"grad_norm": 0.039982387903032636, |
|
"learning_rate": 3.0674205689272375e-05, |
|
"loss": 0.407, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.4573170731707317, |
|
"grad_norm": 0.046576254516377445, |
|
"learning_rate": 3.0587740631643495e-05, |
|
"loss": 0.4671, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4603658536585367, |
|
"grad_norm": 0.04440553082667708, |
|
"learning_rate": 3.050120516164062e-05, |
|
"loss": 0.3768, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 0.041495054277947314, |
|
"learning_rate": 3.041460036971664e-05, |
|
"loss": 0.375, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4664634146341464, |
|
"grad_norm": 0.048010338112052395, |
|
"learning_rate": 3.0327927347197967e-05, |
|
"loss": 0.4223, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4695121951219512, |
|
"grad_norm": 0.04399903311566183, |
|
"learning_rate": 3.024118718627083e-05, |
|
"loss": 0.4412, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4725609756097562, |
|
"grad_norm": 0.043332913853267936, |
|
"learning_rate": 3.0154380979967456e-05, |
|
"loss": 0.4122, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.475609756097561, |
|
"grad_norm": 0.04979870765528592, |
|
"learning_rate": 3.006750982215234e-05, |
|
"loss": 0.4247, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4786585365853657, |
|
"grad_norm": 0.04830840196710952, |
|
"learning_rate": 2.9980574807508443e-05, |
|
"loss": 0.4054, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.4817073170731707, |
|
"grad_norm": 0.04493403266733893, |
|
"learning_rate": 2.98935770315234e-05, |
|
"loss": 0.4192, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4847560975609757, |
|
"grad_norm": 0.04629449676150017, |
|
"learning_rate": 2.9806517590475707e-05, |
|
"loss": 0.4177, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.4878048780487805, |
|
"grad_norm": 0.056540345358252, |
|
"learning_rate": 2.9719397581420932e-05, |
|
"loss": 0.3933, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.4908536585365852, |
|
"grad_norm": 0.044714125976439284, |
|
"learning_rate": 2.9632218102177862e-05, |
|
"loss": 0.4328, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.4939024390243902, |
|
"grad_norm": 0.04581859990723505, |
|
"learning_rate": 2.954498025131468e-05, |
|
"loss": 0.4384, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4969512195121952, |
|
"grad_norm": 0.04460433072726923, |
|
"learning_rate": 2.9457685128135136e-05, |
|
"loss": 0.4242, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.043427642512756305, |
|
"learning_rate": 2.937033383266466e-05, |
|
"loss": 0.4057, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.5030487804878048, |
|
"grad_norm": 0.044045306137113785, |
|
"learning_rate": 2.9282927465636547e-05, |
|
"loss": 0.4315, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.5060975609756098, |
|
"grad_norm": 0.046958499553942205, |
|
"learning_rate": 2.9195467128478044e-05, |
|
"loss": 0.4443, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.5091463414634148, |
|
"grad_norm": 0.04320799042144439, |
|
"learning_rate": 2.9107953923296488e-05, |
|
"loss": 0.3808, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.5121951219512195, |
|
"grad_norm": 0.04109444723893145, |
|
"learning_rate": 2.9020388952865423e-05, |
|
"loss": 0.3906, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.5152439024390243, |
|
"grad_norm": 0.049646393822732345, |
|
"learning_rate": 2.89327733206107e-05, |
|
"loss": 0.4675, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.5182926829268293, |
|
"grad_norm": 0.042206105467635986, |
|
"learning_rate": 2.884510813059657e-05, |
|
"loss": 0.385, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.5213414634146343, |
|
"grad_norm": 0.04242629061154276, |
|
"learning_rate": 2.875739448751176e-05, |
|
"loss": 0.4142, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.524390243902439, |
|
"grad_norm": 0.04508722521275613, |
|
"learning_rate": 2.866963349665559e-05, |
|
"loss": 0.4049, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5274390243902438, |
|
"grad_norm": 0.04226906162135462, |
|
"learning_rate": 2.8581826263923993e-05, |
|
"loss": 0.3626, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.5304878048780488, |
|
"grad_norm": 0.04361915151450245, |
|
"learning_rate": 2.849397389579564e-05, |
|
"loss": 0.4577, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.5335365853658538, |
|
"grad_norm": 0.04649015076624045, |
|
"learning_rate": 2.8406077499317912e-05, |
|
"loss": 0.3971, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.5365853658536586, |
|
"grad_norm": 0.05228072608444702, |
|
"learning_rate": 2.8318138182093052e-05, |
|
"loss": 0.4525, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.5396341463414633, |
|
"grad_norm": 0.03764229090200894, |
|
"learning_rate": 2.8230157052264127e-05, |
|
"loss": 0.31, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5426829268292683, |
|
"grad_norm": 0.04785158948760201, |
|
"learning_rate": 2.8142135218501116e-05, |
|
"loss": 0.4055, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.5457317073170733, |
|
"grad_norm": 0.04267301527731101, |
|
"learning_rate": 2.8054073789986883e-05, |
|
"loss": 0.4174, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.548780487804878, |
|
"grad_norm": 0.049619578175530116, |
|
"learning_rate": 2.796597387640328e-05, |
|
"loss": 0.431, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.5518292682926829, |
|
"grad_norm": 0.04530815374929139, |
|
"learning_rate": 2.7877836587917072e-05, |
|
"loss": 0.4632, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.5548780487804879, |
|
"grad_norm": 0.042163759856576145, |
|
"learning_rate": 2.7789663035166034e-05, |
|
"loss": 0.426, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5579268292682928, |
|
"grad_norm": 0.04282500694359744, |
|
"learning_rate": 2.770145432924489e-05, |
|
"loss": 0.4057, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.5609756097560976, |
|
"grad_norm": 0.04368531663527112, |
|
"learning_rate": 2.761321158169134e-05, |
|
"loss": 0.4924, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.5640243902439024, |
|
"grad_norm": 0.04332755295462451, |
|
"learning_rate": 2.7524935904472054e-05, |
|
"loss": 0.3845, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.5670731707317072, |
|
"grad_norm": 0.0502150335523684, |
|
"learning_rate": 2.7436628409968664e-05, |
|
"loss": 0.416, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.5701219512195121, |
|
"grad_norm": 0.04826923090280151, |
|
"learning_rate": 2.7348290210963723e-05, |
|
"loss": 0.3989, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5731707317073171, |
|
"grad_norm": 0.04054443877837543, |
|
"learning_rate": 2.7259922420626705e-05, |
|
"loss": 0.4146, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.576219512195122, |
|
"grad_norm": 0.04116187892553754, |
|
"learning_rate": 2.7171526152499978e-05, |
|
"loss": 0.3766, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.5792682926829267, |
|
"grad_norm": 0.04717761075694039, |
|
"learning_rate": 2.708310252048476e-05, |
|
"loss": 0.439, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5823170731707317, |
|
"grad_norm": 0.045062272355279945, |
|
"learning_rate": 2.6994652638827078e-05, |
|
"loss": 0.3296, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.5853658536585367, |
|
"grad_norm": 0.04444154739237893, |
|
"learning_rate": 2.6906177622103752e-05, |
|
"loss": 0.4123, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5884146341463414, |
|
"grad_norm": 0.046567991907138076, |
|
"learning_rate": 2.6817678585208327e-05, |
|
"loss": 0.3991, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.5914634146341462, |
|
"grad_norm": 0.043720022432954565, |
|
"learning_rate": 2.672915664333704e-05, |
|
"loss": 0.3864, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5945121951219512, |
|
"grad_norm": 0.0514525696971225, |
|
"learning_rate": 2.6640612911974733e-05, |
|
"loss": 0.4243, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.5975609756097562, |
|
"grad_norm": 0.043857465570069955, |
|
"learning_rate": 2.655204850688085e-05, |
|
"loss": 0.4074, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.600609756097561, |
|
"grad_norm": 0.04552516973100716, |
|
"learning_rate": 2.6463464544075344e-05, |
|
"loss": 0.4147, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.6036585365853657, |
|
"grad_norm": 0.049473995207474815, |
|
"learning_rate": 2.6374862139824618e-05, |
|
"loss": 0.4709, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.6067073170731707, |
|
"grad_norm": 0.04820818030504909, |
|
"learning_rate": 2.628624241062744e-05, |
|
"loss": 0.4169, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.6097560975609757, |
|
"grad_norm": 0.050450825910037425, |
|
"learning_rate": 2.6197606473200924e-05, |
|
"loss": 0.5224, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.6128048780487805, |
|
"grad_norm": 0.04550000497484494, |
|
"learning_rate": 2.610895544446641e-05, |
|
"loss": 0.4492, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.6158536585365852, |
|
"grad_norm": 0.04200381471392651, |
|
"learning_rate": 2.6020290441535417e-05, |
|
"loss": 0.3978, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.6189024390243902, |
|
"grad_norm": 0.04822670808590297, |
|
"learning_rate": 2.5931612581695537e-05, |
|
"loss": 0.4099, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.6219512195121952, |
|
"grad_norm": 0.04500687842031658, |
|
"learning_rate": 2.5842922982396395e-05, |
|
"loss": 0.3042, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 0.05239766736226129, |
|
"learning_rate": 2.5754222761235535e-05, |
|
"loss": 0.4081, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.6280487804878048, |
|
"grad_norm": 0.04602743501218835, |
|
"learning_rate": 2.566551303594437e-05, |
|
"loss": 0.3982, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.6310975609756098, |
|
"grad_norm": 0.049006753443694435, |
|
"learning_rate": 2.557679492437404e-05, |
|
"loss": 0.4353, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.6341463414634148, |
|
"grad_norm": 0.04588961339707511, |
|
"learning_rate": 2.54880695444814e-05, |
|
"loss": 0.4979, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.6371951219512195, |
|
"grad_norm": 0.043562453539744625, |
|
"learning_rate": 2.539933801431487e-05, |
|
"loss": 0.4229, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.6402439024390243, |
|
"grad_norm": 0.04108293903835419, |
|
"learning_rate": 2.5310601452000383e-05, |
|
"loss": 0.3284, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.6432926829268293, |
|
"grad_norm": 0.04784461798032342, |
|
"learning_rate": 2.5221860975727275e-05, |
|
"loss": 0.416, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.6463414634146343, |
|
"grad_norm": 0.055611309808498224, |
|
"learning_rate": 2.5133117703734212e-05, |
|
"loss": 0.4033, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.649390243902439, |
|
"grad_norm": 0.042125329738103255, |
|
"learning_rate": 2.504437275429508e-05, |
|
"loss": 0.4342, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.6524390243902438, |
|
"grad_norm": 0.043957874421772736, |
|
"learning_rate": 2.4955627245704918e-05, |
|
"loss": 0.3941, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.6554878048780488, |
|
"grad_norm": 0.049378467127122065, |
|
"learning_rate": 2.4866882296265794e-05, |
|
"loss": 0.4344, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.6585365853658538, |
|
"grad_norm": 0.05139749501618102, |
|
"learning_rate": 2.4778139024272724e-05, |
|
"loss": 0.45, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.6615853658536586, |
|
"grad_norm": 0.04540333353423334, |
|
"learning_rate": 2.468939854799962e-05, |
|
"loss": 0.422, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6646341463414633, |
|
"grad_norm": 0.04781977767818636, |
|
"learning_rate": 2.4600661985685132e-05, |
|
"loss": 0.4636, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6676829268292683, |
|
"grad_norm": 0.04791256952355809, |
|
"learning_rate": 2.4511930455518605e-05, |
|
"loss": 0.3755, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.6707317073170733, |
|
"grad_norm": 0.04622700218481359, |
|
"learning_rate": 2.4423205075625955e-05, |
|
"loss": 0.4243, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.673780487804878, |
|
"grad_norm": 0.0535233632473163, |
|
"learning_rate": 2.433448696405563e-05, |
|
"loss": 0.4589, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6768292682926829, |
|
"grad_norm": 0.04930305119098795, |
|
"learning_rate": 2.424577723876446e-05, |
|
"loss": 0.4351, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6798780487804879, |
|
"grad_norm": 0.050525972340958514, |
|
"learning_rate": 2.415707701760361e-05, |
|
"loss": 0.3527, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.6829268292682928, |
|
"grad_norm": 0.04658338095694016, |
|
"learning_rate": 2.4068387418304462e-05, |
|
"loss": 0.419, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6859756097560976, |
|
"grad_norm": 0.0459308673167424, |
|
"learning_rate": 2.397970955846459e-05, |
|
"loss": 0.3947, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.6890243902439024, |
|
"grad_norm": 0.04483255537595823, |
|
"learning_rate": 2.3891044555533588e-05, |
|
"loss": 0.3736, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6920731707317072, |
|
"grad_norm": 0.045520663386516586, |
|
"learning_rate": 2.380239352679908e-05, |
|
"loss": 0.39, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6951219512195121, |
|
"grad_norm": 0.05169918882299193, |
|
"learning_rate": 2.3713757589372557e-05, |
|
"loss": 0.5003, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6981707317073171, |
|
"grad_norm": 0.0461102833224252, |
|
"learning_rate": 2.3625137860175385e-05, |
|
"loss": 0.4633, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.701219512195122, |
|
"grad_norm": 0.04613663732250752, |
|
"learning_rate": 2.3536535455924655e-05, |
|
"loss": 0.3678, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.7042682926829267, |
|
"grad_norm": 0.041673432713452745, |
|
"learning_rate": 2.3447951493119152e-05, |
|
"loss": 0.4118, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 0.04431275177649695, |
|
"learning_rate": 2.3359387088025266e-05, |
|
"loss": 0.3673, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.7103658536585367, |
|
"grad_norm": 0.06367990054330565, |
|
"learning_rate": 2.327084335666297e-05, |
|
"loss": 0.4002, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.7134146341463414, |
|
"grad_norm": 0.048050644260211994, |
|
"learning_rate": 2.3182321414791672e-05, |
|
"loss": 0.4634, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.7164634146341462, |
|
"grad_norm": 0.04923233496697778, |
|
"learning_rate": 2.309382237789625e-05, |
|
"loss": 0.3871, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.7195121951219512, |
|
"grad_norm": 0.05364103691899248, |
|
"learning_rate": 2.300534736117292e-05, |
|
"loss": 0.4803, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.7225609756097562, |
|
"grad_norm": 0.048579593446969827, |
|
"learning_rate": 2.2916897479515244e-05, |
|
"loss": 0.4196, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.725609756097561, |
|
"grad_norm": 0.0461136811151973, |
|
"learning_rate": 2.2828473847500024e-05, |
|
"loss": 0.4513, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.7286585365853657, |
|
"grad_norm": 0.04940628622994503, |
|
"learning_rate": 2.27400775793733e-05, |
|
"loss": 0.4208, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.7317073170731707, |
|
"grad_norm": 0.04972357687250847, |
|
"learning_rate": 2.265170978903628e-05, |
|
"loss": 0.3931, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.7347560975609757, |
|
"grad_norm": 0.0444698806113803, |
|
"learning_rate": 2.256337159003134e-05, |
|
"loss": 0.435, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.7378048780487805, |
|
"grad_norm": 0.04255075307190636, |
|
"learning_rate": 2.2475064095527948e-05, |
|
"loss": 0.3906, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7408536585365852, |
|
"grad_norm": 0.04535567324227825, |
|
"learning_rate": 2.238678841830867e-05, |
|
"loss": 0.3901, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.7439024390243902, |
|
"grad_norm": 0.041821101755344973, |
|
"learning_rate": 2.229854567075511e-05, |
|
"loss": 0.3818, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.7469512195121952, |
|
"grad_norm": 0.04572808806362685, |
|
"learning_rate": 2.2210336964833968e-05, |
|
"loss": 0.4141, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.04622786206882963, |
|
"learning_rate": 2.2122163412082927e-05, |
|
"loss": 0.5004, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.7530487804878048, |
|
"grad_norm": 0.041755188750875076, |
|
"learning_rate": 2.203402612359673e-05, |
|
"loss": 0.369, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7560975609756098, |
|
"grad_norm": 0.04754782156490283, |
|
"learning_rate": 2.1945926210013112e-05, |
|
"loss": 0.4536, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.7591463414634148, |
|
"grad_norm": 0.050433137791283965, |
|
"learning_rate": 2.185786478149889e-05, |
|
"loss": 0.4506, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.7621951219512195, |
|
"grad_norm": 0.04384522351312468, |
|
"learning_rate": 2.1769842947735876e-05, |
|
"loss": 0.3789, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.7652439024390243, |
|
"grad_norm": 0.051693646701183, |
|
"learning_rate": 2.1681861817906954e-05, |
|
"loss": 0.4029, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7682926829268293, |
|
"grad_norm": 0.04878335306364114, |
|
"learning_rate": 2.1593922500682087e-05, |
|
"loss": 0.424, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7713414634146343, |
|
"grad_norm": 0.050862021634430564, |
|
"learning_rate": 2.150602610420437e-05, |
|
"loss": 0.3739, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.774390243902439, |
|
"grad_norm": 0.04605607998701594, |
|
"learning_rate": 2.141817373607601e-05, |
|
"loss": 0.4437, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.7774390243902438, |
|
"grad_norm": 0.051381647631965954, |
|
"learning_rate": 2.1330366503344415e-05, |
|
"loss": 0.3766, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.7804878048780488, |
|
"grad_norm": 0.05272844434906908, |
|
"learning_rate": 2.1242605512488248e-05, |
|
"loss": 0.3929, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7835365853658538, |
|
"grad_norm": 0.05311986437136711, |
|
"learning_rate": 2.1154891869403435e-05, |
|
"loss": 0.4092, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7865853658536586, |
|
"grad_norm": 0.05502515824691199, |
|
"learning_rate": 2.1067226679389306e-05, |
|
"loss": 0.4104, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7896341463414633, |
|
"grad_norm": 0.044502014868971776, |
|
"learning_rate": 2.097961104713458e-05, |
|
"loss": 0.3761, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.7926829268292683, |
|
"grad_norm": 0.04774171126141972, |
|
"learning_rate": 2.089204607670352e-05, |
|
"loss": 0.4288, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7957317073170733, |
|
"grad_norm": 0.045225558382394004, |
|
"learning_rate": 2.080453287152196e-05, |
|
"loss": 0.4577, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.798780487804878, |
|
"grad_norm": 0.04835401158227742, |
|
"learning_rate": 2.0717072534363456e-05, |
|
"loss": 0.4534, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.8018292682926829, |
|
"grad_norm": 0.045025439868481315, |
|
"learning_rate": 2.0629666167335344e-05, |
|
"loss": 0.4066, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.8048780487804879, |
|
"grad_norm": 0.049456094850128966, |
|
"learning_rate": 2.0542314871864877e-05, |
|
"loss": 0.3749, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.8079268292682928, |
|
"grad_norm": 0.04201275484991142, |
|
"learning_rate": 2.0455019748685322e-05, |
|
"loss": 0.3862, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.8109756097560976, |
|
"grad_norm": 0.04819807866358233, |
|
"learning_rate": 2.0367781897822147e-05, |
|
"loss": 0.4145, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.8140243902439024, |
|
"grad_norm": 0.045799317561101595, |
|
"learning_rate": 2.0280602418579074e-05, |
|
"loss": 0.3821, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8170731707317072, |
|
"grad_norm": 0.04751188506756341, |
|
"learning_rate": 2.01934824095243e-05, |
|
"loss": 0.4254, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.8201219512195121, |
|
"grad_norm": 0.04593743100660145, |
|
"learning_rate": 2.0106422968476606e-05, |
|
"loss": 0.3731, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.8231707317073171, |
|
"grad_norm": 0.04921016078030088, |
|
"learning_rate": 2.0019425192491563e-05, |
|
"loss": 0.3503, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.826219512195122, |
|
"grad_norm": 0.045878356807498875, |
|
"learning_rate": 1.993249017784766e-05, |
|
"loss": 0.3802, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 0.05193684501902608, |
|
"learning_rate": 1.9845619020032553e-05, |
|
"loss": 0.4092, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8323170731707317, |
|
"grad_norm": 0.05027784444090783, |
|
"learning_rate": 1.975881281372917e-05, |
|
"loss": 0.4722, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.8353658536585367, |
|
"grad_norm": 0.04514834962956475, |
|
"learning_rate": 1.9672072652802036e-05, |
|
"loss": 0.46, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.8384146341463414, |
|
"grad_norm": 0.05451938225894702, |
|
"learning_rate": 1.958539963028337e-05, |
|
"loss": 0.4368, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.8414634146341462, |
|
"grad_norm": 0.040106913103446724, |
|
"learning_rate": 1.949879483835939e-05, |
|
"loss": 0.3257, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.8445121951219512, |
|
"grad_norm": 0.04926311870177447, |
|
"learning_rate": 1.9412259368356507e-05, |
|
"loss": 0.4183, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.8475609756097562, |
|
"grad_norm": 0.04422209752149198, |
|
"learning_rate": 1.9325794310727627e-05, |
|
"loss": 0.3639, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.850609756097561, |
|
"grad_norm": 0.048679581480980755, |
|
"learning_rate": 1.923940075503833e-05, |
|
"loss": 0.4157, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.8536585365853657, |
|
"grad_norm": 0.05499776323028079, |
|
"learning_rate": 1.9153079789953217e-05, |
|
"loss": 0.4656, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.8567073170731707, |
|
"grad_norm": 0.06922562178056044, |
|
"learning_rate": 1.9066832503222128e-05, |
|
"loss": 0.3813, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.8597560975609757, |
|
"grad_norm": 0.04395445795822402, |
|
"learning_rate": 1.898065998166651e-05, |
|
"loss": 0.3411, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8628048780487805, |
|
"grad_norm": 0.04207124907862645, |
|
"learning_rate": 1.8894563311165626e-05, |
|
"loss": 0.3592, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8658536585365852, |
|
"grad_norm": 0.05054981775482249, |
|
"learning_rate": 1.8808543576642963e-05, |
|
"loss": 0.4706, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.8689024390243902, |
|
"grad_norm": 0.05019817217403753, |
|
"learning_rate": 1.872260186205249e-05, |
|
"loss": 0.4213, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8719512195121952, |
|
"grad_norm": 0.0568861338806866, |
|
"learning_rate": 1.863673925036506e-05, |
|
"loss": 0.3852, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 0.04821125446126471, |
|
"learning_rate": 1.8550956823554707e-05, |
|
"loss": 0.4304, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8780487804878048, |
|
"grad_norm": 0.047092779650319404, |
|
"learning_rate": 1.8465255662585053e-05, |
|
"loss": 0.4227, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.8810975609756098, |
|
"grad_norm": 0.056212137174365545, |
|
"learning_rate": 1.8379636847395655e-05, |
|
"loss": 0.4862, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.8841463414634148, |
|
"grad_norm": 0.057774981328737894, |
|
"learning_rate": 1.829410145688843e-05, |
|
"loss": 0.452, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8871951219512195, |
|
"grad_norm": 0.053132370814335673, |
|
"learning_rate": 1.8208650568914033e-05, |
|
"loss": 0.4183, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.8902439024390243, |
|
"grad_norm": 0.04462487543154932, |
|
"learning_rate": 1.812328526025829e-05, |
|
"loss": 0.3585, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8932926829268293, |
|
"grad_norm": 0.04333309998936559, |
|
"learning_rate": 1.80380066066286e-05, |
|
"loss": 0.4107, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8963414634146343, |
|
"grad_norm": 0.05489562977125714, |
|
"learning_rate": 1.795281568264044e-05, |
|
"loss": 0.396, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.899390243902439, |
|
"grad_norm": 0.048000398098398726, |
|
"learning_rate": 1.7867713561803758e-05, |
|
"loss": 0.4085, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.9024390243902438, |
|
"grad_norm": 0.048282605255221815, |
|
"learning_rate": 1.778270131650948e-05, |
|
"loss": 0.3363, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.9054878048780488, |
|
"grad_norm": 0.05716822504858685, |
|
"learning_rate": 1.7697780018015985e-05, |
|
"loss": 0.4386, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.9085365853658538, |
|
"grad_norm": 0.04794543022496826, |
|
"learning_rate": 1.761295073643564e-05, |
|
"loss": 0.3821, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.9115853658536586, |
|
"grad_norm": 0.053218946583905274, |
|
"learning_rate": 1.752821454072124e-05, |
|
"loss": 0.4508, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.9146341463414633, |
|
"grad_norm": 0.049908536883833524, |
|
"learning_rate": 1.7443572498652626e-05, |
|
"loss": 0.3659, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.9176829268292683, |
|
"grad_norm": 0.04638106740630859, |
|
"learning_rate": 1.735902567682315e-05, |
|
"loss": 0.391, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.9207317073170733, |
|
"grad_norm": 0.040265654554186424, |
|
"learning_rate": 1.7274575140626318e-05, |
|
"loss": 0.3738, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.923780487804878, |
|
"grad_norm": 0.04596084571609373, |
|
"learning_rate": 1.719022195424227e-05, |
|
"loss": 0.3629, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.9268292682926829, |
|
"grad_norm": 0.045024822777817844, |
|
"learning_rate": 1.710596718062446e-05, |
|
"loss": 0.4327, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.9298780487804879, |
|
"grad_norm": 0.04732414273870265, |
|
"learning_rate": 1.7021811881486186e-05, |
|
"loss": 0.4537, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.9329268292682928, |
|
"grad_norm": 0.059279293870337126, |
|
"learning_rate": 1.6937757117287278e-05, |
|
"loss": 0.384, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.9359756097560976, |
|
"grad_norm": 0.054739792171745454, |
|
"learning_rate": 1.685380394722068e-05, |
|
"loss": 0.4748, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9390243902439024, |
|
"grad_norm": 0.05392035736905421, |
|
"learning_rate": 1.676995342919914e-05, |
|
"loss": 0.4268, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.9420731707317072, |
|
"grad_norm": 0.054225627701012045, |
|
"learning_rate": 1.6686206619841844e-05, |
|
"loss": 0.4547, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.9451219512195121, |
|
"grad_norm": 0.04760347853249887, |
|
"learning_rate": 1.6602564574461154e-05, |
|
"loss": 0.363, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.9481707317073171, |
|
"grad_norm": 0.04725199886274887, |
|
"learning_rate": 1.651902834704924e-05, |
|
"loss": 0.4266, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.04599398148342138, |
|
"learning_rate": 1.6435598990264877e-05, |
|
"loss": 0.4332, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.9542682926829267, |
|
"grad_norm": 0.04607811862896777, |
|
"learning_rate": 1.635227755542009e-05, |
|
"loss": 0.3435, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.9573170731707317, |
|
"grad_norm": 0.04753790065032661, |
|
"learning_rate": 1.626906509246701e-05, |
|
"loss": 0.3416, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.9603658536585367, |
|
"grad_norm": 0.042068522853175805, |
|
"learning_rate": 1.6185962649984542e-05, |
|
"loss": 0.3723, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.9634146341463414, |
|
"grad_norm": 0.05315456275684181, |
|
"learning_rate": 1.6102971275165228e-05, |
|
"loss": 0.4321, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.9664634146341462, |
|
"grad_norm": 0.04863243472200057, |
|
"learning_rate": 1.6020092013802003e-05, |
|
"loss": 0.4262, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9695121951219512, |
|
"grad_norm": 0.04645463781107319, |
|
"learning_rate": 1.593732591027506e-05, |
|
"loss": 0.4141, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.9725609756097562, |
|
"grad_norm": 0.04630974142223754, |
|
"learning_rate": 1.585467400753863e-05, |
|
"loss": 0.4527, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.975609756097561, |
|
"grad_norm": 0.04647892123563765, |
|
"learning_rate": 1.57721373471079e-05, |
|
"loss": 0.3633, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.9786585365853657, |
|
"grad_norm": 0.054863057319935696, |
|
"learning_rate": 1.5689716969045848e-05, |
|
"loss": 0.4567, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9817073170731707, |
|
"grad_norm": 0.047476319487593684, |
|
"learning_rate": 1.5607413911950177e-05, |
|
"loss": 0.3634, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9847560975609757, |
|
"grad_norm": 0.05498886155793677, |
|
"learning_rate": 1.5525229212940167e-05, |
|
"loss": 0.4834, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.9878048780487805, |
|
"grad_norm": 0.04146670137787259, |
|
"learning_rate": 1.544316390764367e-05, |
|
"loss": 0.3592, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.9908536585365852, |
|
"grad_norm": 0.04448592709004556, |
|
"learning_rate": 1.5361219030184005e-05, |
|
"loss": 0.3881, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.9939024390243902, |
|
"grad_norm": 0.04921396944888872, |
|
"learning_rate": 1.5279395613166986e-05, |
|
"loss": 0.3893, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9969512195121952, |
|
"grad_norm": 0.04399703962269756, |
|
"learning_rate": 1.519769468766784e-05, |
|
"loss": 0.3972, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.05225803349065435, |
|
"learning_rate": 1.5116117283218284e-05, |
|
"loss": 0.3982, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.0030487804878048, |
|
"grad_norm": 0.047057971420935575, |
|
"learning_rate": 1.5034664427793485e-05, |
|
"loss": 0.3979, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.0060975609756095, |
|
"grad_norm": 0.039924127005037634, |
|
"learning_rate": 1.495333714779918e-05, |
|
"loss": 0.3487, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.0091463414634148, |
|
"grad_norm": 0.05227671668342273, |
|
"learning_rate": 1.4872136468058661e-05, |
|
"loss": 0.4956, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.0121951219512195, |
|
"grad_norm": 0.04465904605331701, |
|
"learning_rate": 1.4791063411799938e-05, |
|
"loss": 0.3784, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.0152439024390243, |
|
"grad_norm": 0.04843350015064368, |
|
"learning_rate": 1.471011900064277e-05, |
|
"loss": 0.4202, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.018292682926829, |
|
"grad_norm": 0.04711367212265629, |
|
"learning_rate": 1.4629304254585874e-05, |
|
"loss": 0.4296, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.0213414634146343, |
|
"grad_norm": 0.051282377670104844, |
|
"learning_rate": 1.4548620191994e-05, |
|
"loss": 0.405, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.024390243902439, |
|
"grad_norm": 0.04516937696168246, |
|
"learning_rate": 1.4468067829585108e-05, |
|
"loss": 0.3849, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.027439024390244, |
|
"grad_norm": 0.10404174025420722, |
|
"learning_rate": 1.4387648182417634e-05, |
|
"loss": 0.3091, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.0304878048780486, |
|
"grad_norm": 0.04851496450594152, |
|
"learning_rate": 1.4307362263877582e-05, |
|
"loss": 0.3326, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.033536585365854, |
|
"grad_norm": 0.04897987402822538, |
|
"learning_rate": 1.4227211085665843e-05, |
|
"loss": 0.3917, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.0365853658536586, |
|
"grad_norm": 0.05250855661884853, |
|
"learning_rate": 1.4147195657785394e-05, |
|
"loss": 0.413, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.0396341463414633, |
|
"grad_norm": 0.04485267998459715, |
|
"learning_rate": 1.4067316988528617e-05, |
|
"loss": 0.414, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.042682926829268, |
|
"grad_norm": 0.05050211588874968, |
|
"learning_rate": 1.3987576084464544e-05, |
|
"loss": 0.4114, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.0457317073170733, |
|
"grad_norm": 0.049062688965448895, |
|
"learning_rate": 1.3907973950426223e-05, |
|
"loss": 0.436, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.048780487804878, |
|
"grad_norm": 0.04934816967138291, |
|
"learning_rate": 1.3828511589497979e-05, |
|
"loss": 0.3806, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.051829268292683, |
|
"grad_norm": 0.04846301665690881, |
|
"learning_rate": 1.37491900030029e-05, |
|
"loss": 0.3782, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.0548780487804876, |
|
"grad_norm": 0.0487751457696009, |
|
"learning_rate": 1.3670010190490073e-05, |
|
"loss": 0.3684, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.057926829268293, |
|
"grad_norm": 0.05011236219413779, |
|
"learning_rate": 1.3590973149722103e-05, |
|
"loss": 0.4557, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.0609756097560976, |
|
"grad_norm": 0.0433306137614156, |
|
"learning_rate": 1.3512079876662476e-05, |
|
"loss": 0.398, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.0640243902439024, |
|
"grad_norm": 0.04720276909475184, |
|
"learning_rate": 1.3433331365463042e-05, |
|
"loss": 0.4615, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.067073170731707, |
|
"grad_norm": 0.051702815490710055, |
|
"learning_rate": 1.3354728608451462e-05, |
|
"loss": 0.4537, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.0701219512195124, |
|
"grad_norm": 0.049061114526012986, |
|
"learning_rate": 1.327627259611873e-05, |
|
"loss": 0.3737, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.073170731707317, |
|
"grad_norm": 0.05016460137745102, |
|
"learning_rate": 1.3197964317106645e-05, |
|
"loss": 0.4094, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.076219512195122, |
|
"grad_norm": 0.04496857184174542, |
|
"learning_rate": 1.3119804758195443e-05, |
|
"loss": 0.3212, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.0792682926829267, |
|
"grad_norm": 0.04565158070603903, |
|
"learning_rate": 1.3041794904291244e-05, |
|
"loss": 0.3478, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.082317073170732, |
|
"grad_norm": 0.05118300622487708, |
|
"learning_rate": 1.2963935738413737e-05, |
|
"loss": 0.4798, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.0853658536585367, |
|
"grad_norm": 0.041350376374063275, |
|
"learning_rate": 1.2886228241683749e-05, |
|
"loss": 0.3147, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.0884146341463414, |
|
"grad_norm": 0.05145828794964563, |
|
"learning_rate": 1.2808673393310888e-05, |
|
"loss": 0.4323, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.091463414634146, |
|
"grad_norm": 0.046360376128903494, |
|
"learning_rate": 1.2731272170581204e-05, |
|
"loss": 0.4051, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.0945121951219514, |
|
"grad_norm": 0.04566477398251874, |
|
"learning_rate": 1.26540255488449e-05, |
|
"loss": 0.4433, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.097560975609756, |
|
"grad_norm": 0.04654212012479589, |
|
"learning_rate": 1.2576934501503957e-05, |
|
"loss": 0.3933, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.100609756097561, |
|
"grad_norm": 0.044474358744242146, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.3542, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.1036585365853657, |
|
"grad_norm": 0.08086234904184278, |
|
"learning_rate": 1.2423223013801946e-05, |
|
"loss": 0.4152, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.106707317073171, |
|
"grad_norm": 0.04578439970593931, |
|
"learning_rate": 1.2346604510393821e-05, |
|
"loss": 0.3802, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.1097560975609757, |
|
"grad_norm": 0.045355970498272065, |
|
"learning_rate": 1.2270145455262594e-05, |
|
"loss": 0.4265, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.1128048780487805, |
|
"grad_norm": 0.046546974689879166, |
|
"learning_rate": 1.2193846811885978e-05, |
|
"loss": 0.4156, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.1158536585365852, |
|
"grad_norm": 0.04928163113897007, |
|
"learning_rate": 1.2117709541720306e-05, |
|
"loss": 0.4878, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.1189024390243905, |
|
"grad_norm": 0.04679052446479647, |
|
"learning_rate": 1.204173460418842e-05, |
|
"loss": 0.495, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.1219512195121952, |
|
"grad_norm": 0.060718380403180575, |
|
"learning_rate": 1.1965922956667536e-05, |
|
"loss": 0.4401, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 0.04352857277050566, |
|
"learning_rate": 1.1890275554477278e-05, |
|
"loss": 0.4842, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.1280487804878048, |
|
"grad_norm": 0.047483675913447015, |
|
"learning_rate": 1.1814793350867511e-05, |
|
"loss": 0.4075, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.1310975609756095, |
|
"grad_norm": 0.044241685897127424, |
|
"learning_rate": 1.173947729700644e-05, |
|
"loss": 0.4077, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.1341463414634148, |
|
"grad_norm": 0.04856861739756317, |
|
"learning_rate": 1.1664328341968564e-05, |
|
"loss": 0.4312, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.1371951219512195, |
|
"grad_norm": 0.04330635555839946, |
|
"learning_rate": 1.1589347432722734e-05, |
|
"loss": 0.3697, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.1402439024390243, |
|
"grad_norm": 0.047497521607233716, |
|
"learning_rate": 1.1514535514120217e-05, |
|
"loss": 0.4053, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.143292682926829, |
|
"grad_norm": 0.06097670818580457, |
|
"learning_rate": 1.1439893528882803e-05, |
|
"loss": 0.471, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.1463414634146343, |
|
"grad_norm": 0.04556035518561954, |
|
"learning_rate": 1.1365422417590878e-05, |
|
"loss": 0.4122, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.149390243902439, |
|
"grad_norm": 0.052903392746045705, |
|
"learning_rate": 1.1291123118671665e-05, |
|
"loss": 0.3995, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.152439024390244, |
|
"grad_norm": 0.04777545800892784, |
|
"learning_rate": 1.1216996568387284e-05, |
|
"loss": 0.4108, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.1554878048780486, |
|
"grad_norm": 0.04820369744975627, |
|
"learning_rate": 1.1143043700823039e-05, |
|
"loss": 0.5138, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.158536585365854, |
|
"grad_norm": 0.04305534978570109, |
|
"learning_rate": 1.1069265447875618e-05, |
|
"loss": 0.3452, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.1615853658536586, |
|
"grad_norm": 0.049468850693703, |
|
"learning_rate": 1.0995662739241347e-05, |
|
"loss": 0.4621, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.1646341463414633, |
|
"grad_norm": 0.051986125434277566, |
|
"learning_rate": 1.092223650240448e-05, |
|
"loss": 0.3518, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.167682926829268, |
|
"grad_norm": 0.045548926482217825, |
|
"learning_rate": 1.0848987662625515e-05, |
|
"loss": 0.3681, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.1707317073170733, |
|
"grad_norm": 0.051992076545282505, |
|
"learning_rate": 1.0775917142929501e-05, |
|
"loss": 0.4154, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.173780487804878, |
|
"grad_norm": 0.048057122442053686, |
|
"learning_rate": 1.0703025864094494e-05, |
|
"loss": 0.4414, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.176829268292683, |
|
"grad_norm": 0.05323915865529368, |
|
"learning_rate": 1.063031474463983e-05, |
|
"loss": 0.409, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.1798780487804876, |
|
"grad_norm": 0.04632056825317641, |
|
"learning_rate": 1.0557784700814663e-05, |
|
"loss": 0.3771, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.182926829268293, |
|
"grad_norm": 0.041087767138815914, |
|
"learning_rate": 1.0485436646586349e-05, |
|
"loss": 0.3533, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.1859756097560976, |
|
"grad_norm": 0.04858659822072648, |
|
"learning_rate": 1.0413271493628965e-05, |
|
"loss": 0.3772, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.1890243902439024, |
|
"grad_norm": 0.04566631819528302, |
|
"learning_rate": 1.03412901513118e-05, |
|
"loss": 0.4482, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.192073170731707, |
|
"grad_norm": 0.05591022083048744, |
|
"learning_rate": 1.0269493526687915e-05, |
|
"loss": 0.4326, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 0.04838645871812315, |
|
"learning_rate": 1.0197882524482669e-05, |
|
"loss": 0.3609, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.198170731707317, |
|
"grad_norm": 0.047677260894461095, |
|
"learning_rate": 1.0126458047082405e-05, |
|
"loss": 0.4344, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.201219512195122, |
|
"grad_norm": 0.049886331786466606, |
|
"learning_rate": 1.0055220994522977e-05, |
|
"loss": 0.4234, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.2042682926829267, |
|
"grad_norm": 0.05109177654726143, |
|
"learning_rate": 9.984172264478476e-06, |
|
"loss": 0.4465, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.207317073170732, |
|
"grad_norm": 0.05367217987712921, |
|
"learning_rate": 9.913312752249903e-06, |
|
"loss": 0.472, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.2103658536585367, |
|
"grad_norm": 0.04842354910885158, |
|
"learning_rate": 9.842643350753877e-06, |
|
"loss": 0.3958, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.2134146341463414, |
|
"grad_norm": 0.04554936876201704, |
|
"learning_rate": 9.772164950511387e-06, |
|
"loss": 0.3897, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.216463414634146, |
|
"grad_norm": 0.051586649959186465, |
|
"learning_rate": 9.701878439636586e-06, |
|
"loss": 0.434, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.2195121951219514, |
|
"grad_norm": 0.047742332583085245, |
|
"learning_rate": 9.631784703825547e-06, |
|
"loss": 0.4433, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.222560975609756, |
|
"grad_norm": 0.04567788571876452, |
|
"learning_rate": 9.561884626345205e-06, |
|
"loss": 0.4215, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.225609756097561, |
|
"grad_norm": 0.04315451101759397, |
|
"learning_rate": 9.492179088022102e-06, |
|
"loss": 0.3366, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.2286585365853657, |
|
"grad_norm": 0.0488469738860539, |
|
"learning_rate": 9.422668967231379e-06, |
|
"loss": 0.4103, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.231707317073171, |
|
"grad_norm": 0.06002146852937906, |
|
"learning_rate": 9.353355139885673e-06, |
|
"loss": 0.4298, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.2347560975609757, |
|
"grad_norm": 0.044891336885385796, |
|
"learning_rate": 9.284238479424081e-06, |
|
"loss": 0.3422, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.2378048780487805, |
|
"grad_norm": 0.04393420507675689, |
|
"learning_rate": 9.215319856801158e-06, |
|
"loss": 0.3937, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.2408536585365852, |
|
"grad_norm": 0.047101192915488074, |
|
"learning_rate": 9.146600140475945e-06, |
|
"loss": 0.4188, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.2439024390243905, |
|
"grad_norm": 0.058962750452763446, |
|
"learning_rate": 9.078080196400985e-06, |
|
"loss": 0.4283, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.2469512195121952, |
|
"grad_norm": 0.051835093932849644, |
|
"learning_rate": 9.009760888011512e-06, |
|
"loss": 0.4219, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.049881513783743586, |
|
"learning_rate": 8.941643076214437e-06, |
|
"loss": 0.3981, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.2530487804878048, |
|
"grad_norm": 0.04685764539849008, |
|
"learning_rate": 8.873727619377611e-06, |
|
"loss": 0.4235, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.2560975609756095, |
|
"grad_norm": 0.05463586193578815, |
|
"learning_rate": 8.80601537331895e-06, |
|
"loss": 0.4941, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.2591463414634148, |
|
"grad_norm": 0.047851225684963736, |
|
"learning_rate": 8.73850719129566e-06, |
|
"loss": 0.4333, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.2621951219512195, |
|
"grad_norm": 0.04784242396571518, |
|
"learning_rate": 8.671203923993501e-06, |
|
"loss": 0.4482, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.2652439024390243, |
|
"grad_norm": 0.04804999183223516, |
|
"learning_rate": 8.604106419516062e-06, |
|
"loss": 0.3663, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.2682926829268295, |
|
"grad_norm": 0.05381821577825733, |
|
"learning_rate": 8.537215523374038e-06, |
|
"loss": 0.4166, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.2713414634146343, |
|
"grad_norm": 0.04969839859509168, |
|
"learning_rate": 8.470532078474635e-06, |
|
"loss": 0.4872, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.274390243902439, |
|
"grad_norm": 0.04271271764381146, |
|
"learning_rate": 8.404056925110914e-06, |
|
"loss": 0.4392, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.277439024390244, |
|
"grad_norm": 0.04992579157152129, |
|
"learning_rate": 8.3377909009512e-06, |
|
"loss": 0.3657, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.2804878048780486, |
|
"grad_norm": 0.04818703661052087, |
|
"learning_rate": 8.271734841028553e-06, |
|
"loss": 0.3295, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.283536585365854, |
|
"grad_norm": 0.044986665548360116, |
|
"learning_rate": 8.20588957773018e-06, |
|
"loss": 0.3813, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.2865853658536586, |
|
"grad_norm": 0.04234902786859893, |
|
"learning_rate": 8.14025594078706e-06, |
|
"loss": 0.3131, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.2896341463414633, |
|
"grad_norm": 0.045016594884271365, |
|
"learning_rate": 8.074834757263361e-06, |
|
"loss": 0.3419, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.292682926829268, |
|
"grad_norm": 0.047781975300300185, |
|
"learning_rate": 8.00962685154611e-06, |
|
"loss": 0.3678, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.2957317073170733, |
|
"grad_norm": 0.06553586697371824, |
|
"learning_rate": 7.944633045334762e-06, |
|
"loss": 0.4017, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.298780487804878, |
|
"grad_norm": 0.04606601862679692, |
|
"learning_rate": 7.879854157630861e-06, |
|
"loss": 0.3969, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.301829268292683, |
|
"grad_norm": 0.04300968673061053, |
|
"learning_rate": 7.815291004727713e-06, |
|
"loss": 0.3166, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.3048780487804876, |
|
"grad_norm": 0.04765947333057751, |
|
"learning_rate": 7.750944400200102e-06, |
|
"loss": 0.3845, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.307926829268293, |
|
"grad_norm": 0.04497924678023646, |
|
"learning_rate": 7.68681515489402e-06, |
|
"loss": 0.4085, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.3109756097560976, |
|
"grad_norm": 0.05015532439252871, |
|
"learning_rate": 7.622904076916509e-06, |
|
"loss": 0.4564, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.3140243902439024, |
|
"grad_norm": 0.042988505749526125, |
|
"learning_rate": 7.5592119716253855e-06, |
|
"loss": 0.3383, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.317073170731707, |
|
"grad_norm": 0.04305861474154292, |
|
"learning_rate": 7.495739641619176e-06, |
|
"loss": 0.4107, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.3201219512195124, |
|
"grad_norm": 0.04920016219288475, |
|
"learning_rate": 7.432487886726955e-06, |
|
"loss": 0.4005, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.323170731707317, |
|
"grad_norm": 0.048799915611591145, |
|
"learning_rate": 7.369457503998287e-06, |
|
"loss": 0.4202, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.326219512195122, |
|
"grad_norm": 0.044791697449636486, |
|
"learning_rate": 7.3066492876931705e-06, |
|
"loss": 0.4262, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.3292682926829267, |
|
"grad_norm": 0.04249567961276802, |
|
"learning_rate": 7.244064029272049e-06, |
|
"loss": 0.3719, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.332317073170732, |
|
"grad_norm": 0.051851485763910736, |
|
"learning_rate": 7.181702517385788e-06, |
|
"loss": 0.3861, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.3353658536585367, |
|
"grad_norm": 0.044010285547474354, |
|
"learning_rate": 7.119565537865827e-06, |
|
"loss": 0.367, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.3384146341463414, |
|
"grad_norm": 0.043359373882457634, |
|
"learning_rate": 7.057653873714171e-06, |
|
"loss": 0.4148, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.341463414634146, |
|
"grad_norm": 0.05279502635927622, |
|
"learning_rate": 6.995968305093603e-06, |
|
"loss": 0.3696, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.3445121951219514, |
|
"grad_norm": 0.04883108675178682, |
|
"learning_rate": 6.934509609317821e-06, |
|
"loss": 0.402, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.347560975609756, |
|
"grad_norm": 0.043977626763723576, |
|
"learning_rate": 6.8732785608416476e-06, |
|
"loss": 0.3727, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.350609756097561, |
|
"grad_norm": 0.04588882631244444, |
|
"learning_rate": 6.812275931251269e-06, |
|
"loss": 0.3991, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.3536585365853657, |
|
"grad_norm": 0.04496378231139958, |
|
"learning_rate": 6.751502489254521e-06, |
|
"loss": 0.3228, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.3567073170731705, |
|
"grad_norm": 0.04808438680353696, |
|
"learning_rate": 6.6909590006711655e-06, |
|
"loss": 0.4434, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.3597560975609757, |
|
"grad_norm": 0.05620186191151277, |
|
"learning_rate": 6.6306462284233234e-06, |
|
"loss": 0.3159, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.3628048780487805, |
|
"grad_norm": 0.046014700942548746, |
|
"learning_rate": 6.57056493252575e-06, |
|
"loss": 0.3341, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.3658536585365852, |
|
"grad_norm": 0.04317894848169251, |
|
"learning_rate": 6.510715870076354e-06, |
|
"loss": 0.3533, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.3689024390243905, |
|
"grad_norm": 0.0660246778046814, |
|
"learning_rate": 6.451099795246604e-06, |
|
"loss": 0.3837, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.3719512195121952, |
|
"grad_norm": 0.049699048047795104, |
|
"learning_rate": 6.39171745927204e-06, |
|
"loss": 0.4546, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 0.04647481627923815, |
|
"learning_rate": 6.332569610442807e-06, |
|
"loss": 0.4114, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.3780487804878048, |
|
"grad_norm": 0.053410613463110365, |
|
"learning_rate": 6.273656994094232e-06, |
|
"loss": 0.4131, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.3810975609756095, |
|
"grad_norm": 0.04635611109855328, |
|
"learning_rate": 6.214980352597399e-06, |
|
"loss": 0.4087, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.3841463414634148, |
|
"grad_norm": 0.04998162717060871, |
|
"learning_rate": 6.1565404253498685e-06, |
|
"loss": 0.4286, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.3871951219512195, |
|
"grad_norm": 0.04858537108941669, |
|
"learning_rate": 6.098337948766255e-06, |
|
"loss": 0.4074, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.3902439024390243, |
|
"grad_norm": 0.04766628235578669, |
|
"learning_rate": 6.040373656269041e-06, |
|
"loss": 0.4047, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.3932926829268295, |
|
"grad_norm": 0.04566031966872623, |
|
"learning_rate": 5.982648278279287e-06, |
|
"loss": 0.3623, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.3963414634146343, |
|
"grad_norm": 0.04220225746659264, |
|
"learning_rate": 5.925162542207441e-06, |
|
"loss": 0.4055, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.399390243902439, |
|
"grad_norm": 0.04753754420384983, |
|
"learning_rate": 5.867917172444165e-06, |
|
"loss": 0.4484, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.402439024390244, |
|
"grad_norm": 0.044606797958348605, |
|
"learning_rate": 5.810912890351219e-06, |
|
"loss": 0.3544, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.4054878048780486, |
|
"grad_norm": 0.05429191520954466, |
|
"learning_rate": 5.75415041425234e-06, |
|
"loss": 0.4157, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.408536585365854, |
|
"grad_norm": 0.04841822893600714, |
|
"learning_rate": 5.697630459424258e-06, |
|
"loss": 0.4028, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.4115853658536586, |
|
"grad_norm": 0.05269493013761591, |
|
"learning_rate": 5.641353738087588e-06, |
|
"loss": 0.3775, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.4146341463414633, |
|
"grad_norm": 0.04113581222389823, |
|
"learning_rate": 5.585320959397935e-06, |
|
"loss": 0.3377, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.417682926829268, |
|
"grad_norm": 0.05014248840706332, |
|
"learning_rate": 5.529532829436923e-06, |
|
"loss": 0.3736, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.4207317073170733, |
|
"grad_norm": 0.05375871646459066, |
|
"learning_rate": 5.473990051203298e-06, |
|
"loss": 0.4044, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.423780487804878, |
|
"grad_norm": 0.04562745676951761, |
|
"learning_rate": 5.418693324604082e-06, |
|
"loss": 0.3851, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.426829268292683, |
|
"grad_norm": 0.04607000577575072, |
|
"learning_rate": 5.36364334644574e-06, |
|
"loss": 0.3204, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.4298780487804876, |
|
"grad_norm": 0.05406018016723344, |
|
"learning_rate": 5.308840810425386e-06, |
|
"loss": 0.4035, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.432926829268293, |
|
"grad_norm": 0.047760483793245496, |
|
"learning_rate": 5.2542864071221025e-06, |
|
"loss": 0.4563, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.4359756097560976, |
|
"grad_norm": 0.0476426880340459, |
|
"learning_rate": 5.199980823988157e-06, |
|
"loss": 0.438, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 0.04751953066554679, |
|
"learning_rate": 5.145924745340394e-06, |
|
"loss": 0.3799, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.442073170731707, |
|
"grad_norm": 0.046302307478752316, |
|
"learning_rate": 5.092118852351599e-06, |
|
"loss": 0.3826, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.4451219512195124, |
|
"grad_norm": 0.04692143633572673, |
|
"learning_rate": 5.038563823041903e-06, |
|
"loss": 0.3318, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.448170731707317, |
|
"grad_norm": 0.04435149632876141, |
|
"learning_rate": 4.985260332270256e-06, |
|
"loss": 0.3337, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.451219512195122, |
|
"grad_norm": 0.04336607743154857, |
|
"learning_rate": 4.932209051725914e-06, |
|
"loss": 0.3668, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.4542682926829267, |
|
"grad_norm": 0.04553355750341298, |
|
"learning_rate": 4.879410649919952e-06, |
|
"loss": 0.3869, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.457317073170732, |
|
"grad_norm": 0.06575400522455546, |
|
"learning_rate": 4.82686579217691e-06, |
|
"loss": 0.402, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.4603658536585367, |
|
"grad_norm": 0.05025978450812776, |
|
"learning_rate": 4.7745751406263165e-06, |
|
"loss": 0.3956, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.4634146341463414, |
|
"grad_norm": 0.04207130067854438, |
|
"learning_rate": 4.722539354194414e-06, |
|
"loss": 0.2913, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.466463414634146, |
|
"grad_norm": 0.0463599669859819, |
|
"learning_rate": 4.67075908859583e-06, |
|
"loss": 0.3902, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.4695121951219514, |
|
"grad_norm": 0.049170400057323115, |
|
"learning_rate": 4.619234996325314e-06, |
|
"loss": 0.4393, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.472560975609756, |
|
"grad_norm": 0.055986709128824926, |
|
"learning_rate": 4.5679677266495194e-06, |
|
"loss": 0.4592, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.475609756097561, |
|
"grad_norm": 0.04850790136281466, |
|
"learning_rate": 4.516957925598822e-06, |
|
"loss": 0.3765, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.4786585365853657, |
|
"grad_norm": 0.04548455150924458, |
|
"learning_rate": 4.4662062359591585e-06, |
|
"loss": 0.3892, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.4817073170731705, |
|
"grad_norm": 0.048166202410053933, |
|
"learning_rate": 4.415713297263987e-06, |
|
"loss": 0.4386, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.4847560975609757, |
|
"grad_norm": 0.052189487836794404, |
|
"learning_rate": 4.365479745786147e-06, |
|
"loss": 0.3875, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.4878048780487805, |
|
"grad_norm": 0.04797712854047369, |
|
"learning_rate": 4.315506214529899e-06, |
|
"loss": 0.422, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.4908536585365852, |
|
"grad_norm": 0.04730083089291941, |
|
"learning_rate": 4.265793333222928e-06, |
|
"loss": 0.3352, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.4939024390243905, |
|
"grad_norm": 0.04406355303948544, |
|
"learning_rate": 4.216341728308412e-06, |
|
"loss": 0.3597, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.4969512195121952, |
|
"grad_norm": 0.046402880449595424, |
|
"learning_rate": 4.167152022937124e-06, |
|
"loss": 0.47, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.048301472887954396, |
|
"learning_rate": 4.118224836959589e-06, |
|
"loss": 0.3518, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.5030487804878048, |
|
"grad_norm": 0.04996830725135542, |
|
"learning_rate": 4.0695607869182445e-06, |
|
"loss": 0.43, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.5060975609756095, |
|
"grad_norm": 0.05401545257800483, |
|
"learning_rate": 4.02116048603973e-06, |
|
"loss": 0.434, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.5091463414634148, |
|
"grad_norm": 0.05119011813317202, |
|
"learning_rate": 3.973024544227086e-06, |
|
"loss": 0.3394, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.5121951219512195, |
|
"grad_norm": 0.04804793913213503, |
|
"learning_rate": 3.925153568052123e-06, |
|
"loss": 0.3541, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.5152439024390243, |
|
"grad_norm": 0.04570138233548225, |
|
"learning_rate": 3.877548160747768e-06, |
|
"loss": 0.4105, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.5182926829268295, |
|
"grad_norm": 0.05180433023610628, |
|
"learning_rate": 3.830208922200421e-06, |
|
"loss": 0.4355, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.5213414634146343, |
|
"grad_norm": 0.04510265543541877, |
|
"learning_rate": 3.7831364489424816e-06, |
|
"loss": 0.421, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.524390243902439, |
|
"grad_norm": 0.04553571711513919, |
|
"learning_rate": 3.7363313341447326e-06, |
|
"loss": 0.4308, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.527439024390244, |
|
"grad_norm": 0.05157642728956852, |
|
"learning_rate": 3.689794167608937e-06, |
|
"loss": 0.434, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.5304878048780486, |
|
"grad_norm": 0.05098353856392178, |
|
"learning_rate": 3.643525535760378e-06, |
|
"loss": 0.4203, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.533536585365854, |
|
"grad_norm": 0.041115017707901325, |
|
"learning_rate": 3.597526021640471e-06, |
|
"loss": 0.3213, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.5365853658536586, |
|
"grad_norm": 0.04677981809027256, |
|
"learning_rate": 3.5517962048994213e-06, |
|
"loss": 0.3828, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.5396341463414633, |
|
"grad_norm": 0.045752089961378764, |
|
"learning_rate": 3.5063366617889143e-06, |
|
"loss": 0.4117, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.5426829268292686, |
|
"grad_norm": 0.04701033883496135, |
|
"learning_rate": 3.4611479651548457e-06, |
|
"loss": 0.3884, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.5457317073170733, |
|
"grad_norm": 0.048123442696396505, |
|
"learning_rate": 3.4162306844301465e-06, |
|
"loss": 0.4071, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.548780487804878, |
|
"grad_norm": 0.04713491381100571, |
|
"learning_rate": 3.371585385627535e-06, |
|
"loss": 0.449, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.551829268292683, |
|
"grad_norm": 0.045698119900967754, |
|
"learning_rate": 3.327212631332452e-06, |
|
"loss": 0.391, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.5548780487804876, |
|
"grad_norm": 0.045309930585614173, |
|
"learning_rate": 3.283112980695932e-06, |
|
"loss": 0.3458, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.557926829268293, |
|
"grad_norm": 0.04898751105733046, |
|
"learning_rate": 3.239286989427573e-06, |
|
"loss": 0.4192, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.5609756097560976, |
|
"grad_norm": 0.0509124926611376, |
|
"learning_rate": 3.195735209788528e-06, |
|
"loss": 0.4381, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.5640243902439024, |
|
"grad_norm": 0.05226387101278107, |
|
"learning_rate": 3.1524581905845536e-06, |
|
"loss": 0.4069, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.567073170731707, |
|
"grad_norm": 0.052460384189690755, |
|
"learning_rate": 3.1094564771590703e-06, |
|
"loss": 0.4304, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.5701219512195124, |
|
"grad_norm": 0.04576303428634451, |
|
"learning_rate": 3.066730611386337e-06, |
|
"loss": 0.4097, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.573170731707317, |
|
"grad_norm": 0.0454758335824736, |
|
"learning_rate": 3.024281131664569e-06, |
|
"loss": 0.3863, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.576219512195122, |
|
"grad_norm": 0.04635517134671258, |
|
"learning_rate": 2.9821085729091924e-06, |
|
"loss": 0.4123, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.5792682926829267, |
|
"grad_norm": 0.04489456701562646, |
|
"learning_rate": 2.9402134665460853e-06, |
|
"loss": 0.3966, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.5823170731707314, |
|
"grad_norm": 0.05588564719009066, |
|
"learning_rate": 2.898596340504886e-06, |
|
"loss": 0.422, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.5853658536585367, |
|
"grad_norm": 0.05079351877436922, |
|
"learning_rate": 2.857257719212347e-06, |
|
"loss": 0.3938, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.5884146341463414, |
|
"grad_norm": 0.048725942156966844, |
|
"learning_rate": 2.8161981235857143e-06, |
|
"loss": 0.3921, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.591463414634146, |
|
"grad_norm": 0.05354775440497977, |
|
"learning_rate": 2.775418071026156e-06, |
|
"loss": 0.3606, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.5945121951219514, |
|
"grad_norm": 0.04385956729202263, |
|
"learning_rate": 2.7349180754122906e-06, |
|
"loss": 0.3814, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.597560975609756, |
|
"grad_norm": 0.04889690374749316, |
|
"learning_rate": 2.6946986470936353e-06, |
|
"loss": 0.4797, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.600609756097561, |
|
"grad_norm": 0.04295496415305672, |
|
"learning_rate": 2.6547602928842396e-06, |
|
"loss": 0.3818, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.6036585365853657, |
|
"grad_norm": 0.04348352239438333, |
|
"learning_rate": 2.615103516056275e-06, |
|
"loss": 0.3598, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.6067073170731705, |
|
"grad_norm": 0.047044801096703746, |
|
"learning_rate": 2.5757288163336807e-06, |
|
"loss": 0.4477, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.6097560975609757, |
|
"grad_norm": 0.045721796420320274, |
|
"learning_rate": 2.5366366898858935e-06, |
|
"loss": 0.4007, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.6128048780487805, |
|
"grad_norm": 0.04578531053202925, |
|
"learning_rate": 2.4978276293215725e-06, |
|
"loss": 0.3475, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.6158536585365852, |
|
"grad_norm": 0.045594774709890595, |
|
"learning_rate": 2.4593021236823914e-06, |
|
"loss": 0.422, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.6189024390243905, |
|
"grad_norm": 0.04700300164236975, |
|
"learning_rate": 2.4210606584369104e-06, |
|
"loss": 0.3931, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.6219512195121952, |
|
"grad_norm": 0.059320405162608614, |
|
"learning_rate": 2.383103715474408e-06, |
|
"loss": 0.4289, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 0.05020753814940063, |
|
"learning_rate": 2.345431773098841e-06, |
|
"loss": 0.4138, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.6280487804878048, |
|
"grad_norm": 0.04601472647317754, |
|
"learning_rate": 2.3080453060228157e-06, |
|
"loss": 0.386, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.6310975609756095, |
|
"grad_norm": 0.04866574147542356, |
|
"learning_rate": 2.270944785361598e-06, |
|
"loss": 0.4103, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.6341463414634148, |
|
"grad_norm": 0.04658640783031281, |
|
"learning_rate": 2.2341306786271695e-06, |
|
"loss": 0.4522, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.6371951219512195, |
|
"grad_norm": 0.0487913591888631, |
|
"learning_rate": 2.197603449722363e-06, |
|
"loss": 0.4711, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.6402439024390243, |
|
"grad_norm": 0.05756759392886848, |
|
"learning_rate": 2.1613635589349756e-06, |
|
"loss": 0.391, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.6432926829268295, |
|
"grad_norm": 0.05197045765940042, |
|
"learning_rate": 2.125411462932023e-06, |
|
"loss": 0.4897, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.6463414634146343, |
|
"grad_norm": 0.0500854850405243, |
|
"learning_rate": 2.089747614753923e-06, |
|
"loss": 0.3817, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.649390243902439, |
|
"grad_norm": 0.04590140661248139, |
|
"learning_rate": 2.0543724638088347e-06, |
|
"loss": 0.417, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.652439024390244, |
|
"grad_norm": 0.048277191569880015, |
|
"learning_rate": 2.019286455866981e-06, |
|
"loss": 0.4101, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.6554878048780486, |
|
"grad_norm": 0.0509909437544304, |
|
"learning_rate": 1.98449003305502e-06, |
|
"loss": 0.4346, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.658536585365854, |
|
"grad_norm": 0.046959947573902566, |
|
"learning_rate": 1.9499836338504886e-06, |
|
"loss": 0.4033, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.6615853658536586, |
|
"grad_norm": 0.044990086629584425, |
|
"learning_rate": 1.91576769307627e-06, |
|
"loss": 0.4074, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.6646341463414633, |
|
"grad_norm": 0.046010218256964684, |
|
"learning_rate": 1.881842641895104e-06, |
|
"loss": 0.3816, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.6676829268292686, |
|
"grad_norm": 0.04704431532728486, |
|
"learning_rate": 1.8482089078041915e-06, |
|
"loss": 0.4273, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.6707317073170733, |
|
"grad_norm": 0.047115973314242464, |
|
"learning_rate": 1.8148669146297565e-06, |
|
"loss": 0.4154, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.673780487804878, |
|
"grad_norm": 0.052659028245325507, |
|
"learning_rate": 1.781817082521739e-06, |
|
"loss": 0.4727, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.676829268292683, |
|
"grad_norm": 0.049586231251581646, |
|
"learning_rate": 1.7490598279484933e-06, |
|
"loss": 0.4617, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.6798780487804876, |
|
"grad_norm": 0.04619796705926887, |
|
"learning_rate": 1.7165955636915392e-06, |
|
"loss": 0.3888, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 0.04857595937650091, |
|
"learning_rate": 1.6844246988403561e-06, |
|
"loss": 0.3977, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.6859756097560976, |
|
"grad_norm": 0.04587078872794076, |
|
"learning_rate": 1.6525476387872384e-06, |
|
"loss": 0.3635, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.6890243902439024, |
|
"grad_norm": 0.04749578656301489, |
|
"learning_rate": 1.6209647852221622e-06, |
|
"loss": 0.4613, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.692073170731707, |
|
"grad_norm": 0.045361324797590356, |
|
"learning_rate": 1.589676536127771e-06, |
|
"loss": 0.3525, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.6951219512195124, |
|
"grad_norm": 0.04894264008622803, |
|
"learning_rate": 1.558683285774304e-06, |
|
"loss": 0.4656, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.698170731707317, |
|
"grad_norm": 0.04596961558750641, |
|
"learning_rate": 1.5279854247146702e-06, |
|
"loss": 0.3885, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.701219512195122, |
|
"grad_norm": 0.04877091621740549, |
|
"learning_rate": 1.497583339779507e-06, |
|
"loss": 0.3995, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.7042682926829267, |
|
"grad_norm": 0.046603063123663, |
|
"learning_rate": 1.4674774140723103e-06, |
|
"loss": 0.3794, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.7073170731707314, |
|
"grad_norm": 0.049078136886864604, |
|
"learning_rate": 1.4376680269646086e-06, |
|
"loss": 0.3831, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.7103658536585367, |
|
"grad_norm": 0.046551620610760534, |
|
"learning_rate": 1.408155554091184e-06, |
|
"loss": 0.3683, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.7134146341463414, |
|
"grad_norm": 0.04769564867133674, |
|
"learning_rate": 1.3789403673453216e-06, |
|
"loss": 0.4364, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.716463414634146, |
|
"grad_norm": 0.05275442418545579, |
|
"learning_rate": 1.3500228348741594e-06, |
|
"loss": 0.4007, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.7195121951219514, |
|
"grad_norm": 0.0487800880348598, |
|
"learning_rate": 1.3214033210740079e-06, |
|
"loss": 0.3825, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.722560975609756, |
|
"grad_norm": 0.052257228902126626, |
|
"learning_rate": 1.2930821865857867e-06, |
|
"loss": 0.4774, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.725609756097561, |
|
"grad_norm": 0.04598036087813201, |
|
"learning_rate": 1.265059788290468e-06, |
|
"loss": 0.4152, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.7286585365853657, |
|
"grad_norm": 0.05163087663792444, |
|
"learning_rate": 1.2373364793045821e-06, |
|
"loss": 0.4959, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.7317073170731705, |
|
"grad_norm": 0.0514975087513403, |
|
"learning_rate": 1.2099126089757746e-06, |
|
"loss": 0.3535, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.7347560975609757, |
|
"grad_norm": 0.043893617709315234, |
|
"learning_rate": 1.1827885228783863e-06, |
|
"loss": 0.4188, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.7378048780487805, |
|
"grad_norm": 0.04818454241881662, |
|
"learning_rate": 1.1559645628091115e-06, |
|
"loss": 0.3196, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.7408536585365852, |
|
"grad_norm": 0.04398367015233411, |
|
"learning_rate": 1.129441066782702e-06, |
|
"loss": 0.3253, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.7439024390243905, |
|
"grad_norm": 0.04017783128096445, |
|
"learning_rate": 1.1032183690276754e-06, |
|
"loss": 0.3725, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.7469512195121952, |
|
"grad_norm": 0.051719053632263305, |
|
"learning_rate": 1.0772967999821387e-06, |
|
"loss": 0.3862, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.04677169001526996, |
|
"learning_rate": 1.0516766862895988e-06, |
|
"loss": 0.4183, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.7530487804878048, |
|
"grad_norm": 0.05608282238185744, |
|
"learning_rate": 1.0263583507948592e-06, |
|
"loss": 0.4227, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.7560975609756095, |
|
"grad_norm": 0.050428851178379905, |
|
"learning_rate": 1.0013421125399519e-06, |
|
"loss": 0.3616, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.7591463414634148, |
|
"grad_norm": 0.05034269092062958, |
|
"learning_rate": 9.766282867601062e-07, |
|
"loss": 0.3844, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.7621951219512195, |
|
"grad_norm": 0.04504676946969238, |
|
"learning_rate": 9.522171848797918e-07, |
|
"loss": 0.391, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.7652439024390243, |
|
"grad_norm": 0.04567291042842459, |
|
"learning_rate": 9.281091145087794e-07, |
|
"loss": 0.3731, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.7682926829268295, |
|
"grad_norm": 0.04760877660304163, |
|
"learning_rate": 9.043043794382811e-07, |
|
"loss": 0.3975, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.7713414634146343, |
|
"grad_norm": 0.04886230944889475, |
|
"learning_rate": 8.808032796371019e-07, |
|
"loss": 0.4345, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.774390243902439, |
|
"grad_norm": 0.046761963685432256, |
|
"learning_rate": 8.5760611124788e-07, |
|
"loss": 0.3587, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.777439024390244, |
|
"grad_norm": 0.05012257767113604, |
|
"learning_rate": 8.34713166583334e-07, |
|
"loss": 0.4435, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.7804878048780486, |
|
"grad_norm": 0.0485857061164425, |
|
"learning_rate": 8.121247341226073e-07, |
|
"loss": 0.4457, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.783536585365854, |
|
"grad_norm": 0.047771627365448335, |
|
"learning_rate": 7.898410985076016e-07, |
|
"loss": 0.3506, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.7865853658536586, |
|
"grad_norm": 0.04524184109786172, |
|
"learning_rate": 7.678625405394157e-07, |
|
"loss": 0.368, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.7896341463414633, |
|
"grad_norm": 0.04638346083359397, |
|
"learning_rate": 7.46189337174788e-07, |
|
"loss": 0.4712, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.7926829268292686, |
|
"grad_norm": 0.04795283141258439, |
|
"learning_rate": 7.248217615226232e-07, |
|
"loss": 0.4073, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.7957317073170733, |
|
"grad_norm": 0.047896666710965284, |
|
"learning_rate": 7.037600828405349e-07, |
|
"loss": 0.4145, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.798780487804878, |
|
"grad_norm": 0.04476687658273201, |
|
"learning_rate": 6.830045665314671e-07, |
|
"loss": 0.3853, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.801829268292683, |
|
"grad_norm": 0.045070434638962945, |
|
"learning_rate": 6.625554741403333e-07, |
|
"loss": 0.3998, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.8048780487804876, |
|
"grad_norm": 0.05006029469711144, |
|
"learning_rate": 6.424130633507497e-07, |
|
"loss": 0.4101, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.807926829268293, |
|
"grad_norm": 0.05376575151245747, |
|
"learning_rate": 6.22577587981743e-07, |
|
"loss": 0.4244, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.8109756097560976, |
|
"grad_norm": 0.051072787008947136, |
|
"learning_rate": 6.030492979845953e-07, |
|
"loss": 0.4588, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.8140243902439024, |
|
"grad_norm": 0.05446211637366452, |
|
"learning_rate": 5.838284394396764e-07, |
|
"loss": 0.425, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.817073170731707, |
|
"grad_norm": 0.04809228041596783, |
|
"learning_rate": 5.649152545533332e-07, |
|
"loss": 0.4626, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.8201219512195124, |
|
"grad_norm": 0.0443872011957413, |
|
"learning_rate": 5.463099816548579e-07, |
|
"loss": 0.3606, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.823170731707317, |
|
"grad_norm": 0.0456727521695989, |
|
"learning_rate": 5.280128551934743e-07, |
|
"loss": 0.3553, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.826219512195122, |
|
"grad_norm": 0.04370934239171925, |
|
"learning_rate": 5.100241057353682e-07, |
|
"loss": 0.3541, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.8292682926829267, |
|
"grad_norm": 0.052759703472349234, |
|
"learning_rate": 4.923439599608227e-07, |
|
"loss": 0.4907, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.8323170731707314, |
|
"grad_norm": 0.04862415168463701, |
|
"learning_rate": 4.749726406613142e-07, |
|
"loss": 0.3889, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.8353658536585367, |
|
"grad_norm": 0.0518156119357567, |
|
"learning_rate": 4.5791036673673846e-07, |
|
"loss": 0.4403, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.8384146341463414, |
|
"grad_norm": 0.04371517773149062, |
|
"learning_rate": 4.411573531926422e-07, |
|
"loss": 0.3311, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.841463414634146, |
|
"grad_norm": 0.0444702244148316, |
|
"learning_rate": 4.2471381113750876e-07, |
|
"loss": 0.3891, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.8445121951219514, |
|
"grad_norm": 0.050165104913087226, |
|
"learning_rate": 4.0857994778009944e-07, |
|
"loss": 0.391, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.847560975609756, |
|
"grad_norm": 0.04413473680201035, |
|
"learning_rate": 3.9275596642685543e-07, |
|
"loss": 0.3764, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.850609756097561, |
|
"grad_norm": 0.04934776501624898, |
|
"learning_rate": 3.7724206647930803e-07, |
|
"loss": 0.3838, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.8536585365853657, |
|
"grad_norm": 0.051994396400409246, |
|
"learning_rate": 3.6203844343160044e-07, |
|
"loss": 0.4299, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.8567073170731705, |
|
"grad_norm": 0.05086329496351862, |
|
"learning_rate": 3.4714528886799494e-07, |
|
"loss": 0.4624, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.8597560975609757, |
|
"grad_norm": 0.04546567367722647, |
|
"learning_rate": 3.3256279046048053e-07, |
|
"loss": 0.4194, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.8628048780487805, |
|
"grad_norm": 0.05351102581600541, |
|
"learning_rate": 3.1829113196638614e-07, |
|
"loss": 0.4779, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.8658536585365852, |
|
"grad_norm": 0.046764695752408325, |
|
"learning_rate": 3.0433049322608767e-07, |
|
"loss": 0.3816, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.8689024390243905, |
|
"grad_norm": 0.04223339280911066, |
|
"learning_rate": 2.906810501607293e-07, |
|
"loss": 0.3986, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.8719512195121952, |
|
"grad_norm": 0.04356884108337029, |
|
"learning_rate": 2.7734297477000627e-07, |
|
"loss": 0.3702, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 0.04622932575023138, |
|
"learning_rate": 2.643164351299965e-07, |
|
"loss": 0.4075, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.8780487804878048, |
|
"grad_norm": 0.05178690372987777, |
|
"learning_rate": 2.5160159539105443e-07, |
|
"loss": 0.4318, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.8810975609756095, |
|
"grad_norm": 0.044948193037368314, |
|
"learning_rate": 2.391986157757292e-07, |
|
"loss": 0.4114, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.8841463414634148, |
|
"grad_norm": 0.04997670450445459, |
|
"learning_rate": 2.2710765257674958e-07, |
|
"loss": 0.483, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.8871951219512195, |
|
"grad_norm": 0.05332715471259246, |
|
"learning_rate": 2.153288581550561e-07, |
|
"loss": 0.4037, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.8902439024390243, |
|
"grad_norm": 0.043304309516807464, |
|
"learning_rate": 2.038623809378859e-07, |
|
"loss": 0.4034, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.8932926829268295, |
|
"grad_norm": 0.05034997634317469, |
|
"learning_rate": 1.927083654168854e-07, |
|
"loss": 0.445, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.8963414634146343, |
|
"grad_norm": 0.04656387288683428, |
|
"learning_rate": 1.8186695214631733e-07, |
|
"loss": 0.4002, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.899390243902439, |
|
"grad_norm": 0.05685909635754947, |
|
"learning_rate": 1.7133827774125367e-07, |
|
"loss": 0.4089, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.902439024390244, |
|
"grad_norm": 0.05105550552370391, |
|
"learning_rate": 1.6112247487589105e-07, |
|
"loss": 0.3904, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.9054878048780486, |
|
"grad_norm": 0.04499414356958469, |
|
"learning_rate": 1.512196722818493e-07, |
|
"loss": 0.3904, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.908536585365854, |
|
"grad_norm": 0.04763536407866678, |
|
"learning_rate": 1.4162999474657268e-07, |
|
"loss": 0.381, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.9115853658536586, |
|
"grad_norm": 0.04829185976705493, |
|
"learning_rate": 1.3235356311173397e-07, |
|
"loss": 0.4471, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.9146341463414633, |
|
"grad_norm": 0.046717109208157435, |
|
"learning_rate": 1.2339049427173843e-07, |
|
"loss": 0.46, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.9176829268292686, |
|
"grad_norm": 0.04764383813122349, |
|
"learning_rate": 1.1474090117221947e-07, |
|
"loss": 0.3908, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.9207317073170733, |
|
"grad_norm": 0.04617300216734002, |
|
"learning_rate": 1.0640489280865085e-07, |
|
"loss": 0.4213, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.923780487804878, |
|
"grad_norm": 0.04930526528152621, |
|
"learning_rate": 9.838257422493668e-08, |
|
"loss": 0.4257, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 0.04763207735191656, |
|
"learning_rate": 9.067404651211808e-08, |
|
"loss": 0.369, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.9298780487804876, |
|
"grad_norm": 0.049033279860561683, |
|
"learning_rate": 8.327940680708246e-08, |
|
"loss": 0.4256, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.932926829268293, |
|
"grad_norm": 0.051564111935322204, |
|
"learning_rate": 7.61987482913451e-08, |
|
"loss": 0.4545, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.9359756097560976, |
|
"grad_norm": 0.05251292272917081, |
|
"learning_rate": 6.943216018987508e-08, |
|
"loss": 0.4666, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.9390243902439024, |
|
"grad_norm": 0.04823175777273723, |
|
"learning_rate": 6.297972776996286e-08, |
|
"loss": 0.4674, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.942073170731707, |
|
"grad_norm": 0.04891409789807861, |
|
"learning_rate": 5.6841532340162764e-08, |
|
"loss": 0.3941, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.9451219512195124, |
|
"grad_norm": 0.05083495908280979, |
|
"learning_rate": 5.1017651249252175e-08, |
|
"loss": 0.408, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.948170731707317, |
|
"grad_norm": 0.048667783893621294, |
|
"learning_rate": 4.550815788526008e-08, |
|
"loss": 0.4246, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.951219512195122, |
|
"grad_norm": 0.04938238004854509, |
|
"learning_rate": 4.03131216745567e-08, |
|
"loss": 0.4088, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.9542682926829267, |
|
"grad_norm": 0.044065171800084124, |
|
"learning_rate": 3.543260808095139e-08, |
|
"loss": 0.3315, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.9573170731707314, |
|
"grad_norm": 0.04128399852179956, |
|
"learning_rate": 3.0866678604896116e-08, |
|
"loss": 0.3572, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.9603658536585367, |
|
"grad_norm": 0.05213292796988347, |
|
"learning_rate": 2.6615390782691596e-08, |
|
"loss": 0.4389, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.9634146341463414, |
|
"grad_norm": 0.05712450278932026, |
|
"learning_rate": 2.2678798185771232e-08, |
|
"loss": 0.3561, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.966463414634146, |
|
"grad_norm": 0.04612117104693992, |
|
"learning_rate": 1.905695042002109e-08, |
|
"loss": 0.4228, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.9695121951219514, |
|
"grad_norm": 0.05064073173011995, |
|
"learning_rate": 1.5749893125160954e-08, |
|
"loss": 0.4459, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.972560975609756, |
|
"grad_norm": 0.04260762810954784, |
|
"learning_rate": 1.2757667974155896e-08, |
|
"loss": 0.4114, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.975609756097561, |
|
"grad_norm": 0.06403331793633539, |
|
"learning_rate": 1.0080312672711145e-08, |
|
"loss": 0.4859, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.9786585365853657, |
|
"grad_norm": 0.04714572968910608, |
|
"learning_rate": 7.717860958780798e-09, |
|
"loss": 0.4298, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.9817073170731705, |
|
"grad_norm": 0.04845961498863735, |
|
"learning_rate": 5.670342602148715e-09, |
|
"loss": 0.3839, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.9847560975609757, |
|
"grad_norm": 0.049494663259983175, |
|
"learning_rate": 3.9377834040538185e-09, |
|
"loss": 0.3712, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.9878048780487805, |
|
"grad_norm": 0.04949515354248563, |
|
"learning_rate": 2.5202051968625797e-09, |
|
"loss": 0.3833, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.9908536585365852, |
|
"grad_norm": 0.05067059935337459, |
|
"learning_rate": 1.4176258437970103e-09, |
|
"loss": 0.4035, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.9939024390243905, |
|
"grad_norm": 0.05569874207800916, |
|
"learning_rate": 6.300592387098458e-10, |
|
"loss": 0.4107, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.9969512195121952, |
|
"grad_norm": 0.048017333907845926, |
|
"learning_rate": 1.575153058985812e-10, |
|
"loss": 0.3739, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.05063228298134809, |
|
"learning_rate": 0.0, |
|
"loss": 0.4038, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 984, |
|
"total_flos": 3793981707124736.0, |
|
"train_loss": 0.4249174514315962, |
|
"train_runtime": 22985.8992, |
|
"train_samples_per_second": 1.366, |
|
"train_steps_per_second": 0.043 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 984, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3793981707124736.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|