|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 288, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003472222222222222, |
|
"grad_norm": 0.6381791234016418, |
|
"learning_rate": 4e-05, |
|
"loss": 1.664, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006944444444444444, |
|
"grad_norm": 0.6412252187728882, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6095, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010416666666666666, |
|
"grad_norm": 0.6355312466621399, |
|
"learning_rate": 0.00012, |
|
"loss": 1.6431, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013888888888888888, |
|
"grad_norm": 0.7827576994895935, |
|
"learning_rate": 0.00016, |
|
"loss": 1.7554, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017361111111111112, |
|
"grad_norm": 0.7365180850028992, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6866, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.020833333333333332, |
|
"grad_norm": 0.7607629299163818, |
|
"learning_rate": 0.00019929328621908128, |
|
"loss": 1.5447, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.024305555555555556, |
|
"grad_norm": 0.7718775272369385, |
|
"learning_rate": 0.00019858657243816254, |
|
"loss": 1.5777, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.027777777777777776, |
|
"grad_norm": 0.7301136255264282, |
|
"learning_rate": 0.00019787985865724384, |
|
"loss": 1.3402, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 0.6953989863395691, |
|
"learning_rate": 0.0001971731448763251, |
|
"loss": 1.4443, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.034722222222222224, |
|
"grad_norm": 0.605755090713501, |
|
"learning_rate": 0.00019646643109540637, |
|
"loss": 1.3609, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03819444444444445, |
|
"grad_norm": 0.5867183804512024, |
|
"learning_rate": 0.00019575971731448764, |
|
"loss": 1.2765, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.041666666666666664, |
|
"grad_norm": 0.5973581075668335, |
|
"learning_rate": 0.00019505300353356894, |
|
"loss": 1.2585, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04513888888888889, |
|
"grad_norm": 0.7357228398323059, |
|
"learning_rate": 0.0001943462897526502, |
|
"loss": 1.2199, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04861111111111111, |
|
"grad_norm": 0.7804207801818848, |
|
"learning_rate": 0.00019363957597173144, |
|
"loss": 1.1996, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.052083333333333336, |
|
"grad_norm": 0.813244640827179, |
|
"learning_rate": 0.0001929328621908127, |
|
"loss": 1.1735, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05555555555555555, |
|
"grad_norm": 0.7277530431747437, |
|
"learning_rate": 0.00019222614840989398, |
|
"loss": 1.1094, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.059027777777777776, |
|
"grad_norm": 0.6414357423782349, |
|
"learning_rate": 0.00019151943462897527, |
|
"loss": 1.1786, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.5542577505111694, |
|
"learning_rate": 0.00019081272084805654, |
|
"loss": 0.9343, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06597222222222222, |
|
"grad_norm": 0.5263577699661255, |
|
"learning_rate": 0.0001901060070671378, |
|
"loss": 1.0607, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06944444444444445, |
|
"grad_norm": 0.49260228872299194, |
|
"learning_rate": 0.00018939929328621908, |
|
"loss": 0.9934, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07291666666666667, |
|
"grad_norm": 0.5072307586669922, |
|
"learning_rate": 0.00018869257950530037, |
|
"loss": 1.0165, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0763888888888889, |
|
"grad_norm": 0.5154305696487427, |
|
"learning_rate": 0.00018798586572438164, |
|
"loss": 1.0159, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0798611111111111, |
|
"grad_norm": 0.5033209919929504, |
|
"learning_rate": 0.0001872791519434629, |
|
"loss": 1.0102, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.5032006502151489, |
|
"learning_rate": 0.00018657243816254417, |
|
"loss": 1.0215, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08680555555555555, |
|
"grad_norm": 0.4746347665786743, |
|
"learning_rate": 0.00018586572438162547, |
|
"loss": 0.867, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09027777777777778, |
|
"grad_norm": 0.4780479967594147, |
|
"learning_rate": 0.00018515901060070673, |
|
"loss": 0.951, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 0.45668691396713257, |
|
"learning_rate": 0.000184452296819788, |
|
"loss": 0.9464, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09722222222222222, |
|
"grad_norm": 0.43334755301475525, |
|
"learning_rate": 0.00018374558303886927, |
|
"loss": 0.8768, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.10069444444444445, |
|
"grad_norm": 0.4458893835544586, |
|
"learning_rate": 0.00018303886925795054, |
|
"loss": 0.8553, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10416666666666667, |
|
"grad_norm": 0.4605867564678192, |
|
"learning_rate": 0.00018233215547703183, |
|
"loss": 0.948, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1076388888888889, |
|
"grad_norm": 0.44694918394088745, |
|
"learning_rate": 0.0001816254416961131, |
|
"loss": 0.8755, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 0.45669859647750854, |
|
"learning_rate": 0.00018091872791519434, |
|
"loss": 0.8554, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11458333333333333, |
|
"grad_norm": 0.4380110502243042, |
|
"learning_rate": 0.0001802120141342756, |
|
"loss": 0.8699, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11805555555555555, |
|
"grad_norm": 0.4494687020778656, |
|
"learning_rate": 0.0001795053003533569, |
|
"loss": 0.8463, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12152777777777778, |
|
"grad_norm": 0.4385037422180176, |
|
"learning_rate": 0.00017879858657243817, |
|
"loss": 0.7964, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.42709654569625854, |
|
"learning_rate": 0.00017809187279151944, |
|
"loss": 0.8675, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1284722222222222, |
|
"grad_norm": 0.462563157081604, |
|
"learning_rate": 0.0001773851590106007, |
|
"loss": 0.9125, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13194444444444445, |
|
"grad_norm": 0.4372367560863495, |
|
"learning_rate": 0.00017667844522968197, |
|
"loss": 0.8352, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13541666666666666, |
|
"grad_norm": 0.4251866340637207, |
|
"learning_rate": 0.00017597173144876327, |
|
"loss": 0.8695, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 0.43755847215652466, |
|
"learning_rate": 0.00017526501766784453, |
|
"loss": 0.8544, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1423611111111111, |
|
"grad_norm": 0.44858112931251526, |
|
"learning_rate": 0.0001745583038869258, |
|
"loss": 0.835, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14583333333333334, |
|
"grad_norm": 0.4447510838508606, |
|
"learning_rate": 0.00017385159010600707, |
|
"loss": 0.8537, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14930555555555555, |
|
"grad_norm": 0.42954930663108826, |
|
"learning_rate": 0.00017314487632508836, |
|
"loss": 0.7603, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1527777777777778, |
|
"grad_norm": 0.4357329308986664, |
|
"learning_rate": 0.00017243816254416963, |
|
"loss": 0.7315, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 0.4562698304653168, |
|
"learning_rate": 0.0001717314487632509, |
|
"loss": 0.9103, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1597222222222222, |
|
"grad_norm": 0.46614545583724976, |
|
"learning_rate": 0.00017102473498233216, |
|
"loss": 0.832, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16319444444444445, |
|
"grad_norm": 0.42011672258377075, |
|
"learning_rate": 0.00017031802120141343, |
|
"loss": 0.7172, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.40585461258888245, |
|
"learning_rate": 0.00016961130742049473, |
|
"loss": 0.7087, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1701388888888889, |
|
"grad_norm": 0.4490915834903717, |
|
"learning_rate": 0.000168904593639576, |
|
"loss": 0.7956, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1736111111111111, |
|
"grad_norm": 0.4251860976219177, |
|
"learning_rate": 0.00016819787985865723, |
|
"loss": 0.7763, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17708333333333334, |
|
"grad_norm": 0.4503968060016632, |
|
"learning_rate": 0.0001674911660777385, |
|
"loss": 0.833, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.18055555555555555, |
|
"grad_norm": 0.452923059463501, |
|
"learning_rate": 0.0001667844522968198, |
|
"loss": 0.729, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1840277777777778, |
|
"grad_norm": 0.45421820878982544, |
|
"learning_rate": 0.00016607773851590106, |
|
"loss": 0.8678, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.47255003452301025, |
|
"learning_rate": 0.00016537102473498233, |
|
"loss": 0.7856, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1909722222222222, |
|
"grad_norm": 0.49460238218307495, |
|
"learning_rate": 0.0001646643109540636, |
|
"loss": 0.8103, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19444444444444445, |
|
"grad_norm": 0.47668716311454773, |
|
"learning_rate": 0.0001639575971731449, |
|
"loss": 0.6687, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19791666666666666, |
|
"grad_norm": 0.4631917178630829, |
|
"learning_rate": 0.00016325088339222616, |
|
"loss": 0.7362, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2013888888888889, |
|
"grad_norm": 0.4908764064311981, |
|
"learning_rate": 0.00016254416961130743, |
|
"loss": 0.7734, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2048611111111111, |
|
"grad_norm": 0.47517484426498413, |
|
"learning_rate": 0.0001618374558303887, |
|
"loss": 0.8048, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 0.4693131744861603, |
|
"learning_rate": 0.00016113074204946996, |
|
"loss": 0.8148, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21180555555555555, |
|
"grad_norm": 0.48101750016212463, |
|
"learning_rate": 0.00016042402826855126, |
|
"loss": 0.7089, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2152777777777778, |
|
"grad_norm": 0.45942020416259766, |
|
"learning_rate": 0.00015971731448763253, |
|
"loss": 0.7649, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21875, |
|
"grad_norm": 0.46566373109817505, |
|
"learning_rate": 0.0001590106007067138, |
|
"loss": 0.7081, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.4782656133174896, |
|
"learning_rate": 0.00015830388692579506, |
|
"loss": 0.6578, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22569444444444445, |
|
"grad_norm": 0.4600412845611572, |
|
"learning_rate": 0.00015759717314487635, |
|
"loss": 0.6907, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22916666666666666, |
|
"grad_norm": 0.47648143768310547, |
|
"learning_rate": 0.00015689045936395762, |
|
"loss": 0.7159, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2326388888888889, |
|
"grad_norm": 0.4845995008945465, |
|
"learning_rate": 0.00015618374558303886, |
|
"loss": 0.6186, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2361111111111111, |
|
"grad_norm": 0.5142258405685425, |
|
"learning_rate": 0.00015547703180212013, |
|
"loss": 0.7643, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23958333333333334, |
|
"grad_norm": 0.5000512599945068, |
|
"learning_rate": 0.0001547703180212014, |
|
"loss": 0.734, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24305555555555555, |
|
"grad_norm": 0.473004549741745, |
|
"learning_rate": 0.0001540636042402827, |
|
"loss": 0.689, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2465277777777778, |
|
"grad_norm": 0.49982285499572754, |
|
"learning_rate": 0.00015335689045936396, |
|
"loss": 0.7162, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.48723116517066956, |
|
"learning_rate": 0.00015265017667844523, |
|
"loss": 0.605, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2534722222222222, |
|
"grad_norm": 0.4668457508087158, |
|
"learning_rate": 0.0001519434628975265, |
|
"loss": 0.7956, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2569444444444444, |
|
"grad_norm": 0.5139826536178589, |
|
"learning_rate": 0.0001512367491166078, |
|
"loss": 0.6877, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2604166666666667, |
|
"grad_norm": 0.48903441429138184, |
|
"learning_rate": 0.00015053003533568906, |
|
"loss": 0.7341, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2638888888888889, |
|
"grad_norm": 0.4892962872982025, |
|
"learning_rate": 0.00014982332155477032, |
|
"loss": 0.6833, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2673611111111111, |
|
"grad_norm": 0.510981023311615, |
|
"learning_rate": 0.0001491166077738516, |
|
"loss": 0.6567, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2708333333333333, |
|
"grad_norm": 0.48602378368377686, |
|
"learning_rate": 0.00014840989399293289, |
|
"loss": 0.6592, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2743055555555556, |
|
"grad_norm": 0.5149515271186829, |
|
"learning_rate": 0.00014770318021201415, |
|
"loss": 0.7435, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 0.4999159872531891, |
|
"learning_rate": 0.00014699646643109542, |
|
"loss": 0.7397, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28125, |
|
"grad_norm": 0.5036943554878235, |
|
"learning_rate": 0.0001462897526501767, |
|
"loss": 0.6791, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2847222222222222, |
|
"grad_norm": 0.4811699688434601, |
|
"learning_rate": 0.00014558303886925796, |
|
"loss": 0.6376, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2881944444444444, |
|
"grad_norm": 0.5215135812759399, |
|
"learning_rate": 0.00014487632508833925, |
|
"loss": 0.6547, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2916666666666667, |
|
"grad_norm": 0.5180763006210327, |
|
"learning_rate": 0.00014416961130742052, |
|
"loss": 0.6667, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2951388888888889, |
|
"grad_norm": 0.5010943412780762, |
|
"learning_rate": 0.00014346289752650176, |
|
"loss": 0.6496, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2986111111111111, |
|
"grad_norm": 0.47832784056663513, |
|
"learning_rate": 0.00014275618374558303, |
|
"loss": 0.6516, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3020833333333333, |
|
"grad_norm": 0.5183102488517761, |
|
"learning_rate": 0.00014204946996466432, |
|
"loss": 0.6822, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3055555555555556, |
|
"grad_norm": 0.5250104069709778, |
|
"learning_rate": 0.0001413427561837456, |
|
"loss": 0.6837, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3090277777777778, |
|
"grad_norm": 0.4821203052997589, |
|
"learning_rate": 0.00014063604240282686, |
|
"loss": 0.6734, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.5341935157775879, |
|
"learning_rate": 0.00013992932862190812, |
|
"loss": 0.7098, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3159722222222222, |
|
"grad_norm": 0.5351920127868652, |
|
"learning_rate": 0.0001392226148409894, |
|
"loss": 0.6806, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3194444444444444, |
|
"grad_norm": 0.5185657143592834, |
|
"learning_rate": 0.00013851590106007068, |
|
"loss": 0.6212, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3229166666666667, |
|
"grad_norm": 0.4946865439414978, |
|
"learning_rate": 0.00013780918727915195, |
|
"loss": 0.7244, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3263888888888889, |
|
"grad_norm": 0.51689213514328, |
|
"learning_rate": 0.00013710247349823322, |
|
"loss": 0.532, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3298611111111111, |
|
"grad_norm": 0.5387365221977234, |
|
"learning_rate": 0.0001363957597173145, |
|
"loss": 0.6225, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.5210251212120056, |
|
"learning_rate": 0.00013568904593639578, |
|
"loss": 0.6565, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3368055555555556, |
|
"grad_norm": 0.5470120310783386, |
|
"learning_rate": 0.00013498233215547705, |
|
"loss": 0.6181, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3402777777777778, |
|
"grad_norm": 0.5134613513946533, |
|
"learning_rate": 0.00013427561837455832, |
|
"loss": 0.5943, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.34375, |
|
"grad_norm": 0.5114628076553345, |
|
"learning_rate": 0.00013356890459363958, |
|
"loss": 0.7112, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3472222222222222, |
|
"grad_norm": 0.5393586158752441, |
|
"learning_rate": 0.00013286219081272085, |
|
"loss": 0.6513, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3506944444444444, |
|
"grad_norm": 0.5121696591377258, |
|
"learning_rate": 0.00013215547703180215, |
|
"loss": 0.6112, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3541666666666667, |
|
"grad_norm": 0.5529887676239014, |
|
"learning_rate": 0.0001314487632508834, |
|
"loss": 0.704, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3576388888888889, |
|
"grad_norm": 0.5504875779151917, |
|
"learning_rate": 0.00013074204946996465, |
|
"loss": 0.6367, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3611111111111111, |
|
"grad_norm": 0.5555490255355835, |
|
"learning_rate": 0.00013003533568904592, |
|
"loss": 0.6988, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3645833333333333, |
|
"grad_norm": 0.5071545839309692, |
|
"learning_rate": 0.00012932862190812722, |
|
"loss": 0.6598, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3680555555555556, |
|
"grad_norm": 0.5332069396972656, |
|
"learning_rate": 0.00012862190812720848, |
|
"loss": 0.65, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3715277777777778, |
|
"grad_norm": 0.5629200339317322, |
|
"learning_rate": 0.00012791519434628975, |
|
"loss": 0.699, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.5515280961990356, |
|
"learning_rate": 0.00012720848056537102, |
|
"loss": 0.6418, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3784722222222222, |
|
"grad_norm": 0.527538001537323, |
|
"learning_rate": 0.0001265017667844523, |
|
"loss": 0.7302, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3819444444444444, |
|
"grad_norm": 0.5716756582260132, |
|
"learning_rate": 0.00012579505300353358, |
|
"loss": 0.7443, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3854166666666667, |
|
"grad_norm": 0.5540614724159241, |
|
"learning_rate": 0.00012508833922261485, |
|
"loss": 0.6564, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3888888888888889, |
|
"grad_norm": 0.5193327069282532, |
|
"learning_rate": 0.00012438162544169612, |
|
"loss": 0.6783, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3923611111111111, |
|
"grad_norm": 0.5474222302436829, |
|
"learning_rate": 0.00012367491166077738, |
|
"loss": 0.6519, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3958333333333333, |
|
"grad_norm": 0.5058162808418274, |
|
"learning_rate": 0.00012296819787985868, |
|
"loss": 0.5785, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3993055555555556, |
|
"grad_norm": 0.5280646085739136, |
|
"learning_rate": 0.00012226148409893994, |
|
"loss": 0.6308, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4027777777777778, |
|
"grad_norm": 0.536086916923523, |
|
"learning_rate": 0.0001215547703180212, |
|
"loss": 0.5696, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"grad_norm": 0.5223795771598816, |
|
"learning_rate": 0.00012084805653710247, |
|
"loss": 0.5861, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4097222222222222, |
|
"grad_norm": 0.5218377113342285, |
|
"learning_rate": 0.00012014134275618376, |
|
"loss": 0.6151, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4131944444444444, |
|
"grad_norm": 0.5461903214454651, |
|
"learning_rate": 0.00011943462897526503, |
|
"loss": 0.585, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.5451020002365112, |
|
"learning_rate": 0.0001187279151943463, |
|
"loss": 0.6571, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4201388888888889, |
|
"grad_norm": 0.5320534706115723, |
|
"learning_rate": 0.00011802120141342756, |
|
"loss": 0.6191, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4236111111111111, |
|
"grad_norm": 0.5509814023971558, |
|
"learning_rate": 0.00011731448763250883, |
|
"loss": 0.542, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4270833333333333, |
|
"grad_norm": 0.5494756102561951, |
|
"learning_rate": 0.00011660777385159012, |
|
"loss": 0.7145, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4305555555555556, |
|
"grad_norm": 0.5778016448020935, |
|
"learning_rate": 0.00011590106007067139, |
|
"loss": 0.6787, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4340277777777778, |
|
"grad_norm": 0.5546441674232483, |
|
"learning_rate": 0.00011519434628975265, |
|
"loss": 0.5897, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.5590822100639343, |
|
"learning_rate": 0.00011448763250883391, |
|
"loss": 0.6331, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4409722222222222, |
|
"grad_norm": 0.5571174025535583, |
|
"learning_rate": 0.00011378091872791521, |
|
"loss": 0.6624, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.579007625579834, |
|
"learning_rate": 0.00011307420494699648, |
|
"loss": 0.6377, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4479166666666667, |
|
"grad_norm": 0.5956988334655762, |
|
"learning_rate": 0.00011236749116607774, |
|
"loss": 0.6828, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4513888888888889, |
|
"grad_norm": 0.5952633023262024, |
|
"learning_rate": 0.00011166077738515901, |
|
"loss": 0.5603, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4548611111111111, |
|
"grad_norm": 0.561600387096405, |
|
"learning_rate": 0.00011095406360424028, |
|
"loss": 0.6311, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4583333333333333, |
|
"grad_norm": 0.5515158772468567, |
|
"learning_rate": 0.00011024734982332157, |
|
"loss": 0.7293, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4618055555555556, |
|
"grad_norm": 0.5889860987663269, |
|
"learning_rate": 0.00010954063604240284, |
|
"loss": 0.6693, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4652777777777778, |
|
"grad_norm": 0.5815720558166504, |
|
"learning_rate": 0.0001088339222614841, |
|
"loss": 0.6881, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 0.6205659508705139, |
|
"learning_rate": 0.00010812720848056536, |
|
"loss": 0.6732, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4722222222222222, |
|
"grad_norm": 0.5689711570739746, |
|
"learning_rate": 0.00010742049469964666, |
|
"loss": 0.6206, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4756944444444444, |
|
"grad_norm": 0.5453592538833618, |
|
"learning_rate": 0.00010671378091872792, |
|
"loss": 0.6257, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4791666666666667, |
|
"grad_norm": 0.5330798625946045, |
|
"learning_rate": 0.00010600706713780919, |
|
"loss": 0.4674, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4826388888888889, |
|
"grad_norm": 0.5798875093460083, |
|
"learning_rate": 0.00010530035335689046, |
|
"loss": 0.6017, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4861111111111111, |
|
"grad_norm": 0.5574885010719299, |
|
"learning_rate": 0.00010459363957597175, |
|
"loss": 0.6087, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4895833333333333, |
|
"grad_norm": 0.5381941199302673, |
|
"learning_rate": 0.00010388692579505302, |
|
"loss": 0.6543, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4930555555555556, |
|
"grad_norm": 0.5750802755355835, |
|
"learning_rate": 0.00010318021201413429, |
|
"loss": 0.6662, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4965277777777778, |
|
"grad_norm": 0.6241365671157837, |
|
"learning_rate": 0.00010247349823321554, |
|
"loss": 0.5976, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5461493134498596, |
|
"learning_rate": 0.00010176678445229681, |
|
"loss": 0.6796, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5034722222222222, |
|
"grad_norm": 0.5892328023910522, |
|
"learning_rate": 0.0001010600706713781, |
|
"loss": 0.6166, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5069444444444444, |
|
"grad_norm": 0.5670603513717651, |
|
"learning_rate": 0.00010035335689045937, |
|
"loss": 0.651, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5104166666666666, |
|
"grad_norm": 0.5573473572731018, |
|
"learning_rate": 9.964664310954064e-05, |
|
"loss": 0.6268, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5138888888888888, |
|
"grad_norm": 0.5641173720359802, |
|
"learning_rate": 9.893992932862192e-05, |
|
"loss": 0.6515, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5173611111111112, |
|
"grad_norm": 0.5903286337852478, |
|
"learning_rate": 9.823321554770319e-05, |
|
"loss": 0.6705, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5208333333333334, |
|
"grad_norm": 0.5907771587371826, |
|
"learning_rate": 9.752650176678447e-05, |
|
"loss": 0.6184, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5243055555555556, |
|
"grad_norm": 0.5515082478523254, |
|
"learning_rate": 9.681978798586572e-05, |
|
"loss": 0.661, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5277777777777778, |
|
"grad_norm": 0.5268746614456177, |
|
"learning_rate": 9.611307420494699e-05, |
|
"loss": 0.6082, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.53125, |
|
"grad_norm": 0.5877247452735901, |
|
"learning_rate": 9.540636042402827e-05, |
|
"loss": 0.5856, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5347222222222222, |
|
"grad_norm": 0.5402259826660156, |
|
"learning_rate": 9.469964664310954e-05, |
|
"loss": 0.574, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5381944444444444, |
|
"grad_norm": 0.5235267281532288, |
|
"learning_rate": 9.399293286219082e-05, |
|
"loss": 0.5598, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5416666666666666, |
|
"grad_norm": 0.5417196750640869, |
|
"learning_rate": 9.328621908127209e-05, |
|
"loss": 0.6447, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5451388888888888, |
|
"grad_norm": 0.6259889006614685, |
|
"learning_rate": 9.257950530035337e-05, |
|
"loss": 0.6227, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5486111111111112, |
|
"grad_norm": 0.567189633846283, |
|
"learning_rate": 9.187279151943463e-05, |
|
"loss": 0.5788, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5520833333333334, |
|
"grad_norm": 0.5860530138015747, |
|
"learning_rate": 9.116607773851592e-05, |
|
"loss": 0.5026, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.5970867872238159, |
|
"learning_rate": 9.045936395759717e-05, |
|
"loss": 0.65, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5590277777777778, |
|
"grad_norm": 0.5621108412742615, |
|
"learning_rate": 8.975265017667845e-05, |
|
"loss": 0.6118, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.5695009827613831, |
|
"learning_rate": 8.904593639575972e-05, |
|
"loss": 0.5119, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5659722222222222, |
|
"grad_norm": 0.5741239786148071, |
|
"learning_rate": 8.833922261484099e-05, |
|
"loss": 0.5273, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5694444444444444, |
|
"grad_norm": 0.5961455702781677, |
|
"learning_rate": 8.763250883392227e-05, |
|
"loss": 0.6197, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5729166666666666, |
|
"grad_norm": 0.5358597636222839, |
|
"learning_rate": 8.692579505300353e-05, |
|
"loss": 0.5629, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5763888888888888, |
|
"grad_norm": 0.5704313516616821, |
|
"learning_rate": 8.621908127208482e-05, |
|
"loss": 0.5838, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5798611111111112, |
|
"grad_norm": 0.6421118378639221, |
|
"learning_rate": 8.551236749116608e-05, |
|
"loss": 0.5964, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.5588890314102173, |
|
"learning_rate": 8.480565371024736e-05, |
|
"loss": 0.4957, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5868055555555556, |
|
"grad_norm": 0.5954290628433228, |
|
"learning_rate": 8.409893992932862e-05, |
|
"loss": 0.6364, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5902777777777778, |
|
"grad_norm": 0.5738086700439453, |
|
"learning_rate": 8.33922261484099e-05, |
|
"loss": 0.5661, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.59375, |
|
"grad_norm": 0.6033700704574585, |
|
"learning_rate": 8.268551236749117e-05, |
|
"loss": 0.6128, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5972222222222222, |
|
"grad_norm": 0.6137632727622986, |
|
"learning_rate": 8.197879858657245e-05, |
|
"loss": 0.6271, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6006944444444444, |
|
"grad_norm": 0.5929612517356873, |
|
"learning_rate": 8.127208480565371e-05, |
|
"loss": 0.6328, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.6041666666666666, |
|
"grad_norm": 0.5914517641067505, |
|
"learning_rate": 8.056537102473498e-05, |
|
"loss": 0.6406, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6076388888888888, |
|
"grad_norm": 0.5911087989807129, |
|
"learning_rate": 7.985865724381626e-05, |
|
"loss": 0.5432, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6111111111111112, |
|
"grad_norm": 0.6051996350288391, |
|
"learning_rate": 7.915194346289753e-05, |
|
"loss": 0.576, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6145833333333334, |
|
"grad_norm": 0.5913227200508118, |
|
"learning_rate": 7.844522968197881e-05, |
|
"loss": 0.6012, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6180555555555556, |
|
"grad_norm": 0.5444892048835754, |
|
"learning_rate": 7.773851590106007e-05, |
|
"loss": 0.5324, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6215277777777778, |
|
"grad_norm": 0.5951313972473145, |
|
"learning_rate": 7.703180212014135e-05, |
|
"loss": 0.6932, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.5791239142417908, |
|
"learning_rate": 7.632508833922261e-05, |
|
"loss": 0.6144, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6284722222222222, |
|
"grad_norm": 0.5847236514091492, |
|
"learning_rate": 7.56183745583039e-05, |
|
"loss": 0.5894, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6319444444444444, |
|
"grad_norm": 0.6364589929580688, |
|
"learning_rate": 7.491166077738516e-05, |
|
"loss": 0.5187, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6354166666666666, |
|
"grad_norm": 0.572428822517395, |
|
"learning_rate": 7.420494699646644e-05, |
|
"loss": 0.5653, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6388888888888888, |
|
"grad_norm": 0.5649181008338928, |
|
"learning_rate": 7.349823321554771e-05, |
|
"loss": 0.5586, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6423611111111112, |
|
"grad_norm": 0.5847340226173401, |
|
"learning_rate": 7.279151943462898e-05, |
|
"loss": 0.6389, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6458333333333334, |
|
"grad_norm": 0.5655092597007751, |
|
"learning_rate": 7.208480565371026e-05, |
|
"loss": 0.5809, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6493055555555556, |
|
"grad_norm": 0.5791696310043335, |
|
"learning_rate": 7.137809187279151e-05, |
|
"loss": 0.6295, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6527777777777778, |
|
"grad_norm": 0.6031994223594666, |
|
"learning_rate": 7.06713780918728e-05, |
|
"loss": 0.5962, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.65625, |
|
"grad_norm": 0.6025336980819702, |
|
"learning_rate": 6.996466431095406e-05, |
|
"loss": 0.6424, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6597222222222222, |
|
"grad_norm": 0.6521866917610168, |
|
"learning_rate": 6.925795053003534e-05, |
|
"loss": 0.5857, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6631944444444444, |
|
"grad_norm": 0.5835008025169373, |
|
"learning_rate": 6.855123674911661e-05, |
|
"loss": 0.5807, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5866998434066772, |
|
"learning_rate": 6.784452296819789e-05, |
|
"loss": 0.5653, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6701388888888888, |
|
"grad_norm": 0.5725639462471008, |
|
"learning_rate": 6.713780918727916e-05, |
|
"loss": 0.4654, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6736111111111112, |
|
"grad_norm": 0.6040775775909424, |
|
"learning_rate": 6.643109540636043e-05, |
|
"loss": 0.6563, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6770833333333334, |
|
"grad_norm": 0.6573815941810608, |
|
"learning_rate": 6.57243816254417e-05, |
|
"loss": 0.5836, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6805555555555556, |
|
"grad_norm": 0.5753028392791748, |
|
"learning_rate": 6.501766784452296e-05, |
|
"loss": 0.6142, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6840277777777778, |
|
"grad_norm": 0.6084535717964172, |
|
"learning_rate": 6.431095406360424e-05, |
|
"loss": 0.5592, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.6668769717216492, |
|
"learning_rate": 6.360424028268551e-05, |
|
"loss": 0.563, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6909722222222222, |
|
"grad_norm": 0.620083212852478, |
|
"learning_rate": 6.289752650176679e-05, |
|
"loss": 0.4906, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 0.6372251510620117, |
|
"learning_rate": 6.219081272084806e-05, |
|
"loss": 0.5754, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6979166666666666, |
|
"grad_norm": 0.602571964263916, |
|
"learning_rate": 6.148409893992934e-05, |
|
"loss": 0.5631, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7013888888888888, |
|
"grad_norm": 0.610701322555542, |
|
"learning_rate": 6.07773851590106e-05, |
|
"loss": 0.6417, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.7048611111111112, |
|
"grad_norm": 0.6244217753410339, |
|
"learning_rate": 6.007067137809188e-05, |
|
"loss": 0.6194, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.7083333333333334, |
|
"grad_norm": 0.6154200434684753, |
|
"learning_rate": 5.936395759717315e-05, |
|
"loss": 0.5724, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7118055555555556, |
|
"grad_norm": 0.5975745916366577, |
|
"learning_rate": 5.8657243816254415e-05, |
|
"loss": 0.6117, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7152777777777778, |
|
"grad_norm": 0.6204216480255127, |
|
"learning_rate": 5.7950530035335696e-05, |
|
"loss": 0.6153, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.71875, |
|
"grad_norm": 0.5886074900627136, |
|
"learning_rate": 5.724381625441696e-05, |
|
"loss": 0.4968, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7222222222222222, |
|
"grad_norm": 0.5940162539482117, |
|
"learning_rate": 5.653710247349824e-05, |
|
"loss": 0.6544, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7256944444444444, |
|
"grad_norm": 0.596792459487915, |
|
"learning_rate": 5.5830388692579505e-05, |
|
"loss": 0.5685, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7291666666666666, |
|
"grad_norm": 0.5985198020935059, |
|
"learning_rate": 5.5123674911660786e-05, |
|
"loss": 0.5795, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7326388888888888, |
|
"grad_norm": 0.5998839735984802, |
|
"learning_rate": 5.441696113074205e-05, |
|
"loss": 0.5229, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.7361111111111112, |
|
"grad_norm": 0.5900275707244873, |
|
"learning_rate": 5.371024734982333e-05, |
|
"loss": 0.5882, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7395833333333334, |
|
"grad_norm": 0.5826575756072998, |
|
"learning_rate": 5.3003533568904595e-05, |
|
"loss": 0.4709, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7430555555555556, |
|
"grad_norm": 0.5909337997436523, |
|
"learning_rate": 5.2296819787985876e-05, |
|
"loss": 0.5528, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7465277777777778, |
|
"grad_norm": 0.5588683485984802, |
|
"learning_rate": 5.1590106007067144e-05, |
|
"loss": 0.5373, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5555421710014343, |
|
"learning_rate": 5.0883392226148405e-05, |
|
"loss": 0.5779, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7534722222222222, |
|
"grad_norm": 0.5925477743148804, |
|
"learning_rate": 5.0176678445229686e-05, |
|
"loss": 0.5896, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7569444444444444, |
|
"grad_norm": 0.6183862090110779, |
|
"learning_rate": 4.946996466431096e-05, |
|
"loss": 0.6245, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7604166666666666, |
|
"grad_norm": 0.5807591080665588, |
|
"learning_rate": 4.8763250883392234e-05, |
|
"loss": 0.5509, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7638888888888888, |
|
"grad_norm": 0.5723608136177063, |
|
"learning_rate": 4.8056537102473495e-05, |
|
"loss": 0.5927, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7673611111111112, |
|
"grad_norm": 0.5748648643493652, |
|
"learning_rate": 4.734982332155477e-05, |
|
"loss": 0.5009, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7708333333333334, |
|
"grad_norm": 0.5595970749855042, |
|
"learning_rate": 4.664310954063604e-05, |
|
"loss": 0.5225, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7743055555555556, |
|
"grad_norm": 0.5699341893196106, |
|
"learning_rate": 4.593639575971732e-05, |
|
"loss": 0.5945, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.5965125560760498, |
|
"learning_rate": 4.5229681978798585e-05, |
|
"loss": 0.6039, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 0.601673424243927, |
|
"learning_rate": 4.452296819787986e-05, |
|
"loss": 0.6116, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7847222222222222, |
|
"grad_norm": 0.6311600804328918, |
|
"learning_rate": 4.381625441696113e-05, |
|
"loss": 0.6356, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.7881944444444444, |
|
"grad_norm": 0.6146146059036255, |
|
"learning_rate": 4.310954063604241e-05, |
|
"loss": 0.5925, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.7916666666666666, |
|
"grad_norm": 0.5608177781105042, |
|
"learning_rate": 4.240282685512368e-05, |
|
"loss": 0.4546, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.7951388888888888, |
|
"grad_norm": 0.6077985167503357, |
|
"learning_rate": 4.169611307420495e-05, |
|
"loss": 0.6111, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.7986111111111112, |
|
"grad_norm": 0.5868244171142578, |
|
"learning_rate": 4.0989399293286223e-05, |
|
"loss": 0.5945, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8020833333333334, |
|
"grad_norm": 0.587386965751648, |
|
"learning_rate": 4.028268551236749e-05, |
|
"loss": 0.5294, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.8055555555555556, |
|
"grad_norm": 0.5953108668327332, |
|
"learning_rate": 3.9575971731448765e-05, |
|
"loss": 0.5621, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8090277777777778, |
|
"grad_norm": 0.6050392985343933, |
|
"learning_rate": 3.886925795053003e-05, |
|
"loss": 0.5374, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 0.6083176732063293, |
|
"learning_rate": 3.816254416961131e-05, |
|
"loss": 0.5843, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8159722222222222, |
|
"grad_norm": 0.5946184992790222, |
|
"learning_rate": 3.745583038869258e-05, |
|
"loss": 0.5049, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8194444444444444, |
|
"grad_norm": 0.5910969972610474, |
|
"learning_rate": 3.6749116607773855e-05, |
|
"loss": 0.5749, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8229166666666666, |
|
"grad_norm": 0.5982712507247925, |
|
"learning_rate": 3.604240282685513e-05, |
|
"loss": 0.5882, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8263888888888888, |
|
"grad_norm": 0.6084917783737183, |
|
"learning_rate": 3.53356890459364e-05, |
|
"loss": 0.5169, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8298611111111112, |
|
"grad_norm": 0.6236574649810791, |
|
"learning_rate": 3.462897526501767e-05, |
|
"loss": 0.6085, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.592654824256897, |
|
"learning_rate": 3.3922261484098945e-05, |
|
"loss": 0.5525, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8368055555555556, |
|
"grad_norm": 0.6017850041389465, |
|
"learning_rate": 3.321554770318021e-05, |
|
"loss": 0.636, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.8402777777777778, |
|
"grad_norm": 0.5793926119804382, |
|
"learning_rate": 3.250883392226148e-05, |
|
"loss": 0.6003, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.84375, |
|
"grad_norm": 0.5898053050041199, |
|
"learning_rate": 3.1802120141342755e-05, |
|
"loss": 0.6925, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8472222222222222, |
|
"grad_norm": 0.6041194796562195, |
|
"learning_rate": 3.109540636042403e-05, |
|
"loss": 0.5509, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.8506944444444444, |
|
"grad_norm": 0.5889320373535156, |
|
"learning_rate": 3.03886925795053e-05, |
|
"loss": 0.5377, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8541666666666666, |
|
"grad_norm": 0.65882408618927, |
|
"learning_rate": 2.9681978798586574e-05, |
|
"loss": 0.6909, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8576388888888888, |
|
"grad_norm": 0.5652550458908081, |
|
"learning_rate": 2.8975265017667848e-05, |
|
"loss": 0.6553, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8611111111111112, |
|
"grad_norm": 0.6159343123435974, |
|
"learning_rate": 2.826855123674912e-05, |
|
"loss": 0.4816, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8645833333333334, |
|
"grad_norm": 0.5917626023292542, |
|
"learning_rate": 2.7561837455830393e-05, |
|
"loss": 0.6042, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8680555555555556, |
|
"grad_norm": 0.5773218870162964, |
|
"learning_rate": 2.6855123674911664e-05, |
|
"loss": 0.5723, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8715277777777778, |
|
"grad_norm": 0.5936452150344849, |
|
"learning_rate": 2.6148409893992938e-05, |
|
"loss": 0.6071, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.6094207763671875, |
|
"learning_rate": 2.5441696113074202e-05, |
|
"loss": 0.5406, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8784722222222222, |
|
"grad_norm": 0.6131781935691833, |
|
"learning_rate": 2.473498233215548e-05, |
|
"loss": 0.562, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.8819444444444444, |
|
"grad_norm": 0.5903745889663696, |
|
"learning_rate": 2.4028268551236747e-05, |
|
"loss": 0.5841, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8854166666666666, |
|
"grad_norm": 0.5778252482414246, |
|
"learning_rate": 2.332155477031802e-05, |
|
"loss": 0.6071, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.6057181358337402, |
|
"learning_rate": 2.2614840989399292e-05, |
|
"loss": 0.5794, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.8923611111111112, |
|
"grad_norm": 0.5691949129104614, |
|
"learning_rate": 2.1908127208480567e-05, |
|
"loss": 0.5634, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.8958333333333334, |
|
"grad_norm": 0.6154379844665527, |
|
"learning_rate": 2.120141342756184e-05, |
|
"loss": 0.5389, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.8993055555555556, |
|
"grad_norm": 0.5671730637550354, |
|
"learning_rate": 2.0494699646643112e-05, |
|
"loss": 0.6045, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.9027777777777778, |
|
"grad_norm": 0.6344681978225708, |
|
"learning_rate": 1.9787985865724383e-05, |
|
"loss": 0.4922, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.90625, |
|
"grad_norm": 0.584071934223175, |
|
"learning_rate": 1.9081272084805653e-05, |
|
"loss": 0.5283, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.9097222222222222, |
|
"grad_norm": 0.5744527578353882, |
|
"learning_rate": 1.8374558303886928e-05, |
|
"loss": 0.51, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.9131944444444444, |
|
"grad_norm": 0.5983773469924927, |
|
"learning_rate": 1.76678445229682e-05, |
|
"loss": 0.619, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.5973845720291138, |
|
"learning_rate": 1.6961130742049473e-05, |
|
"loss": 0.5131, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9201388888888888, |
|
"grad_norm": 0.6193533539772034, |
|
"learning_rate": 1.625441696113074e-05, |
|
"loss": 0.577, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9236111111111112, |
|
"grad_norm": 0.6047850251197815, |
|
"learning_rate": 1.5547703180212014e-05, |
|
"loss": 0.6277, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9270833333333334, |
|
"grad_norm": 0.6007250547409058, |
|
"learning_rate": 1.4840989399293287e-05, |
|
"loss": 0.6026, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9305555555555556, |
|
"grad_norm": 0.5738986730575562, |
|
"learning_rate": 1.413427561837456e-05, |
|
"loss": 0.5842, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9340277777777778, |
|
"grad_norm": 0.5671294927597046, |
|
"learning_rate": 1.3427561837455832e-05, |
|
"loss": 0.4875, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.5837607383728027, |
|
"learning_rate": 1.2720848056537101e-05, |
|
"loss": 0.6032, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9409722222222222, |
|
"grad_norm": 0.6210164427757263, |
|
"learning_rate": 1.2014134275618374e-05, |
|
"loss": 0.6143, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.9444444444444444, |
|
"grad_norm": 0.5613874197006226, |
|
"learning_rate": 1.1307420494699646e-05, |
|
"loss": 0.5193, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.9479166666666666, |
|
"grad_norm": 0.5788601636886597, |
|
"learning_rate": 1.060070671378092e-05, |
|
"loss": 0.5993, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.9513888888888888, |
|
"grad_norm": 0.5956071615219116, |
|
"learning_rate": 9.893992932862191e-06, |
|
"loss": 0.6244, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.9548611111111112, |
|
"grad_norm": 0.5741413235664368, |
|
"learning_rate": 9.187279151943464e-06, |
|
"loss": 0.5912, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9583333333333334, |
|
"grad_norm": 0.5746176242828369, |
|
"learning_rate": 8.480565371024736e-06, |
|
"loss": 0.4959, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9618055555555556, |
|
"grad_norm": 0.6048396825790405, |
|
"learning_rate": 7.773851590106007e-06, |
|
"loss": 0.5358, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9652777777777778, |
|
"grad_norm": 0.5789562463760376, |
|
"learning_rate": 7.06713780918728e-06, |
|
"loss": 0.526, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.96875, |
|
"grad_norm": 0.623224139213562, |
|
"learning_rate": 6.3604240282685506e-06, |
|
"loss": 0.6296, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 0.5628058910369873, |
|
"learning_rate": 5.653710247349823e-06, |
|
"loss": 0.519, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9756944444444444, |
|
"grad_norm": 0.574334442615509, |
|
"learning_rate": 4.946996466431096e-06, |
|
"loss": 0.5241, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.9791666666666666, |
|
"grad_norm": 0.6212002038955688, |
|
"learning_rate": 4.240282685512368e-06, |
|
"loss": 0.6307, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.9826388888888888, |
|
"grad_norm": 0.6209303140640259, |
|
"learning_rate": 3.53356890459364e-06, |
|
"loss": 0.5419, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.9861111111111112, |
|
"grad_norm": 0.6056732535362244, |
|
"learning_rate": 2.8268551236749116e-06, |
|
"loss": 0.6091, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.9895833333333334, |
|
"grad_norm": 0.632924497127533, |
|
"learning_rate": 2.120141342756184e-06, |
|
"loss": 0.6159, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9930555555555556, |
|
"grad_norm": 0.6058059334754944, |
|
"learning_rate": 1.4134275618374558e-06, |
|
"loss": 0.5805, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.9965277777777778, |
|
"grad_norm": 0.6084165573120117, |
|
"learning_rate": 7.067137809187279e-07, |
|
"loss": 0.6208, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6049368977546692, |
|
"learning_rate": 0.0, |
|
"loss": 0.4819, |
|
"step": 288 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 288, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.5964978416533504e+17, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|