|
{ |
|
"best_metric": 0.07540635019540787, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 1.2573221757322175, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008368200836820083, |
|
"grad_norm": 3.4848103523254395, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7472, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008368200836820083, |
|
"eval_loss": 1.5766565799713135, |
|
"eval_runtime": 4.0802, |
|
"eval_samples_per_second": 49.507, |
|
"eval_steps_per_second": 12.499, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.016736401673640166, |
|
"grad_norm": 3.88921856880188, |
|
"learning_rate": 2e-05, |
|
"loss": 0.863, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02510460251046025, |
|
"grad_norm": 4.598084449768066, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0088, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03347280334728033, |
|
"grad_norm": 3.6586599349975586, |
|
"learning_rate": 4e-05, |
|
"loss": 0.9831, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04184100418410042, |
|
"grad_norm": 3.121110677719116, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9342, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0502092050209205, |
|
"grad_norm": 2.6722185611724854, |
|
"learning_rate": 6e-05, |
|
"loss": 0.8293, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.058577405857740586, |
|
"grad_norm": 2.4001119136810303, |
|
"learning_rate": 7e-05, |
|
"loss": 0.6924, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06694560669456066, |
|
"grad_norm": 3.029315710067749, |
|
"learning_rate": 8e-05, |
|
"loss": 0.5698, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07531380753138076, |
|
"grad_norm": 2.597606658935547, |
|
"learning_rate": 9e-05, |
|
"loss": 0.4675, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08368200836820083, |
|
"grad_norm": 1.8661812543869019, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3722, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09205020920502092, |
|
"grad_norm": 1.7599635124206543, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.2951, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.100418410041841, |
|
"grad_norm": 1.6175488233566284, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.285, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1087866108786611, |
|
"grad_norm": 1.6192901134490967, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.2916, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11715481171548117, |
|
"grad_norm": 2.2023255825042725, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.2656, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12552301255230125, |
|
"grad_norm": 1.2332242727279663, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.2047, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13389121338912133, |
|
"grad_norm": 1.2950152158737183, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.2132, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14225941422594143, |
|
"grad_norm": 1.1822935342788696, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.1827, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1506276150627615, |
|
"grad_norm": 0.8956453204154968, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.1601, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1589958158995816, |
|
"grad_norm": 1.1256321668624878, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.2129, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16736401673640167, |
|
"grad_norm": 1.0068782567977905, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.1641, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17573221757322174, |
|
"grad_norm": 1.2121520042419434, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.2133, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18410041841004185, |
|
"grad_norm": 1.0212056636810303, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.1245, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19246861924686193, |
|
"grad_norm": 1.3977688550949097, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.1618, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.200836820083682, |
|
"grad_norm": 1.0460073947906494, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.1903, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.20920502092050208, |
|
"grad_norm": 1.0836344957351685, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.1523, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2175732217573222, |
|
"grad_norm": 0.9304505586624146, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.1322, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.22594142259414227, |
|
"grad_norm": 1.1587389707565308, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.1123, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.23430962343096234, |
|
"grad_norm": 1.9223076105117798, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.1, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24267782426778242, |
|
"grad_norm": 0.7539680600166321, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.0602, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2510460251046025, |
|
"grad_norm": 4.452033519744873, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.4788, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2594142259414226, |
|
"grad_norm": 3.088981866836548, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.4023, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.26778242677824265, |
|
"grad_norm": 2.3259027004241943, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.3357, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.27615062761506276, |
|
"grad_norm": 1.3535948991775513, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.2125, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.28451882845188287, |
|
"grad_norm": 0.6757339239120483, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.1745, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2928870292887029, |
|
"grad_norm": 0.997547447681427, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.1698, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.301255230125523, |
|
"grad_norm": 1.2034856081008911, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.1886, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.30962343096234307, |
|
"grad_norm": 1.122100591659546, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.1912, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3179916317991632, |
|
"grad_norm": 0.8844034075737, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.1782, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3263598326359833, |
|
"grad_norm": 0.4449402093887329, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.1225, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.33472803347280333, |
|
"grad_norm": 0.5567941069602966, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.1362, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.34309623430962344, |
|
"grad_norm": 0.6472607254981995, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.1481, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3514644351464435, |
|
"grad_norm": 0.5753950476646423, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.1365, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3598326359832636, |
|
"grad_norm": 0.5642210841178894, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.1053, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3682008368200837, |
|
"grad_norm": 0.4673701524734497, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.1113, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.37656903765690375, |
|
"grad_norm": 0.8079649806022644, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.1491, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.38493723849372385, |
|
"grad_norm": 0.8153290152549744, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.1367, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.39330543933054396, |
|
"grad_norm": 0.6437317728996277, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.0991, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.401673640167364, |
|
"grad_norm": 0.6529302000999451, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.1035, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4100418410041841, |
|
"grad_norm": 0.5692593455314636, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.0992, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.41841004184100417, |
|
"grad_norm": 0.5359004735946655, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.1008, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.41841004184100417, |
|
"eval_loss": 0.15166662633419037, |
|
"eval_runtime": 4.0723, |
|
"eval_samples_per_second": 49.604, |
|
"eval_steps_per_second": 12.524, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.42677824267782427, |
|
"grad_norm": 0.5589534640312195, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.11, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4351464435146444, |
|
"grad_norm": 0.5620235204696655, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.0946, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4435146443514644, |
|
"grad_norm": 0.8351897597312927, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.1539, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.45188284518828453, |
|
"grad_norm": 0.6207558512687683, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.1107, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4602510460251046, |
|
"grad_norm": 0.7445648312568665, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.1152, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.4686192468619247, |
|
"grad_norm": 0.628424346446991, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.0807, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4769874476987448, |
|
"grad_norm": 0.5635355114936829, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.0555, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.48535564853556484, |
|
"grad_norm": 1.0648820400238037, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.0634, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.49372384937238495, |
|
"grad_norm": 1.8602968454360962, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.2547, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.502092050209205, |
|
"grad_norm": 1.5479875802993774, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.3128, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5104602510460251, |
|
"grad_norm": 1.3398643732070923, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.2635, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5188284518828452, |
|
"grad_norm": 0.9705513119697571, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.2057, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5271966527196653, |
|
"grad_norm": 0.7675544619560242, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.178, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5355648535564853, |
|
"grad_norm": 0.7129813432693481, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.1655, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5439330543933054, |
|
"grad_norm": 0.7832943201065063, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.1606, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5523012552301255, |
|
"grad_norm": 0.7569060325622559, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.143, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5606694560669456, |
|
"grad_norm": 0.7445584535598755, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.128, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5690376569037657, |
|
"grad_norm": 0.5912233591079712, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.1289, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5774058577405857, |
|
"grad_norm": 0.5939972996711731, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.1251, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5857740585774058, |
|
"grad_norm": 0.5252910852432251, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.1211, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5941422594142259, |
|
"grad_norm": 0.5812236666679382, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.1478, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.602510460251046, |
|
"grad_norm": 0.46988946199417114, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.1213, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6108786610878661, |
|
"grad_norm": 0.4680638611316681, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.1172, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6192468619246861, |
|
"grad_norm": 0.4192774295806885, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.1102, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6276150627615062, |
|
"grad_norm": 0.4939725995063782, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.112, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6359832635983264, |
|
"grad_norm": 0.3755834102630615, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.0967, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6443514644351465, |
|
"grad_norm": 0.4578147530555725, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.092, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6527196652719666, |
|
"grad_norm": 0.34847137331962585, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.0872, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6610878661087866, |
|
"grad_norm": 0.6222680807113647, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.1088, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.6694560669456067, |
|
"grad_norm": 0.3839572072029114, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.0883, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6778242677824268, |
|
"grad_norm": 0.6254891157150269, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.0723, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.6861924686192469, |
|
"grad_norm": 0.4347880780696869, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.0798, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.694560669456067, |
|
"grad_norm": 0.5865311622619629, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.0801, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.702928870292887, |
|
"grad_norm": 0.3609165549278259, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.056, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7112970711297071, |
|
"grad_norm": 0.5713534951210022, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.0781, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7196652719665272, |
|
"grad_norm": 0.8238276243209839, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.0878, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7280334728033473, |
|
"grad_norm": 0.7100816369056702, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.0299, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7364016736401674, |
|
"grad_norm": 1.6119959354400635, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.1945, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7447698744769874, |
|
"grad_norm": 1.3309483528137207, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.2027, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7531380753138075, |
|
"grad_norm": 1.1444945335388184, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.1986, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7615062761506276, |
|
"grad_norm": 0.9420828223228455, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.1783, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7698744769874477, |
|
"grad_norm": 0.7960387468338013, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.1642, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7782426778242678, |
|
"grad_norm": 0.6485269069671631, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.1292, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.7866108786610879, |
|
"grad_norm": 0.7513681650161743, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.1737, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.7949790794979079, |
|
"grad_norm": 0.6703801155090332, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.1357, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.803347280334728, |
|
"grad_norm": 0.7546550035476685, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.1346, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8117154811715481, |
|
"grad_norm": 0.5935238003730774, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.1048, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8200836820083682, |
|
"grad_norm": 0.4116928279399872, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.0998, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8284518828451883, |
|
"grad_norm": 0.5409568548202515, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.1126, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8368200836820083, |
|
"grad_norm": 0.5066820979118347, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.1014, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8368200836820083, |
|
"eval_loss": 0.10415530204772949, |
|
"eval_runtime": 4.076, |
|
"eval_samples_per_second": 49.558, |
|
"eval_steps_per_second": 12.512, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8451882845188284, |
|
"grad_norm": 0.40647611021995544, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.1071, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8535564853556485, |
|
"grad_norm": 0.3912857472896576, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.0873, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8619246861924686, |
|
"grad_norm": 0.4201953411102295, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.0937, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8702928870292888, |
|
"grad_norm": 0.45583081245422363, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.0906, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8786610878661087, |
|
"grad_norm": 0.39570245146751404, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0873, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8870292887029289, |
|
"grad_norm": 0.45384737849235535, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.1021, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.895397489539749, |
|
"grad_norm": 0.40736356377601624, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.0887, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9037656903765691, |
|
"grad_norm": 0.42815500497817993, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.0755, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9121338912133892, |
|
"grad_norm": 0.3796560764312744, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.0783, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9205020920502092, |
|
"grad_norm": 0.43120044469833374, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.079, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9288702928870293, |
|
"grad_norm": 0.4474979341030121, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.0907, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9372384937238494, |
|
"grad_norm": 0.3411693274974823, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.0518, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9456066945606695, |
|
"grad_norm": 0.430654913187027, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.0606, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9539748953974896, |
|
"grad_norm": 0.4269810616970062, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.0623, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9623430962343096, |
|
"grad_norm": 0.4438549876213074, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.0568, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9707112970711297, |
|
"grad_norm": 0.9744530916213989, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.0582, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.9790794979079498, |
|
"grad_norm": 0.8297134041786194, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.1539, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.9874476987447699, |
|
"grad_norm": 0.795654833316803, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.1596, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.99581589958159, |
|
"grad_norm": 0.4995171129703522, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.0761, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.006276150627615, |
|
"grad_norm": 0.9617952704429626, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.138, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0146443514644352, |
|
"grad_norm": 0.5126711130142212, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.1132, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0230125523012552, |
|
"grad_norm": 0.4848877191543579, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.1071, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0313807531380754, |
|
"grad_norm": 0.44112974405288696, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.1004, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0397489539748954, |
|
"grad_norm": 0.5203883647918701, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.1179, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0481171548117154, |
|
"grad_norm": 0.4299384653568268, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.1128, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0564853556485356, |
|
"grad_norm": 0.36720457673072815, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.0803, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.0648535564853556, |
|
"grad_norm": 0.38658344745635986, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.0782, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0732217573221758, |
|
"grad_norm": 0.42113763093948364, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.0871, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.0815899581589958, |
|
"grad_norm": 0.31596583127975464, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.0754, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.0899581589958158, |
|
"grad_norm": 0.4931642711162567, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.083, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.098326359832636, |
|
"grad_norm": 0.5247243046760559, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.0955, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.106694560669456, |
|
"grad_norm": 0.5747186541557312, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.1038, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1150627615062763, |
|
"grad_norm": 0.48280155658721924, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.0817, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1234309623430963, |
|
"grad_norm": 0.40240225195884705, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.0714, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.1317991631799162, |
|
"grad_norm": 0.5216189026832581, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.0755, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1401673640167365, |
|
"grad_norm": 0.4143962860107422, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.0726, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.1485355648535565, |
|
"grad_norm": 0.33933359384536743, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.057, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1569037656903767, |
|
"grad_norm": 0.44673094153404236, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.0569, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.1652719665271967, |
|
"grad_norm": 0.5124291181564331, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.0813, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.1736401673640167, |
|
"grad_norm": 0.5399123430252075, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.0778, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.1820083682008369, |
|
"grad_norm": 0.3727053999900818, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.0579, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.1903765690376569, |
|
"grad_norm": 0.4652441740036011, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.0662, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.198744769874477, |
|
"grad_norm": 0.47085118293762207, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.0554, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.207112970711297, |
|
"grad_norm": 0.5209120512008667, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.0774, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.215481171548117, |
|
"grad_norm": 0.4766678214073181, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.0639, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2238493723849373, |
|
"grad_norm": 0.4801768362522125, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.0537, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2322175732217573, |
|
"grad_norm": 0.5006198883056641, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.0554, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.2405857740585775, |
|
"grad_norm": 0.25908923149108887, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.0186, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.2489539748953975, |
|
"grad_norm": 0.3488486409187317, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.0464, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2573221757322175, |
|
"grad_norm": 0.339759886264801, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.0755, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2573221757322175, |
|
"eval_loss": 0.07540635019540787, |
|
"eval_runtime": 4.0734, |
|
"eval_samples_per_second": 49.59, |
|
"eval_steps_per_second": 12.52, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.19972177870848e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|