|
{ |
|
"best_metric": 0.6717210412025452, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.026522853859075237, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00017681902572716824, |
|
"grad_norm": 31.273752212524414, |
|
"learning_rate": 1e-05, |
|
"loss": 5.7904, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00017681902572716824, |
|
"eval_loss": 1.5057493448257446, |
|
"eval_runtime": 707.0176, |
|
"eval_samples_per_second": 13.472, |
|
"eval_steps_per_second": 3.369, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003536380514543365, |
|
"grad_norm": 33.96555709838867, |
|
"learning_rate": 2e-05, |
|
"loss": 6.1229, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0005304570771815047, |
|
"grad_norm": 22.80571937561035, |
|
"learning_rate": 3e-05, |
|
"loss": 5.831, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000707276102908673, |
|
"grad_norm": 17.077383041381836, |
|
"learning_rate": 4e-05, |
|
"loss": 4.8649, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008840951286358412, |
|
"grad_norm": 15.064048767089844, |
|
"learning_rate": 5e-05, |
|
"loss": 4.4974, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0010609141543630094, |
|
"grad_norm": 12.101446151733398, |
|
"learning_rate": 6e-05, |
|
"loss": 3.9707, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0012377331800901778, |
|
"grad_norm": 10.376118659973145, |
|
"learning_rate": 7e-05, |
|
"loss": 3.7705, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.001414552205817346, |
|
"grad_norm": 10.354647636413574, |
|
"learning_rate": 8e-05, |
|
"loss": 3.6031, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.001591371231544514, |
|
"grad_norm": 10.469047546386719, |
|
"learning_rate": 9e-05, |
|
"loss": 3.4053, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0017681902572716825, |
|
"grad_norm": 9.278939247131348, |
|
"learning_rate": 0.0001, |
|
"loss": 3.659, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0019450092829988506, |
|
"grad_norm": 15.399981498718262, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 3.6484, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.002121828308726019, |
|
"grad_norm": 10.160966873168945, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 3.5971, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002298647334453187, |
|
"grad_norm": 7.888012886047363, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 3.4807, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0024754663601803555, |
|
"grad_norm": 7.219862937927246, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 3.3483, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0026522853859075237, |
|
"grad_norm": 6.999874114990234, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 3.27, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.002829104411634692, |
|
"grad_norm": 6.7764692306518555, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 3.2381, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00300592343736186, |
|
"grad_norm": 7.229355812072754, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 3.1572, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.003182742463089028, |
|
"grad_norm": 7.318708419799805, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 3.2629, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0033595614888161968, |
|
"grad_norm": 7.011357307434082, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 3.1216, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.003536380514543365, |
|
"grad_norm": 6.351388454437256, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 3.1227, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.003713199540270533, |
|
"grad_norm": 6.193704128265381, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 3.0576, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0038900185659977013, |
|
"grad_norm": 6.23767614364624, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 3.1666, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00406683759172487, |
|
"grad_norm": 5.724069118499756, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 3.0868, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.004243656617452038, |
|
"grad_norm": 6.114058494567871, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 3.1426, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.004420475643179206, |
|
"grad_norm": 6.249318599700928, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 3.1009, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004597294668906374, |
|
"grad_norm": 5.979328632354736, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 3.0182, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0047741136946335425, |
|
"grad_norm": 5.138391971588135, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.7047, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004950932720360711, |
|
"grad_norm": 5.40130090713501, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 3.1115, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.005127751746087879, |
|
"grad_norm": 5.664342403411865, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.8735, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.005304570771815047, |
|
"grad_norm": 5.740615367889404, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 3.1383, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005481389797542215, |
|
"grad_norm": 5.822990417480469, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.7514, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005658208823269384, |
|
"grad_norm": 6.541563034057617, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 2.896, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005835027848996552, |
|
"grad_norm": 5.708773612976074, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 2.9389, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.00601184687472372, |
|
"grad_norm": 5.139838218688965, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 2.85, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.006188665900450889, |
|
"grad_norm": 4.9747796058654785, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 2.9934, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.006365484926178056, |
|
"grad_norm": 4.744929313659668, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 2.893, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.006542303951905225, |
|
"grad_norm": 5.647818088531494, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 3.0005, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0067191229776323936, |
|
"grad_norm": 5.120396614074707, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 2.8626, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006895942003359561, |
|
"grad_norm": 5.247471809387207, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 2.9666, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00707276102908673, |
|
"grad_norm": 5.75712251663208, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 2.7657, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.007249580054813898, |
|
"grad_norm": 5.175816535949707, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 2.9367, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.007426399080541066, |
|
"grad_norm": 5.635525703430176, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 2.8576, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.007603218106268235, |
|
"grad_norm": 4.829882621765137, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 2.6487, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0077800371319954025, |
|
"grad_norm": 4.661524295806885, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 2.7636, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.007956856157722571, |
|
"grad_norm": 5.161839008331299, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 2.7386, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.00813367518344974, |
|
"grad_norm": 5.641641616821289, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 2.6801, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.008310494209176908, |
|
"grad_norm": 5.284437656402588, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 2.7854, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.008487313234904075, |
|
"grad_norm": 4.893701076507568, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.6241, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.008664132260631244, |
|
"grad_norm": 5.710663795471191, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 2.6486, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.008840951286358412, |
|
"grad_norm": 5.808746337890625, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 2.8028, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008840951286358412, |
|
"eval_loss": 0.8329792022705078, |
|
"eval_runtime": 712.2685, |
|
"eval_samples_per_second": 13.373, |
|
"eval_steps_per_second": 3.344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.009017770312085581, |
|
"grad_norm": 11.850320816040039, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 4.0343, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.009194589337812748, |
|
"grad_norm": 7.7840576171875, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 3.4929, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.009371408363539916, |
|
"grad_norm": 4.552064895629883, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 3.1584, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.009548227389267085, |
|
"grad_norm": 7.188632965087891, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 3.3249, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.009725046414994254, |
|
"grad_norm": 5.568719863891602, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 3.0549, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.009901865440721422, |
|
"grad_norm": 4.3227972984313965, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 3.1477, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.010078684466448589, |
|
"grad_norm": 4.280993938446045, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 3.0837, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.010255503492175758, |
|
"grad_norm": 4.789489269256592, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 3.0091, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.010432322517902926, |
|
"grad_norm": 5.41645622253418, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 3.1189, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.010609141543630095, |
|
"grad_norm": 4.8120269775390625, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.991, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.010785960569357263, |
|
"grad_norm": 4.251246929168701, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 2.8324, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.01096277959508443, |
|
"grad_norm": 4.403325080871582, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 3.0057, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.011139598620811599, |
|
"grad_norm": 4.967185974121094, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 3.0389, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.011316417646538767, |
|
"grad_norm": 4.585970401763916, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 2.8278, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.011493236672265936, |
|
"grad_norm": 4.517243385314941, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 3.1275, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.011670055697993105, |
|
"grad_norm": 4.599977970123291, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 2.8354, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.011846874723720272, |
|
"grad_norm": 4.074687480926514, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.7571, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01202369374944744, |
|
"grad_norm": 4.451271057128906, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 2.9244, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.012200512775174609, |
|
"grad_norm": 3.967043161392212, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 2.7281, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.012377331800901777, |
|
"grad_norm": 4.431678295135498, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 2.8933, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.012554150826628946, |
|
"grad_norm": 4.48666524887085, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 2.9408, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.012730969852356113, |
|
"grad_norm": 4.583484172821045, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 2.8614, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.012907788878083281, |
|
"grad_norm": 4.457082271575928, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 3.0324, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.01308460790381045, |
|
"grad_norm": 4.137736797332764, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 2.8703, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.013261426929537619, |
|
"grad_norm": 4.20756721496582, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 2.7438, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.013438245955264787, |
|
"grad_norm": 4.50335168838501, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 2.836, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.013615064980991954, |
|
"grad_norm": 3.9836575984954834, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 2.7682, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.013791884006719123, |
|
"grad_norm": 4.124038219451904, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 2.9161, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.013968703032446291, |
|
"grad_norm": 4.177953243255615, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 2.8599, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.01414552205817346, |
|
"grad_norm": 4.370811939239502, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 2.9956, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.014322341083900628, |
|
"grad_norm": 4.351916313171387, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 2.7608, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.014499160109627795, |
|
"grad_norm": 4.02456521987915, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.8675, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.014675979135354964, |
|
"grad_norm": 3.745039701461792, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 2.735, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.014852798161082132, |
|
"grad_norm": 4.1861042976379395, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 2.7153, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.015029617186809301, |
|
"grad_norm": 4.138434886932373, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 2.9203, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01520643621253647, |
|
"grad_norm": 4.018141746520996, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.6352, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.015383255238263636, |
|
"grad_norm": 4.0129923820495605, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 2.7209, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.015560074263990805, |
|
"grad_norm": 4.6002888679504395, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 2.7132, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.015736893289717974, |
|
"grad_norm": 4.2588067054748535, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 2.4424, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.015913712315445142, |
|
"grad_norm": 4.202187538146973, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 2.5436, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01609053134117231, |
|
"grad_norm": 4.206346035003662, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 2.5855, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.01626735036689948, |
|
"grad_norm": 4.269227981567383, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 2.712, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.016444169392626648, |
|
"grad_norm": 3.9102425575256348, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 2.4231, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.016620988418353817, |
|
"grad_norm": 4.122182846069336, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 2.4536, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.01679780744408098, |
|
"grad_norm": 4.083293437957764, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 2.6099, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01697462646980815, |
|
"grad_norm": 4.000704765319824, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 2.5202, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.01715144549553532, |
|
"grad_norm": 4.2389092445373535, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 2.5216, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.017328264521262488, |
|
"grad_norm": 4.1938605308532715, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 2.4519, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.017505083546989656, |
|
"grad_norm": 4.141265392303467, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 2.425, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.017681902572716825, |
|
"grad_norm": 4.398811340332031, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.416, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017681902572716825, |
|
"eval_loss": 0.7194002270698547, |
|
"eval_runtime": 712.028, |
|
"eval_samples_per_second": 13.377, |
|
"eval_steps_per_second": 3.345, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017858721598443993, |
|
"grad_norm": 7.377161026000977, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 3.2334, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.018035540624171162, |
|
"grad_norm": 5.567739009857178, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 3.3289, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01821235964989833, |
|
"grad_norm": 4.169541358947754, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.9622, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.018389178675625496, |
|
"grad_norm": 5.395923614501953, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 3.1449, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.018565997701352664, |
|
"grad_norm": 4.531886577606201, |
|
"learning_rate": 5e-05, |
|
"loss": 3.1416, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.018742816727079833, |
|
"grad_norm": 3.8139052391052246, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 2.9983, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.018919635752807, |
|
"grad_norm": 3.7022151947021484, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 3.0179, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01909645477853417, |
|
"grad_norm": 3.809081792831421, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 2.8028, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.01927327380426134, |
|
"grad_norm": 3.4583377838134766, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 2.7208, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.019450092829988507, |
|
"grad_norm": 3.455575466156006, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 2.8423, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.019626911855715676, |
|
"grad_norm": 3.6886868476867676, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 2.6708, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.019803730881442844, |
|
"grad_norm": 4.032835960388184, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 2.9931, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.019980549907170013, |
|
"grad_norm": 3.7966928482055664, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 2.8558, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.020157368932897178, |
|
"grad_norm": 3.60139799118042, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 2.4373, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.020334187958624347, |
|
"grad_norm": 3.8890609741210938, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 2.758, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.020511006984351515, |
|
"grad_norm": 3.8674120903015137, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 2.8288, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.020687826010078684, |
|
"grad_norm": 3.7245192527770996, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 2.7111, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.020864645035805852, |
|
"grad_norm": 3.8925111293792725, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 2.782, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.02104146406153302, |
|
"grad_norm": 3.7165839672088623, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 2.7444, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.02121828308726019, |
|
"grad_norm": 4.107248783111572, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 2.7172, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.021395102112987358, |
|
"grad_norm": 3.8568592071533203, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 2.5638, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.021571921138714527, |
|
"grad_norm": 3.638155460357666, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 2.6479, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.021748740164441695, |
|
"grad_norm": 3.860167980194092, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 2.6039, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02192555919016886, |
|
"grad_norm": 3.792959690093994, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.6498, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.02210237821589603, |
|
"grad_norm": 4.04556941986084, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 2.632, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.022279197241623198, |
|
"grad_norm": 6.900913238525391, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 2.5513, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.022456016267350366, |
|
"grad_norm": 3.745027780532837, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 2.6285, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.022632835293077535, |
|
"grad_norm": 3.830739974975586, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 2.7284, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.022809654318804704, |
|
"grad_norm": 3.877413511276245, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 2.5802, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.022986473344531872, |
|
"grad_norm": 3.963332414627075, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 2.8791, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02316329237025904, |
|
"grad_norm": 3.840338706970215, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 2.4996, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.02334011139598621, |
|
"grad_norm": 3.7065539360046387, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 2.5997, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.023516930421713378, |
|
"grad_norm": 3.834458589553833, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 2.4962, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.023693749447440543, |
|
"grad_norm": 3.911560297012329, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 2.5727, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.02387056847316771, |
|
"grad_norm": 3.85789155960083, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 2.4992, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02404738749889488, |
|
"grad_norm": 4.070404529571533, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 2.7064, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.02422420652462205, |
|
"grad_norm": 3.5851242542266846, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 2.449, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.024401025550349217, |
|
"grad_norm": 3.727949857711792, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 2.6118, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.024577844576076386, |
|
"grad_norm": 3.50991153717041, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 2.431, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.024754663601803555, |
|
"grad_norm": 3.723494529724121, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 2.2647, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.024931482627530723, |
|
"grad_norm": 3.584364175796509, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 2.4095, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.025108301653257892, |
|
"grad_norm": 3.8995087146759033, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 2.5542, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.02528512067898506, |
|
"grad_norm": 3.8994150161743164, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.3609, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.025461939704712225, |
|
"grad_norm": 3.8739383220672607, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 2.4378, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.025638758730439394, |
|
"grad_norm": 3.730926513671875, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 2.3718, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.025815577756166563, |
|
"grad_norm": 4.219221591949463, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 2.5712, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.02599239678189373, |
|
"grad_norm": 4.296256065368652, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 2.6027, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.0261692158076209, |
|
"grad_norm": 3.8536908626556396, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 2.2772, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.02634603483334807, |
|
"grad_norm": 3.92279314994812, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 2.2474, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.026522853859075237, |
|
"grad_norm": 4.397715091705322, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 2.454, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.026522853859075237, |
|
"eval_loss": 0.6717210412025452, |
|
"eval_runtime": 712.25, |
|
"eval_samples_per_second": 13.373, |
|
"eval_steps_per_second": 3.344, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.146499429400576e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|