|
{ |
|
"best_metric": 2.4205310344696045, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.3009781790820166, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0030097817908201654, |
|
"grad_norm": 6.573893070220947, |
|
"learning_rate": 1e-05, |
|
"loss": 9.4986, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0030097817908201654, |
|
"eval_loss": 2.8468708992004395, |
|
"eval_runtime": 54.2783, |
|
"eval_samples_per_second": 10.317, |
|
"eval_steps_per_second": 2.579, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006019563581640331, |
|
"grad_norm": 7.830665111541748, |
|
"learning_rate": 2e-05, |
|
"loss": 9.5231, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009029345372460496, |
|
"grad_norm": 7.3165059089660645, |
|
"learning_rate": 3e-05, |
|
"loss": 8.9478, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012039127163280662, |
|
"grad_norm": 8.22419261932373, |
|
"learning_rate": 4e-05, |
|
"loss": 9.1723, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.015048908954100828, |
|
"grad_norm": 7.707956314086914, |
|
"learning_rate": 5e-05, |
|
"loss": 9.4721, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01805869074492099, |
|
"grad_norm": 7.3733649253845215, |
|
"learning_rate": 6e-05, |
|
"loss": 9.1231, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.021068472535741158, |
|
"grad_norm": 9.522957801818848, |
|
"learning_rate": 7e-05, |
|
"loss": 10.3674, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024078254326561323, |
|
"grad_norm": 7.464807987213135, |
|
"learning_rate": 8e-05, |
|
"loss": 8.7806, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02708803611738149, |
|
"grad_norm": 8.447348594665527, |
|
"learning_rate": 9e-05, |
|
"loss": 9.8534, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.030097817908201655, |
|
"grad_norm": 9.508760452270508, |
|
"learning_rate": 0.0001, |
|
"loss": 10.1334, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03310759969902182, |
|
"grad_norm": 11.263378143310547, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 10.433, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03611738148984198, |
|
"grad_norm": 9.284879684448242, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 10.0197, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03912716328066215, |
|
"grad_norm": 8.84637451171875, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 9.5473, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.042136945071482315, |
|
"grad_norm": 9.146723747253418, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 9.3743, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.045146726862302484, |
|
"grad_norm": 8.942791938781738, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 9.7516, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04815650865312265, |
|
"grad_norm": 10.329294204711914, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 9.7756, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.051166290443942816, |
|
"grad_norm": 8.851433753967285, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 9.7867, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05417607223476298, |
|
"grad_norm": 8.984696388244629, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 9.5581, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05718585402558315, |
|
"grad_norm": 9.131317138671875, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 9.5847, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06019563581640331, |
|
"grad_norm": 10.204080581665039, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 10.3269, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06320541760722348, |
|
"grad_norm": 8.885244369506836, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 9.7441, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06621519939804364, |
|
"grad_norm": 9.042747497558594, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 10.0208, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0692249811888638, |
|
"grad_norm": 9.553254127502441, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 10.0202, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07223476297968397, |
|
"grad_norm": 8.767007827758789, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 8.8363, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07524454477050414, |
|
"grad_norm": 9.454209327697754, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 9.7709, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0782543265613243, |
|
"grad_norm": 9.191224098205566, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 9.5502, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08126410835214447, |
|
"grad_norm": 9.49747085571289, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 9.6738, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08427389014296463, |
|
"grad_norm": 10.24111270904541, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 10.03, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0872836719337848, |
|
"grad_norm": 9.009526252746582, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 8.6197, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09029345372460497, |
|
"grad_norm": 11.295206069946289, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 10.0076, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09330323551542513, |
|
"grad_norm": 11.421455383300781, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 9.5754, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0963130173062453, |
|
"grad_norm": 10.831648826599121, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 9.439, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09932279909706546, |
|
"grad_norm": 12.72357177734375, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 10.3717, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10233258088788563, |
|
"grad_norm": 11.876553535461426, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 9.5917, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1053423626787058, |
|
"grad_norm": 11.42211627960205, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 10.0323, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10835214446952596, |
|
"grad_norm": 11.210999488830566, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 9.6294, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11136192626034612, |
|
"grad_norm": 10.617776870727539, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 10.1042, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1143717080511663, |
|
"grad_norm": 12.666061401367188, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 10.3523, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11738148984198646, |
|
"grad_norm": 11.441605567932129, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 9.6864, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12039127163280662, |
|
"grad_norm": 14.956219673156738, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 10.7549, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12340105342362678, |
|
"grad_norm": 13.108258247375488, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 10.5067, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12641083521444696, |
|
"grad_norm": 12.229085922241211, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 10.5228, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1294206170052671, |
|
"grad_norm": 13.60671615600586, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 11.0104, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13243039879608728, |
|
"grad_norm": 13.636900901794434, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 10.4467, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13544018058690746, |
|
"grad_norm": 11.208077430725098, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 9.3656, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1384499623777276, |
|
"grad_norm": 13.557292938232422, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 10.3251, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.14145974416854779, |
|
"grad_norm": 13.766647338867188, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 9.9424, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14446952595936793, |
|
"grad_norm": 14.213854789733887, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 11.234, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1474793077501881, |
|
"grad_norm": 19.774276733398438, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 12.4225, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1504890895410083, |
|
"grad_norm": 20.662708282470703, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 12.5272, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1504890895410083, |
|
"eval_loss": 2.5100247859954834, |
|
"eval_runtime": 54.9415, |
|
"eval_samples_per_second": 10.193, |
|
"eval_steps_per_second": 2.548, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15349887133182843, |
|
"grad_norm": 6.058256149291992, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 8.2189, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1565086531226486, |
|
"grad_norm": 6.569075107574463, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 9.1637, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1595184349134688, |
|
"grad_norm": 5.583766937255859, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 8.7535, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16252821670428894, |
|
"grad_norm": 5.88881778717041, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 9.8206, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1655379984951091, |
|
"grad_norm": 5.4886651039123535, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 8.7837, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16854778028592926, |
|
"grad_norm": 5.4740705490112305, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 9.2256, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17155756207674944, |
|
"grad_norm": 5.433065891265869, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 9.2593, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1745673438675696, |
|
"grad_norm": 5.914703845977783, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 9.1884, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17757712565838976, |
|
"grad_norm": 6.139044284820557, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 9.8026, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.18058690744920994, |
|
"grad_norm": 6.379807472229004, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 9.324, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1835966892400301, |
|
"grad_norm": 7.047702312469482, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 9.2618, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18660647103085026, |
|
"grad_norm": 6.448022365570068, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 9.5976, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18961625282167044, |
|
"grad_norm": 8.178146362304688, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 10.021, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1926260346124906, |
|
"grad_norm": 6.976129055023193, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 9.7405, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19563581640331076, |
|
"grad_norm": 7.0136308670043945, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 9.4789, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1986455981941309, |
|
"grad_norm": 7.680490970611572, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 9.5718, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2016553799849511, |
|
"grad_norm": 8.17990493774414, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 9.8405, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20466516177577126, |
|
"grad_norm": 8.38397216796875, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 10.1608, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2076749435665914, |
|
"grad_norm": 8.197789192199707, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 8.672, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2106847253574116, |
|
"grad_norm": 6.918241024017334, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 9.5279, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21369450714823177, |
|
"grad_norm": 6.7393035888671875, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 9.1222, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21670428893905191, |
|
"grad_norm": 8.266468048095703, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 9.943, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2197140707298721, |
|
"grad_norm": 7.681157112121582, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 9.2706, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22272385252069224, |
|
"grad_norm": 9.782959938049316, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 10.0855, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22573363431151242, |
|
"grad_norm": 8.54826545715332, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 9.7183, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2287434161023326, |
|
"grad_norm": 7.834469318389893, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 9.5791, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.23175319789315274, |
|
"grad_norm": 8.019440650939941, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 9.8348, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23476297968397292, |
|
"grad_norm": 9.625268936157227, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 10.2138, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23777276147479307, |
|
"grad_norm": 8.725410461425781, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 9.9408, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24078254326561324, |
|
"grad_norm": 8.686917304992676, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 9.4381, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24379232505643342, |
|
"grad_norm": 9.45250415802002, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 9.5831, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24680210684725357, |
|
"grad_norm": 8.683648109436035, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 9.2012, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.24981188863807374, |
|
"grad_norm": 10.793254852294922, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 10.2356, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2528216704288939, |
|
"grad_norm": 9.123403549194336, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 9.7882, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2558314522197141, |
|
"grad_norm": 10.348355293273926, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 9.885, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2588412340105342, |
|
"grad_norm": 10.56598949432373, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 10.1376, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2618510158013544, |
|
"grad_norm": 9.426460266113281, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 9.368, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26486079759217457, |
|
"grad_norm": 10.77817153930664, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 9.7117, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26787057938299474, |
|
"grad_norm": 11.538412094116211, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 10.1616, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2708803611738149, |
|
"grad_norm": 10.98781681060791, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 9.7931, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.27389014296463504, |
|
"grad_norm": 10.231600761413574, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 9.1857, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2768999247554552, |
|
"grad_norm": 11.264632225036621, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 9.0738, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2799097065462754, |
|
"grad_norm": 12.202892303466797, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 9.6755, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.28291948833709557, |
|
"grad_norm": 13.844965934753418, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 11.1556, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.28592927012791575, |
|
"grad_norm": 12.187628746032715, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 9.2201, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.28893905191873587, |
|
"grad_norm": 13.739523887634277, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 10.5167, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.29194883370955604, |
|
"grad_norm": 12.928597450256348, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 9.7247, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2949586155003762, |
|
"grad_norm": 16.119169235229492, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 10.4071, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2979683972911964, |
|
"grad_norm": 15.795464515686035, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 10.2401, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3009781790820166, |
|
"grad_norm": 22.210914611816406, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 11.3174, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3009781790820166, |
|
"eval_loss": 2.4205310344696045, |
|
"eval_runtime": 54.9516, |
|
"eval_samples_per_second": 10.191, |
|
"eval_steps_per_second": 2.548, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4453079890775245e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|