|
{ |
|
"best_metric": 3.0394115447998047, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.333889816360601, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"grad_norm": 37.723533630371094, |
|
"learning_rate": 1e-05, |
|
"loss": 8.6363, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"eval_loss": 3.710127830505371, |
|
"eval_runtime": 35.9461, |
|
"eval_samples_per_second": 14.049, |
|
"eval_steps_per_second": 3.533, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00667779632721202, |
|
"grad_norm": 44.79377365112305, |
|
"learning_rate": 2e-05, |
|
"loss": 10.4558, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01001669449081803, |
|
"grad_norm": 35.595638275146484, |
|
"learning_rate": 3e-05, |
|
"loss": 10.9784, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01335559265442404, |
|
"grad_norm": 23.04273796081543, |
|
"learning_rate": 4e-05, |
|
"loss": 10.3057, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01669449081803005, |
|
"grad_norm": 19.645402908325195, |
|
"learning_rate": 5e-05, |
|
"loss": 9.6469, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02003338898163606, |
|
"grad_norm": 14.549028396606445, |
|
"learning_rate": 6e-05, |
|
"loss": 9.3464, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02337228714524207, |
|
"grad_norm": 13.930132865905762, |
|
"learning_rate": 7e-05, |
|
"loss": 9.6147, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02671118530884808, |
|
"grad_norm": 13.57673168182373, |
|
"learning_rate": 8e-05, |
|
"loss": 9.0649, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03005008347245409, |
|
"grad_norm": 15.489851951599121, |
|
"learning_rate": 9e-05, |
|
"loss": 8.9347, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0333889816360601, |
|
"grad_norm": 15.896312713623047, |
|
"learning_rate": 0.0001, |
|
"loss": 9.8896, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03672787979966611, |
|
"grad_norm": 15.528593063354492, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 9.1073, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04006677796327212, |
|
"grad_norm": 15.032848358154297, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 9.2407, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04340567612687813, |
|
"grad_norm": 14.364212036132812, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 10.1868, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04674457429048414, |
|
"grad_norm": 14.188446998596191, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 8.5398, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05008347245409015, |
|
"grad_norm": 13.074345588684082, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 9.3816, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05342237061769616, |
|
"grad_norm": 14.517337799072266, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 9.052, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05676126878130217, |
|
"grad_norm": 15.009509086608887, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 10.1783, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06010016694490818, |
|
"grad_norm": 13.750170707702637, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 9.5757, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06343906510851419, |
|
"grad_norm": 14.414755821228027, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 9.667, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0667779632721202, |
|
"grad_norm": 14.613687515258789, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 9.9493, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07011686143572621, |
|
"grad_norm": 13.50129222869873, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 9.3863, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07345575959933222, |
|
"grad_norm": 13.425811767578125, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 10.0341, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07679465776293823, |
|
"grad_norm": 14.33968448638916, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 9.4556, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08013355592654424, |
|
"grad_norm": 14.002581596374512, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 10.3062, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08347245409015025, |
|
"grad_norm": 14.369874000549316, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 9.2341, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08681135225375626, |
|
"grad_norm": 12.856378555297852, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 10.157, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09015025041736227, |
|
"grad_norm": 14.03198528289795, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 9.5912, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09348914858096828, |
|
"grad_norm": 12.59056568145752, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 9.499, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09682804674457429, |
|
"grad_norm": 13.37918758392334, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 9.5738, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1001669449081803, |
|
"grad_norm": 15.977365493774414, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 10.7106, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10350584307178631, |
|
"grad_norm": 16.689184188842773, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 10.2359, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10684474123539232, |
|
"grad_norm": 13.517721176147461, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 9.8871, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11018363939899833, |
|
"grad_norm": 17.465274810791016, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 9.877, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11352253756260434, |
|
"grad_norm": 13.706908226013184, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 9.8173, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11686143572621036, |
|
"grad_norm": 14.503616333007812, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 10.431, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12020033388981637, |
|
"grad_norm": 14.698240280151367, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 9.8721, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12353923205342238, |
|
"grad_norm": 15.244645118713379, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 9.9001, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12687813021702837, |
|
"grad_norm": 18.27094841003418, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 10.8394, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1302170283806344, |
|
"grad_norm": 15.800652503967285, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 10.0216, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1335559265442404, |
|
"grad_norm": 14.698711395263672, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 10.146, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13689482470784642, |
|
"grad_norm": 16.719526290893555, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 10.8504, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14023372287145242, |
|
"grad_norm": 14.47635555267334, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 10.4568, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14357262103505844, |
|
"grad_norm": 18.760009765625, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 10.0805, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14691151919866444, |
|
"grad_norm": 17.59091567993164, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 10.6397, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15025041736227046, |
|
"grad_norm": 18.24905014038086, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 10.7017, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15358931552587646, |
|
"grad_norm": 20.4737491607666, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 9.1001, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15692821368948248, |
|
"grad_norm": 26.5585880279541, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 10.7494, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16026711185308848, |
|
"grad_norm": 170.9046630859375, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 8.1164, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1636060100166945, |
|
"grad_norm": 42.19401931762695, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 8.1099, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"grad_norm": 59.42169189453125, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 10.5789, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"eval_loss": 3.170802116394043, |
|
"eval_runtime": 36.7398, |
|
"eval_samples_per_second": 13.745, |
|
"eval_steps_per_second": 3.457, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17028380634390652, |
|
"grad_norm": 57.85205841064453, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 10.0838, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17362270450751252, |
|
"grad_norm": 24.928632736206055, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 10.0305, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17696160267111852, |
|
"grad_norm": 12.831768035888672, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 8.4044, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18030050083472454, |
|
"grad_norm": 11.783881187438965, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 8.8682, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18363939899833054, |
|
"grad_norm": 10.361265182495117, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 8.097, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18697829716193656, |
|
"grad_norm": 9.282995223999023, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 7.9845, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19031719532554256, |
|
"grad_norm": 9.993803977966309, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 8.0418, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19365609348914858, |
|
"grad_norm": 9.868743896484375, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 8.4737, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19699499165275458, |
|
"grad_norm": 9.891820907592773, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 8.5594, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2003338898163606, |
|
"grad_norm": 9.493250846862793, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 8.2122, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2036727879799666, |
|
"grad_norm": 10.277020454406738, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 8.6064, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20701168614357263, |
|
"grad_norm": 12.072765350341797, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 8.8124, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21035058430717862, |
|
"grad_norm": 11.113184928894043, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 8.8311, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21368948247078465, |
|
"grad_norm": 10.37546157836914, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 8.9539, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21702838063439064, |
|
"grad_norm": 11.003464698791504, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 8.4274, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22036727879799667, |
|
"grad_norm": 11.640214920043945, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 8.732, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22370617696160267, |
|
"grad_norm": 10.23813533782959, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 8.4723, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2270450751252087, |
|
"grad_norm": 10.505694389343262, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 8.5907, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2303839732888147, |
|
"grad_norm": 11.966251373291016, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 9.4347, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2337228714524207, |
|
"grad_norm": 11.130023002624512, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 9.242, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2370617696160267, |
|
"grad_norm": 11.233088493347168, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 9.2261, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24040066777963273, |
|
"grad_norm": 10.693445205688477, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 8.998, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24373956594323873, |
|
"grad_norm": 10.547587394714355, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 9.3157, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24707846410684475, |
|
"grad_norm": 10.857179641723633, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 9.4256, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25041736227045075, |
|
"grad_norm": 11.358246803283691, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 9.5806, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25375626043405675, |
|
"grad_norm": 11.498934745788574, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 9.7731, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2570951585976628, |
|
"grad_norm": 11.578938484191895, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 9.9566, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2604340567612688, |
|
"grad_norm": 11.857230186462402, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 8.9907, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2637729549248748, |
|
"grad_norm": 11.511113166809082, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 8.8168, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2671118530884808, |
|
"grad_norm": 12.616565704345703, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 9.119, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2704507512520868, |
|
"grad_norm": 11.596989631652832, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 8.8922, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.27378964941569284, |
|
"grad_norm": 12.636907577514648, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 9.1289, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27712854757929883, |
|
"grad_norm": 12.50519847869873, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 8.7335, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28046744574290483, |
|
"grad_norm": 12.061328887939453, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 9.1102, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2838063439065108, |
|
"grad_norm": 17.33370018005371, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 9.3422, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2871452420701169, |
|
"grad_norm": 13.716808319091797, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 9.0279, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2904841402337229, |
|
"grad_norm": 15.019532203674316, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 9.8689, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2938230383973289, |
|
"grad_norm": 14.468006134033203, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 10.0441, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29716193656093487, |
|
"grad_norm": 13.162923812866211, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 9.4476, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3005008347245409, |
|
"grad_norm": 14.644915580749512, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 9.3327, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3038397328881469, |
|
"grad_norm": 13.767836570739746, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 9.4936, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3071786310517529, |
|
"grad_norm": 14.879925727844238, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 9.5901, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3105175292153589, |
|
"grad_norm": 16.64482307434082, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 10.1544, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.31385642737896496, |
|
"grad_norm": 15.542655944824219, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 9.7929, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31719532554257096, |
|
"grad_norm": 17.215084075927734, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 10.8473, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32053422370617696, |
|
"grad_norm": 16.660884857177734, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 10.3807, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.32387312186978295, |
|
"grad_norm": 20.6027774810791, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 10.0095, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.327212020033389, |
|
"grad_norm": 26.255468368530273, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 9.1675, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.330550918196995, |
|
"grad_norm": 26.772062301635742, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 7.9304, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"grad_norm": 45.09166717529297, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 10.911, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"eval_loss": 3.0394115447998047, |
|
"eval_runtime": 36.7677, |
|
"eval_samples_per_second": 13.735, |
|
"eval_steps_per_second": 3.454, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.431616484278272e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|