|
{ |
|
"best_metric": 0.9684666991233826, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.9060022650056625, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004530011325028313, |
|
"grad_norm": 2.294863700866699, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6809, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004530011325028313, |
|
"eval_loss": 3.339136838912964, |
|
"eval_runtime": 27.6289, |
|
"eval_samples_per_second": 13.464, |
|
"eval_steps_per_second": 3.366, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009060022650056626, |
|
"grad_norm": 2.830218553543091, |
|
"learning_rate": 2e-05, |
|
"loss": 1.956, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013590033975084938, |
|
"grad_norm": 2.898862361907959, |
|
"learning_rate": 3e-05, |
|
"loss": 2.2194, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01812004530011325, |
|
"grad_norm": 7.4336161613464355, |
|
"learning_rate": 4e-05, |
|
"loss": 2.1395, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.022650056625141562, |
|
"grad_norm": 4.23063850402832, |
|
"learning_rate": 5e-05, |
|
"loss": 2.1405, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.027180067950169876, |
|
"grad_norm": 4.290370464324951, |
|
"learning_rate": 6e-05, |
|
"loss": 1.8719, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.031710079275198186, |
|
"grad_norm": 3.541994333267212, |
|
"learning_rate": 7e-05, |
|
"loss": 1.6124, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0362400906002265, |
|
"grad_norm": 2.3610470294952393, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5442, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04077010192525481, |
|
"grad_norm": 2.551922082901001, |
|
"learning_rate": 9e-05, |
|
"loss": 1.2651, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.045300113250283124, |
|
"grad_norm": 2.418034315109253, |
|
"learning_rate": 0.0001, |
|
"loss": 1.3057, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04983012457531144, |
|
"grad_norm": 2.066025733947754, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.3253, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05436013590033975, |
|
"grad_norm": 1.9381566047668457, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.4766, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05889014722536806, |
|
"grad_norm": 2.2231814861297607, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.3767, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06342015855039637, |
|
"grad_norm": 1.644042730331421, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.0681, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06795016987542468, |
|
"grad_norm": 1.984635353088379, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.2662, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.072480181200453, |
|
"grad_norm": 1.8118503093719482, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.3361, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07701019252548132, |
|
"grad_norm": 1.784030795097351, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.2805, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08154020385050963, |
|
"grad_norm": 1.6014776229858398, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.2645, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08607021517553794, |
|
"grad_norm": 1.550774335861206, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.2934, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09060022650056625, |
|
"grad_norm": 1.9732279777526855, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.3981, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09513023782559456, |
|
"grad_norm": 1.6280652284622192, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.4538, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09966024915062288, |
|
"grad_norm": 1.388110637664795, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.0999, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10419026047565119, |
|
"grad_norm": 1.5452589988708496, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.3083, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1087202718006795, |
|
"grad_norm": 1.63185715675354, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.1674, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11325028312570781, |
|
"grad_norm": 1.6084994077682495, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.2719, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11778029445073612, |
|
"grad_norm": 1.5374693870544434, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.1589, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12231030577576443, |
|
"grad_norm": 1.6639900207519531, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.3361, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12684031710079274, |
|
"grad_norm": 1.5302115678787231, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.2753, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13137032842582105, |
|
"grad_norm": 1.5136266946792603, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.3348, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13590033975084936, |
|
"grad_norm": 1.5961568355560303, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.2247, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1404303510758777, |
|
"grad_norm": 1.5466892719268799, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.2773, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.144960362400906, |
|
"grad_norm": 1.490113377571106, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.2295, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14949037372593432, |
|
"grad_norm": 1.5653648376464844, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.3314, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15402038505096263, |
|
"grad_norm": 1.8402352333068848, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.3435, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15855039637599094, |
|
"grad_norm": 1.6522904634475708, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.2712, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16308040770101925, |
|
"grad_norm": 1.8196598291397095, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.3977, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16761041902604756, |
|
"grad_norm": 1.951027274131775, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.4954, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17214043035107587, |
|
"grad_norm": 1.9112436771392822, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.3942, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17667044167610418, |
|
"grad_norm": 2.1712679862976074, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.4415, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1812004530011325, |
|
"grad_norm": 1.8498501777648926, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.3505, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1857304643261608, |
|
"grad_norm": 2.059781074523926, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.3482, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19026047565118911, |
|
"grad_norm": 2.0546348094940186, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.3824, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19479048697621745, |
|
"grad_norm": 2.621016263961792, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.606, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19932049830124576, |
|
"grad_norm": 2.086237668991089, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.2631, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.20385050962627407, |
|
"grad_norm": 2.4675724506378174, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.5235, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.20838052095130238, |
|
"grad_norm": 2.6800007820129395, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.517, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2129105322763307, |
|
"grad_norm": 2.2700512409210205, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.3666, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.217440543601359, |
|
"grad_norm": 2.7804107666015625, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.4142, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22197055492638731, |
|
"grad_norm": 3.5445613861083984, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.6274, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22650056625141562, |
|
"grad_norm": 2.5566651821136475, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.3407, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22650056625141562, |
|
"eval_loss": 1.4198116064071655, |
|
"eval_runtime": 28.0668, |
|
"eval_samples_per_second": 13.254, |
|
"eval_steps_per_second": 3.314, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23103057757644394, |
|
"grad_norm": 3.488363742828369, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.9693, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23556058890147225, |
|
"grad_norm": 2.687886953353882, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.1449, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24009060022650056, |
|
"grad_norm": 2.1666171550750732, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 1.3024, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24462061155152887, |
|
"grad_norm": 1.742993712425232, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.1097, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2491506228765572, |
|
"grad_norm": 1.1420605182647705, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.9356, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2536806342015855, |
|
"grad_norm": 1.286728024482727, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.8034, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2582106455266138, |
|
"grad_norm": 1.2109533548355103, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.982, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2627406568516421, |
|
"grad_norm": 1.1387308835983276, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.8655, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26727066817667045, |
|
"grad_norm": 1.2849539518356323, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 1.1622, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2718006795016987, |
|
"grad_norm": 1.1417158842086792, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.125, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.27633069082672707, |
|
"grad_norm": 1.0794636011123657, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.9444, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2808607021517554, |
|
"grad_norm": 1.2429821491241455, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.9038, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2853907134767837, |
|
"grad_norm": 1.1989555358886719, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 1.0491, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.289920724801812, |
|
"grad_norm": 1.1882569789886475, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.0463, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2944507361268403, |
|
"grad_norm": 1.1703907251358032, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.0251, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29898074745186864, |
|
"grad_norm": 1.2039575576782227, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.0778, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3035107587768969, |
|
"grad_norm": 1.465887427330017, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.2316, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.30804077010192527, |
|
"grad_norm": 1.305050015449524, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 1.0009, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.31257078142695355, |
|
"grad_norm": 1.4737871885299683, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 1.0594, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3171007927519819, |
|
"grad_norm": 1.253055214881897, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.9838, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32163080407701017, |
|
"grad_norm": 1.260217547416687, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.0468, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3261608154020385, |
|
"grad_norm": 1.305269718170166, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.1493, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.33069082672706684, |
|
"grad_norm": 1.295682668685913, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.1228, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3352208380520951, |
|
"grad_norm": 1.2946406602859497, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.0911, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.33975084937712347, |
|
"grad_norm": 1.3201980590820312, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.9902, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34428086070215175, |
|
"grad_norm": 1.3449866771697998, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.7981, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3488108720271801, |
|
"grad_norm": 1.356520652770996, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.1687, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.35334088335220837, |
|
"grad_norm": 1.3135828971862793, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.9721, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3578708946772367, |
|
"grad_norm": 1.6086395978927612, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 1.3614, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.362400906002265, |
|
"grad_norm": 1.6636041402816772, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.3424, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3669309173272933, |
|
"grad_norm": 1.725728988647461, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.3588, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3714609286523216, |
|
"grad_norm": 1.5616434812545776, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.0019, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.37599093997734995, |
|
"grad_norm": 1.633862853050232, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 1.2381, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.38052095130237823, |
|
"grad_norm": 1.6802394390106201, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.451, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.38505096262740657, |
|
"grad_norm": 1.623841643333435, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.1578, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3895809739524349, |
|
"grad_norm": 1.532440185546875, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.2683, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3941109852774632, |
|
"grad_norm": 1.7631417512893677, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.2269, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3986409966024915, |
|
"grad_norm": 1.90073561668396, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.2522, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4031710079275198, |
|
"grad_norm": 1.7768625020980835, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.2126, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.40770101925254815, |
|
"grad_norm": 1.7604447603225708, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.0491, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.41223103057757643, |
|
"grad_norm": 1.8100131750106812, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 1.1563, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.41676104190260477, |
|
"grad_norm": 1.7785227298736572, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.2355, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.42129105322763305, |
|
"grad_norm": 2.1177375316619873, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.3305, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4258210645526614, |
|
"grad_norm": 2.3690457344055176, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.5447, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.43035107587768967, |
|
"grad_norm": 1.9034374952316284, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.1644, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.434881087202718, |
|
"grad_norm": 2.056325912475586, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.3085, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.43941109852774635, |
|
"grad_norm": 2.021763324737549, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.3537, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.44394110985277463, |
|
"grad_norm": 2.4095985889434814, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.4288, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.44847112117780297, |
|
"grad_norm": 2.138651132583618, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.2902, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.45300113250283125, |
|
"grad_norm": 2.890157461166382, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.3736, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.45300113250283125, |
|
"eval_loss": 1.2719783782958984, |
|
"eval_runtime": 28.1062, |
|
"eval_samples_per_second": 13.235, |
|
"eval_steps_per_second": 3.309, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4575311438278596, |
|
"grad_norm": 2.714219570159912, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.0719, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.46206115515288787, |
|
"grad_norm": 1.833893060684204, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.8059, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4665911664779162, |
|
"grad_norm": 1.7362415790557861, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.8914, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4711211778029445, |
|
"grad_norm": 1.587952971458435, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.0055, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47565118912797283, |
|
"grad_norm": 1.382871389389038, |
|
"learning_rate": 5e-05, |
|
"loss": 0.853, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4801812004530011, |
|
"grad_norm": 1.427045226097107, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.9621, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.48471121177802945, |
|
"grad_norm": 1.2589244842529297, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.8243, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.48924122310305773, |
|
"grad_norm": 1.1731473207473755, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.8862, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.49377123442808607, |
|
"grad_norm": 1.2672301530838013, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.8736, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4983012457531144, |
|
"grad_norm": 1.1130244731903076, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.8778, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5028312570781427, |
|
"grad_norm": 1.2738019227981567, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.0309, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.507361268403171, |
|
"grad_norm": 1.1945719718933105, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.9288, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5118912797281994, |
|
"grad_norm": 1.153830885887146, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.997, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5164212910532276, |
|
"grad_norm": 1.128495693206787, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.862, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5209513023782559, |
|
"grad_norm": 1.2062228918075562, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 1.0172, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5254813137032842, |
|
"grad_norm": 1.1854007244110107, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.9473, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5300113250283126, |
|
"grad_norm": 1.105261206626892, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.8747, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5345413363533409, |
|
"grad_norm": 1.1203992366790771, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.7881, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5390713476783692, |
|
"grad_norm": 1.1720339059829712, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.9116, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5436013590033975, |
|
"grad_norm": 1.220534086227417, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.7269, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5481313703284258, |
|
"grad_norm": 1.1943268775939941, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.9006, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5526613816534541, |
|
"grad_norm": 1.1528831720352173, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.8354, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5571913929784824, |
|
"grad_norm": 1.2273133993148804, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.8162, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5617214043035108, |
|
"grad_norm": 1.2099953889846802, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.8402, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5662514156285391, |
|
"grad_norm": 1.1575597524642944, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.9024, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5707814269535674, |
|
"grad_norm": 1.4714939594268799, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 1.0202, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5753114382785957, |
|
"grad_norm": 1.4315725564956665, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 1.0502, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.579841449603624, |
|
"grad_norm": 1.5543477535247803, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 1.1694, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5843714609286523, |
|
"grad_norm": 1.3127039670944214, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.9434, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5889014722536806, |
|
"grad_norm": 1.568535327911377, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 1.1988, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5934314835787089, |
|
"grad_norm": 1.353140950202942, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.8726, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5979614949037373, |
|
"grad_norm": 1.4939686059951782, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.2155, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6024915062287656, |
|
"grad_norm": 1.489733099937439, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.923, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6070215175537939, |
|
"grad_norm": 1.6046398878097534, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.1015, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6115515288788222, |
|
"grad_norm": 1.578744888305664, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 1.2103, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6160815402038505, |
|
"grad_norm": 1.431128740310669, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.8943, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6206115515288788, |
|
"grad_norm": 1.9715696573257446, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.3989, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6251415628539071, |
|
"grad_norm": 1.8589072227478027, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.5329, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6296715741789355, |
|
"grad_norm": 1.8302171230316162, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 1.1766, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6342015855039638, |
|
"grad_norm": 2.0135345458984375, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 1.4761, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.638731596828992, |
|
"grad_norm": 1.8871492147445679, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.9438, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6432616081540203, |
|
"grad_norm": 2.027357578277588, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 1.1982, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6477916194790487, |
|
"grad_norm": 2.037635564804077, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.1497, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.652321630804077, |
|
"grad_norm": 1.9788384437561035, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.3363, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6568516421291053, |
|
"grad_norm": 1.8388274908065796, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.2891, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6613816534541337, |
|
"grad_norm": 1.6475542783737183, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.0235, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.665911664779162, |
|
"grad_norm": 2.038667917251587, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.1535, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6704416761041903, |
|
"grad_norm": 2.430046796798706, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.3292, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6749716874292185, |
|
"grad_norm": 2.0234296321868896, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.3348, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6795016987542469, |
|
"grad_norm": 2.2200522422790527, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.3524, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6795016987542469, |
|
"eval_loss": 1.0218840837478638, |
|
"eval_runtime": 28.1145, |
|
"eval_samples_per_second": 13.232, |
|
"eval_steps_per_second": 3.308, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6840317100792752, |
|
"grad_norm": 1.3126825094223022, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.8409, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6885617214043035, |
|
"grad_norm": 1.3274028301239014, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.9251, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6930917327293318, |
|
"grad_norm": 1.5611610412597656, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.9293, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6976217440543602, |
|
"grad_norm": 1.5657310485839844, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.0203, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7021517553793885, |
|
"grad_norm": 1.5585463047027588, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.9957, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7066817667044167, |
|
"grad_norm": 1.3991925716400146, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.0315, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.711211778029445, |
|
"grad_norm": 1.1712048053741455, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.8214, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7157417893544734, |
|
"grad_norm": 1.1740484237670898, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.808, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7202718006795017, |
|
"grad_norm": 1.0895429849624634, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.7608, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.72480181200453, |
|
"grad_norm": 1.1632596254348755, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.9649, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7293318233295584, |
|
"grad_norm": 1.1991957426071167, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.8542, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7338618346545867, |
|
"grad_norm": 1.2483800649642944, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.7725, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7383918459796149, |
|
"grad_norm": 1.2302029132843018, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.8337, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7429218573046432, |
|
"grad_norm": 1.3348345756530762, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.9638, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7474518686296716, |
|
"grad_norm": 1.2184895277023315, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 1.1054, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7519818799546999, |
|
"grad_norm": 1.2723814249038696, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.9325, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7565118912797282, |
|
"grad_norm": 1.2215139865875244, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.0185, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7610419026047565, |
|
"grad_norm": 1.2399837970733643, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.9689, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7655719139297849, |
|
"grad_norm": 1.1482295989990234, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.6897, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7701019252548131, |
|
"grad_norm": 1.279404640197754, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.8622, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7746319365798414, |
|
"grad_norm": 1.1450666189193726, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.7551, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7791619479048698, |
|
"grad_norm": 1.2472995519638062, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.1059, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7836919592298981, |
|
"grad_norm": 1.2653703689575195, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.8404, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7882219705549264, |
|
"grad_norm": 1.1370460987091064, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.7427, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7927519818799547, |
|
"grad_norm": 1.285154104232788, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.9865, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.797281993204983, |
|
"grad_norm": 1.4484916925430298, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.9975, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8018120045300113, |
|
"grad_norm": 1.3047209978103638, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.9076, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8063420158550396, |
|
"grad_norm": 1.373551607131958, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 1.0021, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8108720271800679, |
|
"grad_norm": 1.3371232748031616, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.9812, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8154020385050963, |
|
"grad_norm": 1.2296382188796997, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.7736, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8199320498301246, |
|
"grad_norm": 1.3647637367248535, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.9978, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8244620611551529, |
|
"grad_norm": 1.5607551336288452, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 1.0547, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8289920724801813, |
|
"grad_norm": 1.6037743091583252, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 1.0871, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8335220838052095, |
|
"grad_norm": 1.5129165649414062, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.1008, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8380520951302378, |
|
"grad_norm": 1.5168718099594116, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.995, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8425821064552661, |
|
"grad_norm": 1.5665178298950195, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 1.0104, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8471121177802945, |
|
"grad_norm": 1.6103092432022095, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 1.0282, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8516421291053228, |
|
"grad_norm": 1.7237502336502075, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 1.0711, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8561721404303511, |
|
"grad_norm": 1.5927950143814087, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 1.0247, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8607021517553793, |
|
"grad_norm": 1.8922560214996338, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 1.1249, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8652321630804077, |
|
"grad_norm": 1.80942964553833, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 1.184, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.869762174405436, |
|
"grad_norm": 1.6846965551376343, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.9553, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8742921857304643, |
|
"grad_norm": 2.1317317485809326, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 1.3231, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8788221970554927, |
|
"grad_norm": 2.310786008834839, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.1771, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.883352208380521, |
|
"grad_norm": 2.5074479579925537, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 1.5084, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8878822197055493, |
|
"grad_norm": 1.8117400407791138, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 1.1475, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8924122310305775, |
|
"grad_norm": 1.9792882204055786, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 1.2302, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8969422423556059, |
|
"grad_norm": 2.21106219291687, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 1.3104, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9014722536806342, |
|
"grad_norm": 2.0842549800872803, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 1.1088, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9060022650056625, |
|
"grad_norm": 2.232832431793213, |
|
"learning_rate": 0.0, |
|
"loss": 1.0498, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9060022650056625, |
|
"eval_loss": 0.9684666991233826, |
|
"eval_runtime": 28.1146, |
|
"eval_samples_per_second": 13.232, |
|
"eval_steps_per_second": 3.308, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.017027157491712e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|