|
{ |
|
"best_metric": 1.3433974981307983, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 1.7359307359307359, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017316017316017316, |
|
"grad_norm": 0.3772251009941101, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0925, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017316017316017316, |
|
"eval_loss": 1.5536484718322754, |
|
"eval_runtime": 4.5189, |
|
"eval_samples_per_second": 21.465, |
|
"eval_steps_per_second": 5.532, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03463203463203463, |
|
"grad_norm": 0.5300730466842651, |
|
"learning_rate": 2e-05, |
|
"loss": 1.3261, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05194805194805195, |
|
"grad_norm": 0.552025556564331, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3591, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06926406926406926, |
|
"grad_norm": 0.6423289775848389, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3797, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08658008658008658, |
|
"grad_norm": 0.5506105422973633, |
|
"learning_rate": 5e-05, |
|
"loss": 1.4092, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1038961038961039, |
|
"grad_norm": 0.5402395129203796, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3346, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 0.5220448970794678, |
|
"learning_rate": 7e-05, |
|
"loss": 1.3439, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.13852813852813853, |
|
"grad_norm": 0.5838501453399658, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5076, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.15584415584415584, |
|
"grad_norm": 0.5467644333839417, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3178, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17316017316017315, |
|
"grad_norm": 0.6578097939491272, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4318, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.7482398152351379, |
|
"learning_rate": 9.999071352056675e-05, |
|
"loss": 1.4785, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.2077922077922078, |
|
"grad_norm": 0.9860633015632629, |
|
"learning_rate": 9.9962857531815e-05, |
|
"loss": 1.5532, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22510822510822512, |
|
"grad_norm": 1.0673537254333496, |
|
"learning_rate": 9.99164423811074e-05, |
|
"loss": 1.5532, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 2.2215752601623535, |
|
"learning_rate": 9.985148530977767e-05, |
|
"loss": 1.7363, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 0.5617103576660156, |
|
"learning_rate": 9.976801044672608e-05, |
|
"loss": 1.3188, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.27705627705627706, |
|
"grad_norm": 0.4882018566131592, |
|
"learning_rate": 9.966604879945659e-05, |
|
"loss": 1.2916, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2943722943722944, |
|
"grad_norm": 0.46946704387664795, |
|
"learning_rate": 9.954563824255878e-05, |
|
"loss": 1.2448, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.3116883116883117, |
|
"grad_norm": 0.46930521726608276, |
|
"learning_rate": 9.940682350363912e-05, |
|
"loss": 1.293, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.329004329004329, |
|
"grad_norm": 0.45845523476600647, |
|
"learning_rate": 9.924965614670629e-05, |
|
"loss": 1.2965, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.3463203463203463, |
|
"grad_norm": 0.4072847366333008, |
|
"learning_rate": 9.907419455301741e-05, |
|
"loss": 1.2336, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.4443903863430023, |
|
"learning_rate": 9.888050389939172e-05, |
|
"loss": 1.3432, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.4643585681915283, |
|
"learning_rate": 9.866865613400008e-05, |
|
"loss": 1.2724, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.39826839826839827, |
|
"grad_norm": 0.4974319636821747, |
|
"learning_rate": 9.843872994963911e-05, |
|
"loss": 1.3752, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4155844155844156, |
|
"grad_norm": 0.6184501051902771, |
|
"learning_rate": 9.819081075450014e-05, |
|
"loss": 1.6313, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4329004329004329, |
|
"grad_norm": 0.72726970911026, |
|
"learning_rate": 9.792499064044342e-05, |
|
"loss": 1.4269, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.45021645021645024, |
|
"grad_norm": 0.7648056149482727, |
|
"learning_rate": 9.764136834878986e-05, |
|
"loss": 1.3242, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.4675324675324675, |
|
"grad_norm": 1.0451995134353638, |
|
"learning_rate": 9.734004923364257e-05, |
|
"loss": 1.6421, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 1.6422125101089478, |
|
"learning_rate": 9.702114522275216e-05, |
|
"loss": 1.5802, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5021645021645021, |
|
"grad_norm": 0.30660855770111084, |
|
"learning_rate": 9.66847747759402e-05, |
|
"loss": 1.2923, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 0.33507832884788513, |
|
"learning_rate": 9.63310628410961e-05, |
|
"loss": 1.3251, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5367965367965368, |
|
"grad_norm": 0.3787473142147064, |
|
"learning_rate": 9.596014080776423e-05, |
|
"loss": 1.1753, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5541125541125541, |
|
"grad_norm": 0.3825725018978119, |
|
"learning_rate": 9.557214645833792e-05, |
|
"loss": 1.2146, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.3974904417991638, |
|
"learning_rate": 9.516722391687902e-05, |
|
"loss": 1.2126, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5887445887445888, |
|
"grad_norm": 0.4273703396320343, |
|
"learning_rate": 9.474552359558166e-05, |
|
"loss": 1.2017, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.44417670369148254, |
|
"learning_rate": 9.43072021389003e-05, |
|
"loss": 1.3947, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6233766233766234, |
|
"grad_norm": 0.4899435341358185, |
|
"learning_rate": 9.38524223653626e-05, |
|
"loss": 1.3964, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6406926406926406, |
|
"grad_norm": 0.49582576751708984, |
|
"learning_rate": 9.338135320708911e-05, |
|
"loss": 1.201, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.658008658008658, |
|
"grad_norm": 0.618787944316864, |
|
"learning_rate": 9.289416964704185e-05, |
|
"loss": 1.3723, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6753246753246753, |
|
"grad_norm": 0.6022177934646606, |
|
"learning_rate": 9.239105265402525e-05, |
|
"loss": 1.2341, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6926406926406926, |
|
"grad_norm": 0.7559783458709717, |
|
"learning_rate": 9.187218911546362e-05, |
|
"loss": 1.5585, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.70995670995671, |
|
"grad_norm": 0.9588459730148315, |
|
"learning_rate": 9.133777176798013e-05, |
|
"loss": 1.535, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 1.7360554933547974, |
|
"learning_rate": 9.078799912580304e-05, |
|
"loss": 1.668, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7445887445887446, |
|
"grad_norm": 0.2811432182788849, |
|
"learning_rate": 9.022307540702576e-05, |
|
"loss": 1.1934, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.31521645188331604, |
|
"learning_rate": 8.964321045774807e-05, |
|
"loss": 1.1976, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 0.3455949127674103, |
|
"learning_rate": 8.904861967412703e-05, |
|
"loss": 1.316, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7965367965367965, |
|
"grad_norm": 0.3525890111923218, |
|
"learning_rate": 8.843952392236594e-05, |
|
"loss": 1.2997, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8138528138528138, |
|
"grad_norm": 0.37616753578186035, |
|
"learning_rate": 8.781614945667169e-05, |
|
"loss": 1.254, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8311688311688312, |
|
"grad_norm": 0.4022103250026703, |
|
"learning_rate": 8.717872783521047e-05, |
|
"loss": 1.3993, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.42644044756889343, |
|
"learning_rate": 8.65274958340934e-05, |
|
"loss": 1.245, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8658008658008658, |
|
"grad_norm": 0.5010473132133484, |
|
"learning_rate": 8.586269535942385e-05, |
|
"loss": 1.2409, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8658008658008658, |
|
"eval_loss": 1.3599679470062256, |
|
"eval_runtime": 4.6022, |
|
"eval_samples_per_second": 21.077, |
|
"eval_steps_per_second": 5.432, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8831168831168831, |
|
"grad_norm": 0.5181626081466675, |
|
"learning_rate": 8.518457335743926e-05, |
|
"loss": 1.4489, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9004329004329005, |
|
"grad_norm": 0.5752385854721069, |
|
"learning_rate": 8.449338172278059e-05, |
|
"loss": 1.5145, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9177489177489178, |
|
"grad_norm": 0.65139240026474, |
|
"learning_rate": 8.378937720492384e-05, |
|
"loss": 1.4028, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.935064935064935, |
|
"grad_norm": 0.8399358987808228, |
|
"learning_rate": 8.307282131280804e-05, |
|
"loss": 1.5549, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.9761013388633728, |
|
"learning_rate": 8.23439802176954e-05, |
|
"loss": 1.5483, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 1.5134563446044922, |
|
"learning_rate": 8.160312465429952e-05, |
|
"loss": 1.4882, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.987012987012987, |
|
"grad_norm": 0.37874630093574524, |
|
"learning_rate": 8.085052982021847e-05, |
|
"loss": 1.2874, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0086580086580086, |
|
"grad_norm": 0.8670079708099365, |
|
"learning_rate": 8.008647527371023e-05, |
|
"loss": 1.9227, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.025974025974026, |
|
"grad_norm": 0.28728023171424866, |
|
"learning_rate": 7.931124482984802e-05, |
|
"loss": 1.2465, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0432900432900434, |
|
"grad_norm": 0.2963547110557556, |
|
"learning_rate": 7.85251264550948e-05, |
|
"loss": 1.1765, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 0.3141789138317108, |
|
"learning_rate": 7.772841216033533e-05, |
|
"loss": 1.1228, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.077922077922078, |
|
"grad_norm": 0.32667580246925354, |
|
"learning_rate": 7.692139789240611e-05, |
|
"loss": 1.1514, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0952380952380953, |
|
"grad_norm": 0.3543757200241089, |
|
"learning_rate": 7.610438342416319e-05, |
|
"loss": 1.0842, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1125541125541125, |
|
"grad_norm": 0.367701917886734, |
|
"learning_rate": 7.527767224312883e-05, |
|
"loss": 1.1141, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.12987012987013, |
|
"grad_norm": 0.4170190989971161, |
|
"learning_rate": 7.44415714387582e-05, |
|
"loss": 1.3028, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1471861471861473, |
|
"grad_norm": 0.44387149810791016, |
|
"learning_rate": 7.359639158836828e-05, |
|
"loss": 1.1789, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1645021645021645, |
|
"grad_norm": 0.46756917238235474, |
|
"learning_rate": 7.274244664177097e-05, |
|
"loss": 1.2253, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.5271068215370178, |
|
"learning_rate": 7.188005380465364e-05, |
|
"loss": 1.0931, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.199134199134199, |
|
"grad_norm": 0.5228838920593262, |
|
"learning_rate": 7.10095334207501e-05, |
|
"loss": 0.9346, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2164502164502164, |
|
"grad_norm": 0.7063124775886536, |
|
"learning_rate": 7.013120885284598e-05, |
|
"loss": 1.1747, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2337662337662338, |
|
"grad_norm": 1.0697877407073975, |
|
"learning_rate": 6.924540636266272e-05, |
|
"loss": 1.2499, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.251082251082251, |
|
"grad_norm": 0.7471345067024231, |
|
"learning_rate": 6.835245498966461e-05, |
|
"loss": 0.8715, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2683982683982684, |
|
"grad_norm": 0.3865669071674347, |
|
"learning_rate": 6.745268642883404e-05, |
|
"loss": 1.2091, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.3613302409648895, |
|
"learning_rate": 6.654643490746042e-05, |
|
"loss": 1.1159, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 0.38617178797721863, |
|
"learning_rate": 6.563403706098833e-05, |
|
"loss": 1.1253, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3203463203463204, |
|
"grad_norm": 0.4121956527233124, |
|
"learning_rate": 6.471583180797121e-05, |
|
"loss": 1.1525, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3376623376623376, |
|
"grad_norm": 0.42778268456459045, |
|
"learning_rate": 6.379216022417696e-05, |
|
"loss": 1.1418, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.354978354978355, |
|
"grad_norm": 0.47594478726387024, |
|
"learning_rate": 6.286336541589224e-05, |
|
"loss": 1.2353, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3722943722943723, |
|
"grad_norm": 0.46286624670028687, |
|
"learning_rate": 6.192979239247243e-05, |
|
"loss": 1.1018, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.3896103896103895, |
|
"grad_norm": 0.4925539195537567, |
|
"learning_rate": 6.0991787938184784e-05, |
|
"loss": 1.0574, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.406926406926407, |
|
"grad_norm": 0.5687847137451172, |
|
"learning_rate": 6.004970048339226e-05, |
|
"loss": 1.2203, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 0.6070426106452942, |
|
"learning_rate": 5.910387997512573e-05, |
|
"loss": 1.2845, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4415584415584415, |
|
"grad_norm": 0.6566668152809143, |
|
"learning_rate": 5.8154677747093134e-05, |
|
"loss": 1.1189, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4588744588744589, |
|
"grad_norm": 0.8260652422904968, |
|
"learning_rate": 5.7202446389173223e-05, |
|
"loss": 1.1718, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.4761904761904763, |
|
"grad_norm": 1.1566524505615234, |
|
"learning_rate": 5.624753961644281e-05, |
|
"loss": 0.8529, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.4935064935064934, |
|
"grad_norm": 1.3062056303024292, |
|
"learning_rate": 5.5290312137786146e-05, |
|
"loss": 0.9415, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.5108225108225108, |
|
"grad_norm": 0.34991952776908875, |
|
"learning_rate": 5.433111952413495e-05, |
|
"loss": 1.213, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.5281385281385282, |
|
"grad_norm": 0.3678584098815918, |
|
"learning_rate": 5.33703180763884e-05, |
|
"loss": 1.1127, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.3730531334877014, |
|
"learning_rate": 5.240826469306187e-05, |
|
"loss": 1.1213, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.5627705627705628, |
|
"grad_norm": 0.385711669921875, |
|
"learning_rate": 5.144531673771363e-05, |
|
"loss": 1.0606, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5800865800865802, |
|
"grad_norm": 0.4201517701148987, |
|
"learning_rate": 5.048183190619904e-05, |
|
"loss": 1.1155, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.5974025974025974, |
|
"grad_norm": 0.43522825837135315, |
|
"learning_rate": 4.951816809380097e-05, |
|
"loss": 1.1036, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6147186147186146, |
|
"grad_norm": 0.46107161045074463, |
|
"learning_rate": 4.855468326228638e-05, |
|
"loss": 1.0929, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6320346320346322, |
|
"grad_norm": 0.5166659355163574, |
|
"learning_rate": 4.759173530693814e-05, |
|
"loss": 1.1499, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6493506493506493, |
|
"grad_norm": 0.5847046375274658, |
|
"learning_rate": 4.6629681923611603e-05, |
|
"loss": 1.2884, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.6265720725059509, |
|
"learning_rate": 4.566888047586507e-05, |
|
"loss": 1.2245, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.6839826839826841, |
|
"grad_norm": 0.7204932570457458, |
|
"learning_rate": 4.4709687862213866e-05, |
|
"loss": 1.0221, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7012987012987013, |
|
"grad_norm": 0.8253524899482727, |
|
"learning_rate": 4.3752460383557195e-05, |
|
"loss": 0.915, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.7186147186147185, |
|
"grad_norm": 1.2134804725646973, |
|
"learning_rate": 4.27975536108268e-05, |
|
"loss": 1.0184, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7359307359307359, |
|
"grad_norm": 0.7511556148529053, |
|
"learning_rate": 4.1845322252906864e-05, |
|
"loss": 0.8647, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7359307359307359, |
|
"eval_loss": 1.3433974981307983, |
|
"eval_runtime": 4.5988, |
|
"eval_samples_per_second": 21.092, |
|
"eval_steps_per_second": 5.436, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 173, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.933753135058125e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|