|
{ |
|
"best_metric": 2.0041327476501465, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.01856406924397828, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 9.28203462198914e-05, |
|
"grad_norm": 0.5333740711212158, |
|
"learning_rate": 1.004e-05, |
|
"loss": 2.123, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 9.28203462198914e-05, |
|
"eval_loss": 2.1500566005706787, |
|
"eval_runtime": 212.426, |
|
"eval_samples_per_second": 21.358, |
|
"eval_steps_per_second": 5.343, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001856406924397828, |
|
"grad_norm": 0.5310633182525635, |
|
"learning_rate": 2.008e-05, |
|
"loss": 2.1533, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0002784610386596742, |
|
"grad_norm": 0.570530116558075, |
|
"learning_rate": 3.012e-05, |
|
"loss": 1.9656, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0003712813848795656, |
|
"grad_norm": 0.5866032242774963, |
|
"learning_rate": 4.016e-05, |
|
"loss": 1.9126, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.000464101731099457, |
|
"grad_norm": 0.5609645247459412, |
|
"learning_rate": 5.02e-05, |
|
"loss": 1.9268, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005569220773193484, |
|
"grad_norm": 0.5599470138549805, |
|
"learning_rate": 6.024e-05, |
|
"loss": 2.1493, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0006497424235392398, |
|
"grad_norm": 0.5012790560722351, |
|
"learning_rate": 7.028e-05, |
|
"loss": 1.9697, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0007425627697591312, |
|
"grad_norm": 0.5099210739135742, |
|
"learning_rate": 8.032e-05, |
|
"loss": 2.1438, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0008353831159790226, |
|
"grad_norm": 0.5398349165916443, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 1.9605, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.000928203462198914, |
|
"grad_norm": 0.5319710969924927, |
|
"learning_rate": 0.0001004, |
|
"loss": 2.274, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0010210238084188054, |
|
"grad_norm": 0.5919522047042847, |
|
"learning_rate": 9.987157894736842e-05, |
|
"loss": 2.1037, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0011138441546386967, |
|
"grad_norm": 0.5564264059066772, |
|
"learning_rate": 9.934315789473684e-05, |
|
"loss": 2.1668, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0012066645008585882, |
|
"grad_norm": 0.5003729462623596, |
|
"learning_rate": 9.881473684210525e-05, |
|
"loss": 1.8983, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0012994848470784797, |
|
"grad_norm": 0.5218623876571655, |
|
"learning_rate": 9.828631578947369e-05, |
|
"loss": 2.063, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.001392305193298371, |
|
"grad_norm": 0.5020257830619812, |
|
"learning_rate": 9.77578947368421e-05, |
|
"loss": 2.119, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0014851255395182624, |
|
"grad_norm": 0.510440468788147, |
|
"learning_rate": 9.722947368421052e-05, |
|
"loss": 2.0224, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0015779458857381537, |
|
"grad_norm": 0.5378854274749756, |
|
"learning_rate": 9.670105263157895e-05, |
|
"loss": 2.0703, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0016707662319580452, |
|
"grad_norm": 0.5330931544303894, |
|
"learning_rate": 9.617263157894737e-05, |
|
"loss": 1.9227, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0017635865781779367, |
|
"grad_norm": 0.4982943534851074, |
|
"learning_rate": 9.564421052631579e-05, |
|
"loss": 1.7878, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.001856406924397828, |
|
"grad_norm": 0.5345494747161865, |
|
"learning_rate": 9.511578947368421e-05, |
|
"loss": 2.0192, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0019492272706177194, |
|
"grad_norm": 0.5428488254547119, |
|
"learning_rate": 9.458736842105264e-05, |
|
"loss": 1.8952, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.002042047616837611, |
|
"grad_norm": 0.4932510256767273, |
|
"learning_rate": 9.405894736842106e-05, |
|
"loss": 2.0396, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.002134867963057502, |
|
"grad_norm": 0.5346501469612122, |
|
"learning_rate": 9.353052631578947e-05, |
|
"loss": 2.06, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0022276883092773934, |
|
"grad_norm": 0.5654396414756775, |
|
"learning_rate": 9.300210526315789e-05, |
|
"loss": 2.1686, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.002320508655497285, |
|
"grad_norm": 0.5074037313461304, |
|
"learning_rate": 9.247368421052631e-05, |
|
"loss": 2.0209, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0024133290017171764, |
|
"grad_norm": 0.5299178957939148, |
|
"learning_rate": 9.194526315789473e-05, |
|
"loss": 2.0733, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0025061493479370677, |
|
"grad_norm": 0.4903567135334015, |
|
"learning_rate": 9.141684210526316e-05, |
|
"loss": 1.8781, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0025989696941569594, |
|
"grad_norm": 0.5466001629829407, |
|
"learning_rate": 9.088842105263158e-05, |
|
"loss": 2.2091, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0026917900403768506, |
|
"grad_norm": 0.5639941692352295, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 2.0417, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.002784610386596742, |
|
"grad_norm": 0.547460675239563, |
|
"learning_rate": 8.983157894736843e-05, |
|
"loss": 2.3328, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0028774307328166336, |
|
"grad_norm": 0.5575900077819824, |
|
"learning_rate": 8.930315789473684e-05, |
|
"loss": 1.9555, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.002970251079036525, |
|
"grad_norm": 0.5688737034797668, |
|
"learning_rate": 8.877473684210526e-05, |
|
"loss": 2.2341, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.003063071425256416, |
|
"grad_norm": 0.5598611235618591, |
|
"learning_rate": 8.824631578947368e-05, |
|
"loss": 2.0329, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0031558917714763074, |
|
"grad_norm": 0.5636963248252869, |
|
"learning_rate": 8.771789473684211e-05, |
|
"loss": 2.1278, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.003248712117696199, |
|
"grad_norm": 0.5724871754646301, |
|
"learning_rate": 8.718947368421053e-05, |
|
"loss": 2.1822, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0033415324639160904, |
|
"grad_norm": 0.5909259915351868, |
|
"learning_rate": 8.666105263157895e-05, |
|
"loss": 2.1601, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0034343528101359816, |
|
"grad_norm": 0.5581693053245544, |
|
"learning_rate": 8.613263157894737e-05, |
|
"loss": 2.1731, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0035271731563558733, |
|
"grad_norm": 0.5789634585380554, |
|
"learning_rate": 8.560421052631578e-05, |
|
"loss": 2.0267, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0036199935025757646, |
|
"grad_norm": 0.5522585511207581, |
|
"learning_rate": 8.50757894736842e-05, |
|
"loss": 2.149, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.003712813848795656, |
|
"grad_norm": 0.5890622735023499, |
|
"learning_rate": 8.454736842105263e-05, |
|
"loss": 2.3245, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0038056341950155476, |
|
"grad_norm": 0.6173768043518066, |
|
"learning_rate": 8.401894736842106e-05, |
|
"loss": 2.4872, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.003898454541235439, |
|
"grad_norm": 0.5873492360115051, |
|
"learning_rate": 8.349052631578948e-05, |
|
"loss": 1.9492, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00399127488745533, |
|
"grad_norm": 0.6083503365516663, |
|
"learning_rate": 8.29621052631579e-05, |
|
"loss": 2.2825, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004084095233675222, |
|
"grad_norm": 0.5901498794555664, |
|
"learning_rate": 8.243368421052632e-05, |
|
"loss": 2.0717, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.004176915579895113, |
|
"grad_norm": 0.627431333065033, |
|
"learning_rate": 8.190526315789474e-05, |
|
"loss": 2.2892, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.004269735926115004, |
|
"grad_norm": 0.5825062394142151, |
|
"learning_rate": 8.137684210526315e-05, |
|
"loss": 2.1311, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.004362556272334896, |
|
"grad_norm": 0.600988507270813, |
|
"learning_rate": 8.084842105263157e-05, |
|
"loss": 2.1318, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.004455376618554787, |
|
"grad_norm": 0.660859227180481, |
|
"learning_rate": 8.032e-05, |
|
"loss": 2.1271, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0045481969647746786, |
|
"grad_norm": 0.6793040633201599, |
|
"learning_rate": 7.979157894736842e-05, |
|
"loss": 2.3411, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.00464101731099457, |
|
"grad_norm": 0.6588550806045532, |
|
"learning_rate": 7.926315789473684e-05, |
|
"loss": 2.3991, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00464101731099457, |
|
"eval_loss": 2.0389838218688965, |
|
"eval_runtime": 212.2309, |
|
"eval_samples_per_second": 21.378, |
|
"eval_steps_per_second": 5.348, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004733837657214461, |
|
"grad_norm": 0.4591948688030243, |
|
"learning_rate": 7.873473684210526e-05, |
|
"loss": 2.2252, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.004826658003434353, |
|
"grad_norm": 0.44009777903556824, |
|
"learning_rate": 7.820631578947369e-05, |
|
"loss": 2.0146, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0049194783496542445, |
|
"grad_norm": 0.45978692173957825, |
|
"learning_rate": 7.76778947368421e-05, |
|
"loss": 1.861, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.005012298695874135, |
|
"grad_norm": 0.4349885582923889, |
|
"learning_rate": 7.714947368421052e-05, |
|
"loss": 1.7471, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.005105119042094027, |
|
"grad_norm": 0.4415923058986664, |
|
"learning_rate": 7.662105263157896e-05, |
|
"loss": 1.9122, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.005197939388313919, |
|
"grad_norm": 0.43709808588027954, |
|
"learning_rate": 7.609263157894737e-05, |
|
"loss": 1.8768, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0052907597345338096, |
|
"grad_norm": 0.42503705620765686, |
|
"learning_rate": 7.556421052631579e-05, |
|
"loss": 1.8632, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.005383580080753701, |
|
"grad_norm": 0.4333106279373169, |
|
"learning_rate": 7.503578947368421e-05, |
|
"loss": 1.998, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.005476400426973593, |
|
"grad_norm": 0.44236087799072266, |
|
"learning_rate": 7.450736842105263e-05, |
|
"loss": 1.903, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005569220773193484, |
|
"grad_norm": 0.42541614174842834, |
|
"learning_rate": 7.397894736842105e-05, |
|
"loss": 1.7882, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0056620411194133755, |
|
"grad_norm": 0.43183577060699463, |
|
"learning_rate": 7.345052631578948e-05, |
|
"loss": 2.0504, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.005754861465633267, |
|
"grad_norm": 0.4386312961578369, |
|
"learning_rate": 7.29221052631579e-05, |
|
"loss": 1.8203, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.005847681811853158, |
|
"grad_norm": 0.46675723791122437, |
|
"learning_rate": 7.239368421052631e-05, |
|
"loss": 1.8889, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00594050215807305, |
|
"grad_norm": 0.43209633231163025, |
|
"learning_rate": 7.186526315789474e-05, |
|
"loss": 1.7206, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.006033322504292941, |
|
"grad_norm": 0.46005451679229736, |
|
"learning_rate": 7.133684210526316e-05, |
|
"loss": 2.0714, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.006126142850512832, |
|
"grad_norm": 0.43874305486679077, |
|
"learning_rate": 7.080842105263158e-05, |
|
"loss": 1.9384, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.006218963196732724, |
|
"grad_norm": 0.44712334871292114, |
|
"learning_rate": 7.028e-05, |
|
"loss": 2.0105, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006311783542952615, |
|
"grad_norm": 0.47745460271835327, |
|
"learning_rate": 6.975157894736843e-05, |
|
"loss": 1.9495, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0064046038891725065, |
|
"grad_norm": 0.49662894010543823, |
|
"learning_rate": 6.922315789473685e-05, |
|
"loss": 2.2379, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.006497424235392398, |
|
"grad_norm": 0.5316717028617859, |
|
"learning_rate": 6.869473684210527e-05, |
|
"loss": 2.1759, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006590244581612289, |
|
"grad_norm": 0.4947868585586548, |
|
"learning_rate": 6.816631578947368e-05, |
|
"loss": 2.0152, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.006683064927832181, |
|
"grad_norm": 0.47361937165260315, |
|
"learning_rate": 6.76378947368421e-05, |
|
"loss": 1.7754, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.006775885274052072, |
|
"grad_norm": 0.5122577548027039, |
|
"learning_rate": 6.710947368421052e-05, |
|
"loss": 2.2441, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.006868705620271963, |
|
"grad_norm": 0.4984202980995178, |
|
"learning_rate": 6.658105263157894e-05, |
|
"loss": 1.9172, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.006961525966491855, |
|
"grad_norm": 0.4806360900402069, |
|
"learning_rate": 6.605263157894737e-05, |
|
"loss": 2.0002, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.007054346312711747, |
|
"grad_norm": 0.4529125392436981, |
|
"learning_rate": 6.55242105263158e-05, |
|
"loss": 1.8867, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0071471666589316375, |
|
"grad_norm": 0.4892098307609558, |
|
"learning_rate": 6.499578947368422e-05, |
|
"loss": 2.0669, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.007239987005151529, |
|
"grad_norm": 0.5047986507415771, |
|
"learning_rate": 6.446736842105264e-05, |
|
"loss": 2.0941, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.007332807351371421, |
|
"grad_norm": 0.4794524312019348, |
|
"learning_rate": 6.393894736842105e-05, |
|
"loss": 1.8961, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.007425627697591312, |
|
"grad_norm": 0.4937351644039154, |
|
"learning_rate": 6.341052631578947e-05, |
|
"loss": 1.9912, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.007518448043811203, |
|
"grad_norm": 0.4996980130672455, |
|
"learning_rate": 6.288210526315789e-05, |
|
"loss": 2.0763, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.007611268390031095, |
|
"grad_norm": 0.5066346526145935, |
|
"learning_rate": 6.235368421052632e-05, |
|
"loss": 2.0239, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.007704088736250986, |
|
"grad_norm": 0.5465410947799683, |
|
"learning_rate": 6.182526315789474e-05, |
|
"loss": 1.965, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.007796909082470878, |
|
"grad_norm": 0.49924957752227783, |
|
"learning_rate": 6.129684210526316e-05, |
|
"loss": 1.7469, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.007889729428690768, |
|
"grad_norm": 0.5318244695663452, |
|
"learning_rate": 6.076842105263158e-05, |
|
"loss": 1.9024, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.00798254977491066, |
|
"grad_norm": 0.5169056057929993, |
|
"learning_rate": 6.024e-05, |
|
"loss": 1.9984, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.008075370121130552, |
|
"grad_norm": 0.5733655095100403, |
|
"learning_rate": 5.971157894736842e-05, |
|
"loss": 2.1394, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.008168190467350444, |
|
"grad_norm": 0.5268761515617371, |
|
"learning_rate": 5.9183157894736835e-05, |
|
"loss": 1.9004, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.008261010813570335, |
|
"grad_norm": 0.5536757111549377, |
|
"learning_rate": 5.8654736842105267e-05, |
|
"loss": 2.0655, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.008353831159790225, |
|
"grad_norm": 0.5382729172706604, |
|
"learning_rate": 5.8126315789473684e-05, |
|
"loss": 1.7562, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.008446651506010117, |
|
"grad_norm": 0.5523245930671692, |
|
"learning_rate": 5.759789473684211e-05, |
|
"loss": 2.1592, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.008539471852230009, |
|
"grad_norm": 0.6106486320495605, |
|
"learning_rate": 5.706947368421053e-05, |
|
"loss": 2.39, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0086322921984499, |
|
"grad_norm": 0.6065232753753662, |
|
"learning_rate": 5.6541052631578945e-05, |
|
"loss": 2.1217, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.008725112544669792, |
|
"grad_norm": 0.57742840051651, |
|
"learning_rate": 5.601263157894736e-05, |
|
"loss": 1.9459, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.008817932890889684, |
|
"grad_norm": 0.6233941316604614, |
|
"learning_rate": 5.5484210526315794e-05, |
|
"loss": 2.1776, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.008910753237109574, |
|
"grad_norm": 0.624778687953949, |
|
"learning_rate": 5.495578947368421e-05, |
|
"loss": 1.9676, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.009003573583329465, |
|
"grad_norm": 0.5832610130310059, |
|
"learning_rate": 5.442736842105264e-05, |
|
"loss": 2.0999, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.009096393929549357, |
|
"grad_norm": 0.6290329694747925, |
|
"learning_rate": 5.3898947368421055e-05, |
|
"loss": 2.1532, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.009189214275769249, |
|
"grad_norm": 0.6240950226783752, |
|
"learning_rate": 5.337052631578947e-05, |
|
"loss": 2.1017, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.00928203462198914, |
|
"grad_norm": 0.6797816753387451, |
|
"learning_rate": 5.284210526315789e-05, |
|
"loss": 1.9699, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00928203462198914, |
|
"eval_loss": 2.0185234546661377, |
|
"eval_runtime": 212.0665, |
|
"eval_samples_per_second": 21.394, |
|
"eval_steps_per_second": 5.352, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.009374854968209032, |
|
"grad_norm": 0.4157350957393646, |
|
"learning_rate": 5.231368421052631e-05, |
|
"loss": 1.9556, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.009467675314428922, |
|
"grad_norm": 0.433269739151001, |
|
"learning_rate": 5.178526315789474e-05, |
|
"loss": 1.7989, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.009560495660648814, |
|
"grad_norm": 0.43151503801345825, |
|
"learning_rate": 5.1256842105263165e-05, |
|
"loss": 1.8229, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.009653316006868706, |
|
"grad_norm": 0.44042569398880005, |
|
"learning_rate": 5.072842105263158e-05, |
|
"loss": 1.7344, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.009746136353088597, |
|
"grad_norm": 0.4146162271499634, |
|
"learning_rate": 5.02e-05, |
|
"loss": 1.9032, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.009838956699308489, |
|
"grad_norm": 0.4360559582710266, |
|
"learning_rate": 4.967157894736842e-05, |
|
"loss": 2.0677, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.00993177704552838, |
|
"grad_norm": 0.4013524651527405, |
|
"learning_rate": 4.914315789473684e-05, |
|
"loss": 1.9427, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01002459739174827, |
|
"grad_norm": 0.44252482056617737, |
|
"learning_rate": 4.861473684210526e-05, |
|
"loss": 1.9804, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.010117417737968162, |
|
"grad_norm": 0.41882866621017456, |
|
"learning_rate": 4.8086315789473686e-05, |
|
"loss": 1.799, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.010210238084188054, |
|
"grad_norm": 0.43453121185302734, |
|
"learning_rate": 4.7557894736842104e-05, |
|
"loss": 2.0016, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.010303058430407946, |
|
"grad_norm": 0.49610668420791626, |
|
"learning_rate": 4.702947368421053e-05, |
|
"loss": 2.0242, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.010395878776627837, |
|
"grad_norm": 0.42180904746055603, |
|
"learning_rate": 4.6501052631578946e-05, |
|
"loss": 1.8779, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.010488699122847727, |
|
"grad_norm": 0.4709084928035736, |
|
"learning_rate": 4.5972631578947364e-05, |
|
"loss": 1.8693, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.010581519469067619, |
|
"grad_norm": 0.4451385736465454, |
|
"learning_rate": 4.544421052631579e-05, |
|
"loss": 1.8203, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01067433981528751, |
|
"grad_norm": 0.4565402865409851, |
|
"learning_rate": 4.4915789473684213e-05, |
|
"loss": 1.897, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.010767160161507403, |
|
"grad_norm": 0.4602057635784149, |
|
"learning_rate": 4.438736842105263e-05, |
|
"loss": 1.9684, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.010859980507727294, |
|
"grad_norm": 0.4286463260650635, |
|
"learning_rate": 4.3858947368421056e-05, |
|
"loss": 1.8134, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.010952800853947186, |
|
"grad_norm": 0.4532035291194916, |
|
"learning_rate": 4.3330526315789474e-05, |
|
"loss": 1.9204, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.011045621200167076, |
|
"grad_norm": 0.49677908420562744, |
|
"learning_rate": 4.280210526315789e-05, |
|
"loss": 2.0517, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.011138441546386968, |
|
"grad_norm": 0.4839816391468048, |
|
"learning_rate": 4.2273684210526317e-05, |
|
"loss": 2.0162, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01123126189260686, |
|
"grad_norm": 0.4714973568916321, |
|
"learning_rate": 4.174526315789474e-05, |
|
"loss": 1.9792, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.011324082238826751, |
|
"grad_norm": 0.48217833042144775, |
|
"learning_rate": 4.121684210526316e-05, |
|
"loss": 2.1381, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.011416902585046643, |
|
"grad_norm": 0.45825913548469543, |
|
"learning_rate": 4.068842105263158e-05, |
|
"loss": 1.9006, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.011509722931266534, |
|
"grad_norm": 0.5154534578323364, |
|
"learning_rate": 4.016e-05, |
|
"loss": 2.0451, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.011602543277486424, |
|
"grad_norm": 0.49753981828689575, |
|
"learning_rate": 3.963157894736842e-05, |
|
"loss": 1.9921, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.011695363623706316, |
|
"grad_norm": 0.48420360684394836, |
|
"learning_rate": 3.9103157894736844e-05, |
|
"loss": 1.8454, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.011788183969926208, |
|
"grad_norm": 0.5063680410385132, |
|
"learning_rate": 3.857473684210526e-05, |
|
"loss": 1.8294, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0118810043161461, |
|
"grad_norm": 0.4992259740829468, |
|
"learning_rate": 3.804631578947369e-05, |
|
"loss": 1.9348, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.011973824662365991, |
|
"grad_norm": 0.508219301700592, |
|
"learning_rate": 3.7517894736842105e-05, |
|
"loss": 2.0251, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.012066645008585883, |
|
"grad_norm": 0.4896531105041504, |
|
"learning_rate": 3.698947368421052e-05, |
|
"loss": 1.7667, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.012159465354805773, |
|
"grad_norm": 0.5085623264312744, |
|
"learning_rate": 3.646105263157895e-05, |
|
"loss": 1.8525, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.012252285701025665, |
|
"grad_norm": 0.49702784419059753, |
|
"learning_rate": 3.593263157894737e-05, |
|
"loss": 1.9041, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.012345106047245556, |
|
"grad_norm": 0.5175393223762512, |
|
"learning_rate": 3.540421052631579e-05, |
|
"loss": 2.1606, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.012437926393465448, |
|
"grad_norm": 0.5096806287765503, |
|
"learning_rate": 3.4875789473684215e-05, |
|
"loss": 1.8375, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.01253074673968534, |
|
"grad_norm": 0.5583460927009583, |
|
"learning_rate": 3.434736842105263e-05, |
|
"loss": 2.2475, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.01262356708590523, |
|
"grad_norm": 0.5094785690307617, |
|
"learning_rate": 3.381894736842105e-05, |
|
"loss": 2.019, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.012716387432125121, |
|
"grad_norm": 0.554124116897583, |
|
"learning_rate": 3.329052631578947e-05, |
|
"loss": 2.0397, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.012809207778345013, |
|
"grad_norm": 0.5521702170372009, |
|
"learning_rate": 3.27621052631579e-05, |
|
"loss": 2.067, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.012902028124564905, |
|
"grad_norm": 0.5431244373321533, |
|
"learning_rate": 3.223368421052632e-05, |
|
"loss": 1.9676, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.012994848470784796, |
|
"grad_norm": 0.5575247406959534, |
|
"learning_rate": 3.1705263157894736e-05, |
|
"loss": 1.7978, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.013087668817004688, |
|
"grad_norm": 0.5787823796272278, |
|
"learning_rate": 3.117684210526316e-05, |
|
"loss": 2.1306, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.013180489163224578, |
|
"grad_norm": 0.5557217001914978, |
|
"learning_rate": 3.064842105263158e-05, |
|
"loss": 2.0982, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.01327330950944447, |
|
"grad_norm": 0.5702773928642273, |
|
"learning_rate": 3.012e-05, |
|
"loss": 1.9572, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.013366129855664361, |
|
"grad_norm": 0.5814855098724365, |
|
"learning_rate": 2.9591578947368418e-05, |
|
"loss": 1.9171, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.013458950201884253, |
|
"grad_norm": 0.5961145162582397, |
|
"learning_rate": 2.9063157894736842e-05, |
|
"loss": 2.06, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.013551770548104145, |
|
"grad_norm": 0.5634588599205017, |
|
"learning_rate": 2.8534736842105264e-05, |
|
"loss": 2.0661, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.013644590894324037, |
|
"grad_norm": 0.6884035468101501, |
|
"learning_rate": 2.800631578947368e-05, |
|
"loss": 1.9846, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.013737411240543927, |
|
"grad_norm": 0.592460572719574, |
|
"learning_rate": 2.7477894736842106e-05, |
|
"loss": 2.0338, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.013830231586763818, |
|
"grad_norm": 0.598613440990448, |
|
"learning_rate": 2.6949473684210527e-05, |
|
"loss": 2.1441, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.01392305193298371, |
|
"grad_norm": 0.6344769597053528, |
|
"learning_rate": 2.6421052631578945e-05, |
|
"loss": 2.3666, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01392305193298371, |
|
"eval_loss": 2.0085387229919434, |
|
"eval_runtime": 211.7337, |
|
"eval_samples_per_second": 21.428, |
|
"eval_steps_per_second": 5.361, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.014015872279203602, |
|
"grad_norm": 0.4076049029827118, |
|
"learning_rate": 2.589263157894737e-05, |
|
"loss": 2.2603, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.014108692625423493, |
|
"grad_norm": 0.4122617244720459, |
|
"learning_rate": 2.536421052631579e-05, |
|
"loss": 2.0841, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.014201512971643385, |
|
"grad_norm": 0.4217035472393036, |
|
"learning_rate": 2.483578947368421e-05, |
|
"loss": 2.0217, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.014294333317863275, |
|
"grad_norm": 0.4271675646305084, |
|
"learning_rate": 2.430736842105263e-05, |
|
"loss": 1.7876, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.014387153664083167, |
|
"grad_norm": 0.4109157621860504, |
|
"learning_rate": 2.3778947368421052e-05, |
|
"loss": 1.8487, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.014479974010303058, |
|
"grad_norm": 0.42467454075813293, |
|
"learning_rate": 2.3250526315789473e-05, |
|
"loss": 1.8176, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.01457279435652295, |
|
"grad_norm": 0.43306881189346313, |
|
"learning_rate": 2.2722105263157894e-05, |
|
"loss": 2.0096, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.014665614702742842, |
|
"grad_norm": 0.4301247298717499, |
|
"learning_rate": 2.2193684210526316e-05, |
|
"loss": 1.7983, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.014758435048962733, |
|
"grad_norm": 0.43399932980537415, |
|
"learning_rate": 2.1665263157894737e-05, |
|
"loss": 2.138, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.014851255395182623, |
|
"grad_norm": 0.4387844204902649, |
|
"learning_rate": 2.1136842105263158e-05, |
|
"loss": 1.6287, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.014944075741402515, |
|
"grad_norm": 0.4472537636756897, |
|
"learning_rate": 2.060842105263158e-05, |
|
"loss": 2.1351, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.015036896087622407, |
|
"grad_norm": 0.45660072565078735, |
|
"learning_rate": 2.008e-05, |
|
"loss": 2.0469, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.015129716433842299, |
|
"grad_norm": 0.7435435056686401, |
|
"learning_rate": 1.9551578947368422e-05, |
|
"loss": 1.919, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.01522253678006219, |
|
"grad_norm": 0.4554401934146881, |
|
"learning_rate": 1.9023157894736843e-05, |
|
"loss": 1.951, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.01531535712628208, |
|
"grad_norm": 0.4483166038990021, |
|
"learning_rate": 1.849473684210526e-05, |
|
"loss": 2.0071, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.015408177472501972, |
|
"grad_norm": 0.4810929596424103, |
|
"learning_rate": 1.7966315789473686e-05, |
|
"loss": 2.0367, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.015500997818721864, |
|
"grad_norm": 0.4459546208381653, |
|
"learning_rate": 1.7437894736842107e-05, |
|
"loss": 1.8893, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.015593818164941755, |
|
"grad_norm": 0.4915485084056854, |
|
"learning_rate": 1.6909473684210525e-05, |
|
"loss": 1.9784, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.015686638511161647, |
|
"grad_norm": 0.4772583544254303, |
|
"learning_rate": 1.638105263157895e-05, |
|
"loss": 1.9509, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.015779458857381537, |
|
"grad_norm": 0.4593046307563782, |
|
"learning_rate": 1.5852631578947368e-05, |
|
"loss": 1.8422, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01587227920360143, |
|
"grad_norm": 0.4797092080116272, |
|
"learning_rate": 1.532421052631579e-05, |
|
"loss": 1.8923, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.01596509954982132, |
|
"grad_norm": 0.4819195866584778, |
|
"learning_rate": 1.4795789473684209e-05, |
|
"loss": 1.9566, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.016057919896041214, |
|
"grad_norm": 0.5063287615776062, |
|
"learning_rate": 1.4267368421052632e-05, |
|
"loss": 2.1537, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.016150740242261104, |
|
"grad_norm": 0.47720491886138916, |
|
"learning_rate": 1.3738947368421053e-05, |
|
"loss": 2.0131, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.016243560588480994, |
|
"grad_norm": 0.4763629138469696, |
|
"learning_rate": 1.3210526315789473e-05, |
|
"loss": 1.9978, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.016336380934700887, |
|
"grad_norm": 0.4971455931663513, |
|
"learning_rate": 1.2682105263157896e-05, |
|
"loss": 1.9961, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.016429201280920777, |
|
"grad_norm": 0.5000568628311157, |
|
"learning_rate": 1.2153684210526315e-05, |
|
"loss": 1.9357, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.01652202162714067, |
|
"grad_norm": 0.5184934139251709, |
|
"learning_rate": 1.1625263157894737e-05, |
|
"loss": 1.9728, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01661484197336056, |
|
"grad_norm": 0.5139727592468262, |
|
"learning_rate": 1.1096842105263158e-05, |
|
"loss": 1.9561, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.01670766231958045, |
|
"grad_norm": 0.49820300936698914, |
|
"learning_rate": 1.0568421052631579e-05, |
|
"loss": 1.9569, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.016800482665800344, |
|
"grad_norm": 0.5008655190467834, |
|
"learning_rate": 1.004e-05, |
|
"loss": 2.0881, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.016893303012020234, |
|
"grad_norm": 0.5131986737251282, |
|
"learning_rate": 9.511578947368422e-06, |
|
"loss": 2.066, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.016986123358240127, |
|
"grad_norm": 0.5675308108329773, |
|
"learning_rate": 8.983157894736843e-06, |
|
"loss": 1.9731, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.017078943704460017, |
|
"grad_norm": 0.5388391017913818, |
|
"learning_rate": 8.454736842105263e-06, |
|
"loss": 2.0149, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01717176405067991, |
|
"grad_norm": 0.5240312814712524, |
|
"learning_rate": 7.926315789473684e-06, |
|
"loss": 1.8937, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.0172645843968998, |
|
"grad_norm": 0.5105009078979492, |
|
"learning_rate": 7.397894736842104e-06, |
|
"loss": 2.041, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.01735740474311969, |
|
"grad_norm": 0.5370680689811707, |
|
"learning_rate": 6.8694736842105265e-06, |
|
"loss": 2.0445, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.017450225089339584, |
|
"grad_norm": 0.5596001744270325, |
|
"learning_rate": 6.341052631578948e-06, |
|
"loss": 2.1666, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.017543045435559474, |
|
"grad_norm": 0.5792972445487976, |
|
"learning_rate": 5.812631578947368e-06, |
|
"loss": 2.0378, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.017635865781779368, |
|
"grad_norm": 0.5508564114570618, |
|
"learning_rate": 5.2842105263157896e-06, |
|
"loss": 1.968, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.017728686127999257, |
|
"grad_norm": 0.5679344534873962, |
|
"learning_rate": 4.755789473684211e-06, |
|
"loss": 2.0277, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.017821506474219147, |
|
"grad_norm": 0.529935359954834, |
|
"learning_rate": 4.227368421052631e-06, |
|
"loss": 2.0955, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.01791432682043904, |
|
"grad_norm": 0.6319153904914856, |
|
"learning_rate": 3.698947368421052e-06, |
|
"loss": 2.0734, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.01800714716665893, |
|
"grad_norm": 0.5862286686897278, |
|
"learning_rate": 3.170526315789474e-06, |
|
"loss": 2.1698, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.018099967512878824, |
|
"grad_norm": 0.5513949990272522, |
|
"learning_rate": 2.6421052631578948e-06, |
|
"loss": 2.0404, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.018192787859098714, |
|
"grad_norm": 0.5874564051628113, |
|
"learning_rate": 2.1136842105263157e-06, |
|
"loss": 2.0934, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.018285608205318604, |
|
"grad_norm": 0.6707521677017212, |
|
"learning_rate": 1.585263157894737e-06, |
|
"loss": 2.2616, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.018378428551538498, |
|
"grad_norm": 0.6378986239433289, |
|
"learning_rate": 1.0568421052631578e-06, |
|
"loss": 2.0425, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.018471248897758388, |
|
"grad_norm": 0.7162299156188965, |
|
"learning_rate": 5.284210526315789e-07, |
|
"loss": 2.3415, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.01856406924397828, |
|
"grad_norm": 0.6265064477920532, |
|
"learning_rate": 0.0, |
|
"loss": 2.1299, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01856406924397828, |
|
"eval_loss": 2.0041327476501465, |
|
"eval_runtime": 212.1368, |
|
"eval_samples_per_second": 21.387, |
|
"eval_steps_per_second": 5.35, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.472983354834944e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|