|
{ |
|
"best_metric": 1.1331982612609863, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.03266372693124286, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00016331863465621427, |
|
"grad_norm": 1.6851118803024292, |
|
"learning_rate": 1.002e-05, |
|
"loss": 1.0081, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00016331863465621427, |
|
"eval_loss": 1.9000940322875977, |
|
"eval_runtime": 344.5542, |
|
"eval_samples_per_second": 7.482, |
|
"eval_steps_per_second": 1.872, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00032663726931242854, |
|
"grad_norm": 2.2044219970703125, |
|
"learning_rate": 2.004e-05, |
|
"loss": 1.0249, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0004899559039686428, |
|
"grad_norm": 2.0698764324188232, |
|
"learning_rate": 3.0059999999999997e-05, |
|
"loss": 1.086, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0006532745386248571, |
|
"grad_norm": 2.0256564617156982, |
|
"learning_rate": 4.008e-05, |
|
"loss": 1.2085, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008165931732810714, |
|
"grad_norm": 1.3296934366226196, |
|
"learning_rate": 5.01e-05, |
|
"loss": 0.9578, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0009799118079372856, |
|
"grad_norm": 1.1940107345581055, |
|
"learning_rate": 6.0119999999999994e-05, |
|
"loss": 1.0005, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0011432304425935, |
|
"grad_norm": 1.0048428773880005, |
|
"learning_rate": 7.013999999999999e-05, |
|
"loss": 1.0273, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0013065490772497142, |
|
"grad_norm": 0.9456784129142761, |
|
"learning_rate": 8.016e-05, |
|
"loss": 1.0385, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0014698677119059284, |
|
"grad_norm": 0.99244225025177, |
|
"learning_rate": 9.018e-05, |
|
"loss": 0.9732, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0016331863465621427, |
|
"grad_norm": 0.9175916314125061, |
|
"learning_rate": 0.0001002, |
|
"loss": 1.0126, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001796504981218357, |
|
"grad_norm": 0.9574170112609863, |
|
"learning_rate": 9.967263157894736e-05, |
|
"loss": 0.8922, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0019598236158745713, |
|
"grad_norm": 1.8157280683517456, |
|
"learning_rate": 9.914526315789473e-05, |
|
"loss": 0.9965, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0021231422505307855, |
|
"grad_norm": 0.8389182090759277, |
|
"learning_rate": 9.861789473684209e-05, |
|
"loss": 1.0657, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002286460885187, |
|
"grad_norm": 0.920594334602356, |
|
"learning_rate": 9.809052631578947e-05, |
|
"loss": 1.0575, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.002449779519843214, |
|
"grad_norm": 0.9322181940078735, |
|
"learning_rate": 9.756315789473684e-05, |
|
"loss": 1.1032, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0026130981544994283, |
|
"grad_norm": 0.8384012579917908, |
|
"learning_rate": 9.70357894736842e-05, |
|
"loss": 1.0087, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0027764167891556426, |
|
"grad_norm": 0.8646233677864075, |
|
"learning_rate": 9.650842105263158e-05, |
|
"loss": 1.0182, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.002939735423811857, |
|
"grad_norm": 1.0053048133850098, |
|
"learning_rate": 9.598105263157895e-05, |
|
"loss": 0.9359, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.003103054058468071, |
|
"grad_norm": 0.8383069634437561, |
|
"learning_rate": 9.545368421052631e-05, |
|
"loss": 0.9231, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0032663726931242854, |
|
"grad_norm": 1.0205601453781128, |
|
"learning_rate": 9.492631578947368e-05, |
|
"loss": 1.0473, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0034296913277804997, |
|
"grad_norm": 1.1337229013442993, |
|
"learning_rate": 9.439894736842106e-05, |
|
"loss": 1.01, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.003593009962436714, |
|
"grad_norm": 1.0072332620620728, |
|
"learning_rate": 9.387157894736842e-05, |
|
"loss": 1.094, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0037563285970929282, |
|
"grad_norm": 1.0758895874023438, |
|
"learning_rate": 9.334421052631579e-05, |
|
"loss": 1.2812, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0039196472317491425, |
|
"grad_norm": 1.0436909198760986, |
|
"learning_rate": 9.281684210526315e-05, |
|
"loss": 0.9926, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.004082965866405357, |
|
"grad_norm": 1.1592437028884888, |
|
"learning_rate": 9.228947368421052e-05, |
|
"loss": 1.2151, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004246284501061571, |
|
"grad_norm": 1.1979281902313232, |
|
"learning_rate": 9.176210526315788e-05, |
|
"loss": 1.1807, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004409603135717786, |
|
"grad_norm": 1.2203060388565063, |
|
"learning_rate": 9.123473684210526e-05, |
|
"loss": 0.8416, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004572921770374, |
|
"grad_norm": 1.426841139793396, |
|
"learning_rate": 9.070736842105263e-05, |
|
"loss": 1.0486, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004736240405030214, |
|
"grad_norm": 3.254582643508911, |
|
"learning_rate": 9.018e-05, |
|
"loss": 1.1406, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.004899559039686428, |
|
"grad_norm": 1.349916696548462, |
|
"learning_rate": 8.965263157894736e-05, |
|
"loss": 0.9975, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005062877674342643, |
|
"grad_norm": 1.5444021224975586, |
|
"learning_rate": 8.912526315789474e-05, |
|
"loss": 1.2578, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005226196308998857, |
|
"grad_norm": 1.5829365253448486, |
|
"learning_rate": 8.85978947368421e-05, |
|
"loss": 1.016, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005389514943655071, |
|
"grad_norm": 1.857584834098816, |
|
"learning_rate": 8.807052631578947e-05, |
|
"loss": 0.9408, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.005552833578311285, |
|
"grad_norm": 1.739058256149292, |
|
"learning_rate": 8.754315789473685e-05, |
|
"loss": 1.055, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0057161522129675, |
|
"grad_norm": 2.821774482727051, |
|
"learning_rate": 8.701578947368421e-05, |
|
"loss": 1.3456, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.005879470847623714, |
|
"grad_norm": 2.193387508392334, |
|
"learning_rate": 8.648842105263158e-05, |
|
"loss": 1.2526, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0060427894822799285, |
|
"grad_norm": 2.5872952938079834, |
|
"learning_rate": 8.596105263157894e-05, |
|
"loss": 1.4246, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.006206108116936142, |
|
"grad_norm": 2.5345852375030518, |
|
"learning_rate": 8.543368421052631e-05, |
|
"loss": 1.2144, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"grad_norm": 3.2303738594055176, |
|
"learning_rate": 8.490631578947368e-05, |
|
"loss": 1.0199, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.006532745386248571, |
|
"grad_norm": 4.121665000915527, |
|
"learning_rate": 8.437894736842104e-05, |
|
"loss": 1.6419, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0066960640209047856, |
|
"grad_norm": 4.449281692504883, |
|
"learning_rate": 8.385157894736842e-05, |
|
"loss": 1.7489, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.006859382655560999, |
|
"grad_norm": 3.9495317935943604, |
|
"learning_rate": 8.332421052631579e-05, |
|
"loss": 1.5623, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.007022701290217214, |
|
"grad_norm": 2.803144931793213, |
|
"learning_rate": 8.279684210526315e-05, |
|
"loss": 1.1089, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.007186019924873428, |
|
"grad_norm": 4.1833038330078125, |
|
"learning_rate": 8.226947368421053e-05, |
|
"loss": 1.6404, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.007349338559529643, |
|
"grad_norm": 4.866119861602783, |
|
"learning_rate": 8.17421052631579e-05, |
|
"loss": 1.314, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0075126571941858565, |
|
"grad_norm": 4.199403285980225, |
|
"learning_rate": 8.121473684210526e-05, |
|
"loss": 1.661, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.007675975828842071, |
|
"grad_norm": 3.925476551055908, |
|
"learning_rate": 8.068736842105263e-05, |
|
"loss": 1.4702, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.007839294463498285, |
|
"grad_norm": 4.335568428039551, |
|
"learning_rate": 8.016e-05, |
|
"loss": 2.0748, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.008002613098154499, |
|
"grad_norm": 6.924542427062988, |
|
"learning_rate": 7.963263157894737e-05, |
|
"loss": 2.0374, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.008165931732810714, |
|
"grad_norm": 13.009639739990234, |
|
"learning_rate": 7.910526315789474e-05, |
|
"loss": 3.4481, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008165931732810714, |
|
"eval_loss": 1.2892735004425049, |
|
"eval_runtime": 344.1613, |
|
"eval_samples_per_second": 7.491, |
|
"eval_steps_per_second": 1.874, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008329250367466928, |
|
"grad_norm": 1.8733032941818237, |
|
"learning_rate": 7.85778947368421e-05, |
|
"loss": 0.91, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.008492569002123142, |
|
"grad_norm": 1.351426124572754, |
|
"learning_rate": 7.805052631578947e-05, |
|
"loss": 1.0789, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.008655887636779356, |
|
"grad_norm": 0.9720818400382996, |
|
"learning_rate": 7.752315789473683e-05, |
|
"loss": 0.9113, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.008819206271435572, |
|
"grad_norm": 0.7470458745956421, |
|
"learning_rate": 7.69957894736842e-05, |
|
"loss": 0.917, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.008982524906091785, |
|
"grad_norm": 0.6289156079292297, |
|
"learning_rate": 7.646842105263158e-05, |
|
"loss": 0.8513, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.009145843540748, |
|
"grad_norm": 0.6980412006378174, |
|
"learning_rate": 7.594105263157894e-05, |
|
"loss": 1.036, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.009309162175404213, |
|
"grad_norm": 0.6612934470176697, |
|
"learning_rate": 7.541368421052631e-05, |
|
"loss": 0.9251, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.009472480810060429, |
|
"grad_norm": 0.6878615021705627, |
|
"learning_rate": 7.488631578947369e-05, |
|
"loss": 0.8502, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.009635799444716642, |
|
"grad_norm": 0.7292014360427856, |
|
"learning_rate": 7.435894736842105e-05, |
|
"loss": 0.9259, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.009799118079372856, |
|
"grad_norm": 0.7414498329162598, |
|
"learning_rate": 7.383157894736842e-05, |
|
"loss": 0.9209, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00996243671402907, |
|
"grad_norm": 0.7968541383743286, |
|
"learning_rate": 7.33042105263158e-05, |
|
"loss": 1.006, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.010125755348685286, |
|
"grad_norm": 0.7785654067993164, |
|
"learning_rate": 7.277684210526316e-05, |
|
"loss": 0.998, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0102890739833415, |
|
"grad_norm": 0.7336849570274353, |
|
"learning_rate": 7.224947368421053e-05, |
|
"loss": 0.9043, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.010452392617997713, |
|
"grad_norm": 0.6646738648414612, |
|
"learning_rate": 7.172210526315789e-05, |
|
"loss": 0.8958, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.010615711252653927, |
|
"grad_norm": 0.8088564872741699, |
|
"learning_rate": 7.119473684210526e-05, |
|
"loss": 1.0874, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.010779029887310143, |
|
"grad_norm": 0.7672606706619263, |
|
"learning_rate": 7.066736842105262e-05, |
|
"loss": 0.9482, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010942348521966357, |
|
"grad_norm": 0.9228386878967285, |
|
"learning_rate": 7.013999999999999e-05, |
|
"loss": 1.1435, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01110566715662257, |
|
"grad_norm": 0.8074588179588318, |
|
"learning_rate": 6.961263157894737e-05, |
|
"loss": 1.1595, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.011268985791278784, |
|
"grad_norm": 0.7998431324958801, |
|
"learning_rate": 6.908526315789473e-05, |
|
"loss": 0.9535, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.011432304425935, |
|
"grad_norm": 0.8504302501678467, |
|
"learning_rate": 6.85578947368421e-05, |
|
"loss": 1.0123, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.011595623060591214, |
|
"grad_norm": 1.089838981628418, |
|
"learning_rate": 6.803052631578946e-05, |
|
"loss": 1.0045, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.011758941695247428, |
|
"grad_norm": 0.89869225025177, |
|
"learning_rate": 6.750315789473684e-05, |
|
"loss": 0.7452, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.011922260329903641, |
|
"grad_norm": 0.943939745426178, |
|
"learning_rate": 6.697578947368421e-05, |
|
"loss": 0.9891, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.012085578964559857, |
|
"grad_norm": 0.9160717725753784, |
|
"learning_rate": 6.644842105263157e-05, |
|
"loss": 0.9342, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.01224889759921607, |
|
"grad_norm": 1.082808256149292, |
|
"learning_rate": 6.592105263157895e-05, |
|
"loss": 0.8208, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.012412216233872285, |
|
"grad_norm": 1.2106088399887085, |
|
"learning_rate": 6.539368421052632e-05, |
|
"loss": 1.2033, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.012575534868528498, |
|
"grad_norm": 1.1063079833984375, |
|
"learning_rate": 6.486631578947368e-05, |
|
"loss": 0.9361, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.012738853503184714, |
|
"grad_norm": 1.1758522987365723, |
|
"learning_rate": 6.433894736842105e-05, |
|
"loss": 0.8596, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.012902172137840928, |
|
"grad_norm": 1.644689679145813, |
|
"learning_rate": 6.381157894736842e-05, |
|
"loss": 0.9575, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.013065490772497142, |
|
"grad_norm": 1.4819023609161377, |
|
"learning_rate": 6.328421052631578e-05, |
|
"loss": 1.0894, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.013228809407153356, |
|
"grad_norm": 1.3677387237548828, |
|
"learning_rate": 6.275684210526315e-05, |
|
"loss": 1.1177, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.013392128041809571, |
|
"grad_norm": 2.352775812149048, |
|
"learning_rate": 6.222947368421053e-05, |
|
"loss": 1.5418, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.013555446676465785, |
|
"grad_norm": 3.2826321125030518, |
|
"learning_rate": 6.170210526315789e-05, |
|
"loss": 1.056, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.013718765311121999, |
|
"grad_norm": 1.8273366689682007, |
|
"learning_rate": 6.117473684210526e-05, |
|
"loss": 1.4655, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.013882083945778213, |
|
"grad_norm": 3.2411186695098877, |
|
"learning_rate": 6.064736842105263e-05, |
|
"loss": 1.4192, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.014045402580434428, |
|
"grad_norm": 1.9905811548233032, |
|
"learning_rate": 6.0119999999999994e-05, |
|
"loss": 1.3103, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.014208721215090642, |
|
"grad_norm": 1.930762529373169, |
|
"learning_rate": 5.959263157894736e-05, |
|
"loss": 0.9912, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.014372039849746856, |
|
"grad_norm": 2.365586280822754, |
|
"learning_rate": 5.906526315789473e-05, |
|
"loss": 1.1073, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.01453535848440307, |
|
"grad_norm": 2.24143648147583, |
|
"learning_rate": 5.853789473684211e-05, |
|
"loss": 1.1891, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.014698677119059285, |
|
"grad_norm": 2.7449162006378174, |
|
"learning_rate": 5.8010526315789476e-05, |
|
"loss": 1.3695, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.014861995753715499, |
|
"grad_norm": 3.0811843872070312, |
|
"learning_rate": 5.748315789473684e-05, |
|
"loss": 0.7728, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.015025314388371713, |
|
"grad_norm": 2.499147891998291, |
|
"learning_rate": 5.6955789473684207e-05, |
|
"loss": 1.2178, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.015188633023027927, |
|
"grad_norm": 3.266294240951538, |
|
"learning_rate": 5.642842105263157e-05, |
|
"loss": 1.3692, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.015351951657684142, |
|
"grad_norm": 3.7990870475769043, |
|
"learning_rate": 5.5901052631578944e-05, |
|
"loss": 1.5496, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.015515270292340356, |
|
"grad_norm": 2.9418234825134277, |
|
"learning_rate": 5.5373684210526316e-05, |
|
"loss": 1.4258, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01567858892699657, |
|
"grad_norm": 3.900726795196533, |
|
"learning_rate": 5.484631578947369e-05, |
|
"loss": 1.4664, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.015841907561652786, |
|
"grad_norm": 3.98392391204834, |
|
"learning_rate": 5.4318947368421054e-05, |
|
"loss": 2.07, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.016005226196308998, |
|
"grad_norm": 4.655868053436279, |
|
"learning_rate": 5.379157894736842e-05, |
|
"loss": 1.5402, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.016168544830965213, |
|
"grad_norm": 5.109676837921143, |
|
"learning_rate": 5.3264210526315785e-05, |
|
"loss": 1.723, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.01633186346562143, |
|
"grad_norm": 7.739476680755615, |
|
"learning_rate": 5.273684210526315e-05, |
|
"loss": 2.4745, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01633186346562143, |
|
"eval_loss": 1.202304482460022, |
|
"eval_runtime": 344.5693, |
|
"eval_samples_per_second": 7.482, |
|
"eval_steps_per_second": 1.872, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01649518210027764, |
|
"grad_norm": 0.7410888671875, |
|
"learning_rate": 5.220947368421052e-05, |
|
"loss": 0.7999, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.016658500734933857, |
|
"grad_norm": 0.7798357605934143, |
|
"learning_rate": 5.1682105263157895e-05, |
|
"loss": 0.932, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01682181936959007, |
|
"grad_norm": 0.7108080983161926, |
|
"learning_rate": 5.115473684210527e-05, |
|
"loss": 0.9403, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.016985138004246284, |
|
"grad_norm": 0.7301442623138428, |
|
"learning_rate": 5.062736842105263e-05, |
|
"loss": 0.8655, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0171484566389025, |
|
"grad_norm": 0.6835742592811584, |
|
"learning_rate": 5.01e-05, |
|
"loss": 0.8765, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.017311775273558712, |
|
"grad_norm": 0.6745396256446838, |
|
"learning_rate": 4.9572631578947363e-05, |
|
"loss": 0.9458, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.017475093908214927, |
|
"grad_norm": 0.6688428521156311, |
|
"learning_rate": 4.9045263157894736e-05, |
|
"loss": 0.8976, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.017638412542871143, |
|
"grad_norm": 0.6025101542472839, |
|
"learning_rate": 4.85178947368421e-05, |
|
"loss": 1.007, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.017801731177527355, |
|
"grad_norm": 0.5355660319328308, |
|
"learning_rate": 4.799052631578947e-05, |
|
"loss": 0.893, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.01796504981218357, |
|
"grad_norm": 0.6081918478012085, |
|
"learning_rate": 4.746315789473684e-05, |
|
"loss": 1.0356, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.018128368446839783, |
|
"grad_norm": 0.6432203650474548, |
|
"learning_rate": 4.693578947368421e-05, |
|
"loss": 1.0259, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.018291687081496, |
|
"grad_norm": 0.6371332406997681, |
|
"learning_rate": 4.6408421052631576e-05, |
|
"loss": 0.9714, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.018455005716152214, |
|
"grad_norm": 0.632469117641449, |
|
"learning_rate": 4.588105263157894e-05, |
|
"loss": 0.7899, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.018618324350808426, |
|
"grad_norm": 0.6865615248680115, |
|
"learning_rate": 4.5353684210526314e-05, |
|
"loss": 1.0136, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01878164298546464, |
|
"grad_norm": 0.735899031162262, |
|
"learning_rate": 4.482631578947368e-05, |
|
"loss": 0.9631, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.018944961620120857, |
|
"grad_norm": 0.7938647270202637, |
|
"learning_rate": 4.429894736842105e-05, |
|
"loss": 1.0312, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.01910828025477707, |
|
"grad_norm": 0.7992541790008545, |
|
"learning_rate": 4.3771578947368424e-05, |
|
"loss": 1.1253, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.019271598889433285, |
|
"grad_norm": 0.7731001377105713, |
|
"learning_rate": 4.324421052631579e-05, |
|
"loss": 1.1413, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.019434917524089497, |
|
"grad_norm": 0.8240691423416138, |
|
"learning_rate": 4.2716842105263155e-05, |
|
"loss": 0.9722, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.019598236158745713, |
|
"grad_norm": 0.7312250733375549, |
|
"learning_rate": 4.218947368421052e-05, |
|
"loss": 0.8119, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.019761554793401928, |
|
"grad_norm": 0.8575920462608337, |
|
"learning_rate": 4.166210526315789e-05, |
|
"loss": 0.9616, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.01992487342805814, |
|
"grad_norm": 0.8943201899528503, |
|
"learning_rate": 4.1134736842105265e-05, |
|
"loss": 0.9458, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.020088192062714356, |
|
"grad_norm": 0.7860243916511536, |
|
"learning_rate": 4.060736842105263e-05, |
|
"loss": 1.006, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02025151069737057, |
|
"grad_norm": 0.9303942918777466, |
|
"learning_rate": 4.008e-05, |
|
"loss": 0.9787, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.020414829332026783, |
|
"grad_norm": 1.0351006984710693, |
|
"learning_rate": 3.955263157894737e-05, |
|
"loss": 1.1954, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.020578147966683, |
|
"grad_norm": 0.9931172728538513, |
|
"learning_rate": 3.9025263157894733e-05, |
|
"loss": 0.9572, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02074146660133921, |
|
"grad_norm": 1.0373263359069824, |
|
"learning_rate": 3.84978947368421e-05, |
|
"loss": 0.9803, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.020904785235995427, |
|
"grad_norm": 1.018931269645691, |
|
"learning_rate": 3.797052631578947e-05, |
|
"loss": 0.69, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.021068103870651642, |
|
"grad_norm": 1.324196696281433, |
|
"learning_rate": 3.744315789473684e-05, |
|
"loss": 1.0059, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.021231422505307854, |
|
"grad_norm": 1.19536554813385, |
|
"learning_rate": 3.691578947368421e-05, |
|
"loss": 0.9141, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02139474113996407, |
|
"grad_norm": 1.2916719913482666, |
|
"learning_rate": 3.638842105263158e-05, |
|
"loss": 1.0181, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.021558059774620286, |
|
"grad_norm": 1.313924789428711, |
|
"learning_rate": 3.5861052631578946e-05, |
|
"loss": 1.0114, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.021721378409276498, |
|
"grad_norm": 1.6728382110595703, |
|
"learning_rate": 3.533368421052631e-05, |
|
"loss": 1.151, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.021884697043932713, |
|
"grad_norm": 1.9813265800476074, |
|
"learning_rate": 3.4806315789473684e-05, |
|
"loss": 1.209, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.022048015678588925, |
|
"grad_norm": 2.619920015335083, |
|
"learning_rate": 3.427894736842105e-05, |
|
"loss": 1.4013, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02221133431324514, |
|
"grad_norm": 3.543386936187744, |
|
"learning_rate": 3.375157894736842e-05, |
|
"loss": 1.2687, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.022374652947901356, |
|
"grad_norm": 2.107569932937622, |
|
"learning_rate": 3.322421052631579e-05, |
|
"loss": 1.1479, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.02253797158255757, |
|
"grad_norm": 2.548245429992676, |
|
"learning_rate": 3.269684210526316e-05, |
|
"loss": 1.1627, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.022701290217213784, |
|
"grad_norm": 3.172882556915283, |
|
"learning_rate": 3.2169473684210525e-05, |
|
"loss": 1.4478, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.02286460885187, |
|
"grad_norm": 3.1833114624023438, |
|
"learning_rate": 3.164210526315789e-05, |
|
"loss": 1.4387, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.023027927486526212, |
|
"grad_norm": 3.0594396591186523, |
|
"learning_rate": 3.111473684210526e-05, |
|
"loss": 0.975, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.023191246121182427, |
|
"grad_norm": 3.2839081287384033, |
|
"learning_rate": 3.058736842105263e-05, |
|
"loss": 1.3071, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.02335456475583864, |
|
"grad_norm": 4.123104095458984, |
|
"learning_rate": 3.0059999999999997e-05, |
|
"loss": 1.6883, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.023517883390494855, |
|
"grad_norm": 3.278013229370117, |
|
"learning_rate": 2.9532631578947366e-05, |
|
"loss": 1.5988, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.02368120202515107, |
|
"grad_norm": 2.856154680252075, |
|
"learning_rate": 2.9005263157894738e-05, |
|
"loss": 0.9376, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.023844520659807283, |
|
"grad_norm": 3.468996524810791, |
|
"learning_rate": 2.8477894736842103e-05, |
|
"loss": 1.8904, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0240078392944635, |
|
"grad_norm": 3.6459338665008545, |
|
"learning_rate": 2.7950526315789472e-05, |
|
"loss": 1.1922, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.024171157929119714, |
|
"grad_norm": 4.6464948654174805, |
|
"learning_rate": 2.7423157894736844e-05, |
|
"loss": 2.2418, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.024334476563775926, |
|
"grad_norm": 6.070025444030762, |
|
"learning_rate": 2.689578947368421e-05, |
|
"loss": 2.1127, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.02449779519843214, |
|
"grad_norm": 11.722772598266602, |
|
"learning_rate": 2.6368421052631575e-05, |
|
"loss": 2.0128, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02449779519843214, |
|
"eval_loss": 1.1496460437774658, |
|
"eval_runtime": 344.5868, |
|
"eval_samples_per_second": 7.481, |
|
"eval_steps_per_second": 1.872, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.024661113833088357, |
|
"grad_norm": 0.5295467972755432, |
|
"learning_rate": 2.5841052631578947e-05, |
|
"loss": 0.9102, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.02482443246774457, |
|
"grad_norm": 0.42944419384002686, |
|
"learning_rate": 2.5313684210526316e-05, |
|
"loss": 0.7221, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.024987751102400785, |
|
"grad_norm": 0.4469304084777832, |
|
"learning_rate": 2.4786315789473682e-05, |
|
"loss": 0.7679, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.025151069737056997, |
|
"grad_norm": 0.6193202137947083, |
|
"learning_rate": 2.425894736842105e-05, |
|
"loss": 0.9083, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.025314388371713212, |
|
"grad_norm": 0.5847591161727905, |
|
"learning_rate": 2.373157894736842e-05, |
|
"loss": 0.72, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 0.5906417965888977, |
|
"learning_rate": 2.3204210526315788e-05, |
|
"loss": 0.9138, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 0.4926919639110565, |
|
"learning_rate": 2.2676842105263157e-05, |
|
"loss": 0.9187, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.025804344275681856, |
|
"grad_norm": 0.5422658920288086, |
|
"learning_rate": 2.2149473684210526e-05, |
|
"loss": 1.0084, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.02596766291033807, |
|
"grad_norm": 0.5664920210838318, |
|
"learning_rate": 2.1622105263157895e-05, |
|
"loss": 0.9221, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.026130981544994283, |
|
"grad_norm": 0.5593885779380798, |
|
"learning_rate": 2.109473684210526e-05, |
|
"loss": 0.8757, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0262943001796505, |
|
"grad_norm": 0.6178870797157288, |
|
"learning_rate": 2.0567368421052632e-05, |
|
"loss": 1.0018, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.02645761881430671, |
|
"grad_norm": 0.5820048451423645, |
|
"learning_rate": 2.004e-05, |
|
"loss": 0.8186, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.026620937448962927, |
|
"grad_norm": 0.606930673122406, |
|
"learning_rate": 1.9512631578947367e-05, |
|
"loss": 0.915, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.026784256083619142, |
|
"grad_norm": 0.5813213586807251, |
|
"learning_rate": 1.8985263157894736e-05, |
|
"loss": 0.8943, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.026947574718275354, |
|
"grad_norm": 0.7827402949333191, |
|
"learning_rate": 1.8457894736842104e-05, |
|
"loss": 0.6757, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.02711089335293157, |
|
"grad_norm": 0.7083070874214172, |
|
"learning_rate": 1.7930526315789473e-05, |
|
"loss": 0.797, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.027274211987587785, |
|
"grad_norm": 0.6508756875991821, |
|
"learning_rate": 1.7403157894736842e-05, |
|
"loss": 0.98, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.027437530622243998, |
|
"grad_norm": 0.681760847568512, |
|
"learning_rate": 1.687578947368421e-05, |
|
"loss": 0.8936, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.027600849256900213, |
|
"grad_norm": 0.7164104580879211, |
|
"learning_rate": 1.634842105263158e-05, |
|
"loss": 0.866, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.027764167891556425, |
|
"grad_norm": 0.8126941919326782, |
|
"learning_rate": 1.5821052631578945e-05, |
|
"loss": 0.8797, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.02792748652621264, |
|
"grad_norm": 0.7941915988922119, |
|
"learning_rate": 1.5293684210526314e-05, |
|
"loss": 1.0358, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.028090805160868856, |
|
"grad_norm": 0.9106842875480652, |
|
"learning_rate": 1.4766315789473683e-05, |
|
"loss": 1.0669, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.02825412379552507, |
|
"grad_norm": 0.9008748531341553, |
|
"learning_rate": 1.4238947368421052e-05, |
|
"loss": 1.0582, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.028417442430181284, |
|
"grad_norm": 0.916495144367218, |
|
"learning_rate": 1.3711578947368422e-05, |
|
"loss": 0.7787, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.0285807610648375, |
|
"grad_norm": 0.9486830234527588, |
|
"learning_rate": 1.3184210526315788e-05, |
|
"loss": 0.9648, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.02874407969949371, |
|
"grad_norm": 0.975587785243988, |
|
"learning_rate": 1.2656842105263158e-05, |
|
"loss": 0.9214, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.028907398334149927, |
|
"grad_norm": 1.2781733274459839, |
|
"learning_rate": 1.2129473684210525e-05, |
|
"loss": 1.2543, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.02907071696880614, |
|
"grad_norm": 1.1123965978622437, |
|
"learning_rate": 1.1602105263157894e-05, |
|
"loss": 0.9457, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.029234035603462355, |
|
"grad_norm": 1.0323660373687744, |
|
"learning_rate": 1.1074736842105263e-05, |
|
"loss": 0.8505, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.02939735423811857, |
|
"grad_norm": 1.3954451084136963, |
|
"learning_rate": 1.054736842105263e-05, |
|
"loss": 1.1639, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.029560672872774783, |
|
"grad_norm": 1.4453262090682983, |
|
"learning_rate": 1.002e-05, |
|
"loss": 1.1046, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.029723991507430998, |
|
"grad_norm": 1.371714472770691, |
|
"learning_rate": 9.492631578947368e-06, |
|
"loss": 1.1104, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.029887310142087214, |
|
"grad_norm": 1.7723652124404907, |
|
"learning_rate": 8.965263157894737e-06, |
|
"loss": 1.1258, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.030050628776743426, |
|
"grad_norm": 1.6344342231750488, |
|
"learning_rate": 8.437894736842105e-06, |
|
"loss": 1.1028, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.03021394741139964, |
|
"grad_norm": 2.0994627475738525, |
|
"learning_rate": 7.910526315789473e-06, |
|
"loss": 1.0568, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.030377266046055854, |
|
"grad_norm": 2.0363705158233643, |
|
"learning_rate": 7.383157894736841e-06, |
|
"loss": 1.5804, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.03054058468071207, |
|
"grad_norm": 2.4229798316955566, |
|
"learning_rate": 6.855789473684211e-06, |
|
"loss": 1.1893, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.030703903315368285, |
|
"grad_norm": 3.015899181365967, |
|
"learning_rate": 6.328421052631579e-06, |
|
"loss": 1.2766, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.030867221950024497, |
|
"grad_norm": 2.2767412662506104, |
|
"learning_rate": 5.801052631578947e-06, |
|
"loss": 1.22, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.031030540584680712, |
|
"grad_norm": 3.660109758377075, |
|
"learning_rate": 5.273684210526315e-06, |
|
"loss": 1.2292, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.031193859219336928, |
|
"grad_norm": 4.029131889343262, |
|
"learning_rate": 4.746315789473684e-06, |
|
"loss": 1.7373, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.03135717785399314, |
|
"grad_norm": 3.336766242980957, |
|
"learning_rate": 4.218947368421053e-06, |
|
"loss": 1.44, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.03152049648864935, |
|
"grad_norm": 3.1067514419555664, |
|
"learning_rate": 3.6915789473684207e-06, |
|
"loss": 1.512, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.03168381512330557, |
|
"grad_norm": 3.971158981323242, |
|
"learning_rate": 3.1642105263157895e-06, |
|
"loss": 1.5966, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 2.507552146911621, |
|
"learning_rate": 2.6368421052631575e-06, |
|
"loss": 1.0217, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.032010452392617995, |
|
"grad_norm": 3.831817865371704, |
|
"learning_rate": 2.1094736842105264e-06, |
|
"loss": 0.9842, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.032173771027274214, |
|
"grad_norm": 5.005215167999268, |
|
"learning_rate": 1.5821052631578948e-06, |
|
"loss": 2.8562, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.03233708966193043, |
|
"grad_norm": 4.721521377563477, |
|
"learning_rate": 1.0547368421052632e-06, |
|
"loss": 2.4518, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.03250040829658664, |
|
"grad_norm": 5.0478363037109375, |
|
"learning_rate": 5.273684210526316e-07, |
|
"loss": 2.0353, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.03266372693124286, |
|
"grad_norm": 6.368497371673584, |
|
"learning_rate": 0.0, |
|
"loss": 1.6937, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03266372693124286, |
|
"eval_loss": 1.1331982612609863, |
|
"eval_runtime": 344.2422, |
|
"eval_samples_per_second": 7.489, |
|
"eval_steps_per_second": 1.874, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.65769979075625e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|