|
{ |
|
"best_metric": 0.5795559883117676, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.2828854314002829, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014144271570014145, |
|
"grad_norm": 30.358135223388672, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 5.9636, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0014144271570014145, |
|
"eval_loss": 1.5057185888290405, |
|
"eval_runtime": 65.8108, |
|
"eval_samples_per_second": 144.733, |
|
"eval_steps_per_second": 4.528, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002828854314002829, |
|
"grad_norm": 31.44856071472168, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 6.0251, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004243281471004243, |
|
"grad_norm": 22.16029930114746, |
|
"learning_rate": 3.0299999999999998e-05, |
|
"loss": 5.5536, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005657708628005658, |
|
"grad_norm": 17.20639991760254, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 4.7391, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.007072135785007072, |
|
"grad_norm": 12.612313270568848, |
|
"learning_rate": 5.05e-05, |
|
"loss": 4.1681, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008486562942008486, |
|
"grad_norm": 8.703054428100586, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 3.637, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009900990099009901, |
|
"grad_norm": 9.403382301330566, |
|
"learning_rate": 7.07e-05, |
|
"loss": 3.7323, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.011315417256011316, |
|
"grad_norm": 12.620731353759766, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 3.6018, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01272984441301273, |
|
"grad_norm": 8.057601928710938, |
|
"learning_rate": 9.09e-05, |
|
"loss": 3.4366, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.014144271570014143, |
|
"grad_norm": 4.481306552886963, |
|
"learning_rate": 0.000101, |
|
"loss": 3.2684, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015558698727015558, |
|
"grad_norm": 4.987090110778809, |
|
"learning_rate": 0.00010046842105263158, |
|
"loss": 3.1242, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.016973125884016973, |
|
"grad_norm": 4.0390849113464355, |
|
"learning_rate": 9.993684210526315e-05, |
|
"loss": 2.8276, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.018387553041018388, |
|
"grad_norm": 4.3538432121276855, |
|
"learning_rate": 9.940526315789473e-05, |
|
"loss": 3.0122, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.019801980198019802, |
|
"grad_norm": 3.8758561611175537, |
|
"learning_rate": 9.887368421052632e-05, |
|
"loss": 3.1345, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.021216407355021217, |
|
"grad_norm": 4.682952880859375, |
|
"learning_rate": 9.83421052631579e-05, |
|
"loss": 3.0093, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02263083451202263, |
|
"grad_norm": 3.6436381340026855, |
|
"learning_rate": 9.781052631578948e-05, |
|
"loss": 2.8878, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.024045261669024046, |
|
"grad_norm": 3.1359572410583496, |
|
"learning_rate": 9.727894736842106e-05, |
|
"loss": 2.8174, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02545968882602546, |
|
"grad_norm": 3.475654363632202, |
|
"learning_rate": 9.674736842105263e-05, |
|
"loss": 2.6673, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.026874115983026876, |
|
"grad_norm": 3.38742733001709, |
|
"learning_rate": 9.621578947368421e-05, |
|
"loss": 2.8075, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.028288543140028287, |
|
"grad_norm": 6.014629364013672, |
|
"learning_rate": 9.568421052631578e-05, |
|
"loss": 3.0922, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0297029702970297, |
|
"grad_norm": 3.0035653114318848, |
|
"learning_rate": 9.515263157894737e-05, |
|
"loss": 2.9599, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.031117397454031116, |
|
"grad_norm": 3.9666907787323, |
|
"learning_rate": 9.462105263157895e-05, |
|
"loss": 2.8904, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03253182461103253, |
|
"grad_norm": 3.315082550048828, |
|
"learning_rate": 9.408947368421054e-05, |
|
"loss": 2.7768, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.033946251768033946, |
|
"grad_norm": 2.5618789196014404, |
|
"learning_rate": 9.355789473684211e-05, |
|
"loss": 2.6475, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03536067892503536, |
|
"grad_norm": 2.922630548477173, |
|
"learning_rate": 9.302631578947369e-05, |
|
"loss": 2.5942, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.036775106082036775, |
|
"grad_norm": 4.30806827545166, |
|
"learning_rate": 9.249473684210526e-05, |
|
"loss": 3.0147, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03818953323903819, |
|
"grad_norm": 2.8271915912628174, |
|
"learning_rate": 9.196315789473685e-05, |
|
"loss": 2.8583, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.039603960396039604, |
|
"grad_norm": 2.8441073894500732, |
|
"learning_rate": 9.143157894736843e-05, |
|
"loss": 2.7133, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04101838755304102, |
|
"grad_norm": 2.657393455505371, |
|
"learning_rate": 9.09e-05, |
|
"loss": 2.6705, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.042432814710042434, |
|
"grad_norm": 2.526547908782959, |
|
"learning_rate": 9.036842105263158e-05, |
|
"loss": 2.6752, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04384724186704385, |
|
"grad_norm": 2.5309224128723145, |
|
"learning_rate": 8.983684210526316e-05, |
|
"loss": 2.6006, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04526166902404526, |
|
"grad_norm": 3.246652841567993, |
|
"learning_rate": 8.930526315789474e-05, |
|
"loss": 2.926, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04667609618104668, |
|
"grad_norm": 2.7534706592559814, |
|
"learning_rate": 8.877368421052632e-05, |
|
"loss": 2.8862, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04809052333804809, |
|
"grad_norm": 2.599621295928955, |
|
"learning_rate": 8.82421052631579e-05, |
|
"loss": 2.7434, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04950495049504951, |
|
"grad_norm": 2.4461135864257812, |
|
"learning_rate": 8.771052631578948e-05, |
|
"loss": 2.6901, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05091937765205092, |
|
"grad_norm": 2.444023370742798, |
|
"learning_rate": 8.717894736842105e-05, |
|
"loss": 2.5766, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05233380480905234, |
|
"grad_norm": 2.3293182849884033, |
|
"learning_rate": 8.664736842105263e-05, |
|
"loss": 2.4633, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05374823196605375, |
|
"grad_norm": 2.4258995056152344, |
|
"learning_rate": 8.61157894736842e-05, |
|
"loss": 2.6781, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.055162659123055166, |
|
"grad_norm": 3.006838083267212, |
|
"learning_rate": 8.55842105263158e-05, |
|
"loss": 2.8319, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.056577086280056574, |
|
"grad_norm": 2.496774673461914, |
|
"learning_rate": 8.505263157894737e-05, |
|
"loss": 2.644, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05799151343705799, |
|
"grad_norm": 2.602163076400757, |
|
"learning_rate": 8.452105263157896e-05, |
|
"loss": 2.6905, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0594059405940594, |
|
"grad_norm": 2.52225661277771, |
|
"learning_rate": 8.398947368421053e-05, |
|
"loss": 2.5921, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06082036775106082, |
|
"grad_norm": 2.3774173259735107, |
|
"learning_rate": 8.345789473684211e-05, |
|
"loss": 2.4794, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06223479490806223, |
|
"grad_norm": 2.1766254901885986, |
|
"learning_rate": 8.292631578947368e-05, |
|
"loss": 2.5138, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06364922206506365, |
|
"grad_norm": 2.6760802268981934, |
|
"learning_rate": 8.239473684210526e-05, |
|
"loss": 2.7873, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06506364922206506, |
|
"grad_norm": 2.3476643562316895, |
|
"learning_rate": 8.186315789473683e-05, |
|
"loss": 2.6749, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06647807637906648, |
|
"grad_norm": 2.3106589317321777, |
|
"learning_rate": 8.133157894736842e-05, |
|
"loss": 2.5956, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06789250353606789, |
|
"grad_norm": 2.198598861694336, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 2.5047, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06930693069306931, |
|
"grad_norm": 2.2239043712615967, |
|
"learning_rate": 8.026842105263159e-05, |
|
"loss": 2.5194, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07072135785007072, |
|
"grad_norm": 2.4284136295318604, |
|
"learning_rate": 7.973684210526316e-05, |
|
"loss": 2.4291, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07072135785007072, |
|
"eval_loss": 0.6465714573860168, |
|
"eval_runtime": 65.9647, |
|
"eval_samples_per_second": 144.395, |
|
"eval_steps_per_second": 4.518, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07213578500707214, |
|
"grad_norm": 3.151646852493286, |
|
"learning_rate": 7.920526315789474e-05, |
|
"loss": 2.8639, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07355021216407355, |
|
"grad_norm": 2.3648862838745117, |
|
"learning_rate": 7.867368421052631e-05, |
|
"loss": 2.6662, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07496463932107496, |
|
"grad_norm": 2.239790439605713, |
|
"learning_rate": 7.814210526315789e-05, |
|
"loss": 2.5937, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07637906647807638, |
|
"grad_norm": 2.3133761882781982, |
|
"learning_rate": 7.761052631578946e-05, |
|
"loss": 2.6009, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07779349363507779, |
|
"grad_norm": 2.187490224838257, |
|
"learning_rate": 7.707894736842105e-05, |
|
"loss": 2.5353, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07920792079207921, |
|
"grad_norm": 2.121954917907715, |
|
"learning_rate": 7.654736842105264e-05, |
|
"loss": 2.3811, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08062234794908062, |
|
"grad_norm": 2.474841356277466, |
|
"learning_rate": 7.601578947368422e-05, |
|
"loss": 2.7355, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08203677510608204, |
|
"grad_norm": 2.4026870727539062, |
|
"learning_rate": 7.548421052631579e-05, |
|
"loss": 2.6286, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08345120226308345, |
|
"grad_norm": 1.9473828077316284, |
|
"learning_rate": 7.495263157894737e-05, |
|
"loss": 2.5476, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08486562942008487, |
|
"grad_norm": 2.153718948364258, |
|
"learning_rate": 7.442105263157894e-05, |
|
"loss": 2.5455, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08628005657708628, |
|
"grad_norm": 2.1734304428100586, |
|
"learning_rate": 7.388947368421053e-05, |
|
"loss": 2.4486, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0876944837340877, |
|
"grad_norm": 2.2392075061798096, |
|
"learning_rate": 7.335789473684211e-05, |
|
"loss": 2.4886, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0891089108910891, |
|
"grad_norm": 1.850045084953308, |
|
"learning_rate": 7.282631578947368e-05, |
|
"loss": 2.5359, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09052333804809053, |
|
"grad_norm": 2.3224589824676514, |
|
"learning_rate": 7.229473684210527e-05, |
|
"loss": 2.6619, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09193776520509193, |
|
"grad_norm": 1.9989107847213745, |
|
"learning_rate": 7.176315789473685e-05, |
|
"loss": 2.5735, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09335219236209336, |
|
"grad_norm": 1.9589483737945557, |
|
"learning_rate": 7.123157894736842e-05, |
|
"loss": 2.531, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09476661951909476, |
|
"grad_norm": 1.8521004915237427, |
|
"learning_rate": 7.07e-05, |
|
"loss": 2.4682, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09618104667609619, |
|
"grad_norm": 2.097626209259033, |
|
"learning_rate": 7.016842105263159e-05, |
|
"loss": 2.4427, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09759547383309759, |
|
"grad_norm": 2.1736693382263184, |
|
"learning_rate": 6.963684210526316e-05, |
|
"loss": 2.4669, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09900990099009901, |
|
"grad_norm": 2.0476653575897217, |
|
"learning_rate": 6.910526315789474e-05, |
|
"loss": 2.6927, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10042432814710042, |
|
"grad_norm": 2.14760160446167, |
|
"learning_rate": 6.857368421052631e-05, |
|
"loss": 2.5764, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10183875530410184, |
|
"grad_norm": 2.0928895473480225, |
|
"learning_rate": 6.80421052631579e-05, |
|
"loss": 2.5387, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10325318246110325, |
|
"grad_norm": 1.9230157136917114, |
|
"learning_rate": 6.751052631578948e-05, |
|
"loss": 2.4688, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10466760961810467, |
|
"grad_norm": 1.8409297466278076, |
|
"learning_rate": 6.697894736842105e-05, |
|
"loss": 2.4534, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.10608203677510608, |
|
"grad_norm": 2.028137445449829, |
|
"learning_rate": 6.644736842105264e-05, |
|
"loss": 2.3257, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1074964639321075, |
|
"grad_norm": 2.186375856399536, |
|
"learning_rate": 6.591578947368422e-05, |
|
"loss": 2.6981, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10891089108910891, |
|
"grad_norm": 1.9577515125274658, |
|
"learning_rate": 6.538421052631579e-05, |
|
"loss": 2.6189, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11032531824611033, |
|
"grad_norm": 1.9610555171966553, |
|
"learning_rate": 6.485263157894737e-05, |
|
"loss": 2.609, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11173974540311174, |
|
"grad_norm": 1.799399733543396, |
|
"learning_rate": 6.432105263157894e-05, |
|
"loss": 2.439, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.11315417256011315, |
|
"grad_norm": 2.2436952590942383, |
|
"learning_rate": 6.378947368421053e-05, |
|
"loss": 2.4132, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11456859971711457, |
|
"grad_norm": 2.1052162647247314, |
|
"learning_rate": 6.32578947368421e-05, |
|
"loss": 2.3395, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11598302687411598, |
|
"grad_norm": 2.0289740562438965, |
|
"learning_rate": 6.27263157894737e-05, |
|
"loss": 2.6342, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1173974540311174, |
|
"grad_norm": 2.1439921855926514, |
|
"learning_rate": 6.219473684210527e-05, |
|
"loss": 2.6534, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1188118811881188, |
|
"grad_norm": 1.9859956502914429, |
|
"learning_rate": 6.166315789473685e-05, |
|
"loss": 2.5772, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12022630834512023, |
|
"grad_norm": 1.8177697658538818, |
|
"learning_rate": 6.113157894736842e-05, |
|
"loss": 2.3984, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12164073550212164, |
|
"grad_norm": 1.8563177585601807, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 2.4031, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.12305516265912306, |
|
"grad_norm": 2.219247341156006, |
|
"learning_rate": 6.006842105263158e-05, |
|
"loss": 2.3255, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12446958981612447, |
|
"grad_norm": 1.8544104099273682, |
|
"learning_rate": 5.953684210526315e-05, |
|
"loss": 2.5209, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12588401697312587, |
|
"grad_norm": 2.0143113136291504, |
|
"learning_rate": 5.900526315789474e-05, |
|
"loss": 2.6355, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1272984441301273, |
|
"grad_norm": 1.896228551864624, |
|
"learning_rate": 5.847368421052632e-05, |
|
"loss": 2.5526, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12871287128712872, |
|
"grad_norm": 1.8661564588546753, |
|
"learning_rate": 5.79421052631579e-05, |
|
"loss": 2.511, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13012729844413012, |
|
"grad_norm": 1.8194445371627808, |
|
"learning_rate": 5.7410526315789475e-05, |
|
"loss": 2.4344, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13154172560113153, |
|
"grad_norm": 1.8762489557266235, |
|
"learning_rate": 5.687894736842105e-05, |
|
"loss": 2.3408, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13295615275813297, |
|
"grad_norm": 1.7948472499847412, |
|
"learning_rate": 5.6347368421052625e-05, |
|
"loss": 2.4347, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13437057991513437, |
|
"grad_norm": 1.8099737167358398, |
|
"learning_rate": 5.5815789473684214e-05, |
|
"loss": 2.6085, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13578500707213578, |
|
"grad_norm": 1.8026000261306763, |
|
"learning_rate": 5.5284210526315796e-05, |
|
"loss": 2.5406, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1371994342291372, |
|
"grad_norm": 1.7136831283569336, |
|
"learning_rate": 5.475263157894737e-05, |
|
"loss": 2.4505, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13861386138613863, |
|
"grad_norm": 1.7045419216156006, |
|
"learning_rate": 5.422105263157895e-05, |
|
"loss": 2.3824, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14002828854314003, |
|
"grad_norm": 1.850805640220642, |
|
"learning_rate": 5.368947368421053e-05, |
|
"loss": 2.3497, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14144271570014144, |
|
"grad_norm": 1.9787073135375977, |
|
"learning_rate": 5.3157894736842104e-05, |
|
"loss": 2.2438, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14144271570014144, |
|
"eval_loss": 0.6103463172912598, |
|
"eval_runtime": 65.9764, |
|
"eval_samples_per_second": 144.37, |
|
"eval_steps_per_second": 4.517, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 1.9267340898513794, |
|
"learning_rate": 5.262631578947368e-05, |
|
"loss": 2.6755, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.14427157001414428, |
|
"grad_norm": 1.8436909914016724, |
|
"learning_rate": 5.209473684210527e-05, |
|
"loss": 2.5422, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1456859971711457, |
|
"grad_norm": 1.7414659261703491, |
|
"learning_rate": 5.1563157894736844e-05, |
|
"loss": 2.4531, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1471004243281471, |
|
"grad_norm": 1.595393180847168, |
|
"learning_rate": 5.1031578947368426e-05, |
|
"loss": 2.4005, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1485148514851485, |
|
"grad_norm": 1.85604989528656, |
|
"learning_rate": 5.05e-05, |
|
"loss": 2.3848, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14992927864214992, |
|
"grad_norm": 2.049961805343628, |
|
"learning_rate": 4.9968421052631576e-05, |
|
"loss": 2.2966, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.15134370579915135, |
|
"grad_norm": 1.8361124992370605, |
|
"learning_rate": 4.943684210526316e-05, |
|
"loss": 2.5758, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.15275813295615276, |
|
"grad_norm": 1.8123300075531006, |
|
"learning_rate": 4.890526315789474e-05, |
|
"loss": 2.541, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.15417256011315417, |
|
"grad_norm": 1.7798748016357422, |
|
"learning_rate": 4.8373684210526316e-05, |
|
"loss": 2.461, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.15558698727015557, |
|
"grad_norm": 1.794299840927124, |
|
"learning_rate": 4.784210526315789e-05, |
|
"loss": 2.4377, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.157001414427157, |
|
"grad_norm": 1.764225959777832, |
|
"learning_rate": 4.731052631578947e-05, |
|
"loss": 2.4051, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15841584158415842, |
|
"grad_norm": 1.998778223991394, |
|
"learning_rate": 4.6778947368421055e-05, |
|
"loss": 2.2702, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.15983026874115983, |
|
"grad_norm": 1.8289985656738281, |
|
"learning_rate": 4.624736842105263e-05, |
|
"loss": 2.4621, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.16124469589816123, |
|
"grad_norm": 1.7528291940689087, |
|
"learning_rate": 4.571578947368421e-05, |
|
"loss": 2.5883, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.16265912305516267, |
|
"grad_norm": 1.732502818107605, |
|
"learning_rate": 4.518421052631579e-05, |
|
"loss": 2.4774, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16407355021216408, |
|
"grad_norm": 1.6828466653823853, |
|
"learning_rate": 4.465263157894737e-05, |
|
"loss": 2.3899, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.16548797736916548, |
|
"grad_norm": 1.5521636009216309, |
|
"learning_rate": 4.412105263157895e-05, |
|
"loss": 2.2525, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1669024045261669, |
|
"grad_norm": 1.7095965147018433, |
|
"learning_rate": 4.358947368421053e-05, |
|
"loss": 2.2436, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16831683168316833, |
|
"grad_norm": 1.8195699453353882, |
|
"learning_rate": 4.30578947368421e-05, |
|
"loss": 2.3708, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.16973125884016974, |
|
"grad_norm": 1.6346312761306763, |
|
"learning_rate": 4.2526315789473685e-05, |
|
"loss": 2.5518, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17114568599717114, |
|
"grad_norm": 1.5906932353973389, |
|
"learning_rate": 4.199473684210527e-05, |
|
"loss": 2.4394, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.17256011315417255, |
|
"grad_norm": 1.6136739253997803, |
|
"learning_rate": 4.146315789473684e-05, |
|
"loss": 2.3704, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.173974540311174, |
|
"grad_norm": 1.5603336095809937, |
|
"learning_rate": 4.093157894736842e-05, |
|
"loss": 2.3416, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1753889674681754, |
|
"grad_norm": 1.7088912725448608, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 2.2935, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1768033946251768, |
|
"grad_norm": 1.952721357345581, |
|
"learning_rate": 3.986842105263158e-05, |
|
"loss": 2.1708, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1782178217821782, |
|
"grad_norm": 1.8259592056274414, |
|
"learning_rate": 3.933684210526316e-05, |
|
"loss": 2.644, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17963224893917965, |
|
"grad_norm": 1.7780115604400635, |
|
"learning_rate": 3.880526315789473e-05, |
|
"loss": 2.4767, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.18104667609618105, |
|
"grad_norm": 1.6455316543579102, |
|
"learning_rate": 3.827368421052632e-05, |
|
"loss": 2.4048, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.18246110325318246, |
|
"grad_norm": 1.5901890993118286, |
|
"learning_rate": 3.7742105263157896e-05, |
|
"loss": 2.3776, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.18387553041018387, |
|
"grad_norm": 1.6352453231811523, |
|
"learning_rate": 3.721052631578947e-05, |
|
"loss": 2.3015, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18528995756718528, |
|
"grad_norm": 1.8599220514297485, |
|
"learning_rate": 3.6678947368421054e-05, |
|
"loss": 2.1913, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1867043847241867, |
|
"grad_norm": 1.6458431482315063, |
|
"learning_rate": 3.6147368421052636e-05, |
|
"loss": 2.5229, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18811881188118812, |
|
"grad_norm": 1.6390492916107178, |
|
"learning_rate": 3.561578947368421e-05, |
|
"loss": 2.4673, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18953323903818953, |
|
"grad_norm": 1.6500605344772339, |
|
"learning_rate": 3.508421052631579e-05, |
|
"loss": 2.5057, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.19094766619519093, |
|
"grad_norm": 1.5461673736572266, |
|
"learning_rate": 3.455263157894737e-05, |
|
"loss": 2.4082, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.19236209335219237, |
|
"grad_norm": 1.6012736558914185, |
|
"learning_rate": 3.402105263157895e-05, |
|
"loss": 2.3262, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.19377652050919378, |
|
"grad_norm": 1.8380329608917236, |
|
"learning_rate": 3.3489473684210526e-05, |
|
"loss": 2.2267, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.19519094766619519, |
|
"grad_norm": 1.6812222003936768, |
|
"learning_rate": 3.295789473684211e-05, |
|
"loss": 2.4013, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1966053748231966, |
|
"grad_norm": 1.6972684860229492, |
|
"learning_rate": 3.242631578947368e-05, |
|
"loss": 2.5284, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19801980198019803, |
|
"grad_norm": 1.5738369226455688, |
|
"learning_rate": 3.1894736842105265e-05, |
|
"loss": 2.4468, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19943422913719944, |
|
"grad_norm": 1.5377528667449951, |
|
"learning_rate": 3.136315789473685e-05, |
|
"loss": 2.4274, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.20084865629420084, |
|
"grad_norm": 1.4526879787445068, |
|
"learning_rate": 3.083157894736842e-05, |
|
"loss": 2.278, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.20226308345120225, |
|
"grad_norm": 1.6539933681488037, |
|
"learning_rate": 3.0299999999999998e-05, |
|
"loss": 2.2014, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2036775106082037, |
|
"grad_norm": 1.7789607048034668, |
|
"learning_rate": 2.9768421052631577e-05, |
|
"loss": 2.3219, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2050919377652051, |
|
"grad_norm": 1.6558810472488403, |
|
"learning_rate": 2.923684210526316e-05, |
|
"loss": 2.5565, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2065063649222065, |
|
"grad_norm": 1.5841859579086304, |
|
"learning_rate": 2.8705263157894737e-05, |
|
"loss": 2.4249, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2079207920792079, |
|
"grad_norm": 1.541245698928833, |
|
"learning_rate": 2.8173684210526313e-05, |
|
"loss": 2.3161, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.20933521923620935, |
|
"grad_norm": 1.540382981300354, |
|
"learning_rate": 2.7642105263157898e-05, |
|
"loss": 2.2704, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.21074964639321075, |
|
"grad_norm": 1.6713669300079346, |
|
"learning_rate": 2.7110526315789473e-05, |
|
"loss": 2.3984, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.21216407355021216, |
|
"grad_norm": 1.785141110420227, |
|
"learning_rate": 2.6578947368421052e-05, |
|
"loss": 2.1903, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21216407355021216, |
|
"eval_loss": 0.5903738141059875, |
|
"eval_runtime": 65.8893, |
|
"eval_samples_per_second": 144.561, |
|
"eval_steps_per_second": 4.523, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21357850070721357, |
|
"grad_norm": 1.8083994388580322, |
|
"learning_rate": 2.6047368421052634e-05, |
|
"loss": 2.655, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.214992927864215, |
|
"grad_norm": 1.7660170793533325, |
|
"learning_rate": 2.5515789473684213e-05, |
|
"loss": 2.4596, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2164073550212164, |
|
"grad_norm": 1.5700284242630005, |
|
"learning_rate": 2.4984210526315788e-05, |
|
"loss": 2.3994, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.21782178217821782, |
|
"grad_norm": 1.5441746711730957, |
|
"learning_rate": 2.445263157894737e-05, |
|
"loss": 2.3907, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21923620933521923, |
|
"grad_norm": 1.6638027429580688, |
|
"learning_rate": 2.3921052631578946e-05, |
|
"loss": 2.3423, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22065063649222066, |
|
"grad_norm": 1.771287441253662, |
|
"learning_rate": 2.3389473684210528e-05, |
|
"loss": 2.2275, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.22206506364922207, |
|
"grad_norm": 1.5801191329956055, |
|
"learning_rate": 2.2857894736842106e-05, |
|
"loss": 2.4069, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.22347949080622348, |
|
"grad_norm": 1.6368637084960938, |
|
"learning_rate": 2.2326315789473685e-05, |
|
"loss": 2.4982, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2248939179632249, |
|
"grad_norm": 1.5528136491775513, |
|
"learning_rate": 2.1794736842105264e-05, |
|
"loss": 2.3804, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2263083451202263, |
|
"grad_norm": 1.534663438796997, |
|
"learning_rate": 2.1263157894736842e-05, |
|
"loss": 2.3356, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22772277227722773, |
|
"grad_norm": 1.5998404026031494, |
|
"learning_rate": 2.073157894736842e-05, |
|
"loss": 2.2787, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.22913719943422914, |
|
"grad_norm": 1.6879397630691528, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 2.2333, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.23055162659123055, |
|
"grad_norm": 1.523830533027649, |
|
"learning_rate": 1.966842105263158e-05, |
|
"loss": 2.3785, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.23196605374823195, |
|
"grad_norm": 1.5299345254898071, |
|
"learning_rate": 1.913684210526316e-05, |
|
"loss": 2.4672, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2333804809052334, |
|
"grad_norm": 1.5351752042770386, |
|
"learning_rate": 1.8605263157894736e-05, |
|
"loss": 2.3958, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2347949080622348, |
|
"grad_norm": 1.568233609199524, |
|
"learning_rate": 1.8073684210526318e-05, |
|
"loss": 2.3673, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2362093352192362, |
|
"grad_norm": 1.5371862649917603, |
|
"learning_rate": 1.7542105263157897e-05, |
|
"loss": 2.3114, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2376237623762376, |
|
"grad_norm": 1.5453903675079346, |
|
"learning_rate": 1.7010526315789475e-05, |
|
"loss": 2.2459, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.23903818953323905, |
|
"grad_norm": 1.5765697956085205, |
|
"learning_rate": 1.6478947368421054e-05, |
|
"loss": 2.299, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.24045261669024046, |
|
"grad_norm": 1.5953807830810547, |
|
"learning_rate": 1.5947368421052633e-05, |
|
"loss": 2.5046, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.24186704384724186, |
|
"grad_norm": 1.491113305091858, |
|
"learning_rate": 1.541578947368421e-05, |
|
"loss": 2.3534, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.24328147100424327, |
|
"grad_norm": 1.545294165611267, |
|
"learning_rate": 1.4884210526315788e-05, |
|
"loss": 2.3312, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2446958981612447, |
|
"grad_norm": 1.5785633325576782, |
|
"learning_rate": 1.4352631578947369e-05, |
|
"loss": 2.3933, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.24611032531824611, |
|
"grad_norm": 1.5772044658660889, |
|
"learning_rate": 1.3821052631578949e-05, |
|
"loss": 2.2657, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.24752475247524752, |
|
"grad_norm": 1.7794053554534912, |
|
"learning_rate": 1.3289473684210526e-05, |
|
"loss": 2.1362, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24893917963224893, |
|
"grad_norm": 1.560815453529358, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 2.5596, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.25035360678925034, |
|
"grad_norm": 1.4746390581130981, |
|
"learning_rate": 1.2226315789473685e-05, |
|
"loss": 2.3835, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.25176803394625175, |
|
"grad_norm": 1.534509539604187, |
|
"learning_rate": 1.1694736842105264e-05, |
|
"loss": 2.3088, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2531824611032532, |
|
"grad_norm": 1.4818270206451416, |
|
"learning_rate": 1.1163157894736842e-05, |
|
"loss": 2.3556, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2545968882602546, |
|
"grad_norm": 1.5269659757614136, |
|
"learning_rate": 1.0631578947368421e-05, |
|
"loss": 2.2403, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.256011315417256, |
|
"grad_norm": 1.7317768335342407, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 2.1797, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.25742574257425743, |
|
"grad_norm": 1.4866405725479126, |
|
"learning_rate": 9.56842105263158e-06, |
|
"loss": 2.4372, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.25884016973125884, |
|
"grad_norm": 1.50742769241333, |
|
"learning_rate": 9.036842105263159e-06, |
|
"loss": 2.3818, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.26025459688826025, |
|
"grad_norm": 1.4752280712127686, |
|
"learning_rate": 8.505263157894738e-06, |
|
"loss": 2.3667, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.26166902404526166, |
|
"grad_norm": 1.5261386632919312, |
|
"learning_rate": 7.973684210526316e-06, |
|
"loss": 2.3464, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.26308345120226306, |
|
"grad_norm": 1.5557565689086914, |
|
"learning_rate": 7.442105263157894e-06, |
|
"loss": 2.2537, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.26449787835926447, |
|
"grad_norm": 1.6685258150100708, |
|
"learning_rate": 6.9105263157894745e-06, |
|
"loss": 2.1934, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.26591230551626593, |
|
"grad_norm": 1.5335478782653809, |
|
"learning_rate": 6.378947368421053e-06, |
|
"loss": 2.3748, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.26732673267326734, |
|
"grad_norm": 1.5710328817367554, |
|
"learning_rate": 5.847368421052632e-06, |
|
"loss": 2.4465, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.26874115983026875, |
|
"grad_norm": 1.5486925840377808, |
|
"learning_rate": 5.315789473684211e-06, |
|
"loss": 2.3516, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.27015558698727016, |
|
"grad_norm": 1.4741291999816895, |
|
"learning_rate": 4.78421052631579e-06, |
|
"loss": 2.294, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.27157001414427157, |
|
"grad_norm": 1.4587738513946533, |
|
"learning_rate": 4.252631578947369e-06, |
|
"loss": 2.2937, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.272984441301273, |
|
"grad_norm": 1.5502698421478271, |
|
"learning_rate": 3.721052631578947e-06, |
|
"loss": 2.2016, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2743988684582744, |
|
"grad_norm": 1.732304334640503, |
|
"learning_rate": 3.1894736842105266e-06, |
|
"loss": 2.3201, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2758132956152758, |
|
"grad_norm": 1.5487397909164429, |
|
"learning_rate": 2.6578947368421053e-06, |
|
"loss": 2.4652, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.27722772277227725, |
|
"grad_norm": 1.5302364826202393, |
|
"learning_rate": 2.1263157894736844e-06, |
|
"loss": 2.4604, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.27864214992927866, |
|
"grad_norm": 1.4785501956939697, |
|
"learning_rate": 1.5947368421052633e-06, |
|
"loss": 2.3962, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.28005657708628007, |
|
"grad_norm": 1.4775469303131104, |
|
"learning_rate": 1.0631578947368422e-06, |
|
"loss": 2.3007, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2814710042432815, |
|
"grad_norm": 1.5755748748779297, |
|
"learning_rate": 5.315789473684211e-07, |
|
"loss": 2.2026, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2828854314002829, |
|
"grad_norm": 1.9510166645050049, |
|
"learning_rate": 0.0, |
|
"loss": 2.1684, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2828854314002829, |
|
"eval_loss": 0.5795559883117676, |
|
"eval_runtime": 66.046, |
|
"eval_samples_per_second": 144.218, |
|
"eval_steps_per_second": 4.512, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.2895993913606144e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|