|
{ |
|
"best_metric": 0.33371686935424805, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.32232070910556004, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016116035455278, |
|
"grad_norm": 0.3171309530735016, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 1.0204, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0016116035455278, |
|
"eval_loss": 1.5084112882614136, |
|
"eval_runtime": 135.8281, |
|
"eval_samples_per_second": 61.534, |
|
"eval_steps_per_second": 1.929, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0032232070910556, |
|
"grad_norm": 0.4342203438282013, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 1.1485, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004834810636583401, |
|
"grad_norm": 0.5146673321723938, |
|
"learning_rate": 3.0299999999999998e-05, |
|
"loss": 1.1971, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0064464141821112, |
|
"grad_norm": 0.7256948947906494, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 1.4328, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008058017727639, |
|
"grad_norm": 1.1304517984390259, |
|
"learning_rate": 5.05e-05, |
|
"loss": 1.596, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009669621273166801, |
|
"grad_norm": 1.6577770709991455, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 1.845, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011281224818694601, |
|
"grad_norm": 0.36109769344329834, |
|
"learning_rate": 7.07e-05, |
|
"loss": 0.9896, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0128928283642224, |
|
"grad_norm": 0.34879496693611145, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 0.9208, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014504431909750202, |
|
"grad_norm": 0.4985645115375519, |
|
"learning_rate": 9.09e-05, |
|
"loss": 0.895, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016116035455278, |
|
"grad_norm": 0.45808470249176025, |
|
"learning_rate": 0.000101, |
|
"loss": 0.8154, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.017727639000805803, |
|
"grad_norm": 0.3932338058948517, |
|
"learning_rate": 0.00010046842105263158, |
|
"loss": 0.8742, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019339242546333603, |
|
"grad_norm": 0.45358890295028687, |
|
"learning_rate": 9.993684210526315e-05, |
|
"loss": 0.9433, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.020950846091861403, |
|
"grad_norm": 0.29831695556640625, |
|
"learning_rate": 9.940526315789473e-05, |
|
"loss": 0.8187, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.022562449637389202, |
|
"grad_norm": 0.22101442515850067, |
|
"learning_rate": 9.887368421052632e-05, |
|
"loss": 0.8009, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.024174053182917002, |
|
"grad_norm": 0.27133405208587646, |
|
"learning_rate": 9.83421052631579e-05, |
|
"loss": 0.8114, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0257856567284448, |
|
"grad_norm": 0.23551404476165771, |
|
"learning_rate": 9.781052631578948e-05, |
|
"loss": 0.7651, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0273972602739726, |
|
"grad_norm": 0.3073457181453705, |
|
"learning_rate": 9.727894736842106e-05, |
|
"loss": 0.8419, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.029008863819500404, |
|
"grad_norm": 0.3708623945713043, |
|
"learning_rate": 9.674736842105263e-05, |
|
"loss": 0.8201, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.030620467365028204, |
|
"grad_norm": 0.22654138505458832, |
|
"learning_rate": 9.621578947368421e-05, |
|
"loss": 0.7523, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.032232070910556, |
|
"grad_norm": 0.15454810857772827, |
|
"learning_rate": 9.568421052631578e-05, |
|
"loss": 0.7827, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03384367445608381, |
|
"grad_norm": 0.17125999927520752, |
|
"learning_rate": 9.515263157894737e-05, |
|
"loss": 0.7706, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.035455278001611606, |
|
"grad_norm": 0.19242525100708008, |
|
"learning_rate": 9.462105263157895e-05, |
|
"loss": 0.7639, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.037066881547139406, |
|
"grad_norm": 0.24774906039237976, |
|
"learning_rate": 9.408947368421054e-05, |
|
"loss": 0.7952, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.038678485092667206, |
|
"grad_norm": 0.25956159830093384, |
|
"learning_rate": 9.355789473684211e-05, |
|
"loss": 0.7721, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.040290088638195005, |
|
"grad_norm": 0.4470839202404022, |
|
"learning_rate": 9.302631578947369e-05, |
|
"loss": 0.7735, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.041901692183722805, |
|
"grad_norm": 0.12824320793151855, |
|
"learning_rate": 9.249473684210526e-05, |
|
"loss": 0.7376, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.043513295729250605, |
|
"grad_norm": 0.17759135365486145, |
|
"learning_rate": 9.196315789473685e-05, |
|
"loss": 0.7544, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.045124899274778404, |
|
"grad_norm": 0.2012881338596344, |
|
"learning_rate": 9.143157894736843e-05, |
|
"loss": 0.7582, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.046736502820306204, |
|
"grad_norm": 0.22395461797714233, |
|
"learning_rate": 9.09e-05, |
|
"loss": 0.764, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.048348106365834004, |
|
"grad_norm": 0.3060719072818756, |
|
"learning_rate": 9.036842105263158e-05, |
|
"loss": 0.7868, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0499597099113618, |
|
"grad_norm": 0.327498197555542, |
|
"learning_rate": 8.983684210526316e-05, |
|
"loss": 0.7615, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0515713134568896, |
|
"grad_norm": 0.20455282926559448, |
|
"learning_rate": 8.930526315789474e-05, |
|
"loss": 0.7787, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0531829170024174, |
|
"grad_norm": 0.16608926653862, |
|
"learning_rate": 8.877368421052632e-05, |
|
"loss": 0.7066, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0547945205479452, |
|
"grad_norm": 0.18552589416503906, |
|
"learning_rate": 8.82421052631579e-05, |
|
"loss": 0.7656, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05640612409347301, |
|
"grad_norm": 0.20650997757911682, |
|
"learning_rate": 8.771052631578948e-05, |
|
"loss": 0.6993, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05801772763900081, |
|
"grad_norm": 0.2687518298625946, |
|
"learning_rate": 8.717894736842105e-05, |
|
"loss": 0.6904, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05962933118452861, |
|
"grad_norm": 0.34716078639030457, |
|
"learning_rate": 8.664736842105263e-05, |
|
"loss": 0.7152, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06124093473005641, |
|
"grad_norm": 0.19605892896652222, |
|
"learning_rate": 8.61157894736842e-05, |
|
"loss": 0.6811, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06285253827558421, |
|
"grad_norm": 0.1753331869840622, |
|
"learning_rate": 8.55842105263158e-05, |
|
"loss": 0.713, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.064464141821112, |
|
"grad_norm": 0.18865598738193512, |
|
"learning_rate": 8.505263157894737e-05, |
|
"loss": 0.7027, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06607574536663981, |
|
"grad_norm": 0.2409752905368805, |
|
"learning_rate": 8.452105263157896e-05, |
|
"loss": 0.7385, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06768734891216761, |
|
"grad_norm": 0.2615523934364319, |
|
"learning_rate": 8.398947368421053e-05, |
|
"loss": 0.6761, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0692989524576954, |
|
"grad_norm": 0.3525382876396179, |
|
"learning_rate": 8.345789473684211e-05, |
|
"loss": 0.7173, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07091055600322321, |
|
"grad_norm": 0.23152434825897217, |
|
"learning_rate": 8.292631578947368e-05, |
|
"loss": 0.6942, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.072522159548751, |
|
"grad_norm": 0.19304005801677704, |
|
"learning_rate": 8.239473684210526e-05, |
|
"loss": 0.711, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07413376309427881, |
|
"grad_norm": 0.20851251482963562, |
|
"learning_rate": 8.186315789473683e-05, |
|
"loss": 0.6829, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0757453666398066, |
|
"grad_norm": 0.24184094369411469, |
|
"learning_rate": 8.133157894736842e-05, |
|
"loss": 0.7245, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07735697018533441, |
|
"grad_norm": 0.27612966299057007, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 0.6987, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0789685737308622, |
|
"grad_norm": 0.337848961353302, |
|
"learning_rate": 8.026842105263159e-05, |
|
"loss": 0.6726, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08058017727639001, |
|
"grad_norm": 0.52480149269104, |
|
"learning_rate": 7.973684210526316e-05, |
|
"loss": 0.6298, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08058017727639001, |
|
"eval_loss": 0.6599936485290527, |
|
"eval_runtime": 135.8696, |
|
"eval_samples_per_second": 61.515, |
|
"eval_steps_per_second": 1.928, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0821917808219178, |
|
"grad_norm": 0.19816854596138, |
|
"learning_rate": 7.920526315789474e-05, |
|
"loss": 0.7042, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08380338436744561, |
|
"grad_norm": 0.23714330792427063, |
|
"learning_rate": 7.867368421052631e-05, |
|
"loss": 0.6846, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0854149879129734, |
|
"grad_norm": 0.2528240382671356, |
|
"learning_rate": 7.814210526315789e-05, |
|
"loss": 0.668, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08702659145850121, |
|
"grad_norm": 0.29058927297592163, |
|
"learning_rate": 7.761052631578946e-05, |
|
"loss": 0.6611, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.088638195004029, |
|
"grad_norm": 0.36324694752693176, |
|
"learning_rate": 7.707894736842105e-05, |
|
"loss": 0.6686, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09024979854955681, |
|
"grad_norm": 0.4872949719429016, |
|
"learning_rate": 7.654736842105264e-05, |
|
"loss": 0.6209, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09186140209508462, |
|
"grad_norm": 0.21689631044864655, |
|
"learning_rate": 7.601578947368422e-05, |
|
"loss": 0.6876, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09347300564061241, |
|
"grad_norm": 0.23922550678253174, |
|
"learning_rate": 7.548421052631579e-05, |
|
"loss": 0.7083, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09508460918614021, |
|
"grad_norm": 0.26386719942092896, |
|
"learning_rate": 7.495263157894737e-05, |
|
"loss": 0.6284, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09669621273166801, |
|
"grad_norm": 0.3023104667663574, |
|
"learning_rate": 7.442105263157894e-05, |
|
"loss": 0.5894, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09830781627719581, |
|
"grad_norm": 0.36962202191352844, |
|
"learning_rate": 7.388947368421053e-05, |
|
"loss": 0.6337, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0999194198227236, |
|
"grad_norm": 0.47718724608421326, |
|
"learning_rate": 7.335789473684211e-05, |
|
"loss": 0.5995, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10153102336825141, |
|
"grad_norm": 0.26407232880592346, |
|
"learning_rate": 7.282631578947368e-05, |
|
"loss": 0.6403, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1031426269137792, |
|
"grad_norm": 0.23950377106666565, |
|
"learning_rate": 7.229473684210527e-05, |
|
"loss": 0.6717, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.10475423045930701, |
|
"grad_norm": 0.2847888767719269, |
|
"learning_rate": 7.176315789473685e-05, |
|
"loss": 0.6053, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1063658340048348, |
|
"grad_norm": 0.30500850081443787, |
|
"learning_rate": 7.123157894736842e-05, |
|
"loss": 0.6456, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10797743755036261, |
|
"grad_norm": 0.35994887351989746, |
|
"learning_rate": 7.07e-05, |
|
"loss": 0.5984, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1095890410958904, |
|
"grad_norm": 0.45406046509742737, |
|
"learning_rate": 7.016842105263159e-05, |
|
"loss": 0.5379, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11120064464141821, |
|
"grad_norm": 0.36862146854400635, |
|
"learning_rate": 6.963684210526316e-05, |
|
"loss": 0.6479, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11281224818694602, |
|
"grad_norm": 0.2420656830072403, |
|
"learning_rate": 6.910526315789474e-05, |
|
"loss": 0.6218, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11442385173247381, |
|
"grad_norm": 0.29916492104530334, |
|
"learning_rate": 6.857368421052631e-05, |
|
"loss": 0.6343, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11603545527800162, |
|
"grad_norm": 0.33885183930397034, |
|
"learning_rate": 6.80421052631579e-05, |
|
"loss": 0.6124, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.38673973083496094, |
|
"learning_rate": 6.751052631578948e-05, |
|
"loss": 0.5623, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11925866236905722, |
|
"grad_norm": 0.4571478068828583, |
|
"learning_rate": 6.697894736842105e-05, |
|
"loss": 0.5642, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12087026591458501, |
|
"grad_norm": 0.6491063237190247, |
|
"learning_rate": 6.644736842105264e-05, |
|
"loss": 0.4955, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12248186946011282, |
|
"grad_norm": 0.24757473170757294, |
|
"learning_rate": 6.591578947368422e-05, |
|
"loss": 0.6216, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12409347300564061, |
|
"grad_norm": 0.3110860288143158, |
|
"learning_rate": 6.538421052631579e-05, |
|
"loss": 0.6069, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12570507655116842, |
|
"grad_norm": 0.3447476923465729, |
|
"learning_rate": 6.485263157894737e-05, |
|
"loss": 0.5747, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12731668009669622, |
|
"grad_norm": 0.3997146785259247, |
|
"learning_rate": 6.432105263157894e-05, |
|
"loss": 0.5514, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.128928283642224, |
|
"grad_norm": 0.4768727421760559, |
|
"learning_rate": 6.378947368421053e-05, |
|
"loss": 0.5112, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1305398871877518, |
|
"grad_norm": 0.6860184073448181, |
|
"learning_rate": 6.32578947368421e-05, |
|
"loss": 0.5494, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13215149073327961, |
|
"grad_norm": 0.28173205256462097, |
|
"learning_rate": 6.27263157894737e-05, |
|
"loss": 0.6339, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.13376309427880742, |
|
"grad_norm": 0.30443888902664185, |
|
"learning_rate": 6.219473684210527e-05, |
|
"loss": 0.6196, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13537469782433523, |
|
"grad_norm": 0.3652026653289795, |
|
"learning_rate": 6.166315789473685e-05, |
|
"loss": 0.588, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.136986301369863, |
|
"grad_norm": 0.3979891240596771, |
|
"learning_rate": 6.113157894736842e-05, |
|
"loss": 0.546, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1385979049153908, |
|
"grad_norm": 0.4760202169418335, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 0.5122, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14020950846091862, |
|
"grad_norm": 0.6287760138511658, |
|
"learning_rate": 6.006842105263158e-05, |
|
"loss": 0.4889, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.14182111200644643, |
|
"grad_norm": 0.32981643080711365, |
|
"learning_rate": 5.953684210526315e-05, |
|
"loss": 0.5776, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1434327155519742, |
|
"grad_norm": 0.3730184733867645, |
|
"learning_rate": 5.900526315789474e-05, |
|
"loss": 0.6509, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.145044319097502, |
|
"grad_norm": 0.3809673488140106, |
|
"learning_rate": 5.847368421052632e-05, |
|
"loss": 0.5744, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14665592264302982, |
|
"grad_norm": 0.411126047372818, |
|
"learning_rate": 5.79421052631579e-05, |
|
"loss": 0.5536, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.14826752618855762, |
|
"grad_norm": 0.49867916107177734, |
|
"learning_rate": 5.7410526315789475e-05, |
|
"loss": 0.4925, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1498791297340854, |
|
"grad_norm": 0.593421995639801, |
|
"learning_rate": 5.687894736842105e-05, |
|
"loss": 0.4427, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1514907332796132, |
|
"grad_norm": 0.39515072107315063, |
|
"learning_rate": 5.6347368421052625e-05, |
|
"loss": 0.5233, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15310233682514102, |
|
"grad_norm": 0.33038094639778137, |
|
"learning_rate": 5.5815789473684214e-05, |
|
"loss": 0.5878, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15471394037066882, |
|
"grad_norm": 0.4073917865753174, |
|
"learning_rate": 5.5284210526315796e-05, |
|
"loss": 0.578, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1563255439161966, |
|
"grad_norm": 0.4969066083431244, |
|
"learning_rate": 5.475263157894737e-05, |
|
"loss": 0.5495, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1579371474617244, |
|
"grad_norm": 0.48956212401390076, |
|
"learning_rate": 5.422105263157895e-05, |
|
"loss": 0.4926, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.15954875100725222, |
|
"grad_norm": 0.5708914995193481, |
|
"learning_rate": 5.368947368421053e-05, |
|
"loss": 0.4043, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16116035455278002, |
|
"grad_norm": 0.8494532108306885, |
|
"learning_rate": 5.3157894736842104e-05, |
|
"loss": 0.3979, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16116035455278002, |
|
"eval_loss": 0.4972478449344635, |
|
"eval_runtime": 134.4776, |
|
"eval_samples_per_second": 62.152, |
|
"eval_steps_per_second": 1.948, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16277195809830783, |
|
"grad_norm": 0.2953222393989563, |
|
"learning_rate": 5.262631578947368e-05, |
|
"loss": 0.5638, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1643835616438356, |
|
"grad_norm": 0.4076739549636841, |
|
"learning_rate": 5.209473684210527e-05, |
|
"loss": 0.5997, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.16599516518936341, |
|
"grad_norm": 0.46156543493270874, |
|
"learning_rate": 5.1563157894736844e-05, |
|
"loss": 0.5238, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.16760676873489122, |
|
"grad_norm": 0.5150144696235657, |
|
"learning_rate": 5.1031578947368426e-05, |
|
"loss": 0.4905, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.16921837228041903, |
|
"grad_norm": 0.6315293312072754, |
|
"learning_rate": 5.05e-05, |
|
"loss": 0.4555, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1708299758259468, |
|
"grad_norm": 0.9211300015449524, |
|
"learning_rate": 4.9968421052631576e-05, |
|
"loss": 0.3999, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1724415793714746, |
|
"grad_norm": 0.36812564730644226, |
|
"learning_rate": 4.943684210526316e-05, |
|
"loss": 0.5902, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.17405318291700242, |
|
"grad_norm": 0.38568469882011414, |
|
"learning_rate": 4.890526315789474e-05, |
|
"loss": 0.5742, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.17566478646253023, |
|
"grad_norm": 0.4459795653820038, |
|
"learning_rate": 4.8373684210526316e-05, |
|
"loss": 0.5317, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.177276390008058, |
|
"grad_norm": 0.5016929507255554, |
|
"learning_rate": 4.784210526315789e-05, |
|
"loss": 0.4765, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1788879935535858, |
|
"grad_norm": 0.6309992671012878, |
|
"learning_rate": 4.731052631578947e-05, |
|
"loss": 0.442, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.18049959709911362, |
|
"grad_norm": 0.7072840332984924, |
|
"learning_rate": 4.6778947368421055e-05, |
|
"loss": 0.4028, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.18211120064464142, |
|
"grad_norm": 0.38350537419319153, |
|
"learning_rate": 4.624736842105263e-05, |
|
"loss": 0.5352, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.18372280419016923, |
|
"grad_norm": 0.4897186756134033, |
|
"learning_rate": 4.571578947368421e-05, |
|
"loss": 0.6138, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.185334407735697, |
|
"grad_norm": 0.5892842411994934, |
|
"learning_rate": 4.518421052631579e-05, |
|
"loss": 0.5777, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.18694601128122482, |
|
"grad_norm": 0.5452648401260376, |
|
"learning_rate": 4.465263157894737e-05, |
|
"loss": 0.475, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.18855761482675262, |
|
"grad_norm": 0.6141505837440491, |
|
"learning_rate": 4.412105263157895e-05, |
|
"loss": 0.4451, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.19016921837228043, |
|
"grad_norm": 0.6963237524032593, |
|
"learning_rate": 4.358947368421053e-05, |
|
"loss": 0.332, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1917808219178082, |
|
"grad_norm": 0.512565016746521, |
|
"learning_rate": 4.30578947368421e-05, |
|
"loss": 0.5548, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.19339242546333602, |
|
"grad_norm": 0.39101463556289673, |
|
"learning_rate": 4.2526315789473685e-05, |
|
"loss": 0.5824, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19500402900886382, |
|
"grad_norm": 0.4487123489379883, |
|
"learning_rate": 4.199473684210527e-05, |
|
"loss": 0.5225, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.19661563255439163, |
|
"grad_norm": 0.5278434753417969, |
|
"learning_rate": 4.146315789473684e-05, |
|
"loss": 0.4837, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1982272360999194, |
|
"grad_norm": 0.6028698086738586, |
|
"learning_rate": 4.093157894736842e-05, |
|
"loss": 0.4294, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1998388396454472, |
|
"grad_norm": 0.8378256559371948, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 0.379, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.20145044319097502, |
|
"grad_norm": 1.1504991054534912, |
|
"learning_rate": 3.986842105263158e-05, |
|
"loss": 0.3705, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.20306204673650283, |
|
"grad_norm": 0.3160516023635864, |
|
"learning_rate": 3.933684210526316e-05, |
|
"loss": 0.5496, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.20467365028203063, |
|
"grad_norm": 0.43820229172706604, |
|
"learning_rate": 3.880526315789473e-05, |
|
"loss": 0.5012, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2062852538275584, |
|
"grad_norm": 0.4975963532924652, |
|
"learning_rate": 3.827368421052632e-05, |
|
"loss": 0.5217, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.20789685737308622, |
|
"grad_norm": 0.5185565948486328, |
|
"learning_rate": 3.7742105263157896e-05, |
|
"loss": 0.428, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.20950846091861403, |
|
"grad_norm": 0.6332120895385742, |
|
"learning_rate": 3.721052631578947e-05, |
|
"loss": 0.3511, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21112006446414183, |
|
"grad_norm": 0.7808757424354553, |
|
"learning_rate": 3.6678947368421054e-05, |
|
"loss": 0.2946, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2127316680096696, |
|
"grad_norm": 0.4457676112651825, |
|
"learning_rate": 3.6147368421052636e-05, |
|
"loss": 0.5695, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.21434327155519742, |
|
"grad_norm": 0.5079824328422546, |
|
"learning_rate": 3.561578947368421e-05, |
|
"loss": 0.5236, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.21595487510072522, |
|
"grad_norm": 0.5814209580421448, |
|
"learning_rate": 3.508421052631579e-05, |
|
"loss": 0.4928, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.21756647864625303, |
|
"grad_norm": 0.5811838507652283, |
|
"learning_rate": 3.455263157894737e-05, |
|
"loss": 0.423, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2191780821917808, |
|
"grad_norm": 0.6463267207145691, |
|
"learning_rate": 3.402105263157895e-05, |
|
"loss": 0.3671, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.22078968573730862, |
|
"grad_norm": 0.825130820274353, |
|
"learning_rate": 3.3489473684210526e-05, |
|
"loss": 0.2856, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.22240128928283642, |
|
"grad_norm": 0.45928215980529785, |
|
"learning_rate": 3.295789473684211e-05, |
|
"loss": 0.5312, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.22401289282836423, |
|
"grad_norm": 0.4779850244522095, |
|
"learning_rate": 3.242631578947368e-05, |
|
"loss": 0.5565, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.22562449637389204, |
|
"grad_norm": 0.5173392295837402, |
|
"learning_rate": 3.1894736842105265e-05, |
|
"loss": 0.4862, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22723609991941982, |
|
"grad_norm": 0.6725038290023804, |
|
"learning_rate": 3.136315789473685e-05, |
|
"loss": 0.4738, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.22884770346494762, |
|
"grad_norm": 0.6613323092460632, |
|
"learning_rate": 3.083157894736842e-05, |
|
"loss": 0.3635, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.23045930701047543, |
|
"grad_norm": 0.726622998714447, |
|
"learning_rate": 3.0299999999999998e-05, |
|
"loss": 0.255, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.23207091055600323, |
|
"grad_norm": 0.5666455626487732, |
|
"learning_rate": 2.9768421052631577e-05, |
|
"loss": 0.3874, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.233682514101531, |
|
"grad_norm": 0.459031343460083, |
|
"learning_rate": 2.923684210526316e-05, |
|
"loss": 0.5113, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.5253135561943054, |
|
"learning_rate": 2.8705263157894737e-05, |
|
"loss": 0.4246, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.23690572119258663, |
|
"grad_norm": 0.6632147431373596, |
|
"learning_rate": 2.8173684210526313e-05, |
|
"loss": 0.4323, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.23851732473811443, |
|
"grad_norm": 0.749000072479248, |
|
"learning_rate": 2.7642105263157898e-05, |
|
"loss": 0.3744, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2401289282836422, |
|
"grad_norm": 0.7715749740600586, |
|
"learning_rate": 2.7110526315789473e-05, |
|
"loss": 0.257, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.24174053182917002, |
|
"grad_norm": 1.1665058135986328, |
|
"learning_rate": 2.6578947368421052e-05, |
|
"loss": 0.2613, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24174053182917002, |
|
"eval_loss": 0.3877754807472229, |
|
"eval_runtime": 136.1771, |
|
"eval_samples_per_second": 61.376, |
|
"eval_steps_per_second": 1.924, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24335213537469783, |
|
"grad_norm": 0.40307724475860596, |
|
"learning_rate": 2.6047368421052634e-05, |
|
"loss": 0.5158, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.24496373892022563, |
|
"grad_norm": 0.47922101616859436, |
|
"learning_rate": 2.5515789473684213e-05, |
|
"loss": 0.5136, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2465753424657534, |
|
"grad_norm": 0.5659621357917786, |
|
"learning_rate": 2.4984210526315788e-05, |
|
"loss": 0.3903, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.24818694601128122, |
|
"grad_norm": 0.6030014753341675, |
|
"learning_rate": 2.445263157894737e-05, |
|
"loss": 0.3398, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.24979854955680902, |
|
"grad_norm": 0.6882525086402893, |
|
"learning_rate": 2.3921052631578946e-05, |
|
"loss": 0.2735, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.25141015310233683, |
|
"grad_norm": 0.9658460021018982, |
|
"learning_rate": 2.3389473684210528e-05, |
|
"loss": 0.2586, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.25302175664786464, |
|
"grad_norm": 0.5281833410263062, |
|
"learning_rate": 2.2857894736842106e-05, |
|
"loss": 0.5193, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.25463336019339244, |
|
"grad_norm": 0.6106149554252625, |
|
"learning_rate": 2.2326315789473685e-05, |
|
"loss": 0.4958, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.25624496373892025, |
|
"grad_norm": 0.7677047252655029, |
|
"learning_rate": 2.1794736842105264e-05, |
|
"loss": 0.4363, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.257856567284448, |
|
"grad_norm": 0.655020534992218, |
|
"learning_rate": 2.1263157894736842e-05, |
|
"loss": 0.3346, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2594681708299758, |
|
"grad_norm": 0.7973310351371765, |
|
"learning_rate": 2.073157894736842e-05, |
|
"loss": 0.3355, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2610797743755036, |
|
"grad_norm": 0.9603102207183838, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 0.2597, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2626913779210314, |
|
"grad_norm": 0.5082424879074097, |
|
"learning_rate": 1.966842105263158e-05, |
|
"loss": 0.4856, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.26430298146655923, |
|
"grad_norm": 0.47313550114631653, |
|
"learning_rate": 1.913684210526316e-05, |
|
"loss": 0.521, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.26591458501208703, |
|
"grad_norm": 0.5528334379196167, |
|
"learning_rate": 1.8605263157894736e-05, |
|
"loss": 0.4273, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.26752618855761484, |
|
"grad_norm": 0.6417384743690491, |
|
"learning_rate": 1.8073684210526318e-05, |
|
"loss": 0.377, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.26913779210314265, |
|
"grad_norm": 0.7294735908508301, |
|
"learning_rate": 1.7542105263157897e-05, |
|
"loss": 0.3155, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.27074939564867045, |
|
"grad_norm": 0.8516286611557007, |
|
"learning_rate": 1.7010526315789475e-05, |
|
"loss": 0.2491, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2723609991941982, |
|
"grad_norm": 0.5871580839157104, |
|
"learning_rate": 1.6478947368421054e-05, |
|
"loss": 0.408, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 0.5080535411834717, |
|
"learning_rate": 1.5947368421052633e-05, |
|
"loss": 0.5006, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2755842062852538, |
|
"grad_norm": 0.572222113609314, |
|
"learning_rate": 1.541578947368421e-05, |
|
"loss": 0.4072, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2771958098307816, |
|
"grad_norm": 0.6648744940757751, |
|
"learning_rate": 1.4884210526315788e-05, |
|
"loss": 0.3782, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.27880741337630943, |
|
"grad_norm": 0.7872058153152466, |
|
"learning_rate": 1.4352631578947369e-05, |
|
"loss": 0.3245, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.28041901692183724, |
|
"grad_norm": 0.8955470323562622, |
|
"learning_rate": 1.3821052631578949e-05, |
|
"loss": 0.2479, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.28203062046736505, |
|
"grad_norm": 1.2318774461746216, |
|
"learning_rate": 1.3289473684210526e-05, |
|
"loss": 0.2326, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.28364222401289285, |
|
"grad_norm": 0.4534154534339905, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 0.4958, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2852538275584206, |
|
"grad_norm": 0.5369049906730652, |
|
"learning_rate": 1.2226315789473685e-05, |
|
"loss": 0.4478, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2868654311039484, |
|
"grad_norm": 0.6685469746589661, |
|
"learning_rate": 1.1694736842105264e-05, |
|
"loss": 0.3742, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2884770346494762, |
|
"grad_norm": 0.666946530342102, |
|
"learning_rate": 1.1163157894736842e-05, |
|
"loss": 0.3135, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.290088638195004, |
|
"grad_norm": 0.7749336361885071, |
|
"learning_rate": 1.0631578947368421e-05, |
|
"loss": 0.2429, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.29170024174053183, |
|
"grad_norm": 1.028951644897461, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 0.2296, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.29331184528605964, |
|
"grad_norm": 0.46223780512809753, |
|
"learning_rate": 9.56842105263158e-06, |
|
"loss": 0.4958, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.29492344883158744, |
|
"grad_norm": 0.518415093421936, |
|
"learning_rate": 9.036842105263159e-06, |
|
"loss": 0.4638, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.29653505237711525, |
|
"grad_norm": 0.6299134492874146, |
|
"learning_rate": 8.505263157894738e-06, |
|
"loss": 0.3941, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.29814665592264306, |
|
"grad_norm": 0.6578340530395508, |
|
"learning_rate": 7.973684210526316e-06, |
|
"loss": 0.3176, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2997582594681708, |
|
"grad_norm": 0.8014324903488159, |
|
"learning_rate": 7.442105263157894e-06, |
|
"loss": 0.2383, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3013698630136986, |
|
"grad_norm": 0.9844911694526672, |
|
"learning_rate": 6.9105263157894745e-06, |
|
"loss": 0.219, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3029814665592264, |
|
"grad_norm": 0.5260629653930664, |
|
"learning_rate": 6.378947368421053e-06, |
|
"loss": 0.4447, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3045930701047542, |
|
"grad_norm": 0.5315889120101929, |
|
"learning_rate": 5.847368421052632e-06, |
|
"loss": 0.4569, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.30620467365028203, |
|
"grad_norm": 0.6365461349487305, |
|
"learning_rate": 5.315789473684211e-06, |
|
"loss": 0.4326, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.30781627719580984, |
|
"grad_norm": 0.658093273639679, |
|
"learning_rate": 4.78421052631579e-06, |
|
"loss": 0.33, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.30942788074133765, |
|
"grad_norm": 0.7685239911079407, |
|
"learning_rate": 4.252631578947369e-06, |
|
"loss": 0.2842, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.31103948428686545, |
|
"grad_norm": 0.8007004261016846, |
|
"learning_rate": 3.721052631578947e-06, |
|
"loss": 0.1996, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3126510878323932, |
|
"grad_norm": 0.6490435004234314, |
|
"learning_rate": 3.1894736842105266e-06, |
|
"loss": 0.3936, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.314262691377921, |
|
"grad_norm": 0.5127969980239868, |
|
"learning_rate": 2.6578947368421053e-06, |
|
"loss": 0.4567, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3158742949234488, |
|
"grad_norm": 0.6253898739814758, |
|
"learning_rate": 2.1263157894736844e-06, |
|
"loss": 0.4243, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3174858984689766, |
|
"grad_norm": 0.6929444074630737, |
|
"learning_rate": 1.5947368421052633e-06, |
|
"loss": 0.3584, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.31909750201450443, |
|
"grad_norm": 0.8625342845916748, |
|
"learning_rate": 1.0631578947368422e-06, |
|
"loss": 0.29, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.32070910556003224, |
|
"grad_norm": 0.9054241180419922, |
|
"learning_rate": 5.315789473684211e-07, |
|
"loss": 0.2209, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.32232070910556004, |
|
"grad_norm": 1.2763004302978516, |
|
"learning_rate": 0.0, |
|
"loss": 0.2273, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.32232070910556004, |
|
"eval_loss": 0.33371686935424805, |
|
"eval_runtime": 137.2091, |
|
"eval_samples_per_second": 60.914, |
|
"eval_steps_per_second": 1.909, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.4879212787531776e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|