|
{ |
|
"best_metric": 0.7186845541000366, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 2.150537634408602, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010752688172043012, |
|
"grad_norm": 4.536034107208252, |
|
"learning_rate": 7e-06, |
|
"loss": 8.1083, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010752688172043012, |
|
"eval_loss": 2.7238574028015137, |
|
"eval_runtime": 14.4588, |
|
"eval_samples_per_second": 10.858, |
|
"eval_steps_per_second": 2.766, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.021505376344086023, |
|
"grad_norm": 5.978432655334473, |
|
"learning_rate": 1.4e-05, |
|
"loss": 10.3139, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03225806451612903, |
|
"grad_norm": 7.468502998352051, |
|
"learning_rate": 2.1e-05, |
|
"loss": 10.007, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.043010752688172046, |
|
"grad_norm": 12.983536720275879, |
|
"learning_rate": 2.8e-05, |
|
"loss": 9.7253, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.053763440860215055, |
|
"grad_norm": 13.686447143554688, |
|
"learning_rate": 3.5e-05, |
|
"loss": 9.548, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 9.852103233337402, |
|
"learning_rate": 4.2e-05, |
|
"loss": 9.0148, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07526881720430108, |
|
"grad_norm": 7.126467704772949, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 8.349, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08602150537634409, |
|
"grad_norm": 6.550177574157715, |
|
"learning_rate": 5.6e-05, |
|
"loss": 7.4971, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0967741935483871, |
|
"grad_norm": 10.200550079345703, |
|
"learning_rate": 6.3e-05, |
|
"loss": 5.8202, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10752688172043011, |
|
"grad_norm": 7.892305850982666, |
|
"learning_rate": 7e-05, |
|
"loss": 4.6753, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11827956989247312, |
|
"grad_norm": 19.061233520507812, |
|
"learning_rate": 6.999521567473641e-05, |
|
"loss": 5.6292, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 14.547859191894531, |
|
"learning_rate": 6.998086400693241e-05, |
|
"loss": 5.6758, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13978494623655913, |
|
"grad_norm": 9.280782699584961, |
|
"learning_rate": 6.995694892019065e-05, |
|
"loss": 4.7095, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.15053763440860216, |
|
"grad_norm": 8.161127090454102, |
|
"learning_rate": 6.99234769526571e-05, |
|
"loss": 3.4122, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.16129032258064516, |
|
"grad_norm": 34.015987396240234, |
|
"learning_rate": 6.988045725523343e-05, |
|
"loss": 4.0857, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.17204301075268819, |
|
"grad_norm": 24.155447006225586, |
|
"learning_rate": 6.982790158907539e-05, |
|
"loss": 4.2336, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1827956989247312, |
|
"grad_norm": 5.08544397354126, |
|
"learning_rate": 6.976582432237733e-05, |
|
"loss": 4.5834, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 13.041632652282715, |
|
"learning_rate": 6.969424242644413e-05, |
|
"loss": 3.4418, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.20430107526881722, |
|
"grad_norm": 5.957951545715332, |
|
"learning_rate": 6.961317547105138e-05, |
|
"loss": 3.8441, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.21505376344086022, |
|
"grad_norm": 5.243581771850586, |
|
"learning_rate": 6.952264561909527e-05, |
|
"loss": 3.2856, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.22580645161290322, |
|
"grad_norm": 7.811934471130371, |
|
"learning_rate": 6.942267762053337e-05, |
|
"loss": 4.6588, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.23655913978494625, |
|
"grad_norm": 11.206618309020996, |
|
"learning_rate": 6.931329880561832e-05, |
|
"loss": 4.762, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.24731182795698925, |
|
"grad_norm": 11.149868965148926, |
|
"learning_rate": 6.919453907742597e-05, |
|
"loss": 2.4062, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 11.476567268371582, |
|
"learning_rate": 6.90664309036802e-05, |
|
"loss": 9.3109, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.26881720430107525, |
|
"grad_norm": 9.604092597961426, |
|
"learning_rate": 6.892900930787656e-05, |
|
"loss": 7.829, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.27956989247311825, |
|
"grad_norm": 6.5685601234436035, |
|
"learning_rate": 6.87823118597072e-05, |
|
"loss": 6.88, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2903225806451613, |
|
"grad_norm": 9.324129104614258, |
|
"learning_rate": 6.862637866478969e-05, |
|
"loss": 7.8981, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3010752688172043, |
|
"grad_norm": 8.132095336914062, |
|
"learning_rate": 6.846125235370252e-05, |
|
"loss": 5.5197, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3118279569892473, |
|
"grad_norm": 5.731638431549072, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 6.1606, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 3.5606529712677, |
|
"learning_rate": 6.81036034595222e-05, |
|
"loss": 5.3749, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 3.248960256576538, |
|
"learning_rate": 6.791117865406564e-05, |
|
"loss": 5.605, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.34408602150537637, |
|
"grad_norm": 2.7672736644744873, |
|
"learning_rate": 6.770975626098112e-05, |
|
"loss": 3.8032, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3548387096774194, |
|
"grad_norm": 2.312937021255493, |
|
"learning_rate": 6.749939134713974e-05, |
|
"loss": 2.5618, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3655913978494624, |
|
"grad_norm": 2.682342290878296, |
|
"learning_rate": 6.728014142420846e-05, |
|
"loss": 3.8125, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3763440860215054, |
|
"grad_norm": 3.345135450363159, |
|
"learning_rate": 6.7052066432927e-05, |
|
"loss": 3.4451, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 2.8873608112335205, |
|
"learning_rate": 6.681522872672069e-05, |
|
"loss": 3.7995, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3978494623655914, |
|
"grad_norm": 2.9758994579315186, |
|
"learning_rate": 6.656969305465356e-05, |
|
"loss": 1.916, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.40860215053763443, |
|
"grad_norm": 2.9894485473632812, |
|
"learning_rate": 6.631552654372672e-05, |
|
"loss": 2.6401, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.41935483870967744, |
|
"grad_norm": 2.8846404552459717, |
|
"learning_rate": 6.60527986805264e-05, |
|
"loss": 3.5161, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.43010752688172044, |
|
"grad_norm": 2.933262348175049, |
|
"learning_rate": 6.578158129222711e-05, |
|
"loss": 3.2549, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.44086021505376344, |
|
"grad_norm": 3.833869218826294, |
|
"learning_rate": 6.550194852695469e-05, |
|
"loss": 3.841, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 3.596083402633667, |
|
"learning_rate": 6.521397683351509e-05, |
|
"loss": 2.6962, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.46236559139784944, |
|
"grad_norm": 6.494784355163574, |
|
"learning_rate": 6.491774494049386e-05, |
|
"loss": 3.7265, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4731182795698925, |
|
"grad_norm": 3.249906063079834, |
|
"learning_rate": 6.461333383473272e-05, |
|
"loss": 1.9618, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4838709677419355, |
|
"grad_norm": 6.374508380889893, |
|
"learning_rate": 6.430082673918849e-05, |
|
"loss": 3.3468, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4946236559139785, |
|
"grad_norm": 7.742399215698242, |
|
"learning_rate": 6.398030909018069e-05, |
|
"loss": 2.4064, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5053763440860215, |
|
"grad_norm": 5.071364879608154, |
|
"learning_rate": 6.365186851403423e-05, |
|
"loss": 6.4741, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 4.5256571769714355, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 6.4723, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5268817204301075, |
|
"grad_norm": 3.286426305770874, |
|
"learning_rate": 6.297157989132236e-05, |
|
"loss": 5.6636, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5376344086021505, |
|
"grad_norm": 2.9677677154541016, |
|
"learning_rate": 6.261991782887377e-05, |
|
"loss": 4.5262, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5376344086021505, |
|
"eval_loss": 1.031134843826294, |
|
"eval_runtime": 14.7506, |
|
"eval_samples_per_second": 10.644, |
|
"eval_steps_per_second": 2.712, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5483870967741935, |
|
"grad_norm": 3.2802512645721436, |
|
"learning_rate": 6.226070475667393e-05, |
|
"loss": 5.2625, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5591397849462365, |
|
"grad_norm": 3.9532055854797363, |
|
"learning_rate": 6.189403887999006e-05, |
|
"loss": 4.5323, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5698924731182796, |
|
"grad_norm": 4.0816731452941895, |
|
"learning_rate": 6.152002044161171e-05, |
|
"loss": 5.2472, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 3.2194178104400635, |
|
"learning_rate": 6.113875169444539e-05, |
|
"loss": 4.1174, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5913978494623656, |
|
"grad_norm": 3.1189627647399902, |
|
"learning_rate": 6.0750336873559605e-05, |
|
"loss": 3.5628, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6021505376344086, |
|
"grad_norm": 3.0379552841186523, |
|
"learning_rate": 6.035488216768811e-05, |
|
"loss": 2.5643, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.6129032258064516, |
|
"grad_norm": 2.96771240234375, |
|
"learning_rate": 5.9952495690198894e-05, |
|
"loss": 3.8131, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6236559139784946, |
|
"grad_norm": 3.369340181350708, |
|
"learning_rate": 5.954328744953709e-05, |
|
"loss": 2.6112, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6344086021505376, |
|
"grad_norm": 2.662733793258667, |
|
"learning_rate": 5.91273693191498e-05, |
|
"loss": 2.678, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 2.3960416316986084, |
|
"learning_rate": 5.870485500690094e-05, |
|
"loss": 1.9448, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6559139784946236, |
|
"grad_norm": 3.0840823650360107, |
|
"learning_rate": 5.827586002398468e-05, |
|
"loss": 3.0983, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 4.117824077606201, |
|
"learning_rate": 5.784050165334589e-05, |
|
"loss": 2.1824, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6774193548387096, |
|
"grad_norm": 3.039947271347046, |
|
"learning_rate": 5.739889891761608e-05, |
|
"loss": 3.4342, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6881720430107527, |
|
"grad_norm": 3.3664510250091553, |
|
"learning_rate": 5.6951172546573794e-05, |
|
"loss": 2.7055, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6989247311827957, |
|
"grad_norm": 7.528428077697754, |
|
"learning_rate": 5.6497444944138376e-05, |
|
"loss": 3.2575, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 2.7763924598693848, |
|
"learning_rate": 5.603784015490587e-05, |
|
"loss": 2.7032, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.7204301075268817, |
|
"grad_norm": 4.630373001098633, |
|
"learning_rate": 5.557248383023655e-05, |
|
"loss": 3.1262, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7311827956989247, |
|
"grad_norm": 4.628628730773926, |
|
"learning_rate": 5.510150319390302e-05, |
|
"loss": 2.4391, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7419354838709677, |
|
"grad_norm": 6.389301300048828, |
|
"learning_rate": 5.4625027007308546e-05, |
|
"loss": 1.6421, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7526881720430108, |
|
"grad_norm": 3.937209129333496, |
|
"learning_rate": 5.414318553428494e-05, |
|
"loss": 6.887, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7634408602150538, |
|
"grad_norm": 3.489776611328125, |
|
"learning_rate": 5.3656110505479776e-05, |
|
"loss": 5.776, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 3.345602035522461, |
|
"learning_rate": 5.316393508234253e-05, |
|
"loss": 5.4254, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7849462365591398, |
|
"grad_norm": 2.5286366939544678, |
|
"learning_rate": 5.266679382071953e-05, |
|
"loss": 5.4602, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7956989247311828, |
|
"grad_norm": 2.6356401443481445, |
|
"learning_rate": 5.216482263406778e-05, |
|
"loss": 4.1083, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.8064516129032258, |
|
"grad_norm": 2.794057607650757, |
|
"learning_rate": 5.1658158756297576e-05, |
|
"loss": 4.0295, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.8172043010752689, |
|
"grad_norm": 3.1053338050842285, |
|
"learning_rate": 5.114694070425407e-05, |
|
"loss": 4.2456, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8279569892473119, |
|
"grad_norm": 3.4362998008728027, |
|
"learning_rate": 5.063130823984823e-05, |
|
"loss": 4.0444, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 3.6933822631835938, |
|
"learning_rate": 5.011140233184724e-05, |
|
"loss": 3.3562, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8494623655913979, |
|
"grad_norm": 2.264709711074829, |
|
"learning_rate": 4.958736511733516e-05, |
|
"loss": 1.5203, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8602150537634409, |
|
"grad_norm": 3.0925395488739014, |
|
"learning_rate": 4.905933986285393e-05, |
|
"loss": 3.4232, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8709677419354839, |
|
"grad_norm": 3.5374860763549805, |
|
"learning_rate": 4.8527470925235824e-05, |
|
"loss": 2.9371, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8817204301075269, |
|
"grad_norm": 2.737464666366577, |
|
"learning_rate": 4.799190371213772e-05, |
|
"loss": 2.9893, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8924731182795699, |
|
"grad_norm": 2.4057750701904297, |
|
"learning_rate": 4.745278464228808e-05, |
|
"loss": 2.4454, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 3.7483322620391846, |
|
"learning_rate": 4.69102611054575e-05, |
|
"loss": 2.2346, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.9139784946236559, |
|
"grad_norm": 3.1117513179779053, |
|
"learning_rate": 4.6364481422163926e-05, |
|
"loss": 2.3842, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.9247311827956989, |
|
"grad_norm": 2.581960678100586, |
|
"learning_rate": 4.581559480312316e-05, |
|
"loss": 2.3513, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9354838709677419, |
|
"grad_norm": 2.8223772048950195, |
|
"learning_rate": 4.526375130845627e-05, |
|
"loss": 2.713, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.946236559139785, |
|
"grad_norm": 3.449551582336426, |
|
"learning_rate": 4.4709101806664554e-05, |
|
"loss": 2.1045, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.956989247311828, |
|
"grad_norm": 3.203859329223633, |
|
"learning_rate": 4.4151797933383685e-05, |
|
"loss": 2.2587, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 2.9277474880218506, |
|
"learning_rate": 4.359199204992797e-05, |
|
"loss": 2.6041, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.978494623655914, |
|
"grad_norm": 3.5323002338409424, |
|
"learning_rate": 4.30298372016363e-05, |
|
"loss": 2.2093, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.989247311827957, |
|
"grad_norm": 4.1092681884765625, |
|
"learning_rate": 4.246548707603114e-05, |
|
"loss": 1.9357, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.2726848125457764, |
|
"learning_rate": 4.1899095960801805e-05, |
|
"loss": 2.8715, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.010752688172043, |
|
"grad_norm": 2.6090543270111084, |
|
"learning_rate": 4.133081870162385e-05, |
|
"loss": 6.4364, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.021505376344086, |
|
"grad_norm": 3.0788755416870117, |
|
"learning_rate": 4.076081065982569e-05, |
|
"loss": 6.1323, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.032258064516129, |
|
"grad_norm": 3.0953359603881836, |
|
"learning_rate": 4.018922766991447e-05, |
|
"loss": 5.4343, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.043010752688172, |
|
"grad_norm": 2.4865167140960693, |
|
"learning_rate": 3.961622599697241e-05, |
|
"loss": 3.752, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.053763440860215, |
|
"grad_norm": 3.014270305633545, |
|
"learning_rate": 3.9041962293935516e-05, |
|
"loss": 3.9712, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.064516129032258, |
|
"grad_norm": 2.697925329208374, |
|
"learning_rate": 3.84665935587662e-05, |
|
"loss": 3.8601, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.075268817204301, |
|
"grad_norm": 2.772761106491089, |
|
"learning_rate": 3.7890277091531636e-05, |
|
"loss": 3.554, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.075268817204301, |
|
"eval_loss": 0.8277088403701782, |
|
"eval_runtime": 14.7467, |
|
"eval_samples_per_second": 10.646, |
|
"eval_steps_per_second": 2.712, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.086021505376344, |
|
"grad_norm": 2.8855340480804443, |
|
"learning_rate": 3.7313170451399475e-05, |
|
"loss": 2.9625, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.096774193548387, |
|
"grad_norm": 3.348214864730835, |
|
"learning_rate": 3.673543141356278e-05, |
|
"loss": 3.571, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.10752688172043, |
|
"grad_norm": 2.3443796634674072, |
|
"learning_rate": 3.6157217926105783e-05, |
|
"loss": 1.8009, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.118279569892473, |
|
"grad_norm": 3.143157482147217, |
|
"learning_rate": 3.557868806682255e-05, |
|
"loss": 2.5616, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.129032258064516, |
|
"grad_norm": 2.678039073944092, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.6681, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.139784946236559, |
|
"grad_norm": 3.7475228309631348, |
|
"learning_rate": 3.442131193317745e-05, |
|
"loss": 2.4192, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1505376344086022, |
|
"grad_norm": 2.9420273303985596, |
|
"learning_rate": 3.384278207389421e-05, |
|
"loss": 1.9677, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 3.134910821914673, |
|
"learning_rate": 3.3264568586437216e-05, |
|
"loss": 1.8615, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1720430107526882, |
|
"grad_norm": 4.73399543762207, |
|
"learning_rate": 3.268682954860052e-05, |
|
"loss": 2.2775, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1827956989247312, |
|
"grad_norm": 4.327884197235107, |
|
"learning_rate": 3.210972290846837e-05, |
|
"loss": 2.0575, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1935483870967742, |
|
"grad_norm": 4.249052047729492, |
|
"learning_rate": 3.15334064412338e-05, |
|
"loss": 1.999, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.2043010752688172, |
|
"grad_norm": 4.234005928039551, |
|
"learning_rate": 3.0958037706064485e-05, |
|
"loss": 1.7021, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.2150537634408602, |
|
"grad_norm": 3.8542721271514893, |
|
"learning_rate": 3.038377400302758e-05, |
|
"loss": 1.4706, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.2258064516129032, |
|
"grad_norm": 4.0482892990112305, |
|
"learning_rate": 2.9810772330085524e-05, |
|
"loss": 2.0322, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.2365591397849462, |
|
"grad_norm": 5.8383073806762695, |
|
"learning_rate": 2.9239189340174306e-05, |
|
"loss": 2.4762, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.2473118279569892, |
|
"grad_norm": 4.042543888092041, |
|
"learning_rate": 2.8669181298376163e-05, |
|
"loss": 1.7183, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2580645161290323, |
|
"grad_norm": 2.37276291847229, |
|
"learning_rate": 2.8100904039198193e-05, |
|
"loss": 5.3169, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.2688172043010753, |
|
"grad_norm": 3.0219969749450684, |
|
"learning_rate": 2.7534512923968863e-05, |
|
"loss": 5.8673, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2795698924731183, |
|
"grad_norm": 3.038010597229004, |
|
"learning_rate": 2.6970162798363695e-05, |
|
"loss": 5.1296, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2903225806451613, |
|
"grad_norm": 3.0977330207824707, |
|
"learning_rate": 2.640800795007203e-05, |
|
"loss": 3.7753, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3010752688172043, |
|
"grad_norm": 3.9190726280212402, |
|
"learning_rate": 2.5848202066616305e-05, |
|
"loss": 4.0651, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.3118279569892473, |
|
"grad_norm": 2.9296584129333496, |
|
"learning_rate": 2.5290898193335446e-05, |
|
"loss": 3.2334, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.3225806451612903, |
|
"grad_norm": 2.870746374130249, |
|
"learning_rate": 2.4736248691543736e-05, |
|
"loss": 3.5725, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 3.1865155696868896, |
|
"learning_rate": 2.4184405196876842e-05, |
|
"loss": 2.4533, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.3440860215053765, |
|
"grad_norm": 2.683004379272461, |
|
"learning_rate": 2.363551857783608e-05, |
|
"loss": 2.8205, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.3548387096774195, |
|
"grad_norm": 2.89284348487854, |
|
"learning_rate": 2.308973889454249e-05, |
|
"loss": 1.5345, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.3655913978494625, |
|
"grad_norm": 2.9330499172210693, |
|
"learning_rate": 2.2547215357711918e-05, |
|
"loss": 3.3656, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.3763440860215055, |
|
"grad_norm": 3.1823220252990723, |
|
"learning_rate": 2.2008096287862266e-05, |
|
"loss": 1.9697, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.3870967741935485, |
|
"grad_norm": 3.1404953002929688, |
|
"learning_rate": 2.1472529074764177e-05, |
|
"loss": 1.8351, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3978494623655915, |
|
"grad_norm": 2.9065327644348145, |
|
"learning_rate": 2.0940660137146074e-05, |
|
"loss": 1.4858, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.4086021505376345, |
|
"grad_norm": 2.9487435817718506, |
|
"learning_rate": 2.041263488266484e-05, |
|
"loss": 1.7826, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.4193548387096775, |
|
"grad_norm": 3.2001399993896484, |
|
"learning_rate": 1.988859766815275e-05, |
|
"loss": 1.7777, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.4301075268817205, |
|
"grad_norm": 3.180950403213501, |
|
"learning_rate": 1.9368691760151773e-05, |
|
"loss": 1.9536, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.4408602150537635, |
|
"grad_norm": 3.969357490539551, |
|
"learning_rate": 1.885305929574593e-05, |
|
"loss": 1.6653, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.4516129032258065, |
|
"grad_norm": 3.9064815044403076, |
|
"learning_rate": 1.8341841243702424e-05, |
|
"loss": 1.6647, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.4623655913978495, |
|
"grad_norm": 3.6417884826660156, |
|
"learning_rate": 1.7835177365932225e-05, |
|
"loss": 1.8726, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.4731182795698925, |
|
"grad_norm": 4.675624847412109, |
|
"learning_rate": 1.7333206179280478e-05, |
|
"loss": 1.9269, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.4838709677419355, |
|
"grad_norm": 4.082760810852051, |
|
"learning_rate": 1.6836064917657478e-05, |
|
"loss": 1.4504, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.4946236559139785, |
|
"grad_norm": 3.289503812789917, |
|
"learning_rate": 1.6343889494520224e-05, |
|
"loss": 0.7642, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.5053763440860215, |
|
"grad_norm": 2.7232275009155273, |
|
"learning_rate": 1.5856814465715064e-05, |
|
"loss": 6.1022, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.5161290322580645, |
|
"grad_norm": 3.177217960357666, |
|
"learning_rate": 1.5374972992691458e-05, |
|
"loss": 5.1127, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.5268817204301075, |
|
"grad_norm": 2.949465751647949, |
|
"learning_rate": 1.4898496806096974e-05, |
|
"loss": 4.6851, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.5376344086021505, |
|
"grad_norm": 3.0858075618743896, |
|
"learning_rate": 1.4427516169763444e-05, |
|
"loss": 4.389, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 3.117891788482666, |
|
"learning_rate": 1.396215984509412e-05, |
|
"loss": 4.1926, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.5591397849462365, |
|
"grad_norm": 3.024880886077881, |
|
"learning_rate": 1.3502555055861625e-05, |
|
"loss": 3.3123, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.5698924731182795, |
|
"grad_norm": 3.528703451156616, |
|
"learning_rate": 1.3048827453426203e-05, |
|
"loss": 4.0442, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5806451612903225, |
|
"grad_norm": 3.134192943572998, |
|
"learning_rate": 1.2601101082383917e-05, |
|
"loss": 2.9924, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5913978494623655, |
|
"grad_norm": 3.5123281478881836, |
|
"learning_rate": 1.2159498346654094e-05, |
|
"loss": 2.2857, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.6021505376344085, |
|
"grad_norm": 2.7393951416015625, |
|
"learning_rate": 1.1724139976015306e-05, |
|
"loss": 1.1784, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"grad_norm": 3.7451350688934326, |
|
"learning_rate": 1.1295144993099068e-05, |
|
"loss": 2.1274, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"eval_loss": 0.7497462034225464, |
|
"eval_runtime": 14.7503, |
|
"eval_samples_per_second": 10.644, |
|
"eval_steps_per_second": 2.712, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6236559139784945, |
|
"grad_norm": 3.5955772399902344, |
|
"learning_rate": 1.0872630680850196e-05, |
|
"loss": 2.3566, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.6344086021505375, |
|
"grad_norm": 3.7463667392730713, |
|
"learning_rate": 1.0456712550462898e-05, |
|
"loss": 1.9469, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.6451612903225805, |
|
"grad_norm": 2.806169033050537, |
|
"learning_rate": 1.0047504309801104e-05, |
|
"loss": 2.1513, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.6559139784946235, |
|
"grad_norm": 3.7219080924987793, |
|
"learning_rate": 9.645117832311886e-06, |
|
"loss": 2.5836, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 3.056955337524414, |
|
"learning_rate": 9.249663126440394e-06, |
|
"loss": 1.6478, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.6774193548387095, |
|
"grad_norm": 3.247894525527954, |
|
"learning_rate": 8.861248305554624e-06, |
|
"loss": 1.455, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.6881720430107527, |
|
"grad_norm": 3.3513553142547607, |
|
"learning_rate": 8.47997955838829e-06, |
|
"loss": 2.0281, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.6989247311827957, |
|
"grad_norm": 3.108705759048462, |
|
"learning_rate": 8.10596112000994e-06, |
|
"loss": 1.5561, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.7096774193548387, |
|
"grad_norm": 3.947622299194336, |
|
"learning_rate": 7.739295243326067e-06, |
|
"loss": 1.7348, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.7204301075268817, |
|
"grad_norm": 4.6300530433654785, |
|
"learning_rate": 7.380082171126228e-06, |
|
"loss": 1.9787, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.7311827956989247, |
|
"grad_norm": 4.57441520690918, |
|
"learning_rate": 7.028420108677635e-06, |
|
"loss": 1.4791, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.7419354838709677, |
|
"grad_norm": 4.058063983917236, |
|
"learning_rate": 6.684405196876842e-06, |
|
"loss": 0.762, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.7526881720430108, |
|
"grad_norm": 2.3837802410125732, |
|
"learning_rate": 6.3481314859657675e-06, |
|
"loss": 5.4107, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.7634408602150538, |
|
"grad_norm": 3.205528974533081, |
|
"learning_rate": 6.019690909819298e-06, |
|
"loss": 5.3617, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.7741935483870968, |
|
"grad_norm": 3.044926881790161, |
|
"learning_rate": 5.6991732608115e-06, |
|
"loss": 4.1357, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.7849462365591398, |
|
"grad_norm": 3.4720242023468018, |
|
"learning_rate": 5.386666165267256e-06, |
|
"loss": 4.7002, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.7956989247311828, |
|
"grad_norm": 3.4407799243927, |
|
"learning_rate": 5.08225505950613e-06, |
|
"loss": 3.6684, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.8064516129032258, |
|
"grad_norm": 3.670388698577881, |
|
"learning_rate": 4.786023166484913e-06, |
|
"loss": 2.7388, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.817204301075269, |
|
"grad_norm": 3.6209826469421387, |
|
"learning_rate": 4.498051473045291e-06, |
|
"loss": 3.9281, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.827956989247312, |
|
"grad_norm": 3.7421019077301025, |
|
"learning_rate": 4.218418707772886e-06, |
|
"loss": 3.1656, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.838709677419355, |
|
"grad_norm": 3.480229377746582, |
|
"learning_rate": 3.947201319473587e-06, |
|
"loss": 1.5602, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.849462365591398, |
|
"grad_norm": 2.7893009185791016, |
|
"learning_rate": 3.684473456273278e-06, |
|
"loss": 1.3773, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.860215053763441, |
|
"grad_norm": 3.345553398132324, |
|
"learning_rate": 3.4303069453464383e-06, |
|
"loss": 1.6992, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.870967741935484, |
|
"grad_norm": 4.188620567321777, |
|
"learning_rate": 3.184771273279312e-06, |
|
"loss": 2.5594, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.881720430107527, |
|
"grad_norm": 3.4975194931030273, |
|
"learning_rate": 2.947933567072987e-06, |
|
"loss": 2.225, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.89247311827957, |
|
"grad_norm": 3.273622989654541, |
|
"learning_rate": 2.719858575791534e-06, |
|
"loss": 1.1841, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.903225806451613, |
|
"grad_norm": 3.025174617767334, |
|
"learning_rate": 2.500608652860256e-06, |
|
"loss": 1.6974, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.913978494623656, |
|
"grad_norm": 2.607513427734375, |
|
"learning_rate": 2.2902437390188737e-06, |
|
"loss": 1.1319, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.924731182795699, |
|
"grad_norm": 3.18086576461792, |
|
"learning_rate": 2.0888213459343587e-06, |
|
"loss": 1.8406, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 3.675136089324951, |
|
"learning_rate": 1.8963965404777875e-06, |
|
"loss": 2.3657, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.946236559139785, |
|
"grad_norm": 3.96602725982666, |
|
"learning_rate": 1.7130219296696263e-06, |
|
"loss": 1.5121, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.956989247311828, |
|
"grad_norm": 3.6097054481506348, |
|
"learning_rate": 1.5387476462974824e-06, |
|
"loss": 1.5113, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.967741935483871, |
|
"grad_norm": 3.757528781890869, |
|
"learning_rate": 1.3736213352103147e-06, |
|
"loss": 1.5258, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.978494623655914, |
|
"grad_norm": 4.669776439666748, |
|
"learning_rate": 1.2176881402928002e-06, |
|
"loss": 1.3748, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.989247311827957, |
|
"grad_norm": 3.6529650688171387, |
|
"learning_rate": 1.0709906921234367e-06, |
|
"loss": 0.7173, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.2291080951690674, |
|
"learning_rate": 9.33569096319799e-07, |
|
"loss": 2.3399, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.010752688172043, |
|
"grad_norm": 2.3444361686706543, |
|
"learning_rate": 8.054609225740255e-07, |
|
"loss": 5.6927, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.021505376344086, |
|
"grad_norm": 2.7011375427246094, |
|
"learning_rate": 6.867011943816724e-07, |
|
"loss": 5.3949, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.032258064516129, |
|
"grad_norm": 2.745737314224243, |
|
"learning_rate": 5.77322379466617e-07, |
|
"loss": 4.3841, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.043010752688172, |
|
"grad_norm": 2.8492581844329834, |
|
"learning_rate": 4.773543809047186e-07, |
|
"loss": 3.7209, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.053763440860215, |
|
"grad_norm": 3.298788547515869, |
|
"learning_rate": 3.868245289486027e-07, |
|
"loss": 4.1478, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.064516129032258, |
|
"grad_norm": 2.956620931625366, |
|
"learning_rate": 3.0575757355586817e-07, |
|
"loss": 2.9301, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.075268817204301, |
|
"grad_norm": 2.8849871158599854, |
|
"learning_rate": 2.3417567762266497e-07, |
|
"loss": 3.1787, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.086021505376344, |
|
"grad_norm": 3.1618738174438477, |
|
"learning_rate": 1.7209841092460043e-07, |
|
"loss": 3.161, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.096774193548387, |
|
"grad_norm": 3.032891273498535, |
|
"learning_rate": 1.1954274476655534e-07, |
|
"loss": 2.5093, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.10752688172043, |
|
"grad_norm": 2.315091133117676, |
|
"learning_rate": 7.652304734289127e-08, |
|
"loss": 1.0642, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.118279569892473, |
|
"grad_norm": 2.705153226852417, |
|
"learning_rate": 4.30510798093342e-08, |
|
"loss": 2.0394, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.129032258064516, |
|
"grad_norm": 2.7641355991363525, |
|
"learning_rate": 1.9135993067588284e-08, |
|
"loss": 1.127, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.139784946236559, |
|
"grad_norm": 3.088827610015869, |
|
"learning_rate": 4.784325263584854e-09, |
|
"loss": 1.8748, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.150537634408602, |
|
"grad_norm": 2.703360080718994, |
|
"learning_rate": 0.0, |
|
"loss": 1.2501, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.150537634408602, |
|
"eval_loss": 0.7186845541000366, |
|
"eval_runtime": 14.749, |
|
"eval_samples_per_second": 10.645, |
|
"eval_steps_per_second": 2.712, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.659354182502318e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|