|
{ |
|
"best_metric": 0.8318732976913452, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 3.0, |
|
"eval_steps": 50, |
|
"global_step": 114, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"grad_norm": 35.45204162597656, |
|
"learning_rate": 1e-05, |
|
"loss": 5.5742, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"eval_loss": 1.6754724979400635, |
|
"eval_runtime": 4.6261, |
|
"eval_samples_per_second": 13.835, |
|
"eval_steps_per_second": 3.459, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 44.67980194091797, |
|
"learning_rate": 2e-05, |
|
"loss": 6.0369, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07894736842105263, |
|
"grad_norm": 46.14945602416992, |
|
"learning_rate": 3e-05, |
|
"loss": 6.071, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 52.658287048339844, |
|
"learning_rate": 4e-05, |
|
"loss": 5.5399, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 39.682334899902344, |
|
"learning_rate": 5e-05, |
|
"loss": 5.1847, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 39.48842239379883, |
|
"learning_rate": 6e-05, |
|
"loss": 4.777, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.18421052631578946, |
|
"grad_norm": 26.690750122070312, |
|
"learning_rate": 7e-05, |
|
"loss": 3.9412, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 33.90504455566406, |
|
"learning_rate": 8e-05, |
|
"loss": 4.0138, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23684210526315788, |
|
"grad_norm": 39.74334716796875, |
|
"learning_rate": 9e-05, |
|
"loss": 4.7881, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 36.84393310546875, |
|
"learning_rate": 0.0001, |
|
"loss": 4.6449, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.2894736842105263, |
|
"grad_norm": 16.538103103637695, |
|
"learning_rate": 9.997718922447667e-05, |
|
"loss": 3.6918, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 14.892024040222168, |
|
"learning_rate": 9.990877771116589e-05, |
|
"loss": 4.0027, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.34210526315789475, |
|
"grad_norm": 16.028579711914062, |
|
"learning_rate": 9.979482788085454e-05, |
|
"loss": 3.3142, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 19.43064308166504, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 3.4893, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 16.012020111083984, |
|
"learning_rate": 9.943077061037671e-05, |
|
"loss": 3.2708, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 19.247434616088867, |
|
"learning_rate": 9.918099534735718e-05, |
|
"loss": 3.5145, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.4473684210526316, |
|
"grad_norm": 16.79051399230957, |
|
"learning_rate": 9.888634581854234e-05, |
|
"loss": 3.2073, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 30.969362258911133, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 3.6592, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 83.82894134521484, |
|
"learning_rate": 9.816354005237583e-05, |
|
"loss": 3.4321, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 22.35731315612793, |
|
"learning_rate": 9.773604332542729e-05, |
|
"loss": 3.8417, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5526315789473685, |
|
"grad_norm": 16.125959396362305, |
|
"learning_rate": 9.726499075173201e-05, |
|
"loss": 3.4863, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 16.601049423217773, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 3.6673, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.6052631578947368, |
|
"grad_norm": 14.341440200805664, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 3.7706, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 14.518145561218262, |
|
"learning_rate": 9.559499229960451e-05, |
|
"loss": 3.2883, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.6578947368421053, |
|
"grad_norm": 15.053692817687988, |
|
"learning_rate": 9.495440568827129e-05, |
|
"loss": 3.3129, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 15.76560115814209, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 3.4472, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.7105263157894737, |
|
"grad_norm": 21.681568145751953, |
|
"learning_rate": 9.355080099977578e-05, |
|
"loss": 4.0833, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 11.0687894821167, |
|
"learning_rate": 9.278906361507238e-05, |
|
"loss": 3.4282, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.7631578947368421, |
|
"grad_norm": 11.090133666992188, |
|
"learning_rate": 9.19882841613699e-05, |
|
"loss": 3.2468, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 14.295247077941895, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 3.5552, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.8157894736842105, |
|
"grad_norm": 12.010293006896973, |
|
"learning_rate": 9.02725566275473e-05, |
|
"loss": 3.2984, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 14.833451271057129, |
|
"learning_rate": 8.935917403045251e-05, |
|
"loss": 3.5882, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.868421052631579, |
|
"grad_norm": 16.68996810913086, |
|
"learning_rate": 8.840987890201403e-05, |
|
"loss": 3.8679, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 13.789620399475098, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 3.2592, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.9210526315789473, |
|
"grad_norm": 13.425146102905273, |
|
"learning_rate": 8.640704769378942e-05, |
|
"loss": 3.4794, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 16.333017349243164, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.727, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.9736842105263158, |
|
"grad_norm": 11.74190902709961, |
|
"learning_rate": 8.427137111675199e-05, |
|
"loss": 3.4931, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 18.10176658630371, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 3.3415, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.0263157894736843, |
|
"grad_norm": 8.549054145812988, |
|
"learning_rate": 8.201064202312441e-05, |
|
"loss": 3.0153, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 7.3096723556518555, |
|
"learning_rate": 8.083594363142717e-05, |
|
"loss": 1.9458, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.0789473684210527, |
|
"grad_norm": 9.102021217346191, |
|
"learning_rate": 7.963310956820085e-05, |
|
"loss": 1.9039, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.1052631578947367, |
|
"grad_norm": 8.0056734085083, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 1.704, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.131578947368421, |
|
"grad_norm": 10.63204574584961, |
|
"learning_rate": 7.714744911007394e-05, |
|
"loss": 1.6481, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.1578947368421053, |
|
"grad_norm": 10.899593353271484, |
|
"learning_rate": 7.586689070888284e-05, |
|
"loss": 1.4451, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.1842105263157894, |
|
"grad_norm": 12.153818130493164, |
|
"learning_rate": 7.456273055419388e-05, |
|
"loss": 1.1306, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 12.974570274353027, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 0.9947, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.236842105263158, |
|
"grad_norm": 19.212732315063477, |
|
"learning_rate": 7.188838525826702e-05, |
|
"loss": 1.2901, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.263157894736842, |
|
"grad_norm": 14.307405471801758, |
|
"learning_rate": 7.052064027263786e-05, |
|
"loss": 2.498, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.2894736842105263, |
|
"grad_norm": 13.335125923156738, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 1.6804, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 9.839455604553223, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 1.6424, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"eval_loss": 0.8318732976913452, |
|
"eval_runtime": 4.7479, |
|
"eval_samples_per_second": 13.48, |
|
"eval_steps_per_second": 3.37, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.3421052631578947, |
|
"grad_norm": 9.968130111694336, |
|
"learning_rate": 6.631013946104347e-05, |
|
"loss": 1.777, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.368421052631579, |
|
"grad_norm": 11.425047874450684, |
|
"learning_rate": 6.487515269276016e-05, |
|
"loss": 1.4606, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.3947368421052633, |
|
"grad_norm": 7.831254959106445, |
|
"learning_rate": 6.342659337371885e-05, |
|
"loss": 1.0979, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.4210526315789473, |
|
"grad_norm": 9.724353790283203, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 1.29, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.4473684210526316, |
|
"grad_norm": 9.942399024963379, |
|
"learning_rate": 6.049405510324238e-05, |
|
"loss": 1.07, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.4736842105263157, |
|
"grad_norm": 15.583574295043945, |
|
"learning_rate": 5.90127518906953e-05, |
|
"loss": 1.4934, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 11.082245826721191, |
|
"learning_rate": 5.752322516373916e-05, |
|
"loss": 2.705, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 9.11624526977539, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 1.8791, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.5526315789473686, |
|
"grad_norm": 9.091814994812012, |
|
"learning_rate": 5.45249437914819e-05, |
|
"loss": 1.6425, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.5789473684210527, |
|
"grad_norm": 8.767348289489746, |
|
"learning_rate": 5.3018924871114305e-05, |
|
"loss": 1.4837, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.6052631578947367, |
|
"grad_norm": 9.580700874328613, |
|
"learning_rate": 5.151015139004445e-05, |
|
"loss": 1.289, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.631578947368421, |
|
"grad_norm": 11.071468353271484, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2966, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.6578947368421053, |
|
"grad_norm": 12.766911506652832, |
|
"learning_rate": 4.848984860995557e-05, |
|
"loss": 1.2353, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.6842105263157894, |
|
"grad_norm": 10.578387260437012, |
|
"learning_rate": 4.6981075128885693e-05, |
|
"loss": 1.1841, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.7105263157894737, |
|
"grad_norm": 13.818449974060059, |
|
"learning_rate": 4.547505620851811e-05, |
|
"loss": 1.1691, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 9.914470672607422, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 2.4346, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.763157894736842, |
|
"grad_norm": 8.47325325012207, |
|
"learning_rate": 4.2476774836260845e-05, |
|
"loss": 1.592, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.7894736842105263, |
|
"grad_norm": 8.820671081542969, |
|
"learning_rate": 4.0987248109304714e-05, |
|
"loss": 1.5771, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.8157894736842106, |
|
"grad_norm": 9.258101463317871, |
|
"learning_rate": 3.950594489675763e-05, |
|
"loss": 1.2339, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 10.028499603271484, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 1.4592, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.868421052631579, |
|
"grad_norm": 12.280841827392578, |
|
"learning_rate": 3.657340662628116e-05, |
|
"loss": 1.2383, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.8947368421052633, |
|
"grad_norm": 9.48217487335205, |
|
"learning_rate": 3.512484730723986e-05, |
|
"loss": 1.1216, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.9210526315789473, |
|
"grad_norm": 16.327556610107422, |
|
"learning_rate": 3.368986053895655e-05, |
|
"loss": 1.2376, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.9473684210526314, |
|
"grad_norm": 13.08785343170166, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 1.2873, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.973684210526316, |
|
"grad_norm": 9.7808198928833, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 1.889, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 15.715737342834473, |
|
"learning_rate": 2.9479359727362173e-05, |
|
"loss": 1.2356, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.026315789473684, |
|
"grad_norm": 5.554771900177002, |
|
"learning_rate": 2.811161474173297e-05, |
|
"loss": 1.2413, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.0526315789473686, |
|
"grad_norm": 5.415524482727051, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 0.7209, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.0789473684210527, |
|
"grad_norm": 5.091140270233154, |
|
"learning_rate": 2.5437269445806145e-05, |
|
"loss": 0.534, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 6.28108024597168, |
|
"learning_rate": 2.4133109291117156e-05, |
|
"loss": 0.4744, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.1315789473684212, |
|
"grad_norm": 7.070590019226074, |
|
"learning_rate": 2.2852550889926067e-05, |
|
"loss": 0.4707, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.1578947368421053, |
|
"grad_norm": 6.633754253387451, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 0.3843, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.1842105263157894, |
|
"grad_norm": 5.0021843910217285, |
|
"learning_rate": 2.0366890431799167e-05, |
|
"loss": 0.2632, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.2105263157894735, |
|
"grad_norm": 10.45669937133789, |
|
"learning_rate": 1.9164056368572846e-05, |
|
"loss": 0.4073, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.236842105263158, |
|
"grad_norm": 7.161397933959961, |
|
"learning_rate": 1.7989357976875603e-05, |
|
"loss": 0.311, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.263157894736842, |
|
"grad_norm": 10.630206108093262, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 1.4604, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.2894736842105265, |
|
"grad_norm": 8.046797752380371, |
|
"learning_rate": 1.5728628883248007e-05, |
|
"loss": 0.5739, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.3157894736842106, |
|
"grad_norm": 8.14920425415039, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.5026, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.3421052631578947, |
|
"grad_norm": 6.329755783081055, |
|
"learning_rate": 1.3592952306210588e-05, |
|
"loss": 0.2924, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.3684210526315788, |
|
"grad_norm": 8.487998962402344, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 0.4119, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.3947368421052633, |
|
"grad_norm": 25.33626937866211, |
|
"learning_rate": 1.159012109798598e-05, |
|
"loss": 0.2595, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 7.648598670959473, |
|
"learning_rate": 1.0640825969547496e-05, |
|
"loss": 0.2641, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.4473684210526314, |
|
"grad_norm": 12.669825553894043, |
|
"learning_rate": 9.7274433724527e-06, |
|
"loss": 0.3631, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.473684210526316, |
|
"grad_norm": 12.002209663391113, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 0.4211, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 9.969554901123047, |
|
"learning_rate": 8.011715838630107e-06, |
|
"loss": 1.0852, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.526315789473684, |
|
"grad_norm": 7.537227630615234, |
|
"learning_rate": 7.21093638492763e-06, |
|
"loss": 0.5648, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.5526315789473686, |
|
"grad_norm": 8.888556480407715, |
|
"learning_rate": 6.449199000224221e-06, |
|
"loss": 0.5441, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.5789473684210527, |
|
"grad_norm": 8.26941967010498, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 0.5021, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.6052631578947367, |
|
"grad_norm": 8.29023265838623, |
|
"learning_rate": 5.045594311728707e-06, |
|
"loss": 0.3533, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 9.796151161193848, |
|
"learning_rate": 4.405007700395497e-06, |
|
"loss": 0.4029, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"eval_loss": 0.9618005752563477, |
|
"eval_runtime": 4.7519, |
|
"eval_samples_per_second": 13.468, |
|
"eval_steps_per_second": 3.367, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.6578947368421053, |
|
"grad_norm": 8.112892150878906, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 0.2688, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.6842105263157894, |
|
"grad_norm": 9.736104965209961, |
|
"learning_rate": 3.249187865729264e-06, |
|
"loss": 0.4238, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.7105263157894735, |
|
"grad_norm": 5.671924591064453, |
|
"learning_rate": 2.7350092482679836e-06, |
|
"loss": 0.1915, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.736842105263158, |
|
"grad_norm": 8.172745704650879, |
|
"learning_rate": 2.2639566745727205e-06, |
|
"loss": 1.0132, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.763157894736842, |
|
"grad_norm": 6.747289180755615, |
|
"learning_rate": 1.8364599476241862e-06, |
|
"loss": 0.5678, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.7894736842105265, |
|
"grad_norm": 8.281822204589844, |
|
"learning_rate": 1.4529091286973995e-06, |
|
"loss": 0.4592, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.8157894736842106, |
|
"grad_norm": 8.051541328430176, |
|
"learning_rate": 1.1136541814576573e-06, |
|
"loss": 0.3611, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.8421052631578947, |
|
"grad_norm": 9.0069580078125, |
|
"learning_rate": 8.190046526428242e-07, |
|
"loss": 0.3382, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.8684210526315788, |
|
"grad_norm": 5.615657329559326, |
|
"learning_rate": 5.692293896232936e-07, |
|
"loss": 0.233, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.8947368421052633, |
|
"grad_norm": 8.225751876831055, |
|
"learning_rate": 3.6455629509730136e-07, |
|
"loss": 0.316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.9210526315789473, |
|
"grad_norm": 15.0908203125, |
|
"learning_rate": 2.0517211914545254e-07, |
|
"loss": 0.26, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 9.770281791687012, |
|
"learning_rate": 9.12222888341252e-08, |
|
"loss": 0.3276, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.973684210526316, |
|
"grad_norm": 5.682199954986572, |
|
"learning_rate": 2.2810775523329773e-08, |
|
"loss": 0.3726, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 10.495176315307617, |
|
"learning_rate": 0.0, |
|
"loss": 0.3121, |
|
"step": 114 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 114, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6219486313408102e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|