|
{ |
|
"best_metric": 0.8117169737815857, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.5154639175257731, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005154639175257732, |
|
"grad_norm": 0.35949328541755676, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.7545, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005154639175257732, |
|
"eval_loss": 1.1052348613739014, |
|
"eval_runtime": 30.1244, |
|
"eval_samples_per_second": 10.855, |
|
"eval_steps_per_second": 1.361, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010309278350515464, |
|
"grad_norm": 0.45567527413368225, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.7432, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.015463917525773196, |
|
"grad_norm": 0.5274704694747925, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.8322, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.020618556701030927, |
|
"grad_norm": 0.5776087045669556, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.7875, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02577319587628866, |
|
"grad_norm": 0.6656889915466309, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8479, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.030927835051546393, |
|
"grad_norm": 0.6724861860275269, |
|
"learning_rate": 3e-06, |
|
"loss": 0.9279, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03608247422680412, |
|
"grad_norm": 0.785415530204773, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.8712, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.041237113402061855, |
|
"grad_norm": 0.7006766200065613, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.7866, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04639175257731959, |
|
"grad_norm": 0.9407919645309448, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.8717, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05154639175257732, |
|
"grad_norm": 0.6719709038734436, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8558, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05670103092783505, |
|
"grad_norm": 0.6967365145683289, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 0.8185, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.061855670103092786, |
|
"grad_norm": 0.666429877281189, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 0.9746, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06701030927835051, |
|
"grad_norm": 0.687868595123291, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 0.8567, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07216494845360824, |
|
"grad_norm": 0.7461718916893005, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 0.8906, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07731958762886598, |
|
"grad_norm": 0.7825281023979187, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 0.8898, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08247422680412371, |
|
"grad_norm": 0.8114840984344482, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 0.9206, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08762886597938144, |
|
"grad_norm": 0.8150220513343811, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 0.8875, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09278350515463918, |
|
"grad_norm": 0.7087549567222595, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 0.9555, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0979381443298969, |
|
"grad_norm": 0.8564397692680359, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 0.9177, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10309278350515463, |
|
"grad_norm": 0.7236621975898743, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 0.8545, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10824742268041238, |
|
"grad_norm": 0.8788579106330872, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 0.9195, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1134020618556701, |
|
"grad_norm": 0.8592295050621033, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 0.9803, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11855670103092783, |
|
"grad_norm": 0.8085874915122986, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 0.9333, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12371134020618557, |
|
"grad_norm": 1.5814393758773804, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 0.9284, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12886597938144329, |
|
"grad_norm": 0.925155758857727, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 1.0185, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13402061855670103, |
|
"grad_norm": 1.0410577058792114, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 0.8878, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13917525773195877, |
|
"grad_norm": 1.2917616367340088, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 0.9616, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14432989690721648, |
|
"grad_norm": 0.9885799884796143, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.8762, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14948453608247422, |
|
"grad_norm": 1.0929198265075684, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 1.0202, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15463917525773196, |
|
"grad_norm": 1.0404047966003418, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 0.9885, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15979381443298968, |
|
"grad_norm": 1.0854883193969727, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 0.9228, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16494845360824742, |
|
"grad_norm": 1.257588267326355, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 0.9343, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.17010309278350516, |
|
"grad_norm": 1.0686402320861816, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 0.9171, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.17525773195876287, |
|
"grad_norm": 1.2666049003601074, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 0.9904, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.18041237113402062, |
|
"grad_norm": 1.3324109315872192, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 1.0908, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.18556701030927836, |
|
"grad_norm": 1.1834466457366943, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 0.905, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.19072164948453607, |
|
"grad_norm": 1.5354310274124146, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 1.0866, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1958762886597938, |
|
"grad_norm": 1.3238235712051392, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 1.0469, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.20103092783505155, |
|
"grad_norm": 1.776808261871338, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 1.1054, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.20618556701030927, |
|
"grad_norm": 1.812841534614563, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.9541, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.211340206185567, |
|
"grad_norm": 1.7133516073226929, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 0.9837, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.21649484536082475, |
|
"grad_norm": 2.0302529335021973, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 1.0123, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.22164948453608246, |
|
"grad_norm": 1.8799409866333008, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 1.1035, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2268041237113402, |
|
"grad_norm": 2.289742946624756, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 1.1235, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.23195876288659795, |
|
"grad_norm": 2.045342445373535, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 1.0909, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.23711340206185566, |
|
"grad_norm": 2.1356890201568604, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 1.1785, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2422680412371134, |
|
"grad_norm": 2.7677886486053467, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 1.2987, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.24742268041237114, |
|
"grad_norm": 3.38108491897583, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 1.2262, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.25257731958762886, |
|
"grad_norm": 0.44239917397499084, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 0.7028, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.25773195876288657, |
|
"grad_norm": 0.610641598701477, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 0.7112, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25773195876288657, |
|
"eval_loss": 0.852555513381958, |
|
"eval_runtime": 29.9863, |
|
"eval_samples_per_second": 10.905, |
|
"eval_steps_per_second": 1.367, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.26288659793814434, |
|
"grad_norm": 0.7899359464645386, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 0.783, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.26804123711340205, |
|
"grad_norm": 0.7177944779396057, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 0.7351, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.27319587628865977, |
|
"grad_norm": 0.7365036010742188, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 0.6999, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.27835051546391754, |
|
"grad_norm": 0.739840030670166, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 0.6864, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.28350515463917525, |
|
"grad_norm": 0.7644721865653992, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8022, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.28865979381443296, |
|
"grad_norm": 0.7222386002540588, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 0.7589, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.29381443298969073, |
|
"grad_norm": 0.6763759851455688, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 0.7278, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.29896907216494845, |
|
"grad_norm": 0.849905252456665, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 0.8218, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.30412371134020616, |
|
"grad_norm": 0.7614359259605408, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 0.7381, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.30927835051546393, |
|
"grad_norm": 0.6714977025985718, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 0.6973, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.31443298969072164, |
|
"grad_norm": 0.6945096254348755, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 0.7788, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.31958762886597936, |
|
"grad_norm": 0.7656300663948059, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 0.7903, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3247422680412371, |
|
"grad_norm": 0.7429731488227844, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 0.8319, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.32989690721649484, |
|
"grad_norm": 0.7177958488464355, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.7017, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.33505154639175255, |
|
"grad_norm": 0.6787317991256714, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 0.7736, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3402061855670103, |
|
"grad_norm": 0.7011315226554871, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 0.7023, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.34536082474226804, |
|
"grad_norm": 0.6774159669876099, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 0.7007, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.35051546391752575, |
|
"grad_norm": 0.7961472868919373, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 0.7179, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3556701030927835, |
|
"grad_norm": 0.7200611233711243, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 0.8084, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.36082474226804123, |
|
"grad_norm": 0.9225122332572937, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 0.8685, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.36597938144329895, |
|
"grad_norm": 0.7724214792251587, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 0.7963, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3711340206185567, |
|
"grad_norm": 0.709747314453125, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 0.7898, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.37628865979381443, |
|
"grad_norm": 0.8793331384658813, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.8988, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.38144329896907214, |
|
"grad_norm": 0.7961267232894897, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 0.7907, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3865979381443299, |
|
"grad_norm": 0.8463252186775208, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 0.8535, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3917525773195876, |
|
"grad_norm": 0.9485563635826111, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 0.8272, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.39690721649484534, |
|
"grad_norm": 0.9562703371047974, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 0.8414, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4020618556701031, |
|
"grad_norm": 0.9790942668914795, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 0.8585, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4072164948453608, |
|
"grad_norm": 0.9100154638290405, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 0.8219, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.41237113402061853, |
|
"grad_norm": 0.9290817975997925, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 0.8056, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4175257731958763, |
|
"grad_norm": 1.1186609268188477, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 0.8711, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.422680412371134, |
|
"grad_norm": 0.9763901233673096, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.8513, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.42783505154639173, |
|
"grad_norm": 1.0024268627166748, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 0.8448, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4329896907216495, |
|
"grad_norm": 1.1477421522140503, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 0.9141, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4381443298969072, |
|
"grad_norm": 1.1041942834854126, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 0.83, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.44329896907216493, |
|
"grad_norm": 1.0946459770202637, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 0.8241, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4484536082474227, |
|
"grad_norm": 1.2004777193069458, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 0.8111, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4536082474226804, |
|
"grad_norm": 1.2121765613555908, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 0.7956, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4587628865979381, |
|
"grad_norm": 1.216789722442627, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 0.8099, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4639175257731959, |
|
"grad_norm": 1.7433570623397827, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 0.8918, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4690721649484536, |
|
"grad_norm": 1.6635973453521729, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 0.8888, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4742268041237113, |
|
"grad_norm": 1.456506609916687, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 0.8422, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4793814432989691, |
|
"grad_norm": 1.6814242601394653, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 0.8215, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4845360824742268, |
|
"grad_norm": 1.8594515323638916, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 0.8137, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4896907216494845, |
|
"grad_norm": 2.3002114295959473, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 0.9984, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4948453608247423, |
|
"grad_norm": 3.094780445098877, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 1.1265, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3856060802936554, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 0.7213, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5051546391752577, |
|
"grad_norm": 0.4723763167858124, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 0.686, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5103092783505154, |
|
"grad_norm": 1.0411378145217896, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 0.9629, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5154639175257731, |
|
"grad_norm": 0.4399566054344177, |
|
"learning_rate": 0.0, |
|
"loss": 0.6826, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5154639175257731, |
|
"eval_loss": 0.8117169737815857, |
|
"eval_runtime": 29.5641, |
|
"eval_samples_per_second": 11.061, |
|
"eval_steps_per_second": 1.387, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.719705479770276e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|