|
{ |
|
"best_metric": 0.565327525138855, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-350", |
|
"epoch": 0.04033303563711792, |
|
"eval_steps": 50, |
|
"global_step": 350, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00011523724467747976, |
|
"grad_norm": 0.7461973428726196, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6764, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00011523724467747976, |
|
"eval_loss": 0.9687025547027588, |
|
"eval_runtime": 545.4381, |
|
"eval_samples_per_second": 26.795, |
|
"eval_steps_per_second": 6.699, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002304744893549595, |
|
"grad_norm": 0.9859816431999207, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6287, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0003457117340324393, |
|
"grad_norm": 1.0724681615829468, |
|
"learning_rate": 3e-05, |
|
"loss": 0.7866, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000460948978709919, |
|
"grad_norm": 1.011637806892395, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7134, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0005761862233873988, |
|
"grad_norm": 0.818691611289978, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7764, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0006914234680648786, |
|
"grad_norm": 0.7848404049873352, |
|
"learning_rate": 6e-05, |
|
"loss": 0.8435, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0008066607127423584, |
|
"grad_norm": 0.9838979244232178, |
|
"learning_rate": 7e-05, |
|
"loss": 0.7446, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.000921897957419838, |
|
"grad_norm": 1.3054728507995605, |
|
"learning_rate": 8e-05, |
|
"loss": 0.705, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0010371352020973178, |
|
"grad_norm": 1.2515887022018433, |
|
"learning_rate": 9e-05, |
|
"loss": 0.6567, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0011523724467747975, |
|
"grad_norm": 0.9352166652679443, |
|
"learning_rate": 0.0001, |
|
"loss": 0.6788, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0012676096914522774, |
|
"grad_norm": 0.8308036923408508, |
|
"learning_rate": 9.99983777858264e-05, |
|
"loss": 0.7393, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0013828469361297571, |
|
"grad_norm": 0.7662054896354675, |
|
"learning_rate": 9.999351124856874e-05, |
|
"loss": 0.7728, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0014980841808072368, |
|
"grad_norm": 0.9175456762313843, |
|
"learning_rate": 9.998540070400966e-05, |
|
"loss": 0.7545, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0016133214254847167, |
|
"grad_norm": 0.9114332795143127, |
|
"learning_rate": 9.997404667843075e-05, |
|
"loss": 0.8675, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0017285586701621964, |
|
"grad_norm": 0.8873140215873718, |
|
"learning_rate": 9.995944990857849e-05, |
|
"loss": 0.707, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.001843795914839676, |
|
"grad_norm": 0.848321259021759, |
|
"learning_rate": 9.994161134161634e-05, |
|
"loss": 0.7408, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0019590331595171558, |
|
"grad_norm": 0.9329268932342529, |
|
"learning_rate": 9.992053213506334e-05, |
|
"loss": 0.7772, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0020742704041946357, |
|
"grad_norm": 0.9922827482223511, |
|
"learning_rate": 9.989621365671902e-05, |
|
"loss": 0.7095, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0021895076488721156, |
|
"grad_norm": 1.0125304460525513, |
|
"learning_rate": 9.986865748457457e-05, |
|
"loss": 0.8506, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.002304744893549595, |
|
"grad_norm": 1.0258866548538208, |
|
"learning_rate": 9.983786540671051e-05, |
|
"loss": 0.8553, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.002419982138227075, |
|
"grad_norm": 1.1118699312210083, |
|
"learning_rate": 9.980383942118066e-05, |
|
"loss": 0.7704, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.002535219382904555, |
|
"grad_norm": 1.235855221748352, |
|
"learning_rate": 9.976658173588244e-05, |
|
"loss": 0.8872, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0026504566275820343, |
|
"grad_norm": 1.1165136098861694, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.8303, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0027656938722595142, |
|
"grad_norm": 1.1215505599975586, |
|
"learning_rate": 9.968238114591566e-05, |
|
"loss": 0.7298, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.002880931116936994, |
|
"grad_norm": 1.0933395624160767, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 0.7708, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0029961683616144736, |
|
"grad_norm": 1.3705003261566162, |
|
"learning_rate": 9.95852854910781e-05, |
|
"loss": 0.7554, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0031114056062919535, |
|
"grad_norm": 1.1887904405593872, |
|
"learning_rate": 9.953190975913647e-05, |
|
"loss": 0.6981, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0032266428509694334, |
|
"grad_norm": 1.286866545677185, |
|
"learning_rate": 9.947531997255256e-05, |
|
"loss": 0.7432, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.003341880095646913, |
|
"grad_norm": 1.2123883962631226, |
|
"learning_rate": 9.941551980335652e-05, |
|
"loss": 0.6754, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.003457117340324393, |
|
"grad_norm": 1.3810746669769287, |
|
"learning_rate": 9.935251313189564e-05, |
|
"loss": 0.7364, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0035723545850018727, |
|
"grad_norm": 1.2287622690200806, |
|
"learning_rate": 9.928630404658255e-05, |
|
"loss": 0.7362, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.003687591829679352, |
|
"grad_norm": 1.0121726989746094, |
|
"learning_rate": 9.921689684362989e-05, |
|
"loss": 0.7264, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.003802829074356832, |
|
"grad_norm": 1.083332896232605, |
|
"learning_rate": 9.914429602677162e-05, |
|
"loss": 0.6878, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0039180663190343116, |
|
"grad_norm": 0.9687482118606567, |
|
"learning_rate": 9.906850630697068e-05, |
|
"loss": 0.6582, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.004033303563711792, |
|
"grad_norm": 1.047886848449707, |
|
"learning_rate": 9.898953260211338e-05, |
|
"loss": 0.6259, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.004148540808389271, |
|
"grad_norm": 0.8944975137710571, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.6844, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.004263778053066751, |
|
"grad_norm": 0.9275309443473816, |
|
"learning_rate": 9.882205394146361e-05, |
|
"loss": 0.6108, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.004379015297744231, |
|
"grad_norm": 0.9693747162818909, |
|
"learning_rate": 9.87335598531214e-05, |
|
"loss": 0.6563, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.004494252542421711, |
|
"grad_norm": 1.1269587278366089, |
|
"learning_rate": 9.864190351391822e-05, |
|
"loss": 0.7205, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00460948978709919, |
|
"grad_norm": 1.247700572013855, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 0.6948, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0047247270317766705, |
|
"grad_norm": 1.555729627609253, |
|
"learning_rate": 9.844912807753104e-05, |
|
"loss": 0.6225, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.00483996427645415, |
|
"grad_norm": 1.2079527378082275, |
|
"learning_rate": 9.834802148926882e-05, |
|
"loss": 0.6886, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.004955201521131629, |
|
"grad_norm": 1.253836750984192, |
|
"learning_rate": 9.824377766717759e-05, |
|
"loss": 0.8655, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.00507043876580911, |
|
"grad_norm": 1.0194092988967896, |
|
"learning_rate": 9.813640337548954e-05, |
|
"loss": 0.6824, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.005185676010486589, |
|
"grad_norm": 1.059622883796692, |
|
"learning_rate": 9.802590558156862e-05, |
|
"loss": 0.7002, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.005300913255164069, |
|
"grad_norm": 1.1622421741485596, |
|
"learning_rate": 9.791229145545831e-05, |
|
"loss": 0.6609, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.005416150499841549, |
|
"grad_norm": 1.3024383783340454, |
|
"learning_rate": 9.779556836941645e-05, |
|
"loss": 0.6287, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0055313877445190285, |
|
"grad_norm": 1.2196540832519531, |
|
"learning_rate": 9.767574389743682e-05, |
|
"loss": 0.5385, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.005646624989196508, |
|
"grad_norm": 1.4395736455917358, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.6211, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.005761862233873988, |
|
"grad_norm": 1.6215977668762207, |
|
"learning_rate": 9.742682209735727e-05, |
|
"loss": 0.6201, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005761862233873988, |
|
"eval_loss": 0.7081581354141235, |
|
"eval_runtime": 546.452, |
|
"eval_samples_per_second": 26.745, |
|
"eval_steps_per_second": 6.687, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005877099478551468, |
|
"grad_norm": 0.6722015142440796, |
|
"learning_rate": 9.729774092143627e-05, |
|
"loss": 0.5825, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.005992336723228947, |
|
"grad_norm": 0.6303715705871582, |
|
"learning_rate": 9.716559066288715e-05, |
|
"loss": 0.6244, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.006107573967906428, |
|
"grad_norm": 0.6753822565078735, |
|
"learning_rate": 9.703037989675087e-05, |
|
"loss": 0.7524, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.006222811212583907, |
|
"grad_norm": 0.571438729763031, |
|
"learning_rate": 9.689211739666023e-05, |
|
"loss": 0.5751, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0063380484572613865, |
|
"grad_norm": 0.6231772303581238, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 0.74, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.006453285701938867, |
|
"grad_norm": 0.5662903189659119, |
|
"learning_rate": 9.66064732786784e-05, |
|
"loss": 0.5992, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.006568522946616346, |
|
"grad_norm": 0.7178725600242615, |
|
"learning_rate": 9.645911019582467e-05, |
|
"loss": 0.7747, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.006683760191293826, |
|
"grad_norm": 0.6594935655593872, |
|
"learning_rate": 9.630873244788883e-05, |
|
"loss": 0.6761, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.006798997435971306, |
|
"grad_norm": 0.6650819778442383, |
|
"learning_rate": 9.615534979266745e-05, |
|
"loss": 0.8188, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.006914234680648786, |
|
"grad_norm": 0.6455956101417542, |
|
"learning_rate": 9.599897218294122e-05, |
|
"loss": 0.6711, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.007029471925326265, |
|
"grad_norm": 0.6445301175117493, |
|
"learning_rate": 9.583960976582913e-05, |
|
"loss": 0.7227, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.007144709170003745, |
|
"grad_norm": 1.0128474235534668, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.6944, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.007259946414681225, |
|
"grad_norm": 0.7492349147796631, |
|
"learning_rate": 9.551197206565173e-05, |
|
"loss": 0.7062, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.007375183659358704, |
|
"grad_norm": 0.7735145092010498, |
|
"learning_rate": 9.534371804252728e-05, |
|
"loss": 0.7259, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.007490420904036185, |
|
"grad_norm": 0.8398394584655762, |
|
"learning_rate": 9.517252173051911e-05, |
|
"loss": 0.6226, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.007605658148713664, |
|
"grad_norm": 0.8292278051376343, |
|
"learning_rate": 9.49983942383106e-05, |
|
"loss": 0.658, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.007720895393391144, |
|
"grad_norm": 0.7968212962150574, |
|
"learning_rate": 9.482134686478519e-05, |
|
"loss": 0.6969, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.007836132638068623, |
|
"grad_norm": 0.9180437326431274, |
|
"learning_rate": 9.464139109829321e-05, |
|
"loss": 0.7717, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.007951369882746103, |
|
"grad_norm": 0.9269341826438904, |
|
"learning_rate": 9.445853861590647e-05, |
|
"loss": 0.7169, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.008066607127423584, |
|
"grad_norm": 0.9348207712173462, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 0.6536, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.008181844372101062, |
|
"grad_norm": 1.1747376918792725, |
|
"learning_rate": 9.408419115078471e-05, |
|
"loss": 0.7902, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.008297081616778543, |
|
"grad_norm": 1.1941821575164795, |
|
"learning_rate": 9.389272045892024e-05, |
|
"loss": 0.8089, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.008412318861456023, |
|
"grad_norm": 1.107926368713379, |
|
"learning_rate": 9.36984016313259e-05, |
|
"loss": 0.6773, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.008527556106133502, |
|
"grad_norm": 1.135667324066162, |
|
"learning_rate": 9.350124727707197e-05, |
|
"loss": 0.7162, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.008642793350810982, |
|
"grad_norm": 1.1049367189407349, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.81, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.008758030595488462, |
|
"grad_norm": 1.4167182445526123, |
|
"learning_rate": 9.309848334400246e-05, |
|
"loss": 0.7369, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.008873267840165941, |
|
"grad_norm": 1.6046768426895142, |
|
"learning_rate": 9.289289989996133e-05, |
|
"loss": 0.7633, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.008988505084843421, |
|
"grad_norm": 1.4069336652755737, |
|
"learning_rate": 9.268453319711363e-05, |
|
"loss": 0.7567, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.009103742329520902, |
|
"grad_norm": 1.0434424877166748, |
|
"learning_rate": 9.247339675607605e-05, |
|
"loss": 0.5982, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.00921897957419838, |
|
"grad_norm": 1.1521142721176147, |
|
"learning_rate": 9.225950427718975e-05, |
|
"loss": 0.6323, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00933421681887586, |
|
"grad_norm": 0.8314908742904663, |
|
"learning_rate": 9.204286963963111e-05, |
|
"loss": 0.6323, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.009449454063553341, |
|
"grad_norm": 0.8419124484062195, |
|
"learning_rate": 9.182350690051133e-05, |
|
"loss": 0.6278, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.00956469130823082, |
|
"grad_norm": 0.9470197558403015, |
|
"learning_rate": 9.160143029396422e-05, |
|
"loss": 0.6473, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0096799285529083, |
|
"grad_norm": 0.8800526857376099, |
|
"learning_rate": 9.13766542302225e-05, |
|
"loss": 0.5913, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00979516579758578, |
|
"grad_norm": 0.9016923308372498, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 0.5914, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.009910403042263259, |
|
"grad_norm": 0.9425757527351379, |
|
"learning_rate": 9.091906224695935e-05, |
|
"loss": 0.6713, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.010025640286940739, |
|
"grad_norm": 1.0047729015350342, |
|
"learning_rate": 9.068627601992598e-05, |
|
"loss": 0.6046, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.01014087753161822, |
|
"grad_norm": 1.045275092124939, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.6654, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.010256114776295698, |
|
"grad_norm": 1.0444538593292236, |
|
"learning_rate": 9.021279861989885e-05, |
|
"loss": 0.6605, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.010371352020973178, |
|
"grad_norm": 1.0272176265716553, |
|
"learning_rate": 8.997213817017507e-05, |
|
"loss": 0.7068, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010486589265650659, |
|
"grad_norm": 1.094701886177063, |
|
"learning_rate": 8.972888398568772e-05, |
|
"loss": 0.6086, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.010601826510328137, |
|
"grad_norm": 1.1884337663650513, |
|
"learning_rate": 8.948305185085225e-05, |
|
"loss": 0.6948, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.010717063755005618, |
|
"grad_norm": 1.0921660661697388, |
|
"learning_rate": 8.92346577173636e-05, |
|
"loss": 0.664, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.010832300999683098, |
|
"grad_norm": 1.253016710281372, |
|
"learning_rate": 8.898371770316111e-05, |
|
"loss": 0.6962, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.010947538244360577, |
|
"grad_norm": 1.1953667402267456, |
|
"learning_rate": 8.873024809138272e-05, |
|
"loss": 0.6155, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.011062775489038057, |
|
"grad_norm": 1.0429881811141968, |
|
"learning_rate": 8.847426532930831e-05, |
|
"loss": 0.5882, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.011178012733715537, |
|
"grad_norm": 1.3511476516723633, |
|
"learning_rate": 8.821578602729242e-05, |
|
"loss": 0.6557, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.011293249978393016, |
|
"grad_norm": 1.132889986038208, |
|
"learning_rate": 8.795482695768658e-05, |
|
"loss": 0.6294, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.011408487223070496, |
|
"grad_norm": 1.3862687349319458, |
|
"learning_rate": 8.769140505375085e-05, |
|
"loss": 0.6493, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.011523724467747977, |
|
"grad_norm": 1.5808641910552979, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 0.5728, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011523724467747977, |
|
"eval_loss": 0.6656689643859863, |
|
"eval_runtime": 547.0519, |
|
"eval_samples_per_second": 26.716, |
|
"eval_steps_per_second": 6.679, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011638961712425455, |
|
"grad_norm": 0.6208635568618774, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.5428, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.011754198957102936, |
|
"grad_norm": 0.6623407602310181, |
|
"learning_rate": 8.688653405904652e-05, |
|
"loss": 0.6547, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.011869436201780416, |
|
"grad_norm": 0.6228560209274292, |
|
"learning_rate": 8.661343332988869e-05, |
|
"loss": 0.586, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.011984673446457894, |
|
"grad_norm": 0.5910748243331909, |
|
"learning_rate": 8.633795680751116e-05, |
|
"loss": 0.6342, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.012099910691135375, |
|
"grad_norm": 0.6204082369804382, |
|
"learning_rate": 8.606012236719073e-05, |
|
"loss": 0.7092, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.012215147935812855, |
|
"grad_norm": 0.6246654391288757, |
|
"learning_rate": 8.577994803720606e-05, |
|
"loss": 0.8196, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.012330385180490334, |
|
"grad_norm": 0.5755138993263245, |
|
"learning_rate": 8.549745199766792e-05, |
|
"loss": 0.6955, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.012445622425167814, |
|
"grad_norm": 0.5705171227455139, |
|
"learning_rate": 8.521265257933948e-05, |
|
"loss": 0.6706, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.012560859669845294, |
|
"grad_norm": 0.6388005018234253, |
|
"learning_rate": 8.492556826244687e-05, |
|
"loss": 0.6809, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.012676096914522773, |
|
"grad_norm": 0.6977410316467285, |
|
"learning_rate": 8.463621767547998e-05, |
|
"loss": 0.7271, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.012791334159200253, |
|
"grad_norm": 0.6099151968955994, |
|
"learning_rate": 8.434461959398376e-05, |
|
"loss": 0.7123, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.012906571403877734, |
|
"grad_norm": 0.7898553609848022, |
|
"learning_rate": 8.405079293933986e-05, |
|
"loss": 0.7064, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.013021808648555212, |
|
"grad_norm": 0.7140493392944336, |
|
"learning_rate": 8.375475677753881e-05, |
|
"loss": 0.6898, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.013137045893232693, |
|
"grad_norm": 1.0986034870147705, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.754, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.013252283137910173, |
|
"grad_norm": 0.829111635684967, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 0.7179, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.013367520382587652, |
|
"grad_norm": 0.778800904750824, |
|
"learning_rate": 8.285358405218655e-05, |
|
"loss": 0.5743, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.013482757627265132, |
|
"grad_norm": 0.8650482296943665, |
|
"learning_rate": 8.25489033703452e-05, |
|
"loss": 0.7462, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.013597994871942612, |
|
"grad_norm": 0.8594391345977783, |
|
"learning_rate": 8.224211063680853e-05, |
|
"loss": 0.7918, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.01371323211662009, |
|
"grad_norm": 0.8802257180213928, |
|
"learning_rate": 8.19332257589174e-05, |
|
"loss": 0.7046, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.013828469361297571, |
|
"grad_norm": 0.9820117354393005, |
|
"learning_rate": 8.162226877976887e-05, |
|
"loss": 0.8009, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013943706605975052, |
|
"grad_norm": 0.9804040193557739, |
|
"learning_rate": 8.130925987691569e-05, |
|
"loss": 0.7213, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.01405894385065253, |
|
"grad_norm": 0.9773142337799072, |
|
"learning_rate": 8.099421936105702e-05, |
|
"loss": 0.6519, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01417418109533001, |
|
"grad_norm": 1.0842269659042358, |
|
"learning_rate": 8.067716767472045e-05, |
|
"loss": 0.653, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.01428941834000749, |
|
"grad_norm": 1.1096503734588623, |
|
"learning_rate": 8.035812539093557e-05, |
|
"loss": 0.7775, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.01440465558468497, |
|
"grad_norm": 1.3610727787017822, |
|
"learning_rate": 8.003711321189895e-05, |
|
"loss": 0.6345, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.01451989282936245, |
|
"grad_norm": 1.216774582862854, |
|
"learning_rate": 7.971415196763088e-05, |
|
"loss": 0.694, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.01463513007403993, |
|
"grad_norm": 1.4049333333969116, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.6201, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.014750367318717409, |
|
"grad_norm": 1.0875705480575562, |
|
"learning_rate": 7.906246623448183e-05, |
|
"loss": 0.6244, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.014865604563394889, |
|
"grad_norm": 1.0095276832580566, |
|
"learning_rate": 7.873378403255419e-05, |
|
"loss": 0.5977, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.01498084180807237, |
|
"grad_norm": 0.9484551548957825, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 0.6159, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.015096079052749848, |
|
"grad_norm": 0.8636478781700134, |
|
"learning_rate": 7.807084759519405e-05, |
|
"loss": 0.5963, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.015211316297427328, |
|
"grad_norm": 1.010664939880371, |
|
"learning_rate": 7.773663637675694e-05, |
|
"loss": 0.6228, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.015326553542104809, |
|
"grad_norm": 0.8741911053657532, |
|
"learning_rate": 7.740062536773352e-05, |
|
"loss": 0.4789, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.015441790786782287, |
|
"grad_norm": 0.9093407988548279, |
|
"learning_rate": 7.706283637139658e-05, |
|
"loss": 0.6464, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.015557028031459768, |
|
"grad_norm": 0.8352158665657043, |
|
"learning_rate": 7.672329130639005e-05, |
|
"loss": 0.6021, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.015672265276137246, |
|
"grad_norm": 0.9724483489990234, |
|
"learning_rate": 7.638201220530665e-05, |
|
"loss": 0.6069, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.015787502520814727, |
|
"grad_norm": 0.9229646921157837, |
|
"learning_rate": 7.603902121325813e-05, |
|
"loss": 0.6661, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.015902739765492207, |
|
"grad_norm": 0.894641637802124, |
|
"learning_rate": 7.569434058643844e-05, |
|
"loss": 0.4953, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.016017977010169687, |
|
"grad_norm": 0.9383701682090759, |
|
"learning_rate": 7.534799269067953e-05, |
|
"loss": 0.6165, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.016133214254847168, |
|
"grad_norm": 1.0712255239486694, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.5982, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.016248451499524648, |
|
"grad_norm": 1.1666333675384521, |
|
"learning_rate": 7.465038509514688e-05, |
|
"loss": 0.6132, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.016363688744202125, |
|
"grad_norm": 1.1935205459594727, |
|
"learning_rate": 7.42991706621303e-05, |
|
"loss": 0.6188, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.016478925988879605, |
|
"grad_norm": 1.1012414693832397, |
|
"learning_rate": 7.394637949075154e-05, |
|
"loss": 0.6483, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.016594163233557085, |
|
"grad_norm": 1.2057948112487793, |
|
"learning_rate": 7.35920344731241e-05, |
|
"loss": 0.5955, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.016709400478234566, |
|
"grad_norm": 1.210904836654663, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 0.6094, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.016824637722912046, |
|
"grad_norm": 1.1792796850204468, |
|
"learning_rate": 7.287877497021978e-05, |
|
"loss": 0.5172, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.016939874967589526, |
|
"grad_norm": 1.3119630813598633, |
|
"learning_rate": 7.251990676732984e-05, |
|
"loss": 0.5774, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.017055112212267003, |
|
"grad_norm": 1.3735464811325073, |
|
"learning_rate": 7.215957727996207e-05, |
|
"loss": 0.5789, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.017170349456944484, |
|
"grad_norm": 1.3045659065246582, |
|
"learning_rate": 7.179780988938051e-05, |
|
"loss": 0.6074, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.017285586701621964, |
|
"grad_norm": 1.5275754928588867, |
|
"learning_rate": 7.143462807015271e-05, |
|
"loss": 0.4802, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.017285586701621964, |
|
"eval_loss": 0.6325949430465698, |
|
"eval_runtime": 546.5133, |
|
"eval_samples_per_second": 26.742, |
|
"eval_steps_per_second": 6.686, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.017400823946299444, |
|
"grad_norm": 0.6129029393196106, |
|
"learning_rate": 7.107005538862646e-05, |
|
"loss": 0.5457, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.017516061190976925, |
|
"grad_norm": 0.6865366101264954, |
|
"learning_rate": 7.07041155014006e-05, |
|
"loss": 0.6037, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.017631298435654405, |
|
"grad_norm": 0.6349140405654907, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.6006, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.017746535680331882, |
|
"grad_norm": 0.6397451162338257, |
|
"learning_rate": 6.996822917828477e-05, |
|
"loss": 0.6904, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.017861772925009362, |
|
"grad_norm": 0.5073702335357666, |
|
"learning_rate": 6.959833049300377e-05, |
|
"loss": 0.5711, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.017977010169686843, |
|
"grad_norm": 0.5888249278068542, |
|
"learning_rate": 6.922716010014255e-05, |
|
"loss": 0.7033, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.018092247414364323, |
|
"grad_norm": 0.5774878263473511, |
|
"learning_rate": 6.885474208441603e-05, |
|
"loss": 0.6928, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.018207484659041803, |
|
"grad_norm": 0.6262368559837341, |
|
"learning_rate": 6.848110061149556e-05, |
|
"loss": 0.7691, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.018322721903719284, |
|
"grad_norm": 0.6418235301971436, |
|
"learning_rate": 6.810625992644085e-05, |
|
"loss": 0.6263, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.01843795914839676, |
|
"grad_norm": 0.6563417911529541, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 0.692, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01855319639307424, |
|
"grad_norm": 0.654928982257843, |
|
"learning_rate": 6.735307828766515e-05, |
|
"loss": 0.6949, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.01866843363775172, |
|
"grad_norm": 0.7155853509902954, |
|
"learning_rate": 6.697478620682137e-05, |
|
"loss": 0.6591, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0187836708824292, |
|
"grad_norm": 0.7285525798797607, |
|
"learning_rate": 6.659539265642643e-05, |
|
"loss": 0.7245, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.018898908127106682, |
|
"grad_norm": 0.7615992426872253, |
|
"learning_rate": 6.621492225478414e-05, |
|
"loss": 0.5962, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.019014145371784162, |
|
"grad_norm": 0.7541478872299194, |
|
"learning_rate": 6.583339969007363e-05, |
|
"loss": 0.6226, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.01912938261646164, |
|
"grad_norm": 1.3453112840652466, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.7385, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.01924461986113912, |
|
"grad_norm": 0.8395344614982605, |
|
"learning_rate": 6.506729716392481e-05, |
|
"loss": 0.7463, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0193598571058166, |
|
"grad_norm": 0.8182184100151062, |
|
"learning_rate": 6.468276691378155e-05, |
|
"loss": 0.643, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.01947509435049408, |
|
"grad_norm": 1.2326037883758545, |
|
"learning_rate": 6.429728391993446e-05, |
|
"loss": 0.6738, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.01959033159517156, |
|
"grad_norm": 0.9027764797210693, |
|
"learning_rate": 6.391087319582264e-05, |
|
"loss": 0.721, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01970556883984904, |
|
"grad_norm": 0.955884575843811, |
|
"learning_rate": 6.35235598150842e-05, |
|
"loss": 0.65, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.019820806084526518, |
|
"grad_norm": 0.9886214137077332, |
|
"learning_rate": 6.313536890992935e-05, |
|
"loss": 0.7256, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.019936043329203998, |
|
"grad_norm": 0.9550946354866028, |
|
"learning_rate": 6.274632566950967e-05, |
|
"loss": 0.7296, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.020051280573881478, |
|
"grad_norm": 1.1125203371047974, |
|
"learning_rate": 6.235645533828349e-05, |
|
"loss": 0.6658, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.02016651781855896, |
|
"grad_norm": 1.145430326461792, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 0.6213, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.02028175506323644, |
|
"grad_norm": 1.138908863067627, |
|
"learning_rate": 6.157433464794716e-05, |
|
"loss": 0.6373, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.02039699230791392, |
|
"grad_norm": 1.2511849403381348, |
|
"learning_rate": 6.118213503952779e-05, |
|
"loss": 0.6691, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.020512229552591396, |
|
"grad_norm": 1.3821207284927368, |
|
"learning_rate": 6.078920983839031e-05, |
|
"loss": 0.7503, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.020627466797268876, |
|
"grad_norm": 1.0743690729141235, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.6563, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.020742704041946357, |
|
"grad_norm": 0.9574199318885803, |
|
"learning_rate": 6.0001284688802226e-05, |
|
"loss": 0.613, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.020857941286623837, |
|
"grad_norm": 0.9797923564910889, |
|
"learning_rate": 5.960633586768543e-05, |
|
"loss": 0.4921, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.020973178531301317, |
|
"grad_norm": 1.0329078435897827, |
|
"learning_rate": 5.921076370520058e-05, |
|
"loss": 0.5424, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.021088415775978798, |
|
"grad_norm": 1.081237554550171, |
|
"learning_rate": 5.8814593869458455e-05, |
|
"loss": 0.593, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.021203653020656275, |
|
"grad_norm": 0.9744309782981873, |
|
"learning_rate": 5.841785206735192e-05, |
|
"loss": 0.6247, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.021318890265333755, |
|
"grad_norm": 0.9277073740959167, |
|
"learning_rate": 5.8020564042888015e-05, |
|
"loss": 0.5272, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.021434127510011235, |
|
"grad_norm": 0.9203082323074341, |
|
"learning_rate": 5.762275557551727e-05, |
|
"loss": 0.5604, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.021549364754688716, |
|
"grad_norm": 1.002752661705017, |
|
"learning_rate": 5.7224452478461064e-05, |
|
"loss": 0.6614, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.021664601999366196, |
|
"grad_norm": 1.1859053373336792, |
|
"learning_rate": 5.682568059703659e-05, |
|
"loss": 0.6258, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.021779839244043676, |
|
"grad_norm": 0.9486787915229797, |
|
"learning_rate": 5.642646580697973e-05, |
|
"loss": 0.5797, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.021895076488721153, |
|
"grad_norm": 1.088105320930481, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 0.5081, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.022010313733398634, |
|
"grad_norm": 1.0794477462768555, |
|
"learning_rate": 5.562681114593028e-05, |
|
"loss": 0.6171, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.022125550978076114, |
|
"grad_norm": 1.1164370775222778, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.604, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.022240788222753594, |
|
"grad_norm": 1.020864725112915, |
|
"learning_rate": 5.482569604572576e-05, |
|
"loss": 0.5079, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.022356025467431075, |
|
"grad_norm": 0.9936183094978333, |
|
"learning_rate": 5.442465579556793e-05, |
|
"loss": 0.5903, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.022471262712108555, |
|
"grad_norm": 1.3103758096694946, |
|
"learning_rate": 5.402332843583631e-05, |
|
"loss": 0.6436, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.022586499956786032, |
|
"grad_norm": 1.2461060285568237, |
|
"learning_rate": 5.3621740008088126e-05, |
|
"loss": 0.5836, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.022701737201463512, |
|
"grad_norm": 1.5536116361618042, |
|
"learning_rate": 5.321991657082097e-05, |
|
"loss": 0.5672, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.022816974446140992, |
|
"grad_norm": 1.3643299341201782, |
|
"learning_rate": 5.281788419778187e-05, |
|
"loss": 0.4866, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.022932211690818473, |
|
"grad_norm": 1.5207157135009766, |
|
"learning_rate": 5.2415668976275355e-05, |
|
"loss": 0.4737, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.023047448935495953, |
|
"grad_norm": 1.5689412355422974, |
|
"learning_rate": 5.201329700547076e-05, |
|
"loss": 0.4415, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.023047448935495953, |
|
"eval_loss": 0.608009397983551, |
|
"eval_runtime": 545.5348, |
|
"eval_samples_per_second": 26.79, |
|
"eval_steps_per_second": 6.698, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.023162686180173434, |
|
"grad_norm": 0.5872024893760681, |
|
"learning_rate": 5.161079439470866e-05, |
|
"loss": 0.6496, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.02327792342485091, |
|
"grad_norm": 0.5686631798744202, |
|
"learning_rate": 5.1208187261806615e-05, |
|
"loss": 0.5287, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.02339316066952839, |
|
"grad_norm": 0.5892530083656311, |
|
"learning_rate": 5.080550173136457e-05, |
|
"loss": 0.6187, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.02350839791420587, |
|
"grad_norm": 0.754984438419342, |
|
"learning_rate": 5.0402763933069496e-05, |
|
"loss": 0.5892, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.02362363515888335, |
|
"grad_norm": 0.5711228847503662, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4912, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.02373887240356083, |
|
"grad_norm": 0.8867496252059937, |
|
"learning_rate": 4.9597236066930516e-05, |
|
"loss": 0.7261, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.023854109648238312, |
|
"grad_norm": 0.6213980317115784, |
|
"learning_rate": 4.919449826863544e-05, |
|
"loss": 0.5705, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.02396934689291579, |
|
"grad_norm": 0.6462709903717041, |
|
"learning_rate": 4.87918127381934e-05, |
|
"loss": 0.7501, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.02408458413759327, |
|
"grad_norm": 0.6636618375778198, |
|
"learning_rate": 4.8389205605291365e-05, |
|
"loss": 0.6899, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.02419982138227075, |
|
"grad_norm": 0.5810405611991882, |
|
"learning_rate": 4.798670299452926e-05, |
|
"loss": 0.5939, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02431505862694823, |
|
"grad_norm": 0.6413638591766357, |
|
"learning_rate": 4.758433102372466e-05, |
|
"loss": 0.5566, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.02443029587162571, |
|
"grad_norm": 0.708579957485199, |
|
"learning_rate": 4.7182115802218126e-05, |
|
"loss": 0.6541, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.02454553311630319, |
|
"grad_norm": 0.648512065410614, |
|
"learning_rate": 4.678008342917903e-05, |
|
"loss": 0.626, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.024660770360980667, |
|
"grad_norm": 0.6940747499465942, |
|
"learning_rate": 4.6378259991911886e-05, |
|
"loss": 0.6072, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.024776007605658148, |
|
"grad_norm": 0.6784055829048157, |
|
"learning_rate": 4.597667156416371e-05, |
|
"loss": 0.6203, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.024891244850335628, |
|
"grad_norm": 0.8279077410697937, |
|
"learning_rate": 4.5575344204432084e-05, |
|
"loss": 0.6603, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.02500648209501311, |
|
"grad_norm": 1.2922221422195435, |
|
"learning_rate": 4.5174303954274244e-05, |
|
"loss": 0.7691, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.02512171933969059, |
|
"grad_norm": 0.8495535850524902, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.7552, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.02523695658436807, |
|
"grad_norm": 0.8626475930213928, |
|
"learning_rate": 4.437318885406973e-05, |
|
"loss": 0.7286, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.025352193829045546, |
|
"grad_norm": 0.8560473918914795, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 0.6934, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.025467431073723026, |
|
"grad_norm": 0.8979941606521606, |
|
"learning_rate": 4.3573534193020274e-05, |
|
"loss": 0.7408, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.025582668318400507, |
|
"grad_norm": 0.8926584720611572, |
|
"learning_rate": 4.317431940296343e-05, |
|
"loss": 0.6998, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.025697905563077987, |
|
"grad_norm": 1.0287340879440308, |
|
"learning_rate": 4.277554752153895e-05, |
|
"loss": 0.6588, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.025813142807755467, |
|
"grad_norm": 1.906563401222229, |
|
"learning_rate": 4.237724442448273e-05, |
|
"loss": 0.6557, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.025928380052432948, |
|
"grad_norm": 1.0948593616485596, |
|
"learning_rate": 4.197943595711198e-05, |
|
"loss": 0.5973, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.026043617297110425, |
|
"grad_norm": 1.3332748413085938, |
|
"learning_rate": 4.1582147932648074e-05, |
|
"loss": 0.6465, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.026158854541787905, |
|
"grad_norm": 1.3926104307174683, |
|
"learning_rate": 4.118540613054156e-05, |
|
"loss": 0.6133, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.026274091786465385, |
|
"grad_norm": 1.373544454574585, |
|
"learning_rate": 4.078923629479943e-05, |
|
"loss": 0.6587, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.026389329031142866, |
|
"grad_norm": 1.4265940189361572, |
|
"learning_rate": 4.039366413231458e-05, |
|
"loss": 0.6224, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.026504566275820346, |
|
"grad_norm": 1.0143760442733765, |
|
"learning_rate": 3.9998715311197785e-05, |
|
"loss": 0.5808, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.026619803520497826, |
|
"grad_norm": 0.9024608731269836, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.4964, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.026735040765175303, |
|
"grad_norm": 1.010740876197815, |
|
"learning_rate": 3.92107901616097e-05, |
|
"loss": 0.6209, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.026850278009852784, |
|
"grad_norm": 0.9866460561752319, |
|
"learning_rate": 3.8817864960472236e-05, |
|
"loss": 0.6106, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.026965515254530264, |
|
"grad_norm": 0.9405643343925476, |
|
"learning_rate": 3.842566535205286e-05, |
|
"loss": 0.5751, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.027080752499207744, |
|
"grad_norm": 0.9639310240745544, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 0.5764, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.027195989743885225, |
|
"grad_norm": 0.9636418223381042, |
|
"learning_rate": 3.764354466171652e-05, |
|
"loss": 0.4725, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.027311226988562705, |
|
"grad_norm": 0.9855490922927856, |
|
"learning_rate": 3.725367433049033e-05, |
|
"loss": 0.5208, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.02742646423324018, |
|
"grad_norm": 1.095041275024414, |
|
"learning_rate": 3.6864631090070655e-05, |
|
"loss": 0.5492, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.027541701477917662, |
|
"grad_norm": 0.9551463723182678, |
|
"learning_rate": 3.6476440184915815e-05, |
|
"loss": 0.4841, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.027656938722595142, |
|
"grad_norm": 0.9834778308868408, |
|
"learning_rate": 3.608912680417737e-05, |
|
"loss": 0.4802, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.027772175967272623, |
|
"grad_norm": 1.0290888547897339, |
|
"learning_rate": 3.570271608006555e-05, |
|
"loss": 0.4678, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.027887413211950103, |
|
"grad_norm": 1.1884846687316895, |
|
"learning_rate": 3.531723308621847e-05, |
|
"loss": 0.6124, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.028002650456627583, |
|
"grad_norm": 1.1768547296524048, |
|
"learning_rate": 3.493270283607522e-05, |
|
"loss": 0.5453, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.02811788770130506, |
|
"grad_norm": 1.2450060844421387, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.5197, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.02823312494598254, |
|
"grad_norm": 1.1013474464416504, |
|
"learning_rate": 3.4166600309926387e-05, |
|
"loss": 0.4784, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.02834836219066002, |
|
"grad_norm": 1.2103960514068604, |
|
"learning_rate": 3.3785077745215873e-05, |
|
"loss": 0.5579, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.0284635994353375, |
|
"grad_norm": 1.3629238605499268, |
|
"learning_rate": 3.340460734357359e-05, |
|
"loss": 0.521, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.02857883668001498, |
|
"grad_norm": 1.3528265953063965, |
|
"learning_rate": 3.3025213793178646e-05, |
|
"loss": 0.4699, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.028694073924692462, |
|
"grad_norm": 1.7364858388900757, |
|
"learning_rate": 3.264692171233485e-05, |
|
"loss": 0.5086, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.02880931116936994, |
|
"grad_norm": 1.2796268463134766, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 0.4677, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02880931116936994, |
|
"eval_loss": 0.5864402651786804, |
|
"eval_runtime": 546.8732, |
|
"eval_samples_per_second": 26.725, |
|
"eval_steps_per_second": 6.682, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02892454841404742, |
|
"grad_norm": 0.49436643719673157, |
|
"learning_rate": 3.189374007355917e-05, |
|
"loss": 0.5239, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.0290397856587249, |
|
"grad_norm": 0.5220190286636353, |
|
"learning_rate": 3.151889938850445e-05, |
|
"loss": 0.6093, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.02915502290340238, |
|
"grad_norm": 0.596616268157959, |
|
"learning_rate": 3.114525791558398e-05, |
|
"loss": 0.591, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.02927026014807986, |
|
"grad_norm": 0.6420341730117798, |
|
"learning_rate": 3.0772839899857464e-05, |
|
"loss": 0.6168, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.02938549739275734, |
|
"grad_norm": 0.591904878616333, |
|
"learning_rate": 3.0401669506996256e-05, |
|
"loss": 0.5331, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.029500734637434817, |
|
"grad_norm": 0.6355295777320862, |
|
"learning_rate": 3.003177082171523e-05, |
|
"loss": 0.7249, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.029615971882112298, |
|
"grad_norm": 0.6406742930412292, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.592, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.029731209126789778, |
|
"grad_norm": 0.6385841369628906, |
|
"learning_rate": 2.9295884498599414e-05, |
|
"loss": 0.6364, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.02984644637146726, |
|
"grad_norm": 0.692854642868042, |
|
"learning_rate": 2.8929944611373554e-05, |
|
"loss": 0.7044, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.02996168361614474, |
|
"grad_norm": 0.690500795841217, |
|
"learning_rate": 2.8565371929847284e-05, |
|
"loss": 0.6743, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.03007692086082222, |
|
"grad_norm": 0.7187701463699341, |
|
"learning_rate": 2.8202190110619493e-05, |
|
"loss": 0.5568, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.030192158105499696, |
|
"grad_norm": 0.6858519315719604, |
|
"learning_rate": 2.784042272003794e-05, |
|
"loss": 0.5427, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.030307395350177176, |
|
"grad_norm": 0.728316605091095, |
|
"learning_rate": 2.7480093232670158e-05, |
|
"loss": 0.6825, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.030422632594854657, |
|
"grad_norm": 0.7076228260993958, |
|
"learning_rate": 2.712122502978024e-05, |
|
"loss": 0.7327, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.030537869839532137, |
|
"grad_norm": 0.9509554505348206, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 0.5248, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.030653107084209617, |
|
"grad_norm": 0.733241617679596, |
|
"learning_rate": 2.64079655268759e-05, |
|
"loss": 0.5209, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.030768344328887098, |
|
"grad_norm": 0.8173394799232483, |
|
"learning_rate": 2.605362050924848e-05, |
|
"loss": 0.5888, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.030883581573564575, |
|
"grad_norm": 0.7932162284851074, |
|
"learning_rate": 2.57008293378697e-05, |
|
"loss": 0.7154, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.030998818818242055, |
|
"grad_norm": 0.8659783601760864, |
|
"learning_rate": 2.534961490485313e-05, |
|
"loss": 0.5691, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.031114056062919535, |
|
"grad_norm": 0.8032422661781311, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.5285, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.031229293307597016, |
|
"grad_norm": 0.9526371359825134, |
|
"learning_rate": 2.4652007309320498e-05, |
|
"loss": 0.6952, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.03134453055227449, |
|
"grad_norm": 0.933838427066803, |
|
"learning_rate": 2.430565941356157e-05, |
|
"loss": 0.6856, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.031459767796951976, |
|
"grad_norm": 1.056208848953247, |
|
"learning_rate": 2.3960978786741877e-05, |
|
"loss": 0.6652, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.03157500504162945, |
|
"grad_norm": 0.9815629720687866, |
|
"learning_rate": 2.361798779469336e-05, |
|
"loss": 0.5508, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.03169024228630694, |
|
"grad_norm": 0.9763931035995483, |
|
"learning_rate": 2.3276708693609943e-05, |
|
"loss": 0.5162, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.031805479530984414, |
|
"grad_norm": 2.0040676593780518, |
|
"learning_rate": 2.2937163628603435e-05, |
|
"loss": 0.7866, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.03192071677566189, |
|
"grad_norm": 1.1686058044433594, |
|
"learning_rate": 2.259937463226651e-05, |
|
"loss": 0.5514, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.032035954020339374, |
|
"grad_norm": 1.249847412109375, |
|
"learning_rate": 2.2263363623243054e-05, |
|
"loss": 0.6229, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.03215119126501685, |
|
"grad_norm": 1.0815333127975464, |
|
"learning_rate": 2.192915240480596e-05, |
|
"loss": 0.5628, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.032266428509694335, |
|
"grad_norm": 0.8114826083183289, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 0.5534, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03238166575437181, |
|
"grad_norm": 0.8990956544876099, |
|
"learning_rate": 2.1266215967445824e-05, |
|
"loss": 0.6225, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.032496902999049296, |
|
"grad_norm": 0.9672009944915771, |
|
"learning_rate": 2.0937533765518187e-05, |
|
"loss": 0.5707, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.03261214024372677, |
|
"grad_norm": 0.9714505076408386, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.5974, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.03272737748840425, |
|
"grad_norm": 1.074284315109253, |
|
"learning_rate": 2.0285848032369137e-05, |
|
"loss": 0.6089, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.03284261473308173, |
|
"grad_norm": 0.9929256439208984, |
|
"learning_rate": 1.996288678810105e-05, |
|
"loss": 0.4241, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.03295785197775921, |
|
"grad_norm": 0.9622356295585632, |
|
"learning_rate": 1.9641874609064443e-05, |
|
"loss": 0.4916, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.033073089222436694, |
|
"grad_norm": 1.044762372970581, |
|
"learning_rate": 1.932283232527956e-05, |
|
"loss": 0.6892, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.03318832646711417, |
|
"grad_norm": 0.9555506706237793, |
|
"learning_rate": 1.9005780638942982e-05, |
|
"loss": 0.5201, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.03330356371179165, |
|
"grad_norm": 1.053078055381775, |
|
"learning_rate": 1.8690740123084316e-05, |
|
"loss": 0.6296, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.03341880095646913, |
|
"grad_norm": 1.1646440029144287, |
|
"learning_rate": 1.837773122023114e-05, |
|
"loss": 0.5341, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03353403820114661, |
|
"grad_norm": 1.0825483798980713, |
|
"learning_rate": 1.8066774241082612e-05, |
|
"loss": 0.4564, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.03364927544582409, |
|
"grad_norm": 1.0500282049179077, |
|
"learning_rate": 1.7757889363191483e-05, |
|
"loss": 0.4674, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.03376451269050157, |
|
"grad_norm": 1.2521641254425049, |
|
"learning_rate": 1.745109662965481e-05, |
|
"loss": 0.543, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.03387974993517905, |
|
"grad_norm": 1.1742342710494995, |
|
"learning_rate": 1.714641594781347e-05, |
|
"loss": 0.537, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.03399498717985653, |
|
"grad_norm": 1.0398088693618774, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 0.4332, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.03411022442453401, |
|
"grad_norm": 1.2116755247116089, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.4955, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.03422546166921149, |
|
"grad_norm": 1.2733010053634644, |
|
"learning_rate": 1.62452432224612e-05, |
|
"loss": 0.5968, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.03434069891388897, |
|
"grad_norm": 1.2947372198104858, |
|
"learning_rate": 1.5949207060660138e-05, |
|
"loss": 0.4119, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.03445593615856645, |
|
"grad_norm": 1.4762217998504639, |
|
"learning_rate": 1.5655380406016235e-05, |
|
"loss": 0.4468, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.03457117340324393, |
|
"grad_norm": 1.39065420627594, |
|
"learning_rate": 1.536378232452003e-05, |
|
"loss": 0.3888, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03457117340324393, |
|
"eval_loss": 0.5709081292152405, |
|
"eval_runtime": 545.5086, |
|
"eval_samples_per_second": 26.792, |
|
"eval_steps_per_second": 6.698, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.034686410647921405, |
|
"grad_norm": 0.44936424493789673, |
|
"learning_rate": 1.5074431737553157e-05, |
|
"loss": 0.5426, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.03480164789259889, |
|
"grad_norm": 0.5097118616104126, |
|
"learning_rate": 1.4787347420660541e-05, |
|
"loss": 0.4921, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.034916885137276366, |
|
"grad_norm": 0.5721942782402039, |
|
"learning_rate": 1.4502548002332088e-05, |
|
"loss": 0.7171, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.03503212238195385, |
|
"grad_norm": 0.5862175226211548, |
|
"learning_rate": 1.422005196279395e-05, |
|
"loss": 0.5825, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.035147359626631326, |
|
"grad_norm": 0.610176682472229, |
|
"learning_rate": 1.3939877632809278e-05, |
|
"loss": 0.5593, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.03526259687130881, |
|
"grad_norm": 0.6718563437461853, |
|
"learning_rate": 1.3662043192488849e-05, |
|
"loss": 0.6023, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.03537783411598629, |
|
"grad_norm": 0.7071716785430908, |
|
"learning_rate": 1.338656667011134e-05, |
|
"loss": 0.7905, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.035493071360663764, |
|
"grad_norm": 0.6894174814224243, |
|
"learning_rate": 1.3113465940953495e-05, |
|
"loss": 0.6028, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.03560830860534125, |
|
"grad_norm": 0.6270958781242371, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.5373, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.035723545850018724, |
|
"grad_norm": 0.7721391916275024, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 0.5485, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03583878309469621, |
|
"grad_norm": 0.7311829328536987, |
|
"learning_rate": 1.2308594946249163e-05, |
|
"loss": 0.6111, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.035954020339373685, |
|
"grad_norm": 0.7510440349578857, |
|
"learning_rate": 1.204517304231343e-05, |
|
"loss": 0.5879, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.03606925758405116, |
|
"grad_norm": 0.8949467539787292, |
|
"learning_rate": 1.178421397270758e-05, |
|
"loss": 0.7171, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.036184494828728646, |
|
"grad_norm": 0.7741400003433228, |
|
"learning_rate": 1.1525734670691701e-05, |
|
"loss": 0.5864, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.03629973207340612, |
|
"grad_norm": 0.7592278122901917, |
|
"learning_rate": 1.1269751908617277e-05, |
|
"loss": 0.559, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.036414969318083606, |
|
"grad_norm": 0.7965648770332336, |
|
"learning_rate": 1.1016282296838887e-05, |
|
"loss": 0.5493, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.03653020656276108, |
|
"grad_norm": 0.8162243366241455, |
|
"learning_rate": 1.0765342282636416e-05, |
|
"loss": 0.6576, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.03664544380743857, |
|
"grad_norm": 0.8158289194107056, |
|
"learning_rate": 1.0516948149147754e-05, |
|
"loss": 0.6477, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.036760681052116044, |
|
"grad_norm": 0.9171628355979919, |
|
"learning_rate": 1.0271116014312293e-05, |
|
"loss": 0.5811, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.03687591829679352, |
|
"grad_norm": 0.8957247138023376, |
|
"learning_rate": 1.0027861829824952e-05, |
|
"loss": 0.578, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.036991155541471005, |
|
"grad_norm": 0.8826591968536377, |
|
"learning_rate": 9.787201380101157e-06, |
|
"loss": 0.6718, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.03710639278614848, |
|
"grad_norm": 1.0377730131149292, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.7081, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.037221630030825965, |
|
"grad_norm": 0.8994547724723816, |
|
"learning_rate": 9.313723980074018e-06, |
|
"loss": 0.5559, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.03733686727550344, |
|
"grad_norm": 1.031438946723938, |
|
"learning_rate": 9.080937753040646e-06, |
|
"loss": 0.7033, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.03745210452018092, |
|
"grad_norm": 1.041542649269104, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 0.5513, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.0375673417648584, |
|
"grad_norm": 1.2182027101516724, |
|
"learning_rate": 8.623345769777514e-06, |
|
"loss": 0.8364, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.03768257900953588, |
|
"grad_norm": 0.9404509663581848, |
|
"learning_rate": 8.398569706035792e-06, |
|
"loss": 0.537, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.037797816254213364, |
|
"grad_norm": 1.1761248111724854, |
|
"learning_rate": 8.176493099488663e-06, |
|
"loss": 0.6538, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.03791305349889084, |
|
"grad_norm": 1.344883680343628, |
|
"learning_rate": 7.957130360368898e-06, |
|
"loss": 0.6001, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.038028290743568324, |
|
"grad_norm": 0.831625759601593, |
|
"learning_rate": 7.740495722810271e-06, |
|
"loss": 0.4481, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0381435279882458, |
|
"grad_norm": 0.8212209343910217, |
|
"learning_rate": 7.526603243923957e-06, |
|
"loss": 0.4851, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.03825876523292328, |
|
"grad_norm": 0.902117908000946, |
|
"learning_rate": 7.315466802886401e-06, |
|
"loss": 0.5776, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.03837400247760076, |
|
"grad_norm": 0.8807656764984131, |
|
"learning_rate": 7.107100100038671e-06, |
|
"loss": 0.4749, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.03848923972227824, |
|
"grad_norm": 1.0799822807312012, |
|
"learning_rate": 6.901516655997536e-06, |
|
"loss": 0.6281, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.03860447696695572, |
|
"grad_norm": 0.8429787158966064, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.4242, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.0387197142116332, |
|
"grad_norm": 1.0487651824951172, |
|
"learning_rate": 6.498752722928042e-06, |
|
"loss": 0.6525, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.038834951456310676, |
|
"grad_norm": 1.1070475578308105, |
|
"learning_rate": 6.301598368674105e-06, |
|
"loss": 0.5714, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.03895018870098816, |
|
"grad_norm": 1.008655071258545, |
|
"learning_rate": 6.107279541079769e-06, |
|
"loss": 0.5121, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.03906542594566564, |
|
"grad_norm": 1.107851266860962, |
|
"learning_rate": 5.915808849215304e-06, |
|
"loss": 0.5654, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.03918066319034312, |
|
"grad_norm": 0.9551889300346375, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 0.4666, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.0392959004350206, |
|
"grad_norm": 1.1649274826049805, |
|
"learning_rate": 5.54146138409355e-06, |
|
"loss": 0.4918, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.03941113767969808, |
|
"grad_norm": 1.1512972116470337, |
|
"learning_rate": 5.358608901706802e-06, |
|
"loss": 0.5748, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.03952637492437556, |
|
"grad_norm": 1.2667012214660645, |
|
"learning_rate": 5.178653135214812e-06, |
|
"loss": 0.5099, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.039641612169053035, |
|
"grad_norm": 1.1603896617889404, |
|
"learning_rate": 5.001605761689398e-06, |
|
"loss": 0.5842, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.03975684941373052, |
|
"grad_norm": 1.1110482215881348, |
|
"learning_rate": 4.827478269480895e-06, |
|
"loss": 0.525, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.039872086658407996, |
|
"grad_norm": 1.1846598386764526, |
|
"learning_rate": 4.65628195747273e-06, |
|
"loss": 0.5712, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.03998732390308548, |
|
"grad_norm": 1.3512845039367676, |
|
"learning_rate": 4.488027934348271e-06, |
|
"loss": 0.5105, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.040102561147762956, |
|
"grad_norm": 1.2469724416732788, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.495, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.04021779839244043, |
|
"grad_norm": 1.583885908126831, |
|
"learning_rate": 4.16039023417088e-06, |
|
"loss": 0.4817, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.04033303563711792, |
|
"grad_norm": 1.6261035203933716, |
|
"learning_rate": 4.001027817058789e-06, |
|
"loss": 0.3422, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.04033303563711792, |
|
"eval_loss": 0.565327525138855, |
|
"eval_runtime": 545.9327, |
|
"eval_samples_per_second": 26.771, |
|
"eval_steps_per_second": 6.693, |
|
"step": 350 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.345339247276851e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|