|
{ |
|
"best_metric": 0.5731000304222107, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-400", |
|
"epoch": 0.450070323488045, |
|
"eval_steps": 50, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0011251758087201125, |
|
"grad_norm": 0.791208028793335, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5861, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011251758087201125, |
|
"eval_loss": 0.9892504811286926, |
|
"eval_runtime": 121.4392, |
|
"eval_samples_per_second": 12.327, |
|
"eval_steps_per_second": 3.088, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002250351617440225, |
|
"grad_norm": 0.7348774075508118, |
|
"learning_rate": 2e-05, |
|
"loss": 0.592, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0033755274261603376, |
|
"grad_norm": 0.741549015045166, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6838, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00450070323488045, |
|
"grad_norm": 0.5738378167152405, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7513, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005625879043600563, |
|
"grad_norm": 0.47346198558807373, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8137, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006751054852320675, |
|
"grad_norm": 0.39620643854141235, |
|
"learning_rate": 6e-05, |
|
"loss": 0.733, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007876230661040788, |
|
"grad_norm": 0.9768288135528564, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9303, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0090014064697609, |
|
"grad_norm": 0.7520477175712585, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8512, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.010126582278481013, |
|
"grad_norm": 0.4297806918621063, |
|
"learning_rate": 9e-05, |
|
"loss": 0.8734, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.011251758087201125, |
|
"grad_norm": 0.4498920440673828, |
|
"learning_rate": 0.0001, |
|
"loss": 0.9123, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012376933895921238, |
|
"grad_norm": 0.5254006385803223, |
|
"learning_rate": 9.99983777858264e-05, |
|
"loss": 0.9156, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01350210970464135, |
|
"grad_norm": 0.4119812548160553, |
|
"learning_rate": 9.999351124856874e-05, |
|
"loss": 0.8029, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.014627285513361463, |
|
"grad_norm": 0.34345316886901855, |
|
"learning_rate": 9.998540070400966e-05, |
|
"loss": 0.936, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.015752461322081576, |
|
"grad_norm": 0.37706848978996277, |
|
"learning_rate": 9.997404667843075e-05, |
|
"loss": 0.9711, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.016877637130801686, |
|
"grad_norm": 0.4172216057777405, |
|
"learning_rate": 9.995944990857849e-05, |
|
"loss": 0.8853, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0180028129395218, |
|
"grad_norm": 0.33668506145477295, |
|
"learning_rate": 9.994161134161634e-05, |
|
"loss": 0.9898, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01912798874824191, |
|
"grad_norm": 0.2785027027130127, |
|
"learning_rate": 9.992053213506334e-05, |
|
"loss": 0.8462, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.020253164556962026, |
|
"grad_norm": 0.3014741837978363, |
|
"learning_rate": 9.989621365671902e-05, |
|
"loss": 0.9212, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.021378340365682136, |
|
"grad_norm": 0.3413006067276001, |
|
"learning_rate": 9.986865748457457e-05, |
|
"loss": 1.0007, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02250351617440225, |
|
"grad_norm": 0.29686427116394043, |
|
"learning_rate": 9.983786540671051e-05, |
|
"loss": 0.8971, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02362869198312236, |
|
"grad_norm": 0.30158573389053345, |
|
"learning_rate": 9.980383942118066e-05, |
|
"loss": 0.9584, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.024753867791842476, |
|
"grad_norm": 0.2803223431110382, |
|
"learning_rate": 9.976658173588244e-05, |
|
"loss": 1.0235, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.025879043600562587, |
|
"grad_norm": 0.2954846918582916, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.8306, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0270042194092827, |
|
"grad_norm": 0.29831573367118835, |
|
"learning_rate": 9.968238114591566e-05, |
|
"loss": 0.843, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02812939521800281, |
|
"grad_norm": 0.34613728523254395, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 0.7939, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.029254571026722926, |
|
"grad_norm": 0.3171483874320984, |
|
"learning_rate": 9.95852854910781e-05, |
|
"loss": 0.7847, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.030379746835443037, |
|
"grad_norm": 0.3092692494392395, |
|
"learning_rate": 9.953190975913647e-05, |
|
"loss": 0.9123, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03150492264416315, |
|
"grad_norm": 0.312860369682312, |
|
"learning_rate": 9.947531997255256e-05, |
|
"loss": 0.8217, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03263009845288326, |
|
"grad_norm": 0.34326767921447754, |
|
"learning_rate": 9.941551980335652e-05, |
|
"loss": 0.8803, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03375527426160337, |
|
"grad_norm": 0.3247174918651581, |
|
"learning_rate": 9.935251313189564e-05, |
|
"loss": 0.8407, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03488045007032349, |
|
"grad_norm": 0.31886929273605347, |
|
"learning_rate": 9.928630404658255e-05, |
|
"loss": 0.6957, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0360056258790436, |
|
"grad_norm": 0.30395761132240295, |
|
"learning_rate": 9.921689684362989e-05, |
|
"loss": 0.7102, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03713080168776371, |
|
"grad_norm": 0.32554319500923157, |
|
"learning_rate": 9.914429602677162e-05, |
|
"loss": 0.7969, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03825597749648382, |
|
"grad_norm": 0.335056871175766, |
|
"learning_rate": 9.906850630697068e-05, |
|
"loss": 0.6319, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03938115330520394, |
|
"grad_norm": 0.32416272163391113, |
|
"learning_rate": 9.898953260211338e-05, |
|
"loss": 0.6367, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04050632911392405, |
|
"grad_norm": 0.34065842628479004, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.7144, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04163150492264416, |
|
"grad_norm": 0.2950224280357361, |
|
"learning_rate": 9.882205394146361e-05, |
|
"loss": 0.5111, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04275668073136427, |
|
"grad_norm": 0.33300068974494934, |
|
"learning_rate": 9.87335598531214e-05, |
|
"loss": 0.5435, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04388185654008439, |
|
"grad_norm": 0.3185752034187317, |
|
"learning_rate": 9.864190351391822e-05, |
|
"loss": 0.4575, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0450070323488045, |
|
"grad_norm": 0.31263992190361023, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 0.5105, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04613220815752461, |
|
"grad_norm": 0.3303317129611969, |
|
"learning_rate": 9.844912807753104e-05, |
|
"loss": 0.5026, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04725738396624472, |
|
"grad_norm": 0.33453431725502014, |
|
"learning_rate": 9.834802148926882e-05, |
|
"loss": 0.4434, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04838255977496484, |
|
"grad_norm": 0.3641398251056671, |
|
"learning_rate": 9.824377766717759e-05, |
|
"loss": 0.5608, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04950773558368495, |
|
"grad_norm": 0.368456095457077, |
|
"learning_rate": 9.813640337548954e-05, |
|
"loss": 0.4456, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.38945502042770386, |
|
"learning_rate": 9.802590558156862e-05, |
|
"loss": 0.522, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05175808720112517, |
|
"grad_norm": 0.3877948224544525, |
|
"learning_rate": 9.791229145545831e-05, |
|
"loss": 0.3982, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05288326300984529, |
|
"grad_norm": 0.4522574245929718, |
|
"learning_rate": 9.779556836941645e-05, |
|
"loss": 0.3829, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0540084388185654, |
|
"grad_norm": 0.46986550092697144, |
|
"learning_rate": 9.767574389743682e-05, |
|
"loss": 0.4329, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05513361462728551, |
|
"grad_norm": 0.5505241751670837, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.4547, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05625879043600562, |
|
"grad_norm": 0.6366212368011475, |
|
"learning_rate": 9.742682209735727e-05, |
|
"loss": 0.4432, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05625879043600562, |
|
"eval_loss": 0.6768447160720825, |
|
"eval_runtime": 122.3079, |
|
"eval_samples_per_second": 12.24, |
|
"eval_steps_per_second": 3.066, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05738396624472574, |
|
"grad_norm": 0.3021714985370636, |
|
"learning_rate": 9.729774092143627e-05, |
|
"loss": 0.4182, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05850914205344585, |
|
"grad_norm": 0.2832154929637909, |
|
"learning_rate": 9.716559066288715e-05, |
|
"loss": 0.4495, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05963431786216596, |
|
"grad_norm": 0.2705238461494446, |
|
"learning_rate": 9.703037989675087e-05, |
|
"loss": 0.546, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 0.24180416762828827, |
|
"learning_rate": 9.689211739666023e-05, |
|
"loss": 0.5842, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06188466947960619, |
|
"grad_norm": 0.22725558280944824, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 0.5901, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0630098452883263, |
|
"grad_norm": 0.2603343725204468, |
|
"learning_rate": 9.66064732786784e-05, |
|
"loss": 0.6602, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06413502109704641, |
|
"grad_norm": 0.2453056126832962, |
|
"learning_rate": 9.645911019582467e-05, |
|
"loss": 0.6428, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06526019690576652, |
|
"grad_norm": 0.2500747740268707, |
|
"learning_rate": 9.630873244788883e-05, |
|
"loss": 0.6843, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06638537271448663, |
|
"grad_norm": 0.23956970870494843, |
|
"learning_rate": 9.615534979266745e-05, |
|
"loss": 0.7583, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06751054852320675, |
|
"grad_norm": 0.23142866790294647, |
|
"learning_rate": 9.599897218294122e-05, |
|
"loss": 0.7374, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06863572433192687, |
|
"grad_norm": 0.2538938820362091, |
|
"learning_rate": 9.583960976582913e-05, |
|
"loss": 0.8349, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06976090014064698, |
|
"grad_norm": 0.24747668206691742, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.7825, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07088607594936709, |
|
"grad_norm": 0.24818557500839233, |
|
"learning_rate": 9.551197206565173e-05, |
|
"loss": 0.7681, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0720112517580872, |
|
"grad_norm": 0.24176418781280518, |
|
"learning_rate": 9.534371804252728e-05, |
|
"loss": 0.7531, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07313642756680731, |
|
"grad_norm": 0.2653544843196869, |
|
"learning_rate": 9.517252173051911e-05, |
|
"loss": 0.912, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07426160337552742, |
|
"grad_norm": 0.2475324124097824, |
|
"learning_rate": 9.49983942383106e-05, |
|
"loss": 0.7183, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07538677918424753, |
|
"grad_norm": 0.25549477338790894, |
|
"learning_rate": 9.482134686478519e-05, |
|
"loss": 0.7015, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07651195499296765, |
|
"grad_norm": 0.2648347020149231, |
|
"learning_rate": 9.464139109829321e-05, |
|
"loss": 0.7304, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07763713080168777, |
|
"grad_norm": 0.2694757282733917, |
|
"learning_rate": 9.445853861590647e-05, |
|
"loss": 0.7315, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07876230661040788, |
|
"grad_norm": 0.2604469358921051, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 0.7452, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07988748241912799, |
|
"grad_norm": 0.2539106011390686, |
|
"learning_rate": 9.408419115078471e-05, |
|
"loss": 0.666, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0810126582278481, |
|
"grad_norm": 0.2885034680366516, |
|
"learning_rate": 9.389272045892024e-05, |
|
"loss": 0.9262, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08213783403656821, |
|
"grad_norm": 0.2839057743549347, |
|
"learning_rate": 9.36984016313259e-05, |
|
"loss": 0.7723, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08326300984528832, |
|
"grad_norm": 0.2611367702484131, |
|
"learning_rate": 9.350124727707197e-05, |
|
"loss": 0.6862, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 0.2865094542503357, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.8664, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08551336146272855, |
|
"grad_norm": 0.2991608679294586, |
|
"learning_rate": 9.309848334400246e-05, |
|
"loss": 0.8543, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08663853727144867, |
|
"grad_norm": 0.29869431257247925, |
|
"learning_rate": 9.289289989996133e-05, |
|
"loss": 0.7816, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08776371308016878, |
|
"grad_norm": 0.2918897867202759, |
|
"learning_rate": 9.268453319711363e-05, |
|
"loss": 0.7375, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 0.2945139706134796, |
|
"learning_rate": 9.247339675607605e-05, |
|
"loss": 0.797, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.090014064697609, |
|
"grad_norm": 0.28907427191734314, |
|
"learning_rate": 9.225950427718975e-05, |
|
"loss": 0.6864, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09113924050632911, |
|
"grad_norm": 0.2843822240829468, |
|
"learning_rate": 9.204286963963111e-05, |
|
"loss": 0.6236, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09226441631504922, |
|
"grad_norm": 0.29010453820228577, |
|
"learning_rate": 9.182350690051133e-05, |
|
"loss": 0.6518, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09338959212376934, |
|
"grad_norm": 0.31116747856140137, |
|
"learning_rate": 9.160143029396422e-05, |
|
"loss": 0.6894, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09451476793248945, |
|
"grad_norm": 0.3072003424167633, |
|
"learning_rate": 9.13766542302225e-05, |
|
"loss": 0.6314, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09563994374120956, |
|
"grad_norm": 0.3316248953342438, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 0.6879, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09676511954992968, |
|
"grad_norm": 0.33323273062705994, |
|
"learning_rate": 9.091906224695935e-05, |
|
"loss": 0.6434, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09789029535864979, |
|
"grad_norm": 0.314699649810791, |
|
"learning_rate": 9.068627601992598e-05, |
|
"loss": 0.598, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0990154711673699, |
|
"grad_norm": 0.3086985647678375, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.5176, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.10014064697609001, |
|
"grad_norm": 0.31014567613601685, |
|
"learning_rate": 9.021279861989885e-05, |
|
"loss": 0.5924, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.3152261972427368, |
|
"learning_rate": 8.997213817017507e-05, |
|
"loss": 0.459, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10239099859353024, |
|
"grad_norm": 0.31608080863952637, |
|
"learning_rate": 8.972888398568772e-05, |
|
"loss": 0.4356, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.10351617440225035, |
|
"grad_norm": 0.312263548374176, |
|
"learning_rate": 8.948305185085225e-05, |
|
"loss": 0.3729, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.10464135021097046, |
|
"grad_norm": 0.3479161858558655, |
|
"learning_rate": 8.92346577173636e-05, |
|
"loss": 0.4006, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.10576652601969058, |
|
"grad_norm": 0.3391091823577881, |
|
"learning_rate": 8.898371770316111e-05, |
|
"loss": 0.4098, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.10689170182841069, |
|
"grad_norm": 0.37615877389907837, |
|
"learning_rate": 8.873024809138272e-05, |
|
"loss": 0.3537, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1080168776371308, |
|
"grad_norm": 0.38181978464126587, |
|
"learning_rate": 8.847426532930831e-05, |
|
"loss": 0.4301, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.10914205344585091, |
|
"grad_norm": 0.398378849029541, |
|
"learning_rate": 8.821578602729242e-05, |
|
"loss": 0.4368, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.11026722925457103, |
|
"grad_norm": 0.4197014272212982, |
|
"learning_rate": 8.795482695768658e-05, |
|
"loss": 0.4488, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.11139240506329114, |
|
"grad_norm": 0.4649895429611206, |
|
"learning_rate": 8.769140505375085e-05, |
|
"loss": 0.4162, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.11251758087201125, |
|
"grad_norm": 0.6387284398078918, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 0.4822, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11251758087201125, |
|
"eval_loss": 0.644762396812439, |
|
"eval_runtime": 122.8498, |
|
"eval_samples_per_second": 12.186, |
|
"eval_steps_per_second": 3.053, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11364275668073136, |
|
"grad_norm": 0.28204846382141113, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.4037, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.11476793248945148, |
|
"grad_norm": 0.2628364562988281, |
|
"learning_rate": 8.688653405904652e-05, |
|
"loss": 0.4282, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1158931082981716, |
|
"grad_norm": 0.24635303020477295, |
|
"learning_rate": 8.661343332988869e-05, |
|
"loss": 0.4933, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1170182841068917, |
|
"grad_norm": 0.22415153682231903, |
|
"learning_rate": 8.633795680751116e-05, |
|
"loss": 0.5442, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.11814345991561181, |
|
"grad_norm": 0.2219788134098053, |
|
"learning_rate": 8.606012236719073e-05, |
|
"loss": 0.6066, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11926863572433193, |
|
"grad_norm": 0.2294948548078537, |
|
"learning_rate": 8.577994803720606e-05, |
|
"loss": 0.6212, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.12039381153305204, |
|
"grad_norm": 0.23675329983234406, |
|
"learning_rate": 8.549745199766792e-05, |
|
"loss": 0.6921, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 0.24186259508132935, |
|
"learning_rate": 8.521265257933948e-05, |
|
"loss": 0.6363, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.12264416315049226, |
|
"grad_norm": 0.23978343605995178, |
|
"learning_rate": 8.492556826244687e-05, |
|
"loss": 0.7421, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.12376933895921238, |
|
"grad_norm": 0.22931262850761414, |
|
"learning_rate": 8.463621767547998e-05, |
|
"loss": 0.6781, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1248945147679325, |
|
"grad_norm": 0.2452748417854309, |
|
"learning_rate": 8.434461959398376e-05, |
|
"loss": 0.7911, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1260196905766526, |
|
"grad_norm": 0.25769487023353577, |
|
"learning_rate": 8.405079293933986e-05, |
|
"loss": 0.8928, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1271448663853727, |
|
"grad_norm": 0.26870453357696533, |
|
"learning_rate": 8.375475677753881e-05, |
|
"loss": 0.8826, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12827004219409283, |
|
"grad_norm": 0.2632165253162384, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.85, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12939521800281295, |
|
"grad_norm": 0.2665436267852783, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 0.9165, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.13052039381153305, |
|
"grad_norm": 0.25475484132766724, |
|
"learning_rate": 8.285358405218655e-05, |
|
"loss": 0.7661, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.13164556962025317, |
|
"grad_norm": 0.2527354657649994, |
|
"learning_rate": 8.25489033703452e-05, |
|
"loss": 0.7149, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.13277074542897327, |
|
"grad_norm": 0.25752562284469604, |
|
"learning_rate": 8.224211063680853e-05, |
|
"loss": 0.8108, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1338959212376934, |
|
"grad_norm": 0.25537046790122986, |
|
"learning_rate": 8.19332257589174e-05, |
|
"loss": 0.6797, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1350210970464135, |
|
"grad_norm": 0.2648957371711731, |
|
"learning_rate": 8.162226877976887e-05, |
|
"loss": 0.7278, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13614627285513362, |
|
"grad_norm": 0.27850714325904846, |
|
"learning_rate": 8.130925987691569e-05, |
|
"loss": 0.718, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.13727144866385374, |
|
"grad_norm": 0.2784062623977661, |
|
"learning_rate": 8.099421936105702e-05, |
|
"loss": 0.7789, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13839662447257384, |
|
"grad_norm": 0.2852023243904114, |
|
"learning_rate": 8.067716767472045e-05, |
|
"loss": 0.7814, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13952180028129396, |
|
"grad_norm": 0.3003855347633362, |
|
"learning_rate": 8.035812539093557e-05, |
|
"loss": 0.8263, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.14064697609001406, |
|
"grad_norm": 0.2815362215042114, |
|
"learning_rate": 8.003711321189895e-05, |
|
"loss": 0.7245, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14177215189873418, |
|
"grad_norm": 0.2952372133731842, |
|
"learning_rate": 7.971415196763088e-05, |
|
"loss": 0.8003, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.14289732770745428, |
|
"grad_norm": 0.30167871713638306, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.7914, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1440225035161744, |
|
"grad_norm": 0.30462878942489624, |
|
"learning_rate": 7.906246623448183e-05, |
|
"loss": 0.8007, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1451476793248945, |
|
"grad_norm": 0.3005084991455078, |
|
"learning_rate": 7.873378403255419e-05, |
|
"loss": 0.7998, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.14627285513361463, |
|
"grad_norm": 0.30115005373954773, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 0.7084, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14739803094233475, |
|
"grad_norm": 0.29491057991981506, |
|
"learning_rate": 7.807084759519405e-05, |
|
"loss": 0.6415, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.14852320675105485, |
|
"grad_norm": 0.30844321846961975, |
|
"learning_rate": 7.773663637675694e-05, |
|
"loss": 0.7082, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.14964838255977497, |
|
"grad_norm": 0.30617377161979675, |
|
"learning_rate": 7.740062536773352e-05, |
|
"loss": 0.658, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.15077355836849507, |
|
"grad_norm": 0.3148016929626465, |
|
"learning_rate": 7.706283637139658e-05, |
|
"loss": 0.6561, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.31739330291748047, |
|
"learning_rate": 7.672329130639005e-05, |
|
"loss": 0.6966, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1530239099859353, |
|
"grad_norm": 0.30230337381362915, |
|
"learning_rate": 7.638201220530665e-05, |
|
"loss": 0.482, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.15414908579465542, |
|
"grad_norm": 0.3207411766052246, |
|
"learning_rate": 7.603902121325813e-05, |
|
"loss": 0.6072, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.15527426160337554, |
|
"grad_norm": 0.3160305917263031, |
|
"learning_rate": 7.569434058643844e-05, |
|
"loss": 0.5652, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.15639943741209564, |
|
"grad_norm": 0.30304470658302307, |
|
"learning_rate": 7.534799269067953e-05, |
|
"loss": 0.4681, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.15752461322081576, |
|
"grad_norm": 0.3202410042285919, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.4403, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15864978902953586, |
|
"grad_norm": 0.32677364349365234, |
|
"learning_rate": 7.465038509514688e-05, |
|
"loss": 0.4632, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.15977496483825598, |
|
"grad_norm": 0.35057735443115234, |
|
"learning_rate": 7.42991706621303e-05, |
|
"loss": 0.5051, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.16090014064697608, |
|
"grad_norm": 0.34558597207069397, |
|
"learning_rate": 7.394637949075154e-05, |
|
"loss": 0.4052, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1620253164556962, |
|
"grad_norm": 0.35772836208343506, |
|
"learning_rate": 7.35920344731241e-05, |
|
"loss": 0.407, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1631504922644163, |
|
"grad_norm": 0.3630152642726898, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 0.4163, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16427566807313643, |
|
"grad_norm": 0.3928242325782776, |
|
"learning_rate": 7.287877497021978e-05, |
|
"loss": 0.4362, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.16540084388185655, |
|
"grad_norm": 0.45405474305152893, |
|
"learning_rate": 7.251990676732984e-05, |
|
"loss": 0.5051, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.16652601969057665, |
|
"grad_norm": 0.4049229323863983, |
|
"learning_rate": 7.215957727996207e-05, |
|
"loss": 0.353, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.16765119549929677, |
|
"grad_norm": 0.49053892493247986, |
|
"learning_rate": 7.179780988938051e-05, |
|
"loss": 0.358, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.6350364685058594, |
|
"learning_rate": 7.143462807015271e-05, |
|
"loss": 0.4267, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"eval_loss": 0.6191428303718567, |
|
"eval_runtime": 122.8049, |
|
"eval_samples_per_second": 12.19, |
|
"eval_steps_per_second": 3.054, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.169901547116737, |
|
"grad_norm": 0.22611109912395477, |
|
"learning_rate": 7.107005538862646e-05, |
|
"loss": 0.3608, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1710267229254571, |
|
"grad_norm": 0.2486761510372162, |
|
"learning_rate": 7.07041155014006e-05, |
|
"loss": 0.4149, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.17215189873417722, |
|
"grad_norm": 0.22183437645435333, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.4472, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.17327707454289734, |
|
"grad_norm": 0.24657611548900604, |
|
"learning_rate": 6.996822917828477e-05, |
|
"loss": 0.5571, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.17440225035161744, |
|
"grad_norm": 0.22693145275115967, |
|
"learning_rate": 6.959833049300377e-05, |
|
"loss": 0.5807, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17552742616033756, |
|
"grad_norm": 0.22520385682582855, |
|
"learning_rate": 6.922716010014255e-05, |
|
"loss": 0.5593, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.17665260196905766, |
|
"grad_norm": 0.24080556631088257, |
|
"learning_rate": 6.885474208441603e-05, |
|
"loss": 0.6988, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 0.23336027562618256, |
|
"learning_rate": 6.848110061149556e-05, |
|
"loss": 0.6938, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.17890295358649788, |
|
"grad_norm": 0.24269425868988037, |
|
"learning_rate": 6.810625992644085e-05, |
|
"loss": 0.6492, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.180028129395218, |
|
"grad_norm": 0.257687509059906, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 0.7288, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1811533052039381, |
|
"grad_norm": 0.24201682209968567, |
|
"learning_rate": 6.735307828766515e-05, |
|
"loss": 0.7097, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 0.2506401538848877, |
|
"learning_rate": 6.697478620682137e-05, |
|
"loss": 0.7142, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.18340365682137835, |
|
"grad_norm": 0.2325735092163086, |
|
"learning_rate": 6.659539265642643e-05, |
|
"loss": 0.6644, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.18452883263009845, |
|
"grad_norm": 0.2502652704715729, |
|
"learning_rate": 6.621492225478414e-05, |
|
"loss": 0.7367, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.18565400843881857, |
|
"grad_norm": 0.2414039522409439, |
|
"learning_rate": 6.583339969007363e-05, |
|
"loss": 0.7728, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18677918424753867, |
|
"grad_norm": 0.2699027359485626, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.8384, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.1879043600562588, |
|
"grad_norm": 0.2658804655075073, |
|
"learning_rate": 6.506729716392481e-05, |
|
"loss": 0.8469, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1890295358649789, |
|
"grad_norm": 0.25071781873703003, |
|
"learning_rate": 6.468276691378155e-05, |
|
"loss": 0.6885, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.19015471167369902, |
|
"grad_norm": 0.2700413763523102, |
|
"learning_rate": 6.429728391993446e-05, |
|
"loss": 0.825, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.19127988748241911, |
|
"grad_norm": 0.27662935853004456, |
|
"learning_rate": 6.391087319582264e-05, |
|
"loss": 0.8351, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19240506329113924, |
|
"grad_norm": 0.26488763093948364, |
|
"learning_rate": 6.35235598150842e-05, |
|
"loss": 0.7827, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.19353023909985936, |
|
"grad_norm": 0.2654742896556854, |
|
"learning_rate": 6.313536890992935e-05, |
|
"loss": 0.8177, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.19465541490857946, |
|
"grad_norm": 0.2809375822544098, |
|
"learning_rate": 6.274632566950967e-05, |
|
"loss": 0.8218, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.19578059071729959, |
|
"grad_norm": 0.26899024844169617, |
|
"learning_rate": 6.235645533828349e-05, |
|
"loss": 0.7878, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.19690576652601968, |
|
"grad_norm": 0.2818717062473297, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 0.785, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1980309423347398, |
|
"grad_norm": 0.3077673316001892, |
|
"learning_rate": 6.157433464794716e-05, |
|
"loss": 0.8369, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1991561181434599, |
|
"grad_norm": 0.277925580739975, |
|
"learning_rate": 6.118213503952779e-05, |
|
"loss": 0.6012, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.20028129395218003, |
|
"grad_norm": 0.28356897830963135, |
|
"learning_rate": 6.078920983839031e-05, |
|
"loss": 0.7352, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.20140646976090015, |
|
"grad_norm": 0.2859312891960144, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.719, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.31765639781951904, |
|
"learning_rate": 6.0001284688802226e-05, |
|
"loss": 0.8586, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.20365682137834037, |
|
"grad_norm": 0.2968887984752655, |
|
"learning_rate": 5.960633586768543e-05, |
|
"loss": 0.6802, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.20478199718706047, |
|
"grad_norm": 0.294272780418396, |
|
"learning_rate": 5.921076370520058e-05, |
|
"loss": 0.6895, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2059071729957806, |
|
"grad_norm": 0.3082694709300995, |
|
"learning_rate": 5.8814593869458455e-05, |
|
"loss": 0.7071, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2070323488045007, |
|
"grad_norm": 0.3072359561920166, |
|
"learning_rate": 5.841785206735192e-05, |
|
"loss": 0.6542, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.20815752461322082, |
|
"grad_norm": 0.28556135296821594, |
|
"learning_rate": 5.8020564042888015e-05, |
|
"loss": 0.5641, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.20928270042194091, |
|
"grad_norm": 0.3505888283252716, |
|
"learning_rate": 5.762275557551727e-05, |
|
"loss": 0.8211, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.21040787623066104, |
|
"grad_norm": 0.3167312443256378, |
|
"learning_rate": 5.7224452478461064e-05, |
|
"loss": 0.5715, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.21153305203938116, |
|
"grad_norm": 0.30357834696769714, |
|
"learning_rate": 5.682568059703659e-05, |
|
"loss": 0.5422, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.21265822784810126, |
|
"grad_norm": 0.28745442628860474, |
|
"learning_rate": 5.642646580697973e-05, |
|
"loss": 0.3673, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.21378340365682139, |
|
"grad_norm": 0.32116490602493286, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 0.4719, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21490857946554148, |
|
"grad_norm": 0.31803691387176514, |
|
"learning_rate": 5.562681114593028e-05, |
|
"loss": 0.4755, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2160337552742616, |
|
"grad_norm": 0.3275885283946991, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.4121, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2171589310829817, |
|
"grad_norm": 0.3217777907848358, |
|
"learning_rate": 5.482569604572576e-05, |
|
"loss": 0.3525, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.21828410689170183, |
|
"grad_norm": 0.33562275767326355, |
|
"learning_rate": 5.442465579556793e-05, |
|
"loss": 0.3489, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.21940928270042195, |
|
"grad_norm": 0.3709050416946411, |
|
"learning_rate": 5.402332843583631e-05, |
|
"loss": 0.4602, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.22053445850914205, |
|
"grad_norm": 0.3461264371871948, |
|
"learning_rate": 5.3621740008088126e-05, |
|
"loss": 0.3244, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.22165963431786218, |
|
"grad_norm": 0.40534359216690063, |
|
"learning_rate": 5.321991657082097e-05, |
|
"loss": 0.4012, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.22278481012658227, |
|
"grad_norm": 0.425788015127182, |
|
"learning_rate": 5.281788419778187e-05, |
|
"loss": 0.3741, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2239099859353024, |
|
"grad_norm": 0.47200363874435425, |
|
"learning_rate": 5.2415668976275355e-05, |
|
"loss": 0.3717, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2250351617440225, |
|
"grad_norm": 0.5842266082763672, |
|
"learning_rate": 5.201329700547076e-05, |
|
"loss": 0.4174, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2250351617440225, |
|
"eval_loss": 0.6018708348274231, |
|
"eval_runtime": 122.7147, |
|
"eval_samples_per_second": 12.199, |
|
"eval_steps_per_second": 3.056, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22616033755274262, |
|
"grad_norm": 0.19296161830425262, |
|
"learning_rate": 5.161079439470866e-05, |
|
"loss": 0.3172, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.22728551336146272, |
|
"grad_norm": 0.2037908434867859, |
|
"learning_rate": 5.1208187261806615e-05, |
|
"loss": 0.4096, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.22841068917018284, |
|
"grad_norm": 0.23043642938137054, |
|
"learning_rate": 5.080550173136457e-05, |
|
"loss": 0.4616, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.22953586497890296, |
|
"grad_norm": 0.24173596501350403, |
|
"learning_rate": 5.0402763933069496e-05, |
|
"loss": 0.5913, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.23066104078762306, |
|
"grad_norm": 0.22251668572425842, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5383, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2317862165963432, |
|
"grad_norm": 0.23674923181533813, |
|
"learning_rate": 4.9597236066930516e-05, |
|
"loss": 0.6536, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.23291139240506328, |
|
"grad_norm": 0.22485093772411346, |
|
"learning_rate": 4.919449826863544e-05, |
|
"loss": 0.5736, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2340365682137834, |
|
"grad_norm": 0.23937222361564636, |
|
"learning_rate": 4.87918127381934e-05, |
|
"loss": 0.7585, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2351617440225035, |
|
"grad_norm": 0.23238810896873474, |
|
"learning_rate": 4.8389205605291365e-05, |
|
"loss": 0.647, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.23628691983122363, |
|
"grad_norm": 0.24751240015029907, |
|
"learning_rate": 4.798670299452926e-05, |
|
"loss": 0.7593, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23741209563994375, |
|
"grad_norm": 0.24134927988052368, |
|
"learning_rate": 4.758433102372466e-05, |
|
"loss": 0.6838, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.23853727144866385, |
|
"grad_norm": 0.2430945336818695, |
|
"learning_rate": 4.7182115802218126e-05, |
|
"loss": 0.6795, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.23966244725738398, |
|
"grad_norm": 0.24926406145095825, |
|
"learning_rate": 4.678008342917903e-05, |
|
"loss": 0.6832, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.24078762306610407, |
|
"grad_norm": 0.27010077238082886, |
|
"learning_rate": 4.6378259991911886e-05, |
|
"loss": 0.8451, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2419127988748242, |
|
"grad_norm": 0.25659826397895813, |
|
"learning_rate": 4.597667156416371e-05, |
|
"loss": 0.8018, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 0.26555147767066956, |
|
"learning_rate": 4.5575344204432084e-05, |
|
"loss": 0.8489, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.24416315049226442, |
|
"grad_norm": 0.26006457209587097, |
|
"learning_rate": 4.5174303954274244e-05, |
|
"loss": 0.7798, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.24528832630098452, |
|
"grad_norm": 0.2641880214214325, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.7372, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.24641350210970464, |
|
"grad_norm": 0.2819690406322479, |
|
"learning_rate": 4.437318885406973e-05, |
|
"loss": 0.9184, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.24753867791842477, |
|
"grad_norm": 0.26662251353263855, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 0.788, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24866385372714486, |
|
"grad_norm": 0.2747672200202942, |
|
"learning_rate": 4.3573534193020274e-05, |
|
"loss": 0.7521, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.249789029535865, |
|
"grad_norm": 0.2652323842048645, |
|
"learning_rate": 4.317431940296343e-05, |
|
"loss": 0.7034, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2509142053445851, |
|
"grad_norm": 0.2707549035549164, |
|
"learning_rate": 4.277554752153895e-05, |
|
"loss": 0.7233, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2520393811533052, |
|
"grad_norm": 0.25702497363090515, |
|
"learning_rate": 4.237724442448273e-05, |
|
"loss": 0.637, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.2797415852546692, |
|
"learning_rate": 4.197943595711198e-05, |
|
"loss": 0.7079, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2542897327707454, |
|
"grad_norm": 0.26651498675346375, |
|
"learning_rate": 4.1582147932648074e-05, |
|
"loss": 0.7022, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2554149085794655, |
|
"grad_norm": 0.28537195920944214, |
|
"learning_rate": 4.118540613054156e-05, |
|
"loss": 0.7823, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.25654008438818565, |
|
"grad_norm": 0.301383912563324, |
|
"learning_rate": 4.078923629479943e-05, |
|
"loss": 0.7906, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2576652601969058, |
|
"grad_norm": 0.3013364374637604, |
|
"learning_rate": 4.039366413231458e-05, |
|
"loss": 0.7727, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2587904360056259, |
|
"grad_norm": 0.2738421857357025, |
|
"learning_rate": 3.9998715311197785e-05, |
|
"loss": 0.6249, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25991561181434597, |
|
"grad_norm": 0.31420156359672546, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.8525, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2610407876230661, |
|
"grad_norm": 0.28208857774734497, |
|
"learning_rate": 3.92107901616097e-05, |
|
"loss": 0.6143, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2621659634317862, |
|
"grad_norm": 0.29040223360061646, |
|
"learning_rate": 3.8817864960472236e-05, |
|
"loss": 0.6351, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.26329113924050634, |
|
"grad_norm": 0.3167511224746704, |
|
"learning_rate": 3.842566535205286e-05, |
|
"loss": 0.5947, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.26441631504922647, |
|
"grad_norm": 0.30433058738708496, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 0.5977, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.26554149085794654, |
|
"grad_norm": 0.2989290952682495, |
|
"learning_rate": 3.764354466171652e-05, |
|
"loss": 0.5009, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.3097265064716339, |
|
"learning_rate": 3.725367433049033e-05, |
|
"loss": 0.5652, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2677918424753868, |
|
"grad_norm": 0.31169530749320984, |
|
"learning_rate": 3.6864631090070655e-05, |
|
"loss": 0.5104, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2689170182841069, |
|
"grad_norm": 0.3056240379810333, |
|
"learning_rate": 3.6476440184915815e-05, |
|
"loss": 0.4947, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.270042194092827, |
|
"grad_norm": 0.3068471848964691, |
|
"learning_rate": 3.608912680417737e-05, |
|
"loss": 0.4709, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2711673699015471, |
|
"grad_norm": 0.3218245804309845, |
|
"learning_rate": 3.570271608006555e-05, |
|
"loss": 0.4978, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.27229254571026723, |
|
"grad_norm": 0.2946934700012207, |
|
"learning_rate": 3.531723308621847e-05, |
|
"loss": 0.3584, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.27341772151898736, |
|
"grad_norm": 0.3193693161010742, |
|
"learning_rate": 3.493270283607522e-05, |
|
"loss": 0.4092, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2745428973277075, |
|
"grad_norm": 0.32379648089408875, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.3391, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.27566807313642755, |
|
"grad_norm": 0.36625102162361145, |
|
"learning_rate": 3.4166600309926387e-05, |
|
"loss": 0.417, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2767932489451477, |
|
"grad_norm": 0.3967529237270355, |
|
"learning_rate": 3.3785077745215873e-05, |
|
"loss": 0.4547, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.2779184247538678, |
|
"grad_norm": 0.4104136824607849, |
|
"learning_rate": 3.340460734357359e-05, |
|
"loss": 0.4187, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2790436005625879, |
|
"grad_norm": 0.4106130301952362, |
|
"learning_rate": 3.3025213793178646e-05, |
|
"loss": 0.3858, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.280168776371308, |
|
"grad_norm": 0.45233193039894104, |
|
"learning_rate": 3.264692171233485e-05, |
|
"loss": 0.352, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.2812939521800281, |
|
"grad_norm": 0.47355005145072937, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 0.295, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2812939521800281, |
|
"eval_loss": 0.5867157578468323, |
|
"eval_runtime": 122.2413, |
|
"eval_samples_per_second": 12.246, |
|
"eval_steps_per_second": 3.068, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.28241912798874824, |
|
"grad_norm": 0.211994931101799, |
|
"learning_rate": 3.189374007355917e-05, |
|
"loss": 0.4111, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.28354430379746837, |
|
"grad_norm": 0.20463378727436066, |
|
"learning_rate": 3.151889938850445e-05, |
|
"loss": 0.382, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.2846694796061885, |
|
"grad_norm": 0.1885218471288681, |
|
"learning_rate": 3.114525791558398e-05, |
|
"loss": 0.386, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.28579465541490856, |
|
"grad_norm": 0.2250271886587143, |
|
"learning_rate": 3.0772839899857464e-05, |
|
"loss": 0.5242, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.2869198312236287, |
|
"grad_norm": 0.22449398040771484, |
|
"learning_rate": 3.0401669506996256e-05, |
|
"loss": 0.4741, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2880450070323488, |
|
"grad_norm": 0.22009386122226715, |
|
"learning_rate": 3.003177082171523e-05, |
|
"loss": 0.5467, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.28917018284106893, |
|
"grad_norm": 0.2301923781633377, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.6133, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.290295358649789, |
|
"grad_norm": 0.22307683527469635, |
|
"learning_rate": 2.9295884498599414e-05, |
|
"loss": 0.5158, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.29142053445850913, |
|
"grad_norm": 0.25228241086006165, |
|
"learning_rate": 2.8929944611373554e-05, |
|
"loss": 0.788, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.29254571026722925, |
|
"grad_norm": 0.23485252261161804, |
|
"learning_rate": 2.8565371929847284e-05, |
|
"loss": 0.6315, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2936708860759494, |
|
"grad_norm": 0.24391689896583557, |
|
"learning_rate": 2.8202190110619493e-05, |
|
"loss": 0.7767, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.2947960618846695, |
|
"grad_norm": 0.24884432554244995, |
|
"learning_rate": 2.784042272003794e-05, |
|
"loss": 0.7886, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.29592123769338957, |
|
"grad_norm": 0.2370489090681076, |
|
"learning_rate": 2.7480093232670158e-05, |
|
"loss": 0.7015, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.2970464135021097, |
|
"grad_norm": 0.25279098749160767, |
|
"learning_rate": 2.712122502978024e-05, |
|
"loss": 0.8111, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2981715893108298, |
|
"grad_norm": 0.24870732426643372, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 0.7925, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.29929676511954995, |
|
"grad_norm": 0.2534269094467163, |
|
"learning_rate": 2.64079655268759e-05, |
|
"loss": 0.7968, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.30042194092827, |
|
"grad_norm": 0.25014856457710266, |
|
"learning_rate": 2.605362050924848e-05, |
|
"loss": 0.7739, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.30154711673699014, |
|
"grad_norm": 0.2641415297985077, |
|
"learning_rate": 2.57008293378697e-05, |
|
"loss": 0.8149, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.30267229254571026, |
|
"grad_norm": 0.25722652673721313, |
|
"learning_rate": 2.534961490485313e-05, |
|
"loss": 0.6831, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.2503587305545807, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.6551, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3049226441631505, |
|
"grad_norm": 0.2686573266983032, |
|
"learning_rate": 2.4652007309320498e-05, |
|
"loss": 0.7863, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3060478199718706, |
|
"grad_norm": 0.27820244431495667, |
|
"learning_rate": 2.430565941356157e-05, |
|
"loss": 0.7644, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3071729957805907, |
|
"grad_norm": 0.27415186166763306, |
|
"learning_rate": 2.3960978786741877e-05, |
|
"loss": 0.756, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.30829817158931083, |
|
"grad_norm": 0.28485107421875, |
|
"learning_rate": 2.361798779469336e-05, |
|
"loss": 0.7773, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.30942334739803096, |
|
"grad_norm": 0.2954002022743225, |
|
"learning_rate": 2.3276708693609943e-05, |
|
"loss": 0.7728, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3105485232067511, |
|
"grad_norm": 0.2666501998901367, |
|
"learning_rate": 2.2937163628603435e-05, |
|
"loss": 0.6268, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.31167369901547115, |
|
"grad_norm": 0.27716466784477234, |
|
"learning_rate": 2.259937463226651e-05, |
|
"loss": 0.6935, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3127988748241913, |
|
"grad_norm": 0.2816527485847473, |
|
"learning_rate": 2.2263363623243054e-05, |
|
"loss": 0.7137, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3139240506329114, |
|
"grad_norm": 0.29209843277931213, |
|
"learning_rate": 2.192915240480596e-05, |
|
"loss": 0.7184, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3150492264416315, |
|
"grad_norm": 0.3039759397506714, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 0.8377, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3161744022503516, |
|
"grad_norm": 0.29953083395957947, |
|
"learning_rate": 2.1266215967445824e-05, |
|
"loss": 0.7513, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3172995780590717, |
|
"grad_norm": 0.29667040705680847, |
|
"learning_rate": 2.0937533765518187e-05, |
|
"loss": 0.7013, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.31842475386779184, |
|
"grad_norm": 0.30537861585617065, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.6985, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.31954992967651197, |
|
"grad_norm": 0.2818076014518738, |
|
"learning_rate": 2.0285848032369137e-05, |
|
"loss": 0.5899, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3206751054852321, |
|
"grad_norm": 0.3025727868080139, |
|
"learning_rate": 1.996288678810105e-05, |
|
"loss": 0.6654, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.32180028129395216, |
|
"grad_norm": 0.2751609683036804, |
|
"learning_rate": 1.9641874609064443e-05, |
|
"loss": 0.5649, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3229254571026723, |
|
"grad_norm": 0.29040881991386414, |
|
"learning_rate": 1.932283232527956e-05, |
|
"loss": 0.5611, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3240506329113924, |
|
"grad_norm": 0.2740771770477295, |
|
"learning_rate": 1.9005780638942982e-05, |
|
"loss": 0.4361, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.32517580872011254, |
|
"grad_norm": 0.2771192789077759, |
|
"learning_rate": 1.8690740123084316e-05, |
|
"loss": 0.4164, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3263009845288326, |
|
"grad_norm": 0.305276483297348, |
|
"learning_rate": 1.837773122023114e-05, |
|
"loss": 0.5081, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.32742616033755273, |
|
"grad_norm": 0.31528910994529724, |
|
"learning_rate": 1.8066774241082612e-05, |
|
"loss": 0.5404, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.32855133614627285, |
|
"grad_norm": 0.34744343161582947, |
|
"learning_rate": 1.7757889363191483e-05, |
|
"loss": 0.587, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.329676511954993, |
|
"grad_norm": 0.31660598516464233, |
|
"learning_rate": 1.745109662965481e-05, |
|
"loss": 0.389, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3308016877637131, |
|
"grad_norm": 0.3436177968978882, |
|
"learning_rate": 1.714641594781347e-05, |
|
"loss": 0.417, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3319268635724332, |
|
"grad_norm": 0.3294260799884796, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 0.3738, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3330520393811533, |
|
"grad_norm": 0.35129880905151367, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.3655, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3341772151898734, |
|
"grad_norm": 0.4063621759414673, |
|
"learning_rate": 1.62452432224612e-05, |
|
"loss": 0.4208, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.33530239099859355, |
|
"grad_norm": 0.4242677092552185, |
|
"learning_rate": 1.5949207060660138e-05, |
|
"loss": 0.4233, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.3364275668073136, |
|
"grad_norm": 0.43796253204345703, |
|
"learning_rate": 1.5655380406016235e-05, |
|
"loss": 0.3132, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.5782553553581238, |
|
"learning_rate": 1.536378232452003e-05, |
|
"loss": 0.4808, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"eval_loss": 0.5779129862785339, |
|
"eval_runtime": 122.5982, |
|
"eval_samples_per_second": 12.211, |
|
"eval_steps_per_second": 3.059, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.33867791842475387, |
|
"grad_norm": 0.201891228556633, |
|
"learning_rate": 1.5074431737553157e-05, |
|
"loss": 0.3725, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.339803094233474, |
|
"grad_norm": 0.20395103096961975, |
|
"learning_rate": 1.4787347420660541e-05, |
|
"loss": 0.438, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3409282700421941, |
|
"grad_norm": 0.20372365415096283, |
|
"learning_rate": 1.4502548002332088e-05, |
|
"loss": 0.5009, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.3420534458509142, |
|
"grad_norm": 0.20695868134498596, |
|
"learning_rate": 1.422005196279395e-05, |
|
"loss": 0.526, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3431786216596343, |
|
"grad_norm": 0.1962495595216751, |
|
"learning_rate": 1.3939877632809278e-05, |
|
"loss": 0.5019, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.34430379746835443, |
|
"grad_norm": 0.21751022338867188, |
|
"learning_rate": 1.3662043192488849e-05, |
|
"loss": 0.5188, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.34542897327707456, |
|
"grad_norm": 0.23039577901363373, |
|
"learning_rate": 1.338656667011134e-05, |
|
"loss": 0.6243, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3465541490857947, |
|
"grad_norm": 0.2354414016008377, |
|
"learning_rate": 1.3113465940953495e-05, |
|
"loss": 0.721, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.34767932489451475, |
|
"grad_norm": 0.2358129769563675, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.654, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.3488045007032349, |
|
"grad_norm": 0.22713761031627655, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 0.602, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.349929676511955, |
|
"grad_norm": 0.24341897666454315, |
|
"learning_rate": 1.2308594946249163e-05, |
|
"loss": 0.7296, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.3510548523206751, |
|
"grad_norm": 0.24240520596504211, |
|
"learning_rate": 1.204517304231343e-05, |
|
"loss": 0.7068, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3521800281293952, |
|
"grad_norm": 0.25806447863578796, |
|
"learning_rate": 1.178421397270758e-05, |
|
"loss": 0.8166, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.3533052039381153, |
|
"grad_norm": 0.2570001780986786, |
|
"learning_rate": 1.1525734670691701e-05, |
|
"loss": 0.7651, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.2566550076007843, |
|
"learning_rate": 1.1269751908617277e-05, |
|
"loss": 0.7221, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 0.2717800736427307, |
|
"learning_rate": 1.1016282296838887e-05, |
|
"loss": 0.8671, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.3566807313642757, |
|
"grad_norm": 0.276123046875, |
|
"learning_rate": 1.0765342282636416e-05, |
|
"loss": 0.8627, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.35780590717299576, |
|
"grad_norm": 0.26286810636520386, |
|
"learning_rate": 1.0516948149147754e-05, |
|
"loss": 0.7519, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.3589310829817159, |
|
"grad_norm": 0.27873629331588745, |
|
"learning_rate": 1.0271116014312293e-05, |
|
"loss": 0.811, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.360056258790436, |
|
"grad_norm": 0.27732256054878235, |
|
"learning_rate": 1.0027861829824952e-05, |
|
"loss": 0.747, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.36118143459915614, |
|
"grad_norm": 0.271786630153656, |
|
"learning_rate": 9.787201380101157e-06, |
|
"loss": 0.7525, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.3623066104078762, |
|
"grad_norm": 0.28306639194488525, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.8081, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.36343178621659633, |
|
"grad_norm": 0.288723349571228, |
|
"learning_rate": 9.313723980074018e-06, |
|
"loss": 0.8391, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 0.2829471528530121, |
|
"learning_rate": 9.080937753040646e-06, |
|
"loss": 0.8146, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.3656821378340366, |
|
"grad_norm": 0.2665492594242096, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 0.6112, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3668073136427567, |
|
"grad_norm": 0.2935864329338074, |
|
"learning_rate": 8.623345769777514e-06, |
|
"loss": 0.8316, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.3679324894514768, |
|
"grad_norm": 0.2984178066253662, |
|
"learning_rate": 8.398569706035792e-06, |
|
"loss": 0.789, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.3690576652601969, |
|
"grad_norm": 0.2921222746372223, |
|
"learning_rate": 8.176493099488663e-06, |
|
"loss": 0.7701, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.370182841068917, |
|
"grad_norm": 0.2875445485115051, |
|
"learning_rate": 7.957130360368898e-06, |
|
"loss": 0.708, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.37130801687763715, |
|
"grad_norm": 0.2886877954006195, |
|
"learning_rate": 7.740495722810271e-06, |
|
"loss": 0.5826, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3724331926863572, |
|
"grad_norm": 0.29971715807914734, |
|
"learning_rate": 7.526603243923957e-06, |
|
"loss": 0.6209, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.37355836849507734, |
|
"grad_norm": 0.28758934140205383, |
|
"learning_rate": 7.315466802886401e-06, |
|
"loss": 0.5842, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.37468354430379747, |
|
"grad_norm": 0.29430586099624634, |
|
"learning_rate": 7.107100100038671e-06, |
|
"loss": 0.5808, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.3758087201125176, |
|
"grad_norm": 0.29550546407699585, |
|
"learning_rate": 6.901516655997536e-06, |
|
"loss": 0.6218, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.3769338959212377, |
|
"grad_norm": 0.30380144715309143, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.5913, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3780590717299578, |
|
"grad_norm": 0.3008062541484833, |
|
"learning_rate": 6.498752722928042e-06, |
|
"loss": 0.5062, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.3791842475386779, |
|
"grad_norm": 0.3103947639465332, |
|
"learning_rate": 6.301598368674105e-06, |
|
"loss": 0.5587, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.38030942334739803, |
|
"grad_norm": 0.310254842042923, |
|
"learning_rate": 6.107279541079769e-06, |
|
"loss": 0.5346, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.38143459915611816, |
|
"grad_norm": 0.3149080276489258, |
|
"learning_rate": 5.915808849215304e-06, |
|
"loss": 0.4796, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.38255977496483823, |
|
"grad_norm": 0.2941981852054596, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 0.3937, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.38368495077355835, |
|
"grad_norm": 0.31065768003463745, |
|
"learning_rate": 5.54146138409355e-06, |
|
"loss": 0.3904, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.3848101265822785, |
|
"grad_norm": 0.30580249428749084, |
|
"learning_rate": 5.358608901706802e-06, |
|
"loss": 0.3202, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.3859353023909986, |
|
"grad_norm": 0.3562416136264801, |
|
"learning_rate": 5.178653135214812e-06, |
|
"loss": 0.4241, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.3870604781997187, |
|
"grad_norm": 0.30645987391471863, |
|
"learning_rate": 5.001605761689398e-06, |
|
"loss": 0.3141, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.3881856540084388, |
|
"grad_norm": 0.38345733284950256, |
|
"learning_rate": 4.827478269480895e-06, |
|
"loss": 0.49, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3893108298171589, |
|
"grad_norm": 0.3593797981739044, |
|
"learning_rate": 4.65628195747273e-06, |
|
"loss": 0.3769, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.39043600562587905, |
|
"grad_norm": 0.4052690267562866, |
|
"learning_rate": 4.488027934348271e-06, |
|
"loss": 0.4271, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.39156118143459917, |
|
"grad_norm": 0.4411349892616272, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.4206, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.3926863572433193, |
|
"grad_norm": 0.4520365595817566, |
|
"learning_rate": 4.16039023417088e-06, |
|
"loss": 0.4025, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.39381153305203936, |
|
"grad_norm": 0.6150334477424622, |
|
"learning_rate": 4.001027817058789e-06, |
|
"loss": 0.4425, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.39381153305203936, |
|
"eval_loss": 0.5738900899887085, |
|
"eval_runtime": 123.3985, |
|
"eval_samples_per_second": 12.131, |
|
"eval_steps_per_second": 3.039, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3949367088607595, |
|
"grad_norm": 0.18382947146892548, |
|
"learning_rate": 3.844650207332562e-06, |
|
"loss": 0.372, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3960618846694796, |
|
"grad_norm": 0.18228398263454437, |
|
"learning_rate": 3.691267552111183e-06, |
|
"loss": 0.3865, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.39718706047819974, |
|
"grad_norm": 0.19391196966171265, |
|
"learning_rate": 3.54088980417534e-06, |
|
"loss": 0.518, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3983122362869198, |
|
"grad_norm": 0.20379137992858887, |
|
"learning_rate": 3.393526721321616e-06, |
|
"loss": 0.6073, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.39943741209563993, |
|
"grad_norm": 0.20377278327941895, |
|
"learning_rate": 3.249187865729264e-06, |
|
"loss": 0.5355, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.40056258790436006, |
|
"grad_norm": 0.21739913523197174, |
|
"learning_rate": 3.1078826033397843e-06, |
|
"loss": 0.5892, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4016877637130802, |
|
"grad_norm": 0.20590797066688538, |
|
"learning_rate": 2.9696201032491434e-06, |
|
"loss": 0.5308, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.4028129395218003, |
|
"grad_norm": 0.22945421934127808, |
|
"learning_rate": 2.8344093371128424e-06, |
|
"loss": 0.612, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4039381153305204, |
|
"grad_norm": 0.23170845210552216, |
|
"learning_rate": 2.70225907856374e-06, |
|
"loss": 0.6246, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.23253555595874786, |
|
"learning_rate": 2.573177902642726e-06, |
|
"loss": 0.6423, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4061884669479606, |
|
"grad_norm": 0.23829902708530426, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.6802, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.40731364275668075, |
|
"grad_norm": 0.24621927738189697, |
|
"learning_rate": 2.324256102563188e-06, |
|
"loss": 0.6475, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.4084388185654008, |
|
"grad_norm": 0.24897930026054382, |
|
"learning_rate": 2.204431630583548e-06, |
|
"loss": 0.7314, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.40956399437412094, |
|
"grad_norm": 0.25607559084892273, |
|
"learning_rate": 2.087708544541689e-06, |
|
"loss": 0.7108, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.41068917018284107, |
|
"grad_norm": 0.26042604446411133, |
|
"learning_rate": 1.974094418431388e-06, |
|
"loss": 0.8427, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4118143459915612, |
|
"grad_norm": 0.2578832805156708, |
|
"learning_rate": 1.8635966245104664e-06, |
|
"loss": 0.7355, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4129395218002813, |
|
"grad_norm": 0.25586313009262085, |
|
"learning_rate": 1.7562223328224325e-06, |
|
"loss": 0.7442, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4140646976090014, |
|
"grad_norm": 0.2767845094203949, |
|
"learning_rate": 1.6519785107311891e-06, |
|
"loss": 0.8506, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.4151898734177215, |
|
"grad_norm": 0.2640194892883301, |
|
"learning_rate": 1.5508719224689717e-06, |
|
"loss": 0.7084, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.41631504922644164, |
|
"grad_norm": 0.2685393989086151, |
|
"learning_rate": 1.4529091286973995e-06, |
|
"loss": 0.7258, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.41744022503516176, |
|
"grad_norm": 0.2706719636917114, |
|
"learning_rate": 1.358096486081778e-06, |
|
"loss": 0.7101, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.41856540084388183, |
|
"grad_norm": 0.27270835638046265, |
|
"learning_rate": 1.2664401468786114e-06, |
|
"loss": 0.7251, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.41969057665260195, |
|
"grad_norm": 0.28525063395500183, |
|
"learning_rate": 1.1779460585363944e-06, |
|
"loss": 0.7922, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4208157524613221, |
|
"grad_norm": 0.2806113362312317, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.7772, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.29236048460006714, |
|
"learning_rate": 1.0104673978866164e-06, |
|
"loss": 0.776, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.42306610407876233, |
|
"grad_norm": 0.2895606756210327, |
|
"learning_rate": 9.314936930293283e-07, |
|
"loss": 0.7037, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4241912798874824, |
|
"grad_norm": 0.2893851101398468, |
|
"learning_rate": 8.557039732283944e-07, |
|
"loss": 0.7635, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 0.2851521670818329, |
|
"learning_rate": 7.83103156370113e-07, |
|
"loss": 0.6912, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.42644163150492265, |
|
"grad_norm": 0.2914351224899292, |
|
"learning_rate": 7.136959534174592e-07, |
|
"loss": 0.6818, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.42756680731364277, |
|
"grad_norm": 0.2961958348751068, |
|
"learning_rate": 6.474868681043578e-07, |
|
"loss": 0.6924, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4286919831223629, |
|
"grad_norm": 0.30675557255744934, |
|
"learning_rate": 5.844801966434832e-07, |
|
"loss": 0.768, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.42981715893108297, |
|
"grad_norm": 0.303774356842041, |
|
"learning_rate": 5.246800274474439e-07, |
|
"loss": 0.6428, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4309423347398031, |
|
"grad_norm": 0.3004835844039917, |
|
"learning_rate": 4.680902408635335e-07, |
|
"loss": 0.6931, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.4320675105485232, |
|
"grad_norm": 0.29980704188346863, |
|
"learning_rate": 4.1471450892189846e-07, |
|
"loss": 0.5863, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.43319268635724334, |
|
"grad_norm": 0.29202115535736084, |
|
"learning_rate": 3.6455629509730136e-07, |
|
"loss": 0.5396, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4343178621659634, |
|
"grad_norm": 0.3045811653137207, |
|
"learning_rate": 3.1761885408435054e-07, |
|
"loss": 0.5696, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.43544303797468353, |
|
"grad_norm": 0.29848921298980713, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.4808, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.43656821378340366, |
|
"grad_norm": 0.29339462518692017, |
|
"learning_rate": 2.334182641175686e-07, |
|
"loss": 0.4606, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.4376933895921238, |
|
"grad_norm": 0.31360113620758057, |
|
"learning_rate": 1.9616057881935436e-07, |
|
"loss": 0.4696, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.4388185654008439, |
|
"grad_norm": 0.31095123291015625, |
|
"learning_rate": 1.6213459328950352e-07, |
|
"loss": 0.4802, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.439943741209564, |
|
"grad_norm": 0.30314531922340393, |
|
"learning_rate": 1.3134251542544774e-07, |
|
"loss": 0.4323, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4410689170182841, |
|
"grad_norm": 0.34104612469673157, |
|
"learning_rate": 1.0378634328099269e-07, |
|
"loss": 0.4987, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4421940928270042, |
|
"grad_norm": 0.3525398075580597, |
|
"learning_rate": 7.946786493666647e-08, |
|
"loss": 0.4656, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.44331926863572435, |
|
"grad_norm": 0.32821884751319885, |
|
"learning_rate": 5.838865838366792e-08, |
|
"loss": 0.3665, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.3422605097293854, |
|
"learning_rate": 4.055009142152067e-08, |
|
"loss": 0.3255, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.44556962025316454, |
|
"grad_norm": 0.3505786061286926, |
|
"learning_rate": 2.595332156925534e-08, |
|
"loss": 0.3839, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.44669479606188467, |
|
"grad_norm": 0.35224196314811707, |
|
"learning_rate": 1.4599295990352924e-08, |
|
"loss": 0.3063, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4478199718706048, |
|
"grad_norm": 0.39496955275535583, |
|
"learning_rate": 6.488751431266149e-09, |
|
"loss": 0.3376, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4489451476793249, |
|
"grad_norm": 0.4384694993495941, |
|
"learning_rate": 1.622214173602199e-09, |
|
"loss": 0.3727, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.450070323488045, |
|
"grad_norm": 0.5900406837463379, |
|
"learning_rate": 0.0, |
|
"loss": 0.3785, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.450070323488045, |
|
"eval_loss": 0.5731000304222107, |
|
"eval_runtime": 122.6526, |
|
"eval_samples_per_second": 12.205, |
|
"eval_steps_per_second": 3.057, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.392326289935565e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|