|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6776071995764955, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0033880359978824773, |
|
"grad_norm": 0.04680836945772171, |
|
"learning_rate": 7.499999999999999e-06, |
|
"loss": 10.3766, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033880359978824773, |
|
"eval_loss": 10.376468658447266, |
|
"eval_runtime": 3.878, |
|
"eval_samples_per_second": 256.575, |
|
"eval_steps_per_second": 128.416, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006776071995764955, |
|
"grad_norm": 0.04365997761487961, |
|
"learning_rate": 1.4999999999999999e-05, |
|
"loss": 10.3771, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010164107993647432, |
|
"grad_norm": 0.041304562240839005, |
|
"learning_rate": 2.2499999999999998e-05, |
|
"loss": 10.3758, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01355214399152991, |
|
"grad_norm": 0.043574266135692596, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 10.3764, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01694017998941239, |
|
"grad_norm": 0.045331861823797226, |
|
"learning_rate": 3.75e-05, |
|
"loss": 10.3757, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.020328215987294864, |
|
"grad_norm": 0.04029256850481033, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 10.3764, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.023716251985177343, |
|
"grad_norm": 0.046235088258981705, |
|
"learning_rate": 5.2499999999999995e-05, |
|
"loss": 10.3752, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02710428798305982, |
|
"grad_norm": 0.04704303666949272, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 10.3756, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030492323980942298, |
|
"grad_norm": 0.04604668170213699, |
|
"learning_rate": 6.75e-05, |
|
"loss": 10.3763, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03388035997882478, |
|
"grad_norm": 0.04410148411989212, |
|
"learning_rate": 7.5e-05, |
|
"loss": 10.3751, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03726839597670725, |
|
"grad_norm": 0.045992445200681686, |
|
"learning_rate": 8.25e-05, |
|
"loss": 10.3755, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04065643197458973, |
|
"grad_norm": 0.04755518212914467, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 10.374, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04404446797247221, |
|
"grad_norm": 0.0471210703253746, |
|
"learning_rate": 9.75e-05, |
|
"loss": 10.375, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.047432503970354686, |
|
"grad_norm": 0.04655275121331215, |
|
"learning_rate": 0.00010499999999999999, |
|
"loss": 10.3753, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05082053996823716, |
|
"grad_norm": 0.04771905019879341, |
|
"learning_rate": 0.0001125, |
|
"loss": 10.3748, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05420857596611964, |
|
"grad_norm": 0.051401205360889435, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 10.3749, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05759661196400212, |
|
"grad_norm": 0.045204389840364456, |
|
"learning_rate": 0.00012749999999999998, |
|
"loss": 10.3736, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.060984647961884596, |
|
"grad_norm": 0.04979519173502922, |
|
"learning_rate": 0.000135, |
|
"loss": 10.3729, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06437268395976707, |
|
"grad_norm": 0.04958589747548103, |
|
"learning_rate": 0.0001425, |
|
"loss": 10.3735, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06776071995764955, |
|
"grad_norm": 0.054519593715667725, |
|
"learning_rate": 0.00015, |
|
"loss": 10.3723, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07114875595553202, |
|
"grad_norm": 0.060721371322870255, |
|
"learning_rate": 0.00014998857713672935, |
|
"loss": 10.372, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0745367919534145, |
|
"grad_norm": 0.06067592278122902, |
|
"learning_rate": 0.00014995431202643217, |
|
"loss": 10.3718, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07792482795129699, |
|
"grad_norm": 0.06362656503915787, |
|
"learning_rate": 0.000149897215106593, |
|
"loss": 10.3713, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08131286394917946, |
|
"grad_norm": 0.067110076546669, |
|
"learning_rate": 0.0001498173037694868, |
|
"loss": 10.3714, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08470089994706194, |
|
"grad_norm": 0.07370075583457947, |
|
"learning_rate": 0.0001497146023568809, |
|
"loss": 10.3712, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08808893594494442, |
|
"grad_norm": 0.0760367140173912, |
|
"learning_rate": 0.00014958914215262048, |
|
"loss": 10.371, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09147697194282689, |
|
"grad_norm": 0.08042097836732864, |
|
"learning_rate": 0.00014944096137309914, |
|
"loss": 10.37, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09486500794070937, |
|
"grad_norm": 0.08724083006381989, |
|
"learning_rate": 0.00014927010515561776, |
|
"loss": 10.3697, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09825304393859184, |
|
"grad_norm": 0.08779574185609818, |
|
"learning_rate": 0.00014907662554463532, |
|
"loss": 10.3689, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10164107993647432, |
|
"grad_norm": 0.09096319228410721, |
|
"learning_rate": 0.0001488605814759156, |
|
"loss": 10.3677, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1050291159343568, |
|
"grad_norm": 0.10570746660232544, |
|
"learning_rate": 0.00014862203875857477, |
|
"loss": 10.3666, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10841715193223928, |
|
"grad_norm": 0.1051798090338707, |
|
"learning_rate": 0.0001483610700550354, |
|
"loss": 10.3678, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11180518793012176, |
|
"grad_norm": 0.11076612770557404, |
|
"learning_rate": 0.00014807775485889264, |
|
"loss": 10.366, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11519322392800424, |
|
"grad_norm": 0.11073443293571472, |
|
"learning_rate": 0.0001477721794706997, |
|
"loss": 10.3658, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11858125992588671, |
|
"grad_norm": 0.11666611582040787, |
|
"learning_rate": 0.0001474444369716801, |
|
"loss": 10.3642, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12196929592376919, |
|
"grad_norm": 0.12058889120817184, |
|
"learning_rate": 0.0001470946271953739, |
|
"loss": 10.3624, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12535733192165166, |
|
"grad_norm": 0.13473331928253174, |
|
"learning_rate": 0.00014672285669722765, |
|
"loss": 10.3619, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12874536791953414, |
|
"grad_norm": 0.12408842891454697, |
|
"learning_rate": 0.00014632923872213652, |
|
"loss": 10.3616, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13213340391741663, |
|
"grad_norm": 0.12850341200828552, |
|
"learning_rate": 0.00014591389316994876, |
|
"loss": 10.3609, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1355214399152991, |
|
"grad_norm": 0.12435595691204071, |
|
"learning_rate": 0.0001454769465589431, |
|
"loss": 10.3597, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1389094759131816, |
|
"grad_norm": 0.11300837248563766, |
|
"learning_rate": 0.00014501853198729012, |
|
"loss": 10.3583, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14229751191106405, |
|
"grad_norm": 0.11374777555465698, |
|
"learning_rate": 0.00014453878909250904, |
|
"loss": 10.357, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14568554790894653, |
|
"grad_norm": 0.11273212730884552, |
|
"learning_rate": 0.00014403786400893302, |
|
"loss": 10.3559, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.149073583906829, |
|
"grad_norm": 0.10369884222745895, |
|
"learning_rate": 0.00014351590932319504, |
|
"loss": 10.3545, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1524616199047115, |
|
"grad_norm": 0.09857185184955597, |
|
"learning_rate": 0.00014297308402774875, |
|
"loss": 10.3544, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15584965590259398, |
|
"grad_norm": 0.09635099023580551, |
|
"learning_rate": 0.0001424095534724375, |
|
"loss": 10.3537, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15923769190047643, |
|
"grad_norm": 0.08793843537569046, |
|
"learning_rate": 0.00014182548931412757, |
|
"loss": 10.3528, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1626257278983589, |
|
"grad_norm": 0.07799001038074493, |
|
"learning_rate": 0.0001412210694644195, |
|
"loss": 10.3516, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1660137638962414, |
|
"grad_norm": 0.07476358115673065, |
|
"learning_rate": 0.00014059647803545467, |
|
"loss": 10.3515, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16940179989412388, |
|
"grad_norm": 0.07342197000980377, |
|
"learning_rate": 0.0001399519052838329, |
|
"loss": 10.3523, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16940179989412388, |
|
"eval_loss": 10.351042747497559, |
|
"eval_runtime": 3.8684, |
|
"eval_samples_per_second": 257.211, |
|
"eval_steps_per_second": 128.735, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17278983589200636, |
|
"grad_norm": 0.06936267018318176, |
|
"learning_rate": 0.00013928754755265842, |
|
"loss": 10.3514, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17617787188988884, |
|
"grad_norm": 0.06112508103251457, |
|
"learning_rate": 0.00013860360721173193, |
|
"loss": 10.3511, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1795659078877713, |
|
"grad_norm": 0.06208924576640129, |
|
"learning_rate": 0.0001379002925959068, |
|
"loss": 10.3505, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18295394388565378, |
|
"grad_norm": 0.057701293379068375, |
|
"learning_rate": 0.0001371778179416281, |
|
"loss": 10.3505, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18634197988353626, |
|
"grad_norm": 0.050447553396224976, |
|
"learning_rate": 0.00013643640332167438, |
|
"loss": 10.349, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18973001588141875, |
|
"grad_norm": 0.05121852830052376, |
|
"learning_rate": 0.00013567627457812106, |
|
"loss": 10.3496, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19311805187930123, |
|
"grad_norm": 0.04376755282282829, |
|
"learning_rate": 0.00013489766325354695, |
|
"loss": 10.3488, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19650608787718368, |
|
"grad_norm": 0.04570882394909859, |
|
"learning_rate": 0.00013410080652050412, |
|
"loss": 10.3493, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19989412387506617, |
|
"grad_norm": 0.047347478568553925, |
|
"learning_rate": 0.0001332859471092728, |
|
"loss": 10.35, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20328215987294865, |
|
"grad_norm": 0.048713624477386475, |
|
"learning_rate": 0.00013245333323392333, |
|
"loss": 10.3484, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20667019587083113, |
|
"grad_norm": 0.04044219106435776, |
|
"learning_rate": 0.0001316032185167079, |
|
"loss": 10.3485, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2100582318687136, |
|
"grad_norm": 0.03352762386202812, |
|
"learning_rate": 0.00013073586191080457, |
|
"loss": 10.3494, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2134462678665961, |
|
"grad_norm": 0.03678755834698677, |
|
"learning_rate": 0.00012985152762143778, |
|
"loss": 10.3492, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21683430386447855, |
|
"grad_norm": 0.035951532423496246, |
|
"learning_rate": 0.00012895048502539882, |
|
"loss": 10.3475, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22022233986236103, |
|
"grad_norm": 0.04304710775613785, |
|
"learning_rate": 0.00012803300858899104, |
|
"loss": 10.349, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22361037586024352, |
|
"grad_norm": 0.035194698721170425, |
|
"learning_rate": 0.0001270993777844248, |
|
"loss": 10.3468, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.226998411858126, |
|
"grad_norm": 0.029510466381907463, |
|
"learning_rate": 0.0001261498770046874, |
|
"loss": 10.3484, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23038644785600848, |
|
"grad_norm": 0.02603858895599842, |
|
"learning_rate": 0.00012518479547691435, |
|
"loss": 10.3485, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23377448385389094, |
|
"grad_norm": 0.028004931285977364, |
|
"learning_rate": 0.00012420442717428804, |
|
"loss": 10.3488, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.23716251985177342, |
|
"grad_norm": 0.030328616499900818, |
|
"learning_rate": 0.00012320907072649044, |
|
"loss": 10.3479, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2405505558496559, |
|
"grad_norm": 0.027720727026462555, |
|
"learning_rate": 0.0001221990293287378, |
|
"loss": 10.3477, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24393859184753838, |
|
"grad_norm": 0.02903708443045616, |
|
"learning_rate": 0.00012117461064942435, |
|
"loss": 10.3479, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24732662784542087, |
|
"grad_norm": 0.026270205155014992, |
|
"learning_rate": 0.00012013612673640363, |
|
"loss": 10.3473, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2507146638433033, |
|
"grad_norm": 0.022023234516382217, |
|
"learning_rate": 0.00011908389392193547, |
|
"loss": 10.3462, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25410269984118583, |
|
"grad_norm": 0.018086234107613564, |
|
"learning_rate": 0.00011801823272632844, |
|
"loss": 10.3471, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2574907358390683, |
|
"grad_norm": 0.017115121707320213, |
|
"learning_rate": 0.00011693946776030599, |
|
"loss": 10.3481, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26087877183695074, |
|
"grad_norm": 0.022446399554610252, |
|
"learning_rate": 0.00011584792762612703, |
|
"loss": 10.3469, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26426680783483325, |
|
"grad_norm": 0.023726455867290497, |
|
"learning_rate": 0.00011474394481749035, |
|
"loss": 10.3475, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2676548438327157, |
|
"grad_norm": 0.02203121967613697, |
|
"learning_rate": 0.00011362785561825406, |
|
"loss": 10.3484, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2710428798305982, |
|
"grad_norm": 0.016930948942899704, |
|
"learning_rate": 0.0001125, |
|
"loss": 10.3476, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27443091582848067, |
|
"grad_norm": 0.017117898911237717, |
|
"learning_rate": 0.00011136072151847529, |
|
"loss": 10.3474, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2778189518263632, |
|
"grad_norm": 0.02053241617977619, |
|
"learning_rate": 0.00011021036720894179, |
|
"loss": 10.3467, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28120698782424564, |
|
"grad_norm": 0.022768596187233925, |
|
"learning_rate": 0.00010904928748046599, |
|
"loss": 10.3479, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2845950238221281, |
|
"grad_norm": 0.016698352992534637, |
|
"learning_rate": 0.0001078778360091808, |
|
"loss": 10.3468, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2879830598200106, |
|
"grad_norm": 0.020199725404381752, |
|
"learning_rate": 0.00010669636963055245, |
|
"loss": 10.3473, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.29137109581789306, |
|
"grad_norm": 0.014243320561945438, |
|
"learning_rate": 0.00010550524823068502, |
|
"loss": 10.3472, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29475913181577557, |
|
"grad_norm": 0.019790155813097954, |
|
"learning_rate": 0.00010430483463669551, |
|
"loss": 10.3472, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.298147167813658, |
|
"grad_norm": 0.01632939651608467, |
|
"learning_rate": 0.0001030954945061934, |
|
"loss": 10.3474, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3015352038115405, |
|
"grad_norm": 0.015530891716480255, |
|
"learning_rate": 0.0001018775962158975, |
|
"loss": 10.347, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.304923239809423, |
|
"grad_norm": 0.015950841829180717, |
|
"learning_rate": 0.00010065151074942516, |
|
"loss": 10.3475, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.30831127580730544, |
|
"grad_norm": 0.020643549039959908, |
|
"learning_rate": 9.941761158428674e-05, |
|
"loss": 10.347, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.31169931180518795, |
|
"grad_norm": 0.019012747332453728, |
|
"learning_rate": 9.817627457812105e-05, |
|
"loss": 10.3465, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3150873478030704, |
|
"grad_norm": 0.014910330064594746, |
|
"learning_rate": 9.692787785420525e-05, |
|
"loss": 10.3482, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.31847538380095286, |
|
"grad_norm": 0.01823591999709606, |
|
"learning_rate": 9.567280168627493e-05, |
|
"loss": 10.3467, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32186341979883537, |
|
"grad_norm": 0.01734175719320774, |
|
"learning_rate": 9.441142838268905e-05, |
|
"loss": 10.3468, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3252514557967178, |
|
"grad_norm": 0.021044116467237473, |
|
"learning_rate": 9.314414216997507e-05, |
|
"loss": 10.3477, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.32863949179460034, |
|
"grad_norm": 0.01741754449903965, |
|
"learning_rate": 9.187132907578987e-05, |
|
"loss": 10.3468, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3320275277924828, |
|
"grad_norm": 0.018304290249943733, |
|
"learning_rate": 9.059337681133192e-05, |
|
"loss": 10.3482, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.33541556379036525, |
|
"grad_norm": 0.017597520723938942, |
|
"learning_rate": 8.931067465324085e-05, |
|
"loss": 10.3468, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.33880359978824776, |
|
"grad_norm": 0.01578284054994583, |
|
"learning_rate": 8.802361332501978e-05, |
|
"loss": 10.3476, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33880359978824776, |
|
"eval_loss": 10.34636116027832, |
|
"eval_runtime": 3.8912, |
|
"eval_samples_per_second": 255.704, |
|
"eval_steps_per_second": 127.981, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3421916357861302, |
|
"grad_norm": 0.02010318450629711, |
|
"learning_rate": 8.673258487801731e-05, |
|
"loss": 10.3469, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3455796717840127, |
|
"grad_norm": 0.02163493074476719, |
|
"learning_rate": 8.54379825720049e-05, |
|
"loss": 10.346, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3489677077818952, |
|
"grad_norm": 0.02064266800880432, |
|
"learning_rate": 8.414020075538605e-05, |
|
"loss": 10.3461, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3523557437797777, |
|
"grad_norm": 0.023022258654236794, |
|
"learning_rate": 8.2839634745074e-05, |
|
"loss": 10.3463, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.35574377977766014, |
|
"grad_norm": 0.017432276159524918, |
|
"learning_rate": 8.153668070607437e-05, |
|
"loss": 10.3465, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3591318157755426, |
|
"grad_norm": 0.01911778748035431, |
|
"learning_rate": 8.023173553080938e-05, |
|
"loss": 10.3459, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3625198517734251, |
|
"grad_norm": 0.01831880584359169, |
|
"learning_rate": 7.89251967182208e-05, |
|
"loss": 10.3455, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36590788777130756, |
|
"grad_norm": 0.018690194934606552, |
|
"learning_rate": 7.761746225268758e-05, |
|
"loss": 10.3471, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.36929592376919007, |
|
"grad_norm": 0.021015219390392303, |
|
"learning_rate": 7.630893048279627e-05, |
|
"loss": 10.3453, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3726839597670725, |
|
"grad_norm": 0.01983080990612507, |
|
"learning_rate": 7.5e-05, |
|
"loss": 10.3462, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.376071995764955, |
|
"grad_norm": 0.021141625940799713, |
|
"learning_rate": 7.369106951720373e-05, |
|
"loss": 10.3457, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3794600317628375, |
|
"grad_norm": 0.021793803200125694, |
|
"learning_rate": 7.238253774731244e-05, |
|
"loss": 10.3457, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38284806776071995, |
|
"grad_norm": 0.019963612779974937, |
|
"learning_rate": 7.10748032817792e-05, |
|
"loss": 10.3463, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.38623610375860246, |
|
"grad_norm": 0.020090965554118156, |
|
"learning_rate": 6.976826446919059e-05, |
|
"loss": 10.3458, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3896241397564849, |
|
"grad_norm": 0.02083776332437992, |
|
"learning_rate": 6.846331929392562e-05, |
|
"loss": 10.3469, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39301217575436737, |
|
"grad_norm": 0.01785002276301384, |
|
"learning_rate": 6.7160365254926e-05, |
|
"loss": 10.3465, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3964002117522499, |
|
"grad_norm": 0.02254386991262436, |
|
"learning_rate": 6.585979924461394e-05, |
|
"loss": 10.3452, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.39978824775013233, |
|
"grad_norm": 0.023428700864315033, |
|
"learning_rate": 6.45620174279951e-05, |
|
"loss": 10.3475, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.40317628374801484, |
|
"grad_norm": 0.01672559231519699, |
|
"learning_rate": 6.326741512198266e-05, |
|
"loss": 10.346, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4065643197458973, |
|
"grad_norm": 0.020479971542954445, |
|
"learning_rate": 6.197638667498022e-05, |
|
"loss": 10.3456, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.40995235574377975, |
|
"grad_norm": 0.023177366703748703, |
|
"learning_rate": 6.068932534675913e-05, |
|
"loss": 10.3449, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41334039174166226, |
|
"grad_norm": 0.01870677061378956, |
|
"learning_rate": 5.9406623188668055e-05, |
|
"loss": 10.346, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4167284277395447, |
|
"grad_norm": 0.0195186547935009, |
|
"learning_rate": 5.812867092421013e-05, |
|
"loss": 10.3459, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4201164637374272, |
|
"grad_norm": 0.02066197618842125, |
|
"learning_rate": 5.685585783002493e-05, |
|
"loss": 10.3451, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4235044997353097, |
|
"grad_norm": 0.018614448606967926, |
|
"learning_rate": 5.558857161731093e-05, |
|
"loss": 10.3456, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4268925357331922, |
|
"grad_norm": 0.02485392615199089, |
|
"learning_rate": 5.4327198313725064e-05, |
|
"loss": 10.3456, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.43028057173107465, |
|
"grad_norm": 0.019080353900790215, |
|
"learning_rate": 5.307212214579474e-05, |
|
"loss": 10.3463, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.4336686077289571, |
|
"grad_norm": 0.02017894946038723, |
|
"learning_rate": 5.182372542187895e-05, |
|
"loss": 10.3456, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4370566437268396, |
|
"grad_norm": 0.02289474382996559, |
|
"learning_rate": 5.058238841571326e-05, |
|
"loss": 10.3443, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.44044467972472207, |
|
"grad_norm": 0.02067600190639496, |
|
"learning_rate": 4.934848925057484e-05, |
|
"loss": 10.3459, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4438327157226046, |
|
"grad_norm": 0.02071257308125496, |
|
"learning_rate": 4.812240378410248e-05, |
|
"loss": 10.3452, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.44722075172048703, |
|
"grad_norm": 0.02436411753296852, |
|
"learning_rate": 4.690450549380659e-05, |
|
"loss": 10.3449, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4506087877183695, |
|
"grad_norm": 0.0237566027790308, |
|
"learning_rate": 4.569516536330447e-05, |
|
"loss": 10.3462, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.453996823716252, |
|
"grad_norm": 0.029039116576313972, |
|
"learning_rate": 4.449475176931499e-05, |
|
"loss": 10.3455, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.45738485971413445, |
|
"grad_norm": 0.021290864795446396, |
|
"learning_rate": 4.3303630369447554e-05, |
|
"loss": 10.3446, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.46077289571201696, |
|
"grad_norm": 0.01688864268362522, |
|
"learning_rate": 4.212216399081918e-05, |
|
"loss": 10.3439, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4641609317098994, |
|
"grad_norm": 0.026069054380059242, |
|
"learning_rate": 4.095071251953399e-05, |
|
"loss": 10.3461, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46754896770778187, |
|
"grad_norm": 0.020276112481951714, |
|
"learning_rate": 3.978963279105821e-05, |
|
"loss": 10.3445, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4709370037056644, |
|
"grad_norm": 0.021585950627923012, |
|
"learning_rate": 3.863927848152472e-05, |
|
"loss": 10.3442, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.47432503970354684, |
|
"grad_norm": 0.025700606405735016, |
|
"learning_rate": 3.750000000000001e-05, |
|
"loss": 10.3458, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47771307570142935, |
|
"grad_norm": 0.020632240921258926, |
|
"learning_rate": 3.637214438174593e-05, |
|
"loss": 10.3446, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4811011116993118, |
|
"grad_norm": 0.020391074940562248, |
|
"learning_rate": 3.525605518250964e-05, |
|
"loss": 10.3442, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.48448914769719426, |
|
"grad_norm": 0.021430406719446182, |
|
"learning_rate": 3.415207237387297e-05, |
|
"loss": 10.3444, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.48787718369507677, |
|
"grad_norm": 0.01741032488644123, |
|
"learning_rate": 3.3060532239693994e-05, |
|
"loss": 10.3448, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4912652196929592, |
|
"grad_norm": 0.026816904544830322, |
|
"learning_rate": 3.198176727367156e-05, |
|
"loss": 10.3467, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49465325569084173, |
|
"grad_norm": 0.0245627723634243, |
|
"learning_rate": 3.091610607806452e-05, |
|
"loss": 10.3448, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4980412916887242, |
|
"grad_norm": 0.024026039987802505, |
|
"learning_rate": 2.986387326359637e-05, |
|
"loss": 10.3459, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5014293276866066, |
|
"grad_norm": 0.023174704983830452, |
|
"learning_rate": 2.8825389350575624e-05, |
|
"loss": 10.3454, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5048173636844892, |
|
"grad_norm": 0.029328398406505585, |
|
"learning_rate": 2.78009706712622e-05, |
|
"loss": 10.3446, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5082053996823717, |
|
"grad_norm": 0.021034657955169678, |
|
"learning_rate": 2.6790929273509545e-05, |
|
"loss": 10.344, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5082053996823717, |
|
"eval_loss": 10.344643592834473, |
|
"eval_runtime": 3.8713, |
|
"eval_samples_per_second": 257.02, |
|
"eval_steps_per_second": 128.639, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5115934356802541, |
|
"grad_norm": 0.02339405193924904, |
|
"learning_rate": 2.579557282571196e-05, |
|
"loss": 10.3449, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5149814716781366, |
|
"grad_norm": 0.02014802023768425, |
|
"learning_rate": 2.4815204523085654e-05, |
|
"loss": 10.3447, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5183695076760191, |
|
"grad_norm": 0.02366970293223858, |
|
"learning_rate": 2.385012299531262e-05, |
|
"loss": 10.3456, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5217575436739015, |
|
"grad_norm": 0.01631753146648407, |
|
"learning_rate": 2.2900622215575197e-05, |
|
"loss": 10.3453, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.525145579671784, |
|
"grad_norm": 0.02035635896027088, |
|
"learning_rate": 2.1966991411008938e-05, |
|
"loss": 10.3455, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5285336156696665, |
|
"grad_norm": 0.02124161832034588, |
|
"learning_rate": 2.1049514974601175e-05, |
|
"loss": 10.3443, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.531921651667549, |
|
"grad_norm": 0.020465832203626633, |
|
"learning_rate": 2.0148472378562215e-05, |
|
"loss": 10.3438, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5353096876654314, |
|
"grad_norm": 0.022870277985930443, |
|
"learning_rate": 1.926413808919542e-05, |
|
"loss": 10.3436, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5386977236633139, |
|
"grad_norm": 0.02524011954665184, |
|
"learning_rate": 1.8396781483292098e-05, |
|
"loss": 10.3448, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5420857596611964, |
|
"grad_norm": 0.019947601482272148, |
|
"learning_rate": 1.7546666766076655e-05, |
|
"loss": 10.3449, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5454737956590788, |
|
"grad_norm": 0.058847565203905106, |
|
"learning_rate": 1.671405289072718e-05, |
|
"loss": 10.3437, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5488618316569613, |
|
"grad_norm": 0.022570470348000526, |
|
"learning_rate": 1.5899193479495857e-05, |
|
"loss": 10.3451, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5522498676548439, |
|
"grad_norm": 0.019783953204751015, |
|
"learning_rate": 1.5102336746453053e-05, |
|
"loss": 10.3431, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5556379036527264, |
|
"grad_norm": 0.017986372113227844, |
|
"learning_rate": 1.4323725421878949e-05, |
|
"loss": 10.344, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5590259396506088, |
|
"grad_norm": 0.02159409038722515, |
|
"learning_rate": 1.3563596678325606e-05, |
|
"loss": 10.3442, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5624139756484913, |
|
"grad_norm": 0.020207397639751434, |
|
"learning_rate": 1.2822182058371878e-05, |
|
"loss": 10.344, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5658020116463738, |
|
"grad_norm": 0.02228572405874729, |
|
"learning_rate": 1.2099707404093203e-05, |
|
"loss": 10.343, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5691900476442562, |
|
"grad_norm": 0.02297716587781906, |
|
"learning_rate": 1.1396392788268052e-05, |
|
"loss": 10.3455, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5725780836421387, |
|
"grad_norm": 0.019231807440519333, |
|
"learning_rate": 1.0712452447341582e-05, |
|
"loss": 10.3439, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5759661196400212, |
|
"grad_norm": 0.02405407838523388, |
|
"learning_rate": 1.0048094716167095e-05, |
|
"loss": 10.344, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5793541556379036, |
|
"grad_norm": 0.02616795524954796, |
|
"learning_rate": 9.40352196454532e-06, |
|
"loss": 10.3444, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5827421916357861, |
|
"grad_norm": 0.01909734308719635, |
|
"learning_rate": 8.778930535580474e-06, |
|
"loss": 10.3453, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5861302276336686, |
|
"grad_norm": 0.0253478791564703, |
|
"learning_rate": 8.174510685872415e-06, |
|
"loss": 10.3448, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5895182636315511, |
|
"grad_norm": 0.020116323605179787, |
|
"learning_rate": 7.5904465275624884e-06, |
|
"loss": 10.3443, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5929062996294335, |
|
"grad_norm": 0.0158366896212101, |
|
"learning_rate": 7.026915972251254e-06, |
|
"loss": 10.3452, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.596294335627316, |
|
"grad_norm": 0.020508933812379837, |
|
"learning_rate": 6.484090676804926e-06, |
|
"loss": 10.3454, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5996823716251986, |
|
"grad_norm": 0.020298492163419724, |
|
"learning_rate": 5.962135991066971e-06, |
|
"loss": 10.344, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.603070407623081, |
|
"grad_norm": 0.01768598146736622, |
|
"learning_rate": 5.461210907490951e-06, |
|
"loss": 10.3442, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6064584436209635, |
|
"grad_norm": 0.02038932777941227, |
|
"learning_rate": 4.981468012709877e-06, |
|
"loss": 10.3456, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.609846479618846, |
|
"grad_norm": 0.01764788292348385, |
|
"learning_rate": 4.523053441056876e-06, |
|
"loss": 10.3448, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6132345156167285, |
|
"grad_norm": 0.023541666567325592, |
|
"learning_rate": 4.086106830051236e-06, |
|
"loss": 10.3449, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6166225516146109, |
|
"grad_norm": 0.06664617359638214, |
|
"learning_rate": 3.670761277863485e-06, |
|
"loss": 10.3473, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6200105876124934, |
|
"grad_norm": 0.022008830681443214, |
|
"learning_rate": 3.277143302772342e-06, |
|
"loss": 10.345, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6233986236103759, |
|
"grad_norm": 0.020388372242450714, |
|
"learning_rate": 2.9053728046260825e-06, |
|
"loss": 10.3434, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6267866596082583, |
|
"grad_norm": 0.02203419804573059, |
|
"learning_rate": 2.555563028319885e-06, |
|
"loss": 10.3441, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6301746956061408, |
|
"grad_norm": 0.020321670919656754, |
|
"learning_rate": 2.227820529300264e-06, |
|
"loss": 10.3448, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6335627316040233, |
|
"grad_norm": 0.0204358771443367, |
|
"learning_rate": 1.9222451411073645e-06, |
|
"loss": 10.3445, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6369507676019057, |
|
"grad_norm": 0.02741316892206669, |
|
"learning_rate": 1.6389299449645733e-06, |
|
"loss": 10.3437, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6403388035997882, |
|
"grad_norm": 0.018788231536746025, |
|
"learning_rate": 1.3779612414252017e-06, |
|
"loss": 10.3452, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6437268395976707, |
|
"grad_norm": 0.020280800759792328, |
|
"learning_rate": 1.1394185240843983e-06, |
|
"loss": 10.3459, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6471148755955533, |
|
"grad_norm": 0.01888551376760006, |
|
"learning_rate": 9.233744553646754e-07, |
|
"loss": 10.3457, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6505029115934357, |
|
"grad_norm": 0.020312048494815826, |
|
"learning_rate": 7.298948443822228e-07, |
|
"loss": 10.3441, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6538909475913182, |
|
"grad_norm": 0.01959800161421299, |
|
"learning_rate": 5.590386269008512e-07, |
|
"loss": 10.3434, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6572789835892007, |
|
"grad_norm": 0.024537065997719765, |
|
"learning_rate": 4.108578473795032e-07, |
|
"loss": 10.3446, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6606670195870831, |
|
"grad_norm": 0.020832853391766548, |
|
"learning_rate": 2.8539764311908407e-07, |
|
"loss": 10.345, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6640550555849656, |
|
"grad_norm": 0.020389897748827934, |
|
"learning_rate": 1.8269623051318515e-07, |
|
"loss": 10.3452, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6674430915828481, |
|
"grad_norm": 0.018793173134326935, |
|
"learning_rate": 1.027848934069625e-07, |
|
"loss": 10.3462, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6708311275807305, |
|
"grad_norm": 0.03216562792658806, |
|
"learning_rate": 4.568797356781784e-08, |
|
"loss": 10.3447, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.674219163578613, |
|
"grad_norm": 0.02798965945839882, |
|
"learning_rate": 1.142286327065478e-08, |
|
"loss": 10.3446, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6776071995764955, |
|
"grad_norm": 0.021245818585157394, |
|
"learning_rate": 0.0, |
|
"loss": 10.3461, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6776071995764955, |
|
"eval_loss": 10.3442964553833, |
|
"eval_runtime": 3.9275, |
|
"eval_samples_per_second": 253.34, |
|
"eval_steps_per_second": 126.797, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 17, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 85669769379840.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|