|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 209, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004784688995215311, |
|
"grad_norm": 0.14907115697860718, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2491, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009569377990430622, |
|
"grad_norm": 0.17295576632022858, |
|
"learning_rate": 4e-05, |
|
"loss": 1.4909, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014354066985645933, |
|
"grad_norm": 0.18045826256275177, |
|
"learning_rate": 6e-05, |
|
"loss": 1.5363, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.019138755980861243, |
|
"grad_norm": 0.19427408277988434, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6397, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023923444976076555, |
|
"grad_norm": 0.2216596156358719, |
|
"learning_rate": 0.0001, |
|
"loss": 1.7394, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.028708133971291867, |
|
"grad_norm": 0.20222336053848267, |
|
"learning_rate": 9.999407114490384e-05, |
|
"loss": 1.6014, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03349282296650718, |
|
"grad_norm": 0.21694432199001312, |
|
"learning_rate": 9.99762859856683e-05, |
|
"loss": 1.5539, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03827751196172249, |
|
"grad_norm": 0.21403349936008453, |
|
"learning_rate": 9.994664874011863e-05, |
|
"loss": 1.4513, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0430622009569378, |
|
"grad_norm": 0.22947654128074646, |
|
"learning_rate": 9.990516643685222e-05, |
|
"loss": 1.4614, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04784688995215311, |
|
"grad_norm": 0.23931479454040527, |
|
"learning_rate": 9.985184891357164e-05, |
|
"loss": 1.5114, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 0.23815925419330597, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 1.5015, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05741626794258373, |
|
"grad_norm": 0.25154542922973633, |
|
"learning_rate": 9.970976158864073e-05, |
|
"loss": 1.5904, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06220095693779904, |
|
"grad_norm": 0.2565719783306122, |
|
"learning_rate": 9.96210254835968e-05, |
|
"loss": 1.4701, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06698564593301436, |
|
"grad_norm": 0.2682940363883972, |
|
"learning_rate": 9.952052154376026e-05, |
|
"loss": 1.5519, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07177033492822966, |
|
"grad_norm": 0.26406288146972656, |
|
"learning_rate": 9.940827360406297e-05, |
|
"loss": 1.405, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07655502392344497, |
|
"grad_norm": 0.29123812913894653, |
|
"learning_rate": 9.928430828457572e-05, |
|
"loss": 1.3998, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08133971291866028, |
|
"grad_norm": 0.3094870448112488, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 1.6672, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0861244019138756, |
|
"grad_norm": 0.2948465049266815, |
|
"learning_rate": 9.90013458736716e-05, |
|
"loss": 1.5797, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.29864171147346497, |
|
"learning_rate": 9.884241588798005e-05, |
|
"loss": 1.4558, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09569377990430622, |
|
"grad_norm": 0.31656384468078613, |
|
"learning_rate": 9.867190271803465e-05, |
|
"loss": 1.4255, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10047846889952153, |
|
"grad_norm": 0.33296871185302734, |
|
"learning_rate": 9.848984680175049e-05, |
|
"loss": 1.5504, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 0.3384315073490143, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.5032, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11004784688995216, |
|
"grad_norm": 0.3654746413230896, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 1.7055, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11483253588516747, |
|
"grad_norm": 0.3699825406074524, |
|
"learning_rate": 9.787486795309621e-05, |
|
"loss": 1.7864, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11961722488038277, |
|
"grad_norm": 0.38653433322906494, |
|
"learning_rate": 9.764710002135784e-05, |
|
"loss": 1.5302, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12440191387559808, |
|
"grad_norm": 0.39083021879196167, |
|
"learning_rate": 9.74080323795483e-05, |
|
"loss": 1.687, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1291866028708134, |
|
"grad_norm": 0.4041406810283661, |
|
"learning_rate": 9.715772172356388e-05, |
|
"loss": 1.7685, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1339712918660287, |
|
"grad_norm": 0.43206948041915894, |
|
"learning_rate": 9.689622741562892e-05, |
|
"loss": 1.6886, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13875598086124402, |
|
"grad_norm": 0.4564664959907532, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 1.7957, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14354066985645933, |
|
"grad_norm": 0.4699450135231018, |
|
"learning_rate": 9.633993853934803e-05, |
|
"loss": 2.04, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14832535885167464, |
|
"grad_norm": 0.4722703695297241, |
|
"learning_rate": 9.60452758972477e-05, |
|
"loss": 1.9598, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15311004784688995, |
|
"grad_norm": 0.4768441319465637, |
|
"learning_rate": 9.573969342440106e-05, |
|
"loss": 1.7342, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 0.5047369599342346, |
|
"learning_rate": 9.542326359097619e-05, |
|
"loss": 1.7313, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.16267942583732056, |
|
"grad_norm": 0.49944713711738586, |
|
"learning_rate": 9.509606143963832e-05, |
|
"loss": 1.8511, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1674641148325359, |
|
"grad_norm": 0.5391089916229248, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 1.9546, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1722488038277512, |
|
"grad_norm": 0.584690272808075, |
|
"learning_rate": 9.440965310898424e-05, |
|
"loss": 1.9743, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17703349282296652, |
|
"grad_norm": 0.6214085817337036, |
|
"learning_rate": 9.405060971428923e-05, |
|
"loss": 1.9413, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.5669549703598022, |
|
"learning_rate": 9.368111953231848e-05, |
|
"loss": 1.9106, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18660287081339713, |
|
"grad_norm": 0.6074210405349731, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.9595, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.19138755980861244, |
|
"grad_norm": 0.6308673620223999, |
|
"learning_rate": 9.291115176786814e-05, |
|
"loss": 2.0112, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19617224880382775, |
|
"grad_norm": 0.6849383115768433, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 1.9215, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.20095693779904306, |
|
"grad_norm": 0.8266085386276245, |
|
"learning_rate": 9.210048017669726e-05, |
|
"loss": 2.0382, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.20574162679425836, |
|
"grad_norm": 0.7204662561416626, |
|
"learning_rate": 9.168011926105598e-05, |
|
"loss": 1.9057, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.8284960389137268, |
|
"learning_rate": 9.124987372991511e-05, |
|
"loss": 1.9009, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.215311004784689, |
|
"grad_norm": 0.8978604674339294, |
|
"learning_rate": 9.08098456178111e-05, |
|
"loss": 2.0893, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22009569377990432, |
|
"grad_norm": 1.1396929025650024, |
|
"learning_rate": 9.036013927926048e-05, |
|
"loss": 2.2963, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.22488038277511962, |
|
"grad_norm": 1.2800486087799072, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 2.0588, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22966507177033493, |
|
"grad_norm": 1.6371203660964966, |
|
"learning_rate": 8.943212079175391e-05, |
|
"loss": 1.8706, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.23444976076555024, |
|
"grad_norm": 2.216677188873291, |
|
"learning_rate": 8.895402872628352e-05, |
|
"loss": 2.0251, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23923444976076555, |
|
"grad_norm": 3.7424046993255615, |
|
"learning_rate": 8.846669854914396e-05, |
|
"loss": 2.6344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24401913875598086, |
|
"grad_norm": 0.3241454064846039, |
|
"learning_rate": 8.797024583273537e-05, |
|
"loss": 1.562, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.24880382775119617, |
|
"grad_norm": 0.36598026752471924, |
|
"learning_rate": 8.746478831290648e-05, |
|
"loss": 1.6533, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2535885167464115, |
|
"grad_norm": 0.4164574444293976, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 1.5149, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2583732057416268, |
|
"grad_norm": 0.4113615155220032, |
|
"learning_rate": 8.642734045558952e-05, |
|
"loss": 1.5673, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 0.39313098788261414, |
|
"learning_rate": 8.58955961532221e-05, |
|
"loss": 1.6382, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2679425837320574, |
|
"grad_norm": 0.3647348880767822, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 1.6045, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.35326749086380005, |
|
"learning_rate": 8.480669729814635e-05, |
|
"loss": 1.3358, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.27751196172248804, |
|
"grad_norm": 0.3498622477054596, |
|
"learning_rate": 8.424980098237903e-05, |
|
"loss": 1.5901, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2822966507177033, |
|
"grad_norm": 0.34479665756225586, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 1.5684, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.28708133971291866, |
|
"grad_norm": 0.30399441719055176, |
|
"learning_rate": 8.311177489457652e-05, |
|
"loss": 1.6434, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.291866028708134, |
|
"grad_norm": 0.3355162739753723, |
|
"learning_rate": 8.25309150102121e-05, |
|
"loss": 1.6636, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2966507177033493, |
|
"grad_norm": 0.2852919101715088, |
|
"learning_rate": 8.194234028259806e-05, |
|
"loss": 1.4544, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3014354066985646, |
|
"grad_norm": 0.3010583221912384, |
|
"learning_rate": 8.134619029470534e-05, |
|
"loss": 1.5694, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3062200956937799, |
|
"grad_norm": 0.3184051811695099, |
|
"learning_rate": 8.074260642600964e-05, |
|
"loss": 1.4966, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.31100478468899523, |
|
"grad_norm": 0.30305036902427673, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 1.5011, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 0.307675302028656, |
|
"learning_rate": 7.951371134504599e-05, |
|
"loss": 1.3939, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.32057416267942584, |
|
"grad_norm": 0.3339138627052307, |
|
"learning_rate": 7.888869157041257e-05, |
|
"loss": 1.6544, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3253588516746411, |
|
"grad_norm": 0.3280654549598694, |
|
"learning_rate": 7.82568207211296e-05, |
|
"loss": 1.4717, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.33014354066985646, |
|
"grad_norm": 0.3183118999004364, |
|
"learning_rate": 7.76182486480253e-05, |
|
"loss": 1.497, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3349282296650718, |
|
"grad_norm": 0.31861793994903564, |
|
"learning_rate": 7.697312679115125e-05, |
|
"loss": 1.5132, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3397129186602871, |
|
"grad_norm": 0.346038818359375, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 1.4482, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3444976076555024, |
|
"grad_norm": 0.3373595178127289, |
|
"learning_rate": 7.566384721656104e-05, |
|
"loss": 1.5695, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3492822966507177, |
|
"grad_norm": 0.35197165608406067, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.5399, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.35406698564593303, |
|
"grad_norm": 0.3390144109725952, |
|
"learning_rate": 7.433022392834282e-05, |
|
"loss": 1.4598, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3588516746411483, |
|
"grad_norm": 0.36730819940567017, |
|
"learning_rate": 7.365467784180051e-05, |
|
"loss": 1.6108, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.37005236744880676, |
|
"learning_rate": 7.297352194896739e-05, |
|
"loss": 1.534, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 0.44057509303092957, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 1.6591, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.37320574162679426, |
|
"grad_norm": 0.35913902521133423, |
|
"learning_rate": 7.159502819244206e-05, |
|
"loss": 1.5481, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.37799043062200954, |
|
"grad_norm": 0.4454534947872162, |
|
"learning_rate": 7.089801724433917e-05, |
|
"loss": 1.8177, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3827751196172249, |
|
"grad_norm": 0.40797775983810425, |
|
"learning_rate": 7.019605024359474e-05, |
|
"loss": 1.7416, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3875598086124402, |
|
"grad_norm": 0.44979557394981384, |
|
"learning_rate": 6.948929366463396e-05, |
|
"loss": 1.8288, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3923444976076555, |
|
"grad_norm": 0.47842079401016235, |
|
"learning_rate": 6.877791511775063e-05, |
|
"loss": 1.6143, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.39712918660287083, |
|
"grad_norm": 0.46703702211380005, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 1.7399, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4019138755980861, |
|
"grad_norm": 0.5180630087852478, |
|
"learning_rate": 6.734196800197762e-05, |
|
"loss": 1.8192, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.40669856459330145, |
|
"grad_norm": 0.559816837310791, |
|
"learning_rate": 6.661773997398298e-05, |
|
"loss": 2.0921, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.41148325358851673, |
|
"grad_norm": 0.4981471300125122, |
|
"learning_rate": 6.588957097909508e-05, |
|
"loss": 1.877, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.41626794258373206, |
|
"grad_norm": 0.5296308398246765, |
|
"learning_rate": 6.515763370565218e-05, |
|
"loss": 1.8035, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.580813467502594, |
|
"learning_rate": 6.442210173565561e-05, |
|
"loss": 2.0139, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4258373205741627, |
|
"grad_norm": 0.5659003257751465, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 1.9686, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.430622009569378, |
|
"grad_norm": 0.6458680629730225, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 1.7827, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4354066985645933, |
|
"grad_norm": 0.6537598967552185, |
|
"learning_rate": 6.219568600541886e-05, |
|
"loss": 1.8706, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.44019138755980863, |
|
"grad_norm": 0.7065061330795288, |
|
"learning_rate": 6.14475274975067e-05, |
|
"loss": 2.0038, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4449760765550239, |
|
"grad_norm": 0.7320758700370789, |
|
"learning_rate": 6.069665416032487e-05, |
|
"loss": 2.0265, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.44976076555023925, |
|
"grad_norm": 0.8753424882888794, |
|
"learning_rate": 5.9943244066641834e-05, |
|
"loss": 2.1909, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.988920271396637, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 2.1724, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45933014354066987, |
|
"grad_norm": 1.0871882438659668, |
|
"learning_rate": 5.842952886648496e-05, |
|
"loss": 2.1458, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.46411483253588515, |
|
"grad_norm": 1.315832257270813, |
|
"learning_rate": 5.7669582743934284e-05, |
|
"loss": 2.0194, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4688995215311005, |
|
"grad_norm": 1.2786567211151123, |
|
"learning_rate": 5.6907817747594116e-05, |
|
"loss": 1.5575, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 2.389514923095703, |
|
"learning_rate": 5.614441453323571e-05, |
|
"loss": 2.0557, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4784688995215311, |
|
"grad_norm": 4.508440971374512, |
|
"learning_rate": 5.5379554145140574e-05, |
|
"loss": 2.1291, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.48325358851674644, |
|
"grad_norm": 0.2242479771375656, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 1.4759, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4880382775119617, |
|
"grad_norm": 0.24106784164905548, |
|
"learning_rate": 5.38461877097232e-05, |
|
"loss": 1.3887, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.49282296650717705, |
|
"grad_norm": 0.26973956823349, |
|
"learning_rate": 5.307804530669716e-05, |
|
"loss": 1.5937, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.49760765550239233, |
|
"grad_norm": 0.30369481444358826, |
|
"learning_rate": 5.230917293228699e-05, |
|
"loss": 1.723, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5023923444976076, |
|
"grad_norm": 0.3147946000099182, |
|
"learning_rate": 5.153975292780853e-05, |
|
"loss": 1.4143, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.507177033492823, |
|
"grad_norm": 0.3020610213279724, |
|
"learning_rate": 5.0769967764450345e-05, |
|
"loss": 1.4558, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5119617224880383, |
|
"grad_norm": 0.33380070328712463, |
|
"learning_rate": 5e-05, |
|
"loss": 1.4098, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5167464114832536, |
|
"grad_norm": 0.2972123324871063, |
|
"learning_rate": 4.9230032235549667e-05, |
|
"loss": 1.5126, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5215311004784688, |
|
"grad_norm": 0.3250975012779236, |
|
"learning_rate": 4.8460247072191496e-05, |
|
"loss": 1.5187, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.3138869106769562, |
|
"learning_rate": 4.7690827067713035e-05, |
|
"loss": 1.4622, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5311004784688995, |
|
"grad_norm": 0.31304407119750977, |
|
"learning_rate": 4.692195469330286e-05, |
|
"loss": 1.5879, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5358851674641149, |
|
"grad_norm": 0.3148694932460785, |
|
"learning_rate": 4.6153812290276813e-05, |
|
"loss": 1.4633, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5406698564593302, |
|
"grad_norm": 0.31697309017181396, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 1.6007, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.3104463517665863, |
|
"learning_rate": 4.462044585485944e-05, |
|
"loss": 1.5146, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5502392344497608, |
|
"grad_norm": 0.3110423684120178, |
|
"learning_rate": 4.38555854667643e-05, |
|
"loss": 1.3926, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5550239234449761, |
|
"grad_norm": 0.3278864622116089, |
|
"learning_rate": 4.30921822524059e-05, |
|
"loss": 1.5483, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5598086124401914, |
|
"grad_norm": 0.31767237186431885, |
|
"learning_rate": 4.233041725606572e-05, |
|
"loss": 1.4713, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5645933014354066, |
|
"grad_norm": 0.3416193425655365, |
|
"learning_rate": 4.157047113351504e-05, |
|
"loss": 1.4755, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.569377990430622, |
|
"grad_norm": 0.34195205569267273, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 1.3458, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5741626794258373, |
|
"grad_norm": 0.34631067514419556, |
|
"learning_rate": 4.0056755933358184e-05, |
|
"loss": 1.6436, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 0.3526613116264343, |
|
"learning_rate": 3.930334583967514e-05, |
|
"loss": 1.69, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.583732057416268, |
|
"grad_norm": 0.36171069741249084, |
|
"learning_rate": 3.855247250249331e-05, |
|
"loss": 1.4521, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5885167464114832, |
|
"grad_norm": 0.3751888871192932, |
|
"learning_rate": 3.780431399458114e-05, |
|
"loss": 1.7037, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5933014354066986, |
|
"grad_norm": 0.38322314620018005, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 1.5647, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5980861244019139, |
|
"grad_norm": 0.3606765568256378, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 1.6243, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6028708133971292, |
|
"grad_norm": 0.39295509457588196, |
|
"learning_rate": 3.557789826434439e-05, |
|
"loss": 1.6738, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6076555023923444, |
|
"grad_norm": 0.38839149475097656, |
|
"learning_rate": 3.484236629434783e-05, |
|
"loss": 1.5688, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6124401913875598, |
|
"grad_norm": 0.39863213896751404, |
|
"learning_rate": 3.411042902090492e-05, |
|
"loss": 1.5511, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6172248803827751, |
|
"grad_norm": 0.40311485528945923, |
|
"learning_rate": 3.338226002601703e-05, |
|
"loss": 1.7209, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6220095693779905, |
|
"grad_norm": 0.46205297112464905, |
|
"learning_rate": 3.265803199802237e-05, |
|
"loss": 1.8635, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6267942583732058, |
|
"grad_norm": 0.48374590277671814, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 1.903, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.4481237232685089, |
|
"learning_rate": 3.1222084882249375e-05, |
|
"loss": 1.6987, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.4824537932872772, |
|
"learning_rate": 3.0510706335366035e-05, |
|
"loss": 1.8271, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6411483253588517, |
|
"grad_norm": 0.4872766435146332, |
|
"learning_rate": 2.980394975640526e-05, |
|
"loss": 1.7831, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.645933014354067, |
|
"grad_norm": 0.5188060402870178, |
|
"learning_rate": 2.910198275566085e-05, |
|
"loss": 1.7229, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6507177033492823, |
|
"grad_norm": 0.5814325213432312, |
|
"learning_rate": 2.8404971807557957e-05, |
|
"loss": 1.6868, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6555023923444976, |
|
"grad_norm": 0.5563578605651855, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 1.9105, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6602870813397129, |
|
"grad_norm": 0.7155702114105225, |
|
"learning_rate": 2.7026478051032623e-05, |
|
"loss": 2.0886, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6650717703349283, |
|
"grad_norm": 0.6362230181694031, |
|
"learning_rate": 2.6345322158199503e-05, |
|
"loss": 1.8675, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6698564593301436, |
|
"grad_norm": 0.7237346172332764, |
|
"learning_rate": 2.5669776071657192e-05, |
|
"loss": 1.9425, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6746411483253588, |
|
"grad_norm": 0.6796498894691467, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 1.7823, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6794258373205742, |
|
"grad_norm": 0.9227808117866516, |
|
"learning_rate": 2.4336152783438982e-05, |
|
"loss": 2.08, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 0.8645951747894287, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 2.072, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6889952153110048, |
|
"grad_norm": 0.9071637988090515, |
|
"learning_rate": 2.302687320884876e-05, |
|
"loss": 1.9359, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.69377990430622, |
|
"grad_norm": 0.9696687459945679, |
|
"learning_rate": 2.238175135197471e-05, |
|
"loss": 1.9997, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6985645933014354, |
|
"grad_norm": 1.3215420246124268, |
|
"learning_rate": 2.1743179278870407e-05, |
|
"loss": 2.4706, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7033492822966507, |
|
"grad_norm": 1.3039065599441528, |
|
"learning_rate": 2.1111308429587444e-05, |
|
"loss": 1.8514, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7081339712918661, |
|
"grad_norm": 1.5614275932312012, |
|
"learning_rate": 2.0486288654954028e-05, |
|
"loss": 1.6851, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7129186602870813, |
|
"grad_norm": 1.524907112121582, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 1.4203, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7177033492822966, |
|
"grad_norm": 2.6234681606292725, |
|
"learning_rate": 1.925739357399038e-05, |
|
"loss": 1.613, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.722488038277512, |
|
"grad_norm": 0.20270580053329468, |
|
"learning_rate": 1.8653809705294688e-05, |
|
"loss": 1.5107, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.2348601371049881, |
|
"learning_rate": 1.8057659717401947e-05, |
|
"loss": 1.4045, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7320574162679426, |
|
"grad_norm": 0.22996756434440613, |
|
"learning_rate": 1.746908498978791e-05, |
|
"loss": 1.4323, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 0.22488181293010712, |
|
"learning_rate": 1.6888225105423507e-05, |
|
"loss": 1.3006, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7416267942583732, |
|
"grad_norm": 0.2689021825790405, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 1.5859, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7464114832535885, |
|
"grad_norm": 0.24600256979465485, |
|
"learning_rate": 1.575019901762097e-05, |
|
"loss": 1.3297, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7511961722488039, |
|
"grad_norm": 0.2761135995388031, |
|
"learning_rate": 1.5193302701853673e-05, |
|
"loss": 1.3502, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7559808612440191, |
|
"grad_norm": 0.2814018130302429, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 1.5272, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7607655502392344, |
|
"grad_norm": 0.29007115960121155, |
|
"learning_rate": 1.4104403846777909e-05, |
|
"loss": 1.4903, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7655502392344498, |
|
"grad_norm": 0.2952757477760315, |
|
"learning_rate": 1.3572659544410494e-05, |
|
"loss": 1.5378, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7703349282296651, |
|
"grad_norm": 0.311739057302475, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 1.5632, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7751196172248804, |
|
"grad_norm": 0.32148003578186035, |
|
"learning_rate": 1.2535211687093535e-05, |
|
"loss": 1.5378, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7799043062200957, |
|
"grad_norm": 0.326665461063385, |
|
"learning_rate": 1.202975416726464e-05, |
|
"loss": 1.5604, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.784688995215311, |
|
"grad_norm": 0.33070483803749084, |
|
"learning_rate": 1.1533301450856054e-05, |
|
"loss": 1.4631, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 0.34155699610710144, |
|
"learning_rate": 1.1045971273716477e-05, |
|
"loss": 1.53, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7942583732057417, |
|
"grad_norm": 0.3466791808605194, |
|
"learning_rate": 1.0567879208246084e-05, |
|
"loss": 1.5503, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7990430622009569, |
|
"grad_norm": 0.3450649082660675, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 1.5562, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8038277511961722, |
|
"grad_norm": 0.3394903242588043, |
|
"learning_rate": 9.639860720739525e-06, |
|
"loss": 1.3966, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8086124401913876, |
|
"grad_norm": 0.34013882279396057, |
|
"learning_rate": 9.190154382188921e-06, |
|
"loss": 1.4578, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8133971291866029, |
|
"grad_norm": 0.40502288937568665, |
|
"learning_rate": 8.75012627008489e-06, |
|
"loss": 1.4135, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.3667527437210083, |
|
"learning_rate": 8.31988073894403e-06, |
|
"loss": 1.539, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8229665071770335, |
|
"grad_norm": 0.39868226647377014, |
|
"learning_rate": 7.899519823302743e-06, |
|
"loss": 1.7387, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8277511961722488, |
|
"grad_norm": 0.3658325672149658, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 1.5761, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8325358851674641, |
|
"grad_norm": 0.3974853456020355, |
|
"learning_rate": 7.088848232131861e-06, |
|
"loss": 1.5148, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8373205741626795, |
|
"grad_norm": 0.4094543755054474, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 1.5331, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.4123484790325165, |
|
"learning_rate": 6.318880467681526e-06, |
|
"loss": 1.7296, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.84688995215311, |
|
"grad_norm": 0.44035616517066956, |
|
"learning_rate": 5.949390285710776e-06, |
|
"loss": 1.6646, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8516746411483254, |
|
"grad_norm": 0.40625545382499695, |
|
"learning_rate": 5.590346891015758e-06, |
|
"loss": 1.5258, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8564593301435407, |
|
"grad_norm": 0.5013072490692139, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 1.8122, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.861244019138756, |
|
"grad_norm": 0.4660516083240509, |
|
"learning_rate": 4.903938560361698e-06, |
|
"loss": 1.5325, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8660287081339713, |
|
"grad_norm": 0.48506811261177063, |
|
"learning_rate": 4.576736409023813e-06, |
|
"loss": 1.7429, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8708133971291866, |
|
"grad_norm": 0.5250177979469299, |
|
"learning_rate": 4.260306575598949e-06, |
|
"loss": 1.6214, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8755980861244019, |
|
"grad_norm": 0.5322927236557007, |
|
"learning_rate": 3.954724102752316e-06, |
|
"loss": 1.732, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8803827751196173, |
|
"grad_norm": 0.5320829153060913, |
|
"learning_rate": 3.660061460651981e-06, |
|
"loss": 1.8157, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8851674641148325, |
|
"grad_norm": 0.5766329169273376, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 1.9319, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8899521531100478, |
|
"grad_norm": 0.5621324777603149, |
|
"learning_rate": 3.1037725843711062e-06, |
|
"loss": 1.7481, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 0.654734194278717, |
|
"learning_rate": 2.842278276436128e-06, |
|
"loss": 2.079, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8995215311004785, |
|
"grad_norm": 0.6522572636604309, |
|
"learning_rate": 2.591967620451707e-06, |
|
"loss": 1.8302, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9043062200956937, |
|
"grad_norm": 0.7326610088348389, |
|
"learning_rate": 2.3528999786421756e-06, |
|
"loss": 1.9044, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.7005221247673035, |
|
"learning_rate": 2.1251320469037827e-06, |
|
"loss": 1.9704, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9138755980861244, |
|
"grad_norm": 0.6769828200340271, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 1.7863, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9186602870813397, |
|
"grad_norm": 0.7701701521873474, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 1.8417, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9234449760765551, |
|
"grad_norm": 0.781731903553009, |
|
"learning_rate": 1.5101531982495308e-06, |
|
"loss": 1.7861, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9282296650717703, |
|
"grad_norm": 0.9508692026138306, |
|
"learning_rate": 1.328097281965357e-06, |
|
"loss": 1.975, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9330143540669856, |
|
"grad_norm": 0.9699769616127014, |
|
"learning_rate": 1.157584112019966e-06, |
|
"loss": 2.0208, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.937799043062201, |
|
"grad_norm": 1.1134250164031982, |
|
"learning_rate": 9.986541263284077e-07, |
|
"loss": 1.7791, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9425837320574163, |
|
"grad_norm": 1.3487857580184937, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 1.9681, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 1.709845781326294, |
|
"learning_rate": 7.156917154243048e-07, |
|
"loss": 1.983, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9521531100478469, |
|
"grad_norm": 2.0014631748199463, |
|
"learning_rate": 5.917263959370312e-07, |
|
"loss": 2.0637, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9569377990430622, |
|
"grad_norm": 2.2905828952789307, |
|
"learning_rate": 4.794784562397458e-07, |
|
"loss": 1.5265, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9617224880382775, |
|
"grad_norm": 0.2522173821926117, |
|
"learning_rate": 3.7897451640321323e-07, |
|
"loss": 1.4622, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9665071770334929, |
|
"grad_norm": 0.2915673851966858, |
|
"learning_rate": 2.902384113592782e-07, |
|
"loss": 1.7226, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9712918660287081, |
|
"grad_norm": 0.31829988956451416, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 1.444, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9760765550239234, |
|
"grad_norm": 0.35566747188568115, |
|
"learning_rate": 1.481510864283553e-07, |
|
"loss": 1.6455, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9808612440191388, |
|
"grad_norm": 0.43001261353492737, |
|
"learning_rate": 9.483356314779479e-08, |
|
"loss": 1.7939, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9856459330143541, |
|
"grad_norm": 0.5118626356124878, |
|
"learning_rate": 5.3351259881379014e-08, |
|
"loss": 1.823, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9904306220095693, |
|
"grad_norm": 0.6911094784736633, |
|
"learning_rate": 2.371401433170495e-08, |
|
"loss": 2.0949, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9952153110047847, |
|
"grad_norm": 0.847305953502655, |
|
"learning_rate": 5.928855096154484e-09, |
|
"loss": 1.5913, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5929991006851196, |
|
"learning_rate": 0.0, |
|
"loss": 1.6612, |
|
"step": 209 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 209, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 53, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.769507127708877e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|