lesso's picture
Training in progress, epoch 0, checkpoint
7470782 verified
raw
history blame
174 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.44518642181413465,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00044518642181413465,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 4.4716,
"step": 1
},
{
"epoch": 0.0008903728436282693,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 4.9872,
"step": 2
},
{
"epoch": 0.001335559265442404,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 5.0735,
"step": 3
},
{
"epoch": 0.0017807456872565386,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 7.0202,
"step": 4
},
{
"epoch": 0.0022259321090706734,
"grad_norm": 3.0090935230255127,
"learning_rate": 0.00019999950652018584,
"loss": 4.1176,
"step": 5
},
{
"epoch": 0.002671118530884808,
"grad_norm": 5.011908531188965,
"learning_rate": 0.0001999980260856137,
"loss": 4.3713,
"step": 6
},
{
"epoch": 0.0031163049526989426,
"grad_norm": NaN,
"learning_rate": 0.0001999980260856137,
"loss": 5.3384,
"step": 7
},
{
"epoch": 0.003561491374513077,
"grad_norm": NaN,
"learning_rate": 0.0001999980260856137,
"loss": 3.6488,
"step": 8
},
{
"epoch": 0.004006677796327212,
"grad_norm": 4.769114971160889,
"learning_rate": 0.000199995558710895,
"loss": 3.5996,
"step": 9
},
{
"epoch": 0.004451864218141347,
"grad_norm": 6.694155216217041,
"learning_rate": 0.00019999210442038162,
"loss": 4.4521,
"step": 10
},
{
"epoch": 0.004897050639955481,
"grad_norm": 3.8457248210906982,
"learning_rate": 0.00019998766324816607,
"loss": 3.4679,
"step": 11
},
{
"epoch": 0.005342237061769616,
"grad_norm": 7.944019794464111,
"learning_rate": 0.0001999822352380809,
"loss": 4.2213,
"step": 12
},
{
"epoch": 0.005787423483583751,
"grad_norm": 5.603764057159424,
"learning_rate": 0.00019997582044369843,
"loss": 3.1073,
"step": 13
},
{
"epoch": 0.006232609905397885,
"grad_norm": 5.0515265464782715,
"learning_rate": 0.00019996841892833,
"loss": 2.6351,
"step": 14
},
{
"epoch": 0.00667779632721202,
"grad_norm": 4.5042572021484375,
"learning_rate": 0.00019996003076502565,
"loss": 3.238,
"step": 15
},
{
"epoch": 0.007122982749026154,
"grad_norm": 6.955883502960205,
"learning_rate": 0.00019995065603657316,
"loss": 2.5309,
"step": 16
},
{
"epoch": 0.007568169170840289,
"grad_norm": 5.884995937347412,
"learning_rate": 0.0001999402948354973,
"loss": 2.6348,
"step": 17
},
{
"epoch": 0.008013355592654424,
"grad_norm": 5.377790927886963,
"learning_rate": 0.00019992894726405893,
"loss": 3.7344,
"step": 18
},
{
"epoch": 0.00845854201446856,
"grad_norm": 4.829436302185059,
"learning_rate": 0.000199916613434254,
"loss": 2.2568,
"step": 19
},
{
"epoch": 0.008903728436282694,
"grad_norm": 4.930129051208496,
"learning_rate": 0.0001999032934678125,
"loss": 2.2034,
"step": 20
},
{
"epoch": 0.009348914858096828,
"grad_norm": 4.200410842895508,
"learning_rate": 0.00019988898749619702,
"loss": 2.2395,
"step": 21
},
{
"epoch": 0.009794101279910962,
"grad_norm": 3.6214070320129395,
"learning_rate": 0.00019987369566060176,
"loss": 2.1656,
"step": 22
},
{
"epoch": 0.010239287701725098,
"grad_norm": 3.9661879539489746,
"learning_rate": 0.00019985741811195097,
"loss": 2.4032,
"step": 23
},
{
"epoch": 0.010684474123539232,
"grad_norm": 4.426929473876953,
"learning_rate": 0.00019984015501089752,
"loss": 2.4649,
"step": 24
},
{
"epoch": 0.011129660545353366,
"grad_norm": 6.472476005554199,
"learning_rate": 0.0001998219065278212,
"loss": 2.5907,
"step": 25
},
{
"epoch": 0.011574846967167502,
"grad_norm": 6.279055118560791,
"learning_rate": 0.00019980267284282717,
"loss": 1.987,
"step": 26
},
{
"epoch": 0.012020033388981636,
"grad_norm": 3.898387908935547,
"learning_rate": 0.00019978245414574417,
"loss": 1.93,
"step": 27
},
{
"epoch": 0.01246521981079577,
"grad_norm": 8.439789772033691,
"learning_rate": 0.00019976125063612252,
"loss": 2.1414,
"step": 28
},
{
"epoch": 0.012910406232609905,
"grad_norm": 3.3945260047912598,
"learning_rate": 0.00019973906252323238,
"loss": 2.2744,
"step": 29
},
{
"epoch": 0.01335559265442404,
"grad_norm": 3.3326220512390137,
"learning_rate": 0.0001997158900260614,
"loss": 1.7339,
"step": 30
},
{
"epoch": 0.013800779076238175,
"grad_norm": 3.9388749599456787,
"learning_rate": 0.0001996917333733128,
"loss": 2.3125,
"step": 31
},
{
"epoch": 0.014245965498052309,
"grad_norm": 4.969825744628906,
"learning_rate": 0.00019966659280340297,
"loss": 1.5139,
"step": 32
},
{
"epoch": 0.014691151919866445,
"grad_norm": 4.162886619567871,
"learning_rate": 0.00019964046856445924,
"loss": 2.8482,
"step": 33
},
{
"epoch": 0.015136338341680579,
"grad_norm": 2.171025037765503,
"learning_rate": 0.00019961336091431727,
"loss": 1.5546,
"step": 34
},
{
"epoch": 0.015581524763494713,
"grad_norm": 4.523052215576172,
"learning_rate": 0.00019958527012051857,
"loss": 2.5814,
"step": 35
},
{
"epoch": 0.016026711185308847,
"grad_norm": 3.6901893615722656,
"learning_rate": 0.00019955619646030802,
"loss": 1.8633,
"step": 36
},
{
"epoch": 0.016471897607122983,
"grad_norm": 3.451967477798462,
"learning_rate": 0.00019952614022063084,
"loss": 2.2273,
"step": 37
},
{
"epoch": 0.01691708402893712,
"grad_norm": 5.3913164138793945,
"learning_rate": 0.00019949510169813003,
"loss": 2.5587,
"step": 38
},
{
"epoch": 0.01736227045075125,
"grad_norm": 3.112423896789551,
"learning_rate": 0.00019946308119914323,
"loss": 1.417,
"step": 39
},
{
"epoch": 0.017807456872565387,
"grad_norm": 4.377839088439941,
"learning_rate": 0.0001994300790396999,
"loss": 2.1685,
"step": 40
},
{
"epoch": 0.01825264329437952,
"grad_norm": 3.380156993865967,
"learning_rate": 0.000199396095545518,
"loss": 2.0514,
"step": 41
},
{
"epoch": 0.018697829716193656,
"grad_norm": 4.578589916229248,
"learning_rate": 0.00019936113105200085,
"loss": 2.7112,
"step": 42
},
{
"epoch": 0.01914301613800779,
"grad_norm": 7.042427062988281,
"learning_rate": 0.00019932518590423394,
"loss": 2.165,
"step": 43
},
{
"epoch": 0.019588202559821924,
"grad_norm": 1.9593662023544312,
"learning_rate": 0.00019928826045698136,
"loss": 1.1432,
"step": 44
},
{
"epoch": 0.02003338898163606,
"grad_norm": 3.4116411209106445,
"learning_rate": 0.0001992503550746824,
"loss": 1.9726,
"step": 45
},
{
"epoch": 0.020478575403450196,
"grad_norm": 6.172439098358154,
"learning_rate": 0.0001992114701314478,
"loss": 3.1525,
"step": 46
},
{
"epoch": 0.020923761825264328,
"grad_norm": 3.173847198486328,
"learning_rate": 0.0001991716060110563,
"loss": 1.7376,
"step": 47
},
{
"epoch": 0.021368948247078464,
"grad_norm": 5.255223751068115,
"learning_rate": 0.00019913076310695068,
"loss": 1.9631,
"step": 48
},
{
"epoch": 0.0218141346688926,
"grad_norm": 3.278393268585205,
"learning_rate": 0.00019908894182223388,
"loss": 2.2257,
"step": 49
},
{
"epoch": 0.022259321090706732,
"grad_norm": 7.197827339172363,
"learning_rate": 0.00019904614256966512,
"loss": 2.1086,
"step": 50
},
{
"epoch": 0.02270450751252087,
"grad_norm": 3.401571273803711,
"learning_rate": 0.00019900236577165576,
"loss": 1.8664,
"step": 51
},
{
"epoch": 0.023149693934335004,
"grad_norm": 3.7931394577026367,
"learning_rate": 0.0001989576118602651,
"loss": 1.4463,
"step": 52
},
{
"epoch": 0.023594880356149137,
"grad_norm": 3.639777421951294,
"learning_rate": 0.00019891188127719618,
"loss": 2.1317,
"step": 53
},
{
"epoch": 0.024040066777963272,
"grad_norm": 3.713106155395508,
"learning_rate": 0.0001988651744737914,
"loss": 2.371,
"step": 54
},
{
"epoch": 0.02448525319977741,
"grad_norm": 6.107675552368164,
"learning_rate": 0.00019881749191102808,
"loss": 2.0907,
"step": 55
},
{
"epoch": 0.02493043962159154,
"grad_norm": 3.0008957386016846,
"learning_rate": 0.00019876883405951377,
"loss": 1.7736,
"step": 56
},
{
"epoch": 0.025375626043405677,
"grad_norm": 3.111758232116699,
"learning_rate": 0.00019871920139948192,
"loss": 1.9832,
"step": 57
},
{
"epoch": 0.02582081246521981,
"grad_norm": 3.809453248977661,
"learning_rate": 0.0001986685944207868,
"loss": 2.4095,
"step": 58
},
{
"epoch": 0.026265998887033945,
"grad_norm": 3.9784367084503174,
"learning_rate": 0.0001986170136228989,
"loss": 1.7365,
"step": 59
},
{
"epoch": 0.02671118530884808,
"grad_norm": 4.848536014556885,
"learning_rate": 0.00019856445951489982,
"loss": 2.1358,
"step": 60
},
{
"epoch": 0.027156371730662213,
"grad_norm": 2.1415553092956543,
"learning_rate": 0.0001985109326154774,
"loss": 1.3609,
"step": 61
},
{
"epoch": 0.02760155815247635,
"grad_norm": 2.9329774379730225,
"learning_rate": 0.00019845643345292054,
"loss": 1.9755,
"step": 62
},
{
"epoch": 0.028046744574290485,
"grad_norm": 4.407453536987305,
"learning_rate": 0.00019840096256511398,
"loss": 3.6484,
"step": 63
},
{
"epoch": 0.028491930996104618,
"grad_norm": 1.9221078157424927,
"learning_rate": 0.00019834452049953297,
"loss": 0.9992,
"step": 64
},
{
"epoch": 0.028937117417918753,
"grad_norm": 3.787074089050293,
"learning_rate": 0.00019828710781323792,
"loss": 1.6102,
"step": 65
},
{
"epoch": 0.02938230383973289,
"grad_norm": 2.174247980117798,
"learning_rate": 0.0001982287250728689,
"loss": 1.3549,
"step": 66
},
{
"epoch": 0.029827490261547022,
"grad_norm": 1.8840477466583252,
"learning_rate": 0.0001981693728546399,
"loss": 0.9458,
"step": 67
},
{
"epoch": 0.030272676683361158,
"grad_norm": 2.486109733581543,
"learning_rate": 0.0001981090517443334,
"loss": 1.7881,
"step": 68
},
{
"epoch": 0.030717863105175294,
"grad_norm": 2.583346366882324,
"learning_rate": 0.00019804776233729444,
"loss": 1.6065,
"step": 69
},
{
"epoch": 0.031163049526989426,
"grad_norm": 3.2725138664245605,
"learning_rate": 0.0001979855052384247,
"loss": 1.8852,
"step": 70
},
{
"epoch": 0.03160823594880356,
"grad_norm": 2.918576240539551,
"learning_rate": 0.00019792228106217658,
"loss": 1.2993,
"step": 71
},
{
"epoch": 0.032053422370617694,
"grad_norm": 4.293739318847656,
"learning_rate": 0.00019785809043254722,
"loss": 1.8252,
"step": 72
},
{
"epoch": 0.032498608792431834,
"grad_norm": 3.2163848876953125,
"learning_rate": 0.0001977929339830722,
"loss": 1.8469,
"step": 73
},
{
"epoch": 0.032943795214245966,
"grad_norm": 4.199680328369141,
"learning_rate": 0.00019772681235681936,
"loss": 2.658,
"step": 74
},
{
"epoch": 0.0333889816360601,
"grad_norm": 7.452486991882324,
"learning_rate": 0.00019765972620638248,
"loss": 2.4333,
"step": 75
},
{
"epoch": 0.03383416805787424,
"grad_norm": 3.0536255836486816,
"learning_rate": 0.00019759167619387476,
"loss": 2.1119,
"step": 76
},
{
"epoch": 0.03427935447968837,
"grad_norm": 4.044293403625488,
"learning_rate": 0.00019752266299092236,
"loss": 2.2781,
"step": 77
},
{
"epoch": 0.0347245409015025,
"grad_norm": 2.646242141723633,
"learning_rate": 0.00019745268727865774,
"loss": 1.7457,
"step": 78
},
{
"epoch": 0.03516972732331664,
"grad_norm": 5.419813632965088,
"learning_rate": 0.0001973817497477129,
"loss": 3.1021,
"step": 79
},
{
"epoch": 0.035614913745130775,
"grad_norm": 3.2200138568878174,
"learning_rate": 0.00019730985109821266,
"loss": 2.5992,
"step": 80
},
{
"epoch": 0.03606010016694491,
"grad_norm": 4.368217468261719,
"learning_rate": 0.00019723699203976766,
"loss": 2.5135,
"step": 81
},
{
"epoch": 0.03650528658875904,
"grad_norm": 3.3963205814361572,
"learning_rate": 0.0001971631732914674,
"loss": 1.7087,
"step": 82
},
{
"epoch": 0.03695047301057318,
"grad_norm": 2.7550411224365234,
"learning_rate": 0.0001970883955818731,
"loss": 1.307,
"step": 83
},
{
"epoch": 0.03739565943238731,
"grad_norm": 3.3400115966796875,
"learning_rate": 0.0001970126596490106,
"loss": 1.8352,
"step": 84
},
{
"epoch": 0.037840845854201444,
"grad_norm": 2.74469256401062,
"learning_rate": 0.00019693596624036292,
"loss": 1.9022,
"step": 85
},
{
"epoch": 0.03828603227601558,
"grad_norm": 2.932953357696533,
"learning_rate": 0.0001968583161128631,
"loss": 1.9438,
"step": 86
},
{
"epoch": 0.038731218697829715,
"grad_norm": 4.120993137359619,
"learning_rate": 0.00019677971003288655,
"loss": 1.8208,
"step": 87
},
{
"epoch": 0.03917640511964385,
"grad_norm": 4.330765247344971,
"learning_rate": 0.00019670014877624353,
"loss": 2.5833,
"step": 88
},
{
"epoch": 0.03962159154145799,
"grad_norm": 3.0520567893981934,
"learning_rate": 0.00019661963312817148,
"loss": 1.9228,
"step": 89
},
{
"epoch": 0.04006677796327212,
"grad_norm": 3.913396120071411,
"learning_rate": 0.0001965381638833274,
"loss": 2.9555,
"step": 90
},
{
"epoch": 0.04051196438508625,
"grad_norm": 3.679295539855957,
"learning_rate": 0.00019645574184577982,
"loss": 1.9205,
"step": 91
},
{
"epoch": 0.04095715080690039,
"grad_norm": 2.6744461059570312,
"learning_rate": 0.000196372367829001,
"loss": 1.8619,
"step": 92
},
{
"epoch": 0.041402337228714524,
"grad_norm": 3.623929977416992,
"learning_rate": 0.00019628804265585877,
"loss": 2.8998,
"step": 93
},
{
"epoch": 0.041847523650528656,
"grad_norm": 2.4871249198913574,
"learning_rate": 0.0001962027671586086,
"loss": 1.41,
"step": 94
},
{
"epoch": 0.042292710072342796,
"grad_norm": 2.6301159858703613,
"learning_rate": 0.0001961165421788852,
"loss": 2.1916,
"step": 95
},
{
"epoch": 0.04273789649415693,
"grad_norm": 3.1040453910827637,
"learning_rate": 0.0001960293685676943,
"loss": 1.8362,
"step": 96
},
{
"epoch": 0.04318308291597106,
"grad_norm": 3.9120583534240723,
"learning_rate": 0.0001959412471854043,
"loss": 2.3609,
"step": 97
},
{
"epoch": 0.0436282693377852,
"grad_norm": 4.234198093414307,
"learning_rate": 0.0001958521789017376,
"loss": 2.615,
"step": 98
},
{
"epoch": 0.04407345575959933,
"grad_norm": 3.9530892372131348,
"learning_rate": 0.00019576216459576222,
"loss": 2.349,
"step": 99
},
{
"epoch": 0.044518642181413465,
"grad_norm": 3.4594247341156006,
"learning_rate": 0.00019567120515588308,
"loss": 2.6695,
"step": 100
},
{
"epoch": 0.044963828603227604,
"grad_norm": 2.3701462745666504,
"learning_rate": 0.00019557930147983302,
"loss": 1.2688,
"step": 101
},
{
"epoch": 0.04540901502504174,
"grad_norm": 2.673154354095459,
"learning_rate": 0.00019548645447466431,
"loss": 1.5608,
"step": 102
},
{
"epoch": 0.04585420144685587,
"grad_norm": 2.1529808044433594,
"learning_rate": 0.00019539266505673938,
"loss": 1.4572,
"step": 103
},
{
"epoch": 0.04629938786867001,
"grad_norm": 2.52852463722229,
"learning_rate": 0.00019529793415172192,
"loss": 1.436,
"step": 104
},
{
"epoch": 0.04674457429048414,
"grad_norm": 6.402322769165039,
"learning_rate": 0.00019520226269456768,
"loss": 1.6831,
"step": 105
},
{
"epoch": 0.04718976071229827,
"grad_norm": 2.9657320976257324,
"learning_rate": 0.00019510565162951537,
"loss": 1.9188,
"step": 106
},
{
"epoch": 0.04763494713411241,
"grad_norm": 3.413729667663574,
"learning_rate": 0.00019500810191007718,
"loss": 2.6164,
"step": 107
},
{
"epoch": 0.048080133555926545,
"grad_norm": 2.1516737937927246,
"learning_rate": 0.00019490961449902946,
"loss": 1.6051,
"step": 108
},
{
"epoch": 0.04852531997774068,
"grad_norm": 2.9398021697998047,
"learning_rate": 0.0001948101903684032,
"loss": 1.9419,
"step": 109
},
{
"epoch": 0.04897050639955482,
"grad_norm": 3.36769962310791,
"learning_rate": 0.00019470983049947444,
"loss": 1.5534,
"step": 110
},
{
"epoch": 0.04941569282136895,
"grad_norm": 2.4700193405151367,
"learning_rate": 0.00019460853588275454,
"loss": 2.0116,
"step": 111
},
{
"epoch": 0.04986087924318308,
"grad_norm": 3.086195230484009,
"learning_rate": 0.00019450630751798048,
"loss": 2.2248,
"step": 112
},
{
"epoch": 0.05030606566499722,
"grad_norm": 4.200692653656006,
"learning_rate": 0.000194403146414105,
"loss": 3.0367,
"step": 113
},
{
"epoch": 0.05075125208681135,
"grad_norm": 3.112865924835205,
"learning_rate": 0.00019429905358928646,
"loss": 2.1513,
"step": 114
},
{
"epoch": 0.051196438508625486,
"grad_norm": 2.8886055946350098,
"learning_rate": 0.00019419403007087907,
"loss": 2.0628,
"step": 115
},
{
"epoch": 0.05164162493043962,
"grad_norm": 3.146010398864746,
"learning_rate": 0.00019408807689542257,
"loss": 1.2535,
"step": 116
},
{
"epoch": 0.05208681135225376,
"grad_norm": 2.0565860271453857,
"learning_rate": 0.00019398119510863197,
"loss": 1.9781,
"step": 117
},
{
"epoch": 0.05253199777406789,
"grad_norm": 3.192265748977661,
"learning_rate": 0.00019387338576538744,
"loss": 1.7527,
"step": 118
},
{
"epoch": 0.05297718419588202,
"grad_norm": 1.8156938552856445,
"learning_rate": 0.00019376464992972356,
"loss": 1.3864,
"step": 119
},
{
"epoch": 0.05342237061769616,
"grad_norm": 2.6510698795318604,
"learning_rate": 0.00019365498867481923,
"loss": 1.3648,
"step": 120
},
{
"epoch": 0.053867557039510294,
"grad_norm": 3.3322391510009766,
"learning_rate": 0.00019354440308298675,
"loss": 2.0549,
"step": 121
},
{
"epoch": 0.05431274346132443,
"grad_norm": 2.9640331268310547,
"learning_rate": 0.00019343289424566122,
"loss": 1.7579,
"step": 122
},
{
"epoch": 0.054757929883138566,
"grad_norm": 3.904458999633789,
"learning_rate": 0.00019332046326338986,
"loss": 1.8362,
"step": 123
},
{
"epoch": 0.0552031163049527,
"grad_norm": 4.279350280761719,
"learning_rate": 0.0001932071112458211,
"loss": 2.2549,
"step": 124
},
{
"epoch": 0.05564830272676683,
"grad_norm": 4.328281879425049,
"learning_rate": 0.00019309283931169356,
"loss": 3.2525,
"step": 125
},
{
"epoch": 0.05609348914858097,
"grad_norm": 2.8716776371002197,
"learning_rate": 0.00019297764858882514,
"loss": 1.3097,
"step": 126
},
{
"epoch": 0.0565386755703951,
"grad_norm": 3.0296878814697266,
"learning_rate": 0.00019286154021410173,
"loss": 1.7506,
"step": 127
},
{
"epoch": 0.056983861992209235,
"grad_norm": 2.415759563446045,
"learning_rate": 0.00019274451533346615,
"loss": 2.2494,
"step": 128
},
{
"epoch": 0.057429048414023375,
"grad_norm": 3.0764763355255127,
"learning_rate": 0.00019262657510190666,
"loss": 2.1633,
"step": 129
},
{
"epoch": 0.05787423483583751,
"grad_norm": 2.2252795696258545,
"learning_rate": 0.0001925077206834458,
"loss": 1.7298,
"step": 130
},
{
"epoch": 0.05831942125765164,
"grad_norm": 2.305452823638916,
"learning_rate": 0.0001923879532511287,
"loss": 1.5171,
"step": 131
},
{
"epoch": 0.05876460767946578,
"grad_norm": 5.750187873840332,
"learning_rate": 0.0001922672739870115,
"loss": 3.4236,
"step": 132
},
{
"epoch": 0.05920979410127991,
"grad_norm": 4.094027519226074,
"learning_rate": 0.00019214568408214985,
"loss": 3.6172,
"step": 133
},
{
"epoch": 0.059654980523094044,
"grad_norm": 4.096864700317383,
"learning_rate": 0.00019202318473658705,
"loss": 1.8016,
"step": 134
},
{
"epoch": 0.06010016694490818,
"grad_norm": 2.9913458824157715,
"learning_rate": 0.00019189977715934213,
"loss": 2.2077,
"step": 135
},
{
"epoch": 0.060545353366722315,
"grad_norm": 3.893141508102417,
"learning_rate": 0.00019177546256839812,
"loss": 1.8356,
"step": 136
},
{
"epoch": 0.06099053978853645,
"grad_norm": 3.7156870365142822,
"learning_rate": 0.0001916502421906898,
"loss": 2.6727,
"step": 137
},
{
"epoch": 0.06143572621035059,
"grad_norm": 2.921959638595581,
"learning_rate": 0.00019152411726209176,
"loss": 2.133,
"step": 138
},
{
"epoch": 0.06188091263216472,
"grad_norm": 1.890904188156128,
"learning_rate": 0.00019139708902740613,
"loss": 1.2856,
"step": 139
},
{
"epoch": 0.06232609905397885,
"grad_norm": 4.972781658172607,
"learning_rate": 0.0001912691587403503,
"loss": 2.561,
"step": 140
},
{
"epoch": 0.06277128547579298,
"grad_norm": 4.031817436218262,
"learning_rate": 0.00019114032766354453,
"loss": 1.4916,
"step": 141
},
{
"epoch": 0.06321647189760712,
"grad_norm": 3.651207208633423,
"learning_rate": 0.00019101059706849957,
"loss": 2.6341,
"step": 142
},
{
"epoch": 0.06366165831942126,
"grad_norm": 4.809238433837891,
"learning_rate": 0.00019087996823560402,
"loss": 2.7431,
"step": 143
},
{
"epoch": 0.06410684474123539,
"grad_norm": 3.8684816360473633,
"learning_rate": 0.0001907484424541117,
"loss": 1.716,
"step": 144
},
{
"epoch": 0.06455203116304953,
"grad_norm": 4.384403705596924,
"learning_rate": 0.00019061602102212898,
"loss": 1.511,
"step": 145
},
{
"epoch": 0.06499721758486367,
"grad_norm": 3.2156827449798584,
"learning_rate": 0.00019048270524660196,
"loss": 2.7144,
"step": 146
},
{
"epoch": 0.06544240400667779,
"grad_norm": 3.423051118850708,
"learning_rate": 0.0001903484964433035,
"loss": 2.4857,
"step": 147
},
{
"epoch": 0.06588759042849193,
"grad_norm": 3.9671273231506348,
"learning_rate": 0.00019021339593682028,
"loss": 2.7275,
"step": 148
},
{
"epoch": 0.06633277685030607,
"grad_norm": 2.884432077407837,
"learning_rate": 0.00019007740506053983,
"loss": 1.6876,
"step": 149
},
{
"epoch": 0.0667779632721202,
"grad_norm": 2.6939189434051514,
"learning_rate": 0.0001899405251566371,
"loss": 2.4106,
"step": 150
},
{
"epoch": 0.06722314969393434,
"grad_norm": 3.268193244934082,
"learning_rate": 0.00018980275757606157,
"loss": 2.7097,
"step": 151
},
{
"epoch": 0.06766833611574848,
"grad_norm": 2.9582340717315674,
"learning_rate": 0.00018966410367852362,
"loss": 2.1443,
"step": 152
},
{
"epoch": 0.0681135225375626,
"grad_norm": 3.300041913986206,
"learning_rate": 0.00018952456483248119,
"loss": 2.6703,
"step": 153
},
{
"epoch": 0.06855870895937674,
"grad_norm": 2.3364670276641846,
"learning_rate": 0.0001893841424151264,
"loss": 1.7084,
"step": 154
},
{
"epoch": 0.06900389538119088,
"grad_norm": 3.9670588970184326,
"learning_rate": 0.0001892428378123718,
"loss": 2.4215,
"step": 155
},
{
"epoch": 0.069449081803005,
"grad_norm": 2.3109562397003174,
"learning_rate": 0.0001891006524188368,
"loss": 2.001,
"step": 156
},
{
"epoch": 0.06989426822481914,
"grad_norm": 3.1594605445861816,
"learning_rate": 0.00018895758763783383,
"loss": 2.1514,
"step": 157
},
{
"epoch": 0.07033945464663328,
"grad_norm": 3.3163321018218994,
"learning_rate": 0.00018881364488135448,
"loss": 1.7621,
"step": 158
},
{
"epoch": 0.07078464106844741,
"grad_norm": 2.604219913482666,
"learning_rate": 0.00018866882557005567,
"loss": 1.6635,
"step": 159
},
{
"epoch": 0.07122982749026155,
"grad_norm": 3.6924543380737305,
"learning_rate": 0.00018852313113324552,
"loss": 2.2035,
"step": 160
},
{
"epoch": 0.07167501391207569,
"grad_norm": 2.356276035308838,
"learning_rate": 0.00018837656300886937,
"loss": 1.9358,
"step": 161
},
{
"epoch": 0.07212020033388981,
"grad_norm": 4.977180004119873,
"learning_rate": 0.00018822912264349534,
"loss": 2.3076,
"step": 162
},
{
"epoch": 0.07256538675570395,
"grad_norm": 3.712543487548828,
"learning_rate": 0.00018808081149230036,
"loss": 1.7823,
"step": 163
},
{
"epoch": 0.07301057317751808,
"grad_norm": 2.742771625518799,
"learning_rate": 0.00018793163101905563,
"loss": 2.1218,
"step": 164
},
{
"epoch": 0.07345575959933222,
"grad_norm": 2.7484240531921387,
"learning_rate": 0.00018778158269611218,
"loss": 2.3076,
"step": 165
},
{
"epoch": 0.07390094602114636,
"grad_norm": 3.3283402919769287,
"learning_rate": 0.00018763066800438636,
"loss": 2.5205,
"step": 166
},
{
"epoch": 0.07434613244296048,
"grad_norm": 2.589799404144287,
"learning_rate": 0.0001874788884333453,
"loss": 1.7938,
"step": 167
},
{
"epoch": 0.07479131886477462,
"grad_norm": 2.3176369667053223,
"learning_rate": 0.00018732624548099204,
"loss": 2.125,
"step": 168
},
{
"epoch": 0.07523650528658876,
"grad_norm": 2.814786672592163,
"learning_rate": 0.0001871727406538509,
"loss": 2.1019,
"step": 169
},
{
"epoch": 0.07568169170840289,
"grad_norm": 5.515918731689453,
"learning_rate": 0.0001870183754669526,
"loss": 1.652,
"step": 170
},
{
"epoch": 0.07612687813021703,
"grad_norm": 2.5525944232940674,
"learning_rate": 0.00018686315144381913,
"loss": 2.3976,
"step": 171
},
{
"epoch": 0.07657206455203117,
"grad_norm": 3.176581382751465,
"learning_rate": 0.000186707070116449,
"loss": 2.2002,
"step": 172
},
{
"epoch": 0.07701725097384529,
"grad_norm": 4.414255619049072,
"learning_rate": 0.0001865501330253019,
"loss": 2.2484,
"step": 173
},
{
"epoch": 0.07746243739565943,
"grad_norm": 2.5007452964782715,
"learning_rate": 0.00018639234171928353,
"loss": 1.7283,
"step": 174
},
{
"epoch": 0.07790762381747357,
"grad_norm": 2.847289800643921,
"learning_rate": 0.0001862336977557304,
"loss": 1.6054,
"step": 175
},
{
"epoch": 0.0783528102392877,
"grad_norm": 3.44081449508667,
"learning_rate": 0.0001860742027003944,
"loss": 2.174,
"step": 176
},
{
"epoch": 0.07879799666110184,
"grad_norm": 2.025012254714966,
"learning_rate": 0.00018591385812742725,
"loss": 1.4577,
"step": 177
},
{
"epoch": 0.07924318308291597,
"grad_norm": 2.9062042236328125,
"learning_rate": 0.00018575266561936523,
"loss": 2.4368,
"step": 178
},
{
"epoch": 0.0796883695047301,
"grad_norm": 2.5875933170318604,
"learning_rate": 0.00018559062676711332,
"loss": 1.5715,
"step": 179
},
{
"epoch": 0.08013355592654424,
"grad_norm": 1.958118200302124,
"learning_rate": 0.0001854277431699295,
"loss": 1.3609,
"step": 180
},
{
"epoch": 0.08057874234835838,
"grad_norm": 3.3691253662109375,
"learning_rate": 0.00018526401643540922,
"loss": 1.7653,
"step": 181
},
{
"epoch": 0.0810239287701725,
"grad_norm": 3.036341667175293,
"learning_rate": 0.00018509944817946922,
"loss": 1.6798,
"step": 182
},
{
"epoch": 0.08146911519198664,
"grad_norm": 2.7306416034698486,
"learning_rate": 0.00018493404002633166,
"loss": 1.8532,
"step": 183
},
{
"epoch": 0.08191430161380078,
"grad_norm": 3.7763404846191406,
"learning_rate": 0.00018476779360850832,
"loss": 2.2009,
"step": 184
},
{
"epoch": 0.08235948803561491,
"grad_norm": 2.890293836593628,
"learning_rate": 0.00018460071056678422,
"loss": 1.27,
"step": 185
},
{
"epoch": 0.08280467445742905,
"grad_norm": 2.616748094558716,
"learning_rate": 0.00018443279255020152,
"loss": 1.5601,
"step": 186
},
{
"epoch": 0.08324986087924319,
"grad_norm": 3.860060930252075,
"learning_rate": 0.00018426404121604323,
"loss": 2.3942,
"step": 187
},
{
"epoch": 0.08369504730105731,
"grad_norm": 2.253901243209839,
"learning_rate": 0.00018409445822981693,
"loss": 1.8034,
"step": 188
},
{
"epoch": 0.08414023372287145,
"grad_norm": 3.9860846996307373,
"learning_rate": 0.00018392404526523817,
"loss": 2.2563,
"step": 189
},
{
"epoch": 0.08458542014468559,
"grad_norm": 3.047419548034668,
"learning_rate": 0.0001837528040042142,
"loss": 2.0222,
"step": 190
},
{
"epoch": 0.08503060656649972,
"grad_norm": 3.0273172855377197,
"learning_rate": 0.00018358073613682706,
"loss": 1.9349,
"step": 191
},
{
"epoch": 0.08547579298831386,
"grad_norm": 2.851856231689453,
"learning_rate": 0.00018340784336131713,
"loss": 1.4659,
"step": 192
},
{
"epoch": 0.085920979410128,
"grad_norm": 3.09624981880188,
"learning_rate": 0.00018323412738406635,
"loss": 2.5376,
"step": 193
},
{
"epoch": 0.08636616583194212,
"grad_norm": 3.4956867694854736,
"learning_rate": 0.00018305958991958127,
"loss": 2.1087,
"step": 194
},
{
"epoch": 0.08681135225375626,
"grad_norm": 2.766479253768921,
"learning_rate": 0.0001828842326904762,
"loss": 2.0002,
"step": 195
},
{
"epoch": 0.0872565386755704,
"grad_norm": 2.8506453037261963,
"learning_rate": 0.00018270805742745617,
"loss": 1.6929,
"step": 196
},
{
"epoch": 0.08770172509738453,
"grad_norm": 16.174423217773438,
"learning_rate": 0.00018253106586929997,
"loss": 2.5689,
"step": 197
},
{
"epoch": 0.08814691151919866,
"grad_norm": 2.555532932281494,
"learning_rate": 0.00018235325976284275,
"loss": 2.2229,
"step": 198
},
{
"epoch": 0.0885920979410128,
"grad_norm": 3.642751932144165,
"learning_rate": 0.00018217464086295904,
"loss": 2.5422,
"step": 199
},
{
"epoch": 0.08903728436282693,
"grad_norm": 3.288435220718384,
"learning_rate": 0.00018199521093254523,
"loss": 1.628,
"step": 200
},
{
"epoch": 0.08948247078464107,
"grad_norm": 2.8634743690490723,
"learning_rate": 0.00018181497174250236,
"loss": 2.0577,
"step": 201
},
{
"epoch": 0.08992765720645521,
"grad_norm": 3.5794100761413574,
"learning_rate": 0.00018163392507171842,
"loss": 2.8942,
"step": 202
},
{
"epoch": 0.09037284362826933,
"grad_norm": 2.6064746379852295,
"learning_rate": 0.00018145207270705096,
"loss": 1.769,
"step": 203
},
{
"epoch": 0.09081803005008347,
"grad_norm": 2.4976682662963867,
"learning_rate": 0.0001812694164433094,
"loss": 2.0236,
"step": 204
},
{
"epoch": 0.09126321647189761,
"grad_norm": 3.3556509017944336,
"learning_rate": 0.00018108595808323736,
"loss": 2.4135,
"step": 205
},
{
"epoch": 0.09170840289371174,
"grad_norm": 2.855940580368042,
"learning_rate": 0.00018090169943749476,
"loss": 2.0044,
"step": 206
},
{
"epoch": 0.09215358931552588,
"grad_norm": 5.23508358001709,
"learning_rate": 0.00018071664232464002,
"loss": 3.1566,
"step": 207
},
{
"epoch": 0.09259877573734002,
"grad_norm": 2.203725576400757,
"learning_rate": 0.0001805307885711122,
"loss": 1.8375,
"step": 208
},
{
"epoch": 0.09304396215915414,
"grad_norm": 1.9931917190551758,
"learning_rate": 0.00018034414001121278,
"loss": 1.8984,
"step": 209
},
{
"epoch": 0.09348914858096828,
"grad_norm": 3.4173572063446045,
"learning_rate": 0.00018015669848708767,
"loss": 3.1145,
"step": 210
},
{
"epoch": 0.09393433500278242,
"grad_norm": 2.123898983001709,
"learning_rate": 0.00017996846584870908,
"loss": 1.5743,
"step": 211
},
{
"epoch": 0.09437952142459655,
"grad_norm": 2.5022401809692383,
"learning_rate": 0.0001797794439538571,
"loss": 1.8138,
"step": 212
},
{
"epoch": 0.09482470784641069,
"grad_norm": 4.588496685028076,
"learning_rate": 0.0001795896346681016,
"loss": 2.0505,
"step": 213
},
{
"epoch": 0.09526989426822483,
"grad_norm": 2.4845874309539795,
"learning_rate": 0.00017939903986478355,
"loss": 1.7597,
"step": 214
},
{
"epoch": 0.09571508069003895,
"grad_norm": 3.9968199729919434,
"learning_rate": 0.00017920766142499672,
"loss": 2.6499,
"step": 215
},
{
"epoch": 0.09616026711185309,
"grad_norm": 2.016294479370117,
"learning_rate": 0.00017901550123756906,
"loss": 2.0903,
"step": 216
},
{
"epoch": 0.09660545353366723,
"grad_norm": 2.111436605453491,
"learning_rate": 0.00017882256119904403,
"loss": 1.8273,
"step": 217
},
{
"epoch": 0.09705063995548135,
"grad_norm": 3.4320459365844727,
"learning_rate": 0.00017862884321366188,
"loss": 2.3938,
"step": 218
},
{
"epoch": 0.0974958263772955,
"grad_norm": 3.473053216934204,
"learning_rate": 0.000178434349193341,
"loss": 2.0303,
"step": 219
},
{
"epoch": 0.09794101279910963,
"grad_norm": 3.058842658996582,
"learning_rate": 0.0001782390810576588,
"loss": 1.9517,
"step": 220
},
{
"epoch": 0.09838619922092376,
"grad_norm": 3.2113101482391357,
"learning_rate": 0.000178043040733833,
"loss": 1.3175,
"step": 221
},
{
"epoch": 0.0988313856427379,
"grad_norm": 2.1675655841827393,
"learning_rate": 0.00017784623015670238,
"loss": 1.9149,
"step": 222
},
{
"epoch": 0.09927657206455204,
"grad_norm": 2.9513309001922607,
"learning_rate": 0.00017764865126870786,
"loss": 2.916,
"step": 223
},
{
"epoch": 0.09972175848636616,
"grad_norm": 2.7082040309906006,
"learning_rate": 0.00017745030601987337,
"loss": 1.5118,
"step": 224
},
{
"epoch": 0.1001669449081803,
"grad_norm": 2.039851188659668,
"learning_rate": 0.00017725119636778644,
"loss": 1.8884,
"step": 225
},
{
"epoch": 0.10061213132999444,
"grad_norm": 2.2612671852111816,
"learning_rate": 0.00017705132427757895,
"loss": 1.7989,
"step": 226
},
{
"epoch": 0.10105731775180857,
"grad_norm": 3.061992883682251,
"learning_rate": 0.00017685069172190766,
"loss": 2.6819,
"step": 227
},
{
"epoch": 0.1015025041736227,
"grad_norm": NaN,
"learning_rate": 0.00017685069172190766,
"loss": 3.6896,
"step": 228
},
{
"epoch": 0.10194769059543683,
"grad_norm": 2.9173364639282227,
"learning_rate": 0.00017664930068093498,
"loss": 1.7822,
"step": 229
},
{
"epoch": 0.10239287701725097,
"grad_norm": 3.1690425872802734,
"learning_rate": 0.00017644715314230918,
"loss": 1.7367,
"step": 230
},
{
"epoch": 0.10283806343906511,
"grad_norm": 1.5905847549438477,
"learning_rate": 0.0001762442511011448,
"loss": 1.2002,
"step": 231
},
{
"epoch": 0.10328324986087924,
"grad_norm": 2.388805866241455,
"learning_rate": 0.0001760405965600031,
"loss": 1.911,
"step": 232
},
{
"epoch": 0.10372843628269338,
"grad_norm": 2.208155393600464,
"learning_rate": 0.0001758361915288722,
"loss": 2.1999,
"step": 233
},
{
"epoch": 0.10417362270450752,
"grad_norm": 2.7173423767089844,
"learning_rate": 0.0001756310380251472,
"loss": 2.4822,
"step": 234
},
{
"epoch": 0.10461880912632164,
"grad_norm": 2.548349618911743,
"learning_rate": 0.00017542513807361037,
"loss": 2.043,
"step": 235
},
{
"epoch": 0.10506399554813578,
"grad_norm": 3.7509632110595703,
"learning_rate": 0.00017521849370641114,
"loss": 4.1206,
"step": 236
},
{
"epoch": 0.10550918196994992,
"grad_norm": 3.5168652534484863,
"learning_rate": 0.00017501110696304596,
"loss": 1.638,
"step": 237
},
{
"epoch": 0.10595436839176404,
"grad_norm": 2.3908824920654297,
"learning_rate": 0.00017480297989033825,
"loss": 1.5586,
"step": 238
},
{
"epoch": 0.10639955481357818,
"grad_norm": 2.5945377349853516,
"learning_rate": 0.00017459411454241822,
"loss": 2.0376,
"step": 239
},
{
"epoch": 0.10684474123539232,
"grad_norm": 2.566469430923462,
"learning_rate": 0.00017438451298070252,
"loss": 1.7843,
"step": 240
},
{
"epoch": 0.10728992765720645,
"grad_norm": 2.0323879718780518,
"learning_rate": 0.00017417417727387394,
"loss": 0.9602,
"step": 241
},
{
"epoch": 0.10773511407902059,
"grad_norm": 2.077362537384033,
"learning_rate": 0.000173963109497861,
"loss": 2.1987,
"step": 242
},
{
"epoch": 0.10818030050083473,
"grad_norm": 1.803091049194336,
"learning_rate": 0.0001737513117358174,
"loss": 1.0866,
"step": 243
},
{
"epoch": 0.10862548692264885,
"grad_norm": 1.6945303678512573,
"learning_rate": 0.0001735387860781016,
"loss": 1.3323,
"step": 244
},
{
"epoch": 0.10907067334446299,
"grad_norm": 5.757752418518066,
"learning_rate": 0.00017332553462225602,
"loss": 3.8289,
"step": 245
},
{
"epoch": 0.10951585976627713,
"grad_norm": 2.5788002014160156,
"learning_rate": 0.00017311155947298643,
"loss": 1.9719,
"step": 246
},
{
"epoch": 0.10996104618809126,
"grad_norm": 3.206275463104248,
"learning_rate": 0.00017289686274214118,
"loss": 3.0306,
"step": 247
},
{
"epoch": 0.1104062326099054,
"grad_norm": 3.6661014556884766,
"learning_rate": 0.0001726814465486903,
"loss": 2.2205,
"step": 248
},
{
"epoch": 0.11085141903171954,
"grad_norm": 2.7304251194000244,
"learning_rate": 0.0001724653130187047,
"loss": 1.9815,
"step": 249
},
{
"epoch": 0.11129660545353366,
"grad_norm": 1.8175365924835205,
"learning_rate": 0.00017224846428533499,
"loss": 1.5728,
"step": 250
},
{
"epoch": 0.1117417918753478,
"grad_norm": 2.614417791366577,
"learning_rate": 0.0001720309024887907,
"loss": 2.3802,
"step": 251
},
{
"epoch": 0.11218697829716194,
"grad_norm": 1.9326399564743042,
"learning_rate": 0.00017181262977631888,
"loss": 1.9611,
"step": 252
},
{
"epoch": 0.11263216471897607,
"grad_norm": 3.356496572494507,
"learning_rate": 0.00017159364830218312,
"loss": 2.9872,
"step": 253
},
{
"epoch": 0.1130773511407902,
"grad_norm": 2.0015017986297607,
"learning_rate": 0.00017137396022764214,
"loss": 1.9562,
"step": 254
},
{
"epoch": 0.11352253756260434,
"grad_norm": 3.3400707244873047,
"learning_rate": 0.00017115356772092857,
"loss": 2.4441,
"step": 255
},
{
"epoch": 0.11396772398441847,
"grad_norm": 2.803450345993042,
"learning_rate": 0.0001709324729572274,
"loss": 1.9406,
"step": 256
},
{
"epoch": 0.11441291040623261,
"grad_norm": 2.935906410217285,
"learning_rate": 0.00017071067811865476,
"loss": 1.8791,
"step": 257
},
{
"epoch": 0.11485809682804675,
"grad_norm": 2.608060598373413,
"learning_rate": 0.00017048818539423615,
"loss": 0.9475,
"step": 258
},
{
"epoch": 0.11530328324986087,
"grad_norm": 2.941483736038208,
"learning_rate": 0.00017026499697988493,
"loss": 1.5754,
"step": 259
},
{
"epoch": 0.11574846967167501,
"grad_norm": 3.5497794151306152,
"learning_rate": 0.00017004111507838064,
"loss": 2.3999,
"step": 260
},
{
"epoch": 0.11619365609348915,
"grad_norm": 2.6955342292785645,
"learning_rate": 0.00016981654189934727,
"loss": 2.0135,
"step": 261
},
{
"epoch": 0.11663884251530328,
"grad_norm": 3.7114124298095703,
"learning_rate": 0.00016959127965923142,
"loss": 2.4143,
"step": 262
},
{
"epoch": 0.11708402893711742,
"grad_norm": 3.3473169803619385,
"learning_rate": 0.0001693653305812805,
"loss": 2.8757,
"step": 263
},
{
"epoch": 0.11752921535893156,
"grad_norm": 4.7687554359436035,
"learning_rate": 0.00016913869689552064,
"loss": 2.3467,
"step": 264
},
{
"epoch": 0.11797440178074568,
"grad_norm": 3.388779401779175,
"learning_rate": 0.00016891138083873487,
"loss": 2.851,
"step": 265
},
{
"epoch": 0.11841958820255982,
"grad_norm": 2.956942319869995,
"learning_rate": 0.00016868338465444085,
"loss": 2.3852,
"step": 266
},
{
"epoch": 0.11886477462437396,
"grad_norm": 2.6591806411743164,
"learning_rate": 0.00016845471059286887,
"loss": 2.0056,
"step": 267
},
{
"epoch": 0.11930996104618809,
"grad_norm": 3.0301601886749268,
"learning_rate": 0.00016822536091093965,
"loss": 3.0666,
"step": 268
},
{
"epoch": 0.11975514746800223,
"grad_norm": 4.273592948913574,
"learning_rate": 0.00016799533787224192,
"loss": 3.084,
"step": 269
},
{
"epoch": 0.12020033388981637,
"grad_norm": 3.4681904315948486,
"learning_rate": 0.00016776464374701025,
"loss": 1.8521,
"step": 270
},
{
"epoch": 0.12064552031163049,
"grad_norm": 2.7582857608795166,
"learning_rate": 0.00016753328081210245,
"loss": 1.9692,
"step": 271
},
{
"epoch": 0.12109070673344463,
"grad_norm": 5.249884605407715,
"learning_rate": 0.00016730125135097735,
"loss": 2.2755,
"step": 272
},
{
"epoch": 0.12153589315525877,
"grad_norm": 2.75067400932312,
"learning_rate": 0.000167068557653672,
"loss": 1.8455,
"step": 273
},
{
"epoch": 0.1219810795770729,
"grad_norm": 3.4320733547210693,
"learning_rate": 0.0001668352020167793,
"loss": 1.5247,
"step": 274
},
{
"epoch": 0.12242626599888703,
"grad_norm": 4.216282844543457,
"learning_rate": 0.00016660118674342517,
"loss": 1.0498,
"step": 275
},
{
"epoch": 0.12287145242070117,
"grad_norm": 2.9553184509277344,
"learning_rate": 0.00016636651414324587,
"loss": 2.1679,
"step": 276
},
{
"epoch": 0.1233166388425153,
"grad_norm": 3.903003215789795,
"learning_rate": 0.00016613118653236518,
"loss": 2.1111,
"step": 277
},
{
"epoch": 0.12376182526432944,
"grad_norm": 2.639263868331909,
"learning_rate": 0.0001658952062333717,
"loss": 1.6075,
"step": 278
},
{
"epoch": 0.12420701168614358,
"grad_norm": 4.093780517578125,
"learning_rate": 0.00016565857557529566,
"loss": 1.9396,
"step": 279
},
{
"epoch": 0.1246521981079577,
"grad_norm": 2.228785514831543,
"learning_rate": 0.00016542129689358612,
"loss": 1.4554,
"step": 280
},
{
"epoch": 0.12509738452977184,
"grad_norm": 3.1095426082611084,
"learning_rate": 0.0001651833725300879,
"loss": 1.5483,
"step": 281
},
{
"epoch": 0.12554257095158597,
"grad_norm": 5.153223514556885,
"learning_rate": 0.00016494480483301836,
"loss": 2.3421,
"step": 282
},
{
"epoch": 0.12598775737340012,
"grad_norm": 4.7425360679626465,
"learning_rate": 0.00016470559615694446,
"loss": 2.9086,
"step": 283
},
{
"epoch": 0.12643294379521425,
"grad_norm": 5.0155720710754395,
"learning_rate": 0.00016446574886275913,
"loss": 2.037,
"step": 284
},
{
"epoch": 0.12687813021702837,
"grad_norm": 4.832873344421387,
"learning_rate": 0.00016422526531765846,
"loss": 3.072,
"step": 285
},
{
"epoch": 0.12732331663884253,
"grad_norm": 2.0803449153900146,
"learning_rate": 0.00016398414789511786,
"loss": 1.438,
"step": 286
},
{
"epoch": 0.12776850306065665,
"grad_norm": 5.869335174560547,
"learning_rate": 0.000163742398974869,
"loss": 1.9414,
"step": 287
},
{
"epoch": 0.12821368948247078,
"grad_norm": 4.237220287322998,
"learning_rate": 0.00016350002094287609,
"loss": 2.1311,
"step": 288
},
{
"epoch": 0.12865887590428493,
"grad_norm": 4.472570896148682,
"learning_rate": 0.00016325701619131246,
"loss": 2.9077,
"step": 289
},
{
"epoch": 0.12910406232609906,
"grad_norm": 5.2327728271484375,
"learning_rate": 0.00016301338711853693,
"loss": 2.9067,
"step": 290
},
{
"epoch": 0.12954924874791318,
"grad_norm": 4.579474925994873,
"learning_rate": 0.00016276913612907007,
"loss": 2.2166,
"step": 291
},
{
"epoch": 0.12999443516972733,
"grad_norm": 3.3287553787231445,
"learning_rate": 0.00016252426563357055,
"loss": 1.8055,
"step": 292
},
{
"epoch": 0.13043962159154146,
"grad_norm": 1.91664457321167,
"learning_rate": 0.00016227877804881127,
"loss": 1.2989,
"step": 293
},
{
"epoch": 0.13088480801335559,
"grad_norm": 2.971626043319702,
"learning_rate": 0.00016203267579765563,
"loss": 2.4503,
"step": 294
},
{
"epoch": 0.13132999443516974,
"grad_norm": 5.920880317687988,
"learning_rate": 0.00016178596130903344,
"loss": 2.3126,
"step": 295
},
{
"epoch": 0.13177518085698386,
"grad_norm": 4.491692066192627,
"learning_rate": 0.00016153863701791717,
"loss": 2.0189,
"step": 296
},
{
"epoch": 0.132220367278798,
"grad_norm": 3.163790702819824,
"learning_rate": 0.00016129070536529766,
"loss": 2.0132,
"step": 297
},
{
"epoch": 0.13266555370061214,
"grad_norm": 3.0796022415161133,
"learning_rate": 0.00016104216879816026,
"loss": 1.9845,
"step": 298
},
{
"epoch": 0.13311074012242627,
"grad_norm": 5.9430646896362305,
"learning_rate": 0.00016079302976946055,
"loss": 1.9985,
"step": 299
},
{
"epoch": 0.1335559265442404,
"grad_norm": 3.705900192260742,
"learning_rate": 0.00016054329073810015,
"loss": 1.7144,
"step": 300
},
{
"epoch": 0.13400111296605455,
"grad_norm": 2.9216806888580322,
"learning_rate": 0.00016029295416890248,
"loss": 1.5975,
"step": 301
},
{
"epoch": 0.13444629938786867,
"grad_norm": 2.5859384536743164,
"learning_rate": 0.00016004202253258842,
"loss": 0.9565,
"step": 302
},
{
"epoch": 0.1348914858096828,
"grad_norm": 3.337036609649658,
"learning_rate": 0.0001597904983057519,
"loss": 2.1839,
"step": 303
},
{
"epoch": 0.13533667223149695,
"grad_norm": 3.213444709777832,
"learning_rate": 0.00015953838397083552,
"loss": 2.0066,
"step": 304
},
{
"epoch": 0.13578185865331108,
"grad_norm": 3.1022112369537354,
"learning_rate": 0.00015928568201610595,
"loss": 2.1541,
"step": 305
},
{
"epoch": 0.1362270450751252,
"grad_norm": 3.345914840698242,
"learning_rate": 0.00015903239493562948,
"loss": 1.9975,
"step": 306
},
{
"epoch": 0.13667223149693936,
"grad_norm": 3.352506399154663,
"learning_rate": 0.00015877852522924732,
"loss": 1.6579,
"step": 307
},
{
"epoch": 0.13711741791875348,
"grad_norm": 3.894771099090576,
"learning_rate": 0.00015852407540255104,
"loss": 1.441,
"step": 308
},
{
"epoch": 0.1375626043405676,
"grad_norm": 2.2265045642852783,
"learning_rate": 0.00015826904796685762,
"loss": 1.5189,
"step": 309
},
{
"epoch": 0.13800779076238176,
"grad_norm": 3.3169307708740234,
"learning_rate": 0.00015801344543918495,
"loss": 1.9754,
"step": 310
},
{
"epoch": 0.13845297718419589,
"grad_norm": 3.269655466079712,
"learning_rate": 0.00015775727034222675,
"loss": 2.1918,
"step": 311
},
{
"epoch": 0.13889816360601,
"grad_norm": 3.389246702194214,
"learning_rate": 0.00015750052520432787,
"loss": 2.1055,
"step": 312
},
{
"epoch": 0.13934335002782416,
"grad_norm": 3.9781131744384766,
"learning_rate": 0.0001572432125594591,
"loss": 2.5854,
"step": 313
},
{
"epoch": 0.1397885364496383,
"grad_norm": 3.8214235305786133,
"learning_rate": 0.00015698533494719238,
"loss": 2.4634,
"step": 314
},
{
"epoch": 0.14023372287145242,
"grad_norm": 3.8951199054718018,
"learning_rate": 0.00015672689491267567,
"loss": 1.3678,
"step": 315
},
{
"epoch": 0.14067890929326657,
"grad_norm": 3.5023081302642822,
"learning_rate": 0.00015646789500660773,
"loss": 1.4961,
"step": 316
},
{
"epoch": 0.1411240957150807,
"grad_norm": 1.9665567874908447,
"learning_rate": 0.00015620833778521307,
"loss": 1.0397,
"step": 317
},
{
"epoch": 0.14156928213689482,
"grad_norm": 2.015803575515747,
"learning_rate": 0.0001559482258102167,
"loss": 1.6607,
"step": 318
},
{
"epoch": 0.14201446855870897,
"grad_norm": 3.766350746154785,
"learning_rate": 0.00015568756164881882,
"loss": 1.6466,
"step": 319
},
{
"epoch": 0.1424596549805231,
"grad_norm": 2.1854238510131836,
"learning_rate": 0.00015542634787366942,
"loss": 2.359,
"step": 320
},
{
"epoch": 0.14290484140233722,
"grad_norm": 2.5207645893096924,
"learning_rate": 0.00015516458706284303,
"loss": 1.3911,
"step": 321
},
{
"epoch": 0.14335002782415138,
"grad_norm": 2.2049384117126465,
"learning_rate": 0.0001549022817998132,
"loss": 1.7231,
"step": 322
},
{
"epoch": 0.1437952142459655,
"grad_norm": 6.676117420196533,
"learning_rate": 0.00015463943467342693,
"loss": 2.9389,
"step": 323
},
{
"epoch": 0.14424040066777963,
"grad_norm": 2.7219367027282715,
"learning_rate": 0.00015437604827787927,
"loss": 1.9098,
"step": 324
},
{
"epoch": 0.14468558708959378,
"grad_norm": 2.9971964359283447,
"learning_rate": 0.00015411212521268758,
"loss": 1.8154,
"step": 325
},
{
"epoch": 0.1451307735114079,
"grad_norm": 3.812910795211792,
"learning_rate": 0.00015384766808266602,
"loss": 2.1349,
"step": 326
},
{
"epoch": 0.14557595993322203,
"grad_norm": 1.8337472677230835,
"learning_rate": 0.00015358267949789966,
"loss": 1.7524,
"step": 327
},
{
"epoch": 0.14602114635503616,
"grad_norm": 2.9395253658294678,
"learning_rate": 0.00015331716207371888,
"loss": 1.9965,
"step": 328
},
{
"epoch": 0.1464663327768503,
"grad_norm": 2.7258734703063965,
"learning_rate": 0.0001530511184306734,
"loss": 1.824,
"step": 329
},
{
"epoch": 0.14691151919866444,
"grad_norm": 5.46239709854126,
"learning_rate": 0.00015278455119450664,
"loss": 3.3704,
"step": 330
},
{
"epoch": 0.14735670562047856,
"grad_norm": 2.961097002029419,
"learning_rate": 0.0001525174629961296,
"loss": 2.4186,
"step": 331
},
{
"epoch": 0.14780189204229272,
"grad_norm": 3.7786366939544678,
"learning_rate": 0.0001522498564715949,
"loss": 1.9667,
"step": 332
},
{
"epoch": 0.14824707846410684,
"grad_norm": 1.9760524034500122,
"learning_rate": 0.00015198173426207094,
"loss": 1.4929,
"step": 333
},
{
"epoch": 0.14869226488592097,
"grad_norm": 3.354715347290039,
"learning_rate": 0.00015171309901381572,
"loss": 2.3631,
"step": 334
},
{
"epoch": 0.14913745130773512,
"grad_norm": 3.134253740310669,
"learning_rate": 0.00015144395337815064,
"loss": 1.9308,
"step": 335
},
{
"epoch": 0.14958263772954924,
"grad_norm": 1.8403511047363281,
"learning_rate": 0.00015117430001143452,
"loss": 1.5778,
"step": 336
},
{
"epoch": 0.15002782415136337,
"grad_norm": 3.9706308841705322,
"learning_rate": 0.00015090414157503714,
"loss": 3.1083,
"step": 337
},
{
"epoch": 0.15047301057317752,
"grad_norm": 5.338191509246826,
"learning_rate": 0.00015063348073531324,
"loss": 2.761,
"step": 338
},
{
"epoch": 0.15091819699499165,
"grad_norm": 2.094376802444458,
"learning_rate": 0.0001503623201635761,
"loss": 1.3146,
"step": 339
},
{
"epoch": 0.15136338341680577,
"grad_norm": 3.034886360168457,
"learning_rate": 0.000150090662536071,
"loss": 1.9302,
"step": 340
},
{
"epoch": 0.15180856983861993,
"grad_norm": 2.9534401893615723,
"learning_rate": 0.0001498185105339491,
"loss": 1.6089,
"step": 341
},
{
"epoch": 0.15225375626043405,
"grad_norm": 2.7792069911956787,
"learning_rate": 0.00014954586684324078,
"loss": 2.5257,
"step": 342
},
{
"epoch": 0.15269894268224818,
"grad_norm": 6.320833206176758,
"learning_rate": 0.00014927273415482915,
"loss": 3.6341,
"step": 343
},
{
"epoch": 0.15314412910406233,
"grad_norm": 1.7262303829193115,
"learning_rate": 0.00014899911516442365,
"loss": 1.5187,
"step": 344
},
{
"epoch": 0.15358931552587646,
"grad_norm": 4.237273216247559,
"learning_rate": 0.00014872501257253323,
"loss": 2.6972,
"step": 345
},
{
"epoch": 0.15403450194769058,
"grad_norm": 2.0071420669555664,
"learning_rate": 0.0001484504290844398,
"loss": 1.9343,
"step": 346
},
{
"epoch": 0.15447968836950474,
"grad_norm": 4.065686225891113,
"learning_rate": 0.00014817536741017152,
"loss": 3.2849,
"step": 347
},
{
"epoch": 0.15492487479131886,
"grad_norm": 3.3194217681884766,
"learning_rate": 0.00014789983026447612,
"loss": 2.3392,
"step": 348
},
{
"epoch": 0.155370061213133,
"grad_norm": 2.6346261501312256,
"learning_rate": 0.0001476238203667939,
"loss": 1.5951,
"step": 349
},
{
"epoch": 0.15581524763494714,
"grad_norm": 2.0923845767974854,
"learning_rate": 0.0001473473404412312,
"loss": 2.1046,
"step": 350
},
{
"epoch": 0.15626043405676127,
"grad_norm": 2.5452301502227783,
"learning_rate": 0.0001470703932165333,
"loss": 1.6933,
"step": 351
},
{
"epoch": 0.1567056204785754,
"grad_norm": 3.4971985816955566,
"learning_rate": 0.00014679298142605734,
"loss": 2.6632,
"step": 352
},
{
"epoch": 0.15715080690038954,
"grad_norm": 2.550041913986206,
"learning_rate": 0.00014651510780774583,
"loss": 1.4933,
"step": 353
},
{
"epoch": 0.15759599332220367,
"grad_norm": 1.7662994861602783,
"learning_rate": 0.00014623677510409918,
"loss": 1.876,
"step": 354
},
{
"epoch": 0.1580411797440178,
"grad_norm": 2.723386764526367,
"learning_rate": 0.00014595798606214882,
"loss": 2.3278,
"step": 355
},
{
"epoch": 0.15848636616583195,
"grad_norm": 4.134690761566162,
"learning_rate": 0.00014567874343342997,
"loss": 3.0174,
"step": 356
},
{
"epoch": 0.15893155258764607,
"grad_norm": 2.940185785293579,
"learning_rate": 0.00014539904997395468,
"loss": 2.019,
"step": 357
},
{
"epoch": 0.1593767390094602,
"grad_norm": 2.911198854446411,
"learning_rate": 0.00014511890844418453,
"loss": 2.0522,
"step": 358
},
{
"epoch": 0.15982192543127435,
"grad_norm": 1.9790756702423096,
"learning_rate": 0.00014483832160900326,
"loss": 1.8049,
"step": 359
},
{
"epoch": 0.16026711185308848,
"grad_norm": 3.8680615425109863,
"learning_rate": 0.00014455729223768966,
"loss": 1.9089,
"step": 360
},
{
"epoch": 0.1607122982749026,
"grad_norm": 2.533252477645874,
"learning_rate": 0.0001442758231038902,
"loss": 2.2968,
"step": 361
},
{
"epoch": 0.16115748469671676,
"grad_norm": 3.094660758972168,
"learning_rate": 0.00014399391698559152,
"loss": 2.6112,
"step": 362
},
{
"epoch": 0.16160267111853088,
"grad_norm": 2.3190393447875977,
"learning_rate": 0.0001437115766650933,
"loss": 1.5047,
"step": 363
},
{
"epoch": 0.162047857540345,
"grad_norm": 2.6006054878234863,
"learning_rate": 0.00014342880492898048,
"loss": 1.8432,
"step": 364
},
{
"epoch": 0.16249304396215916,
"grad_norm": 1.5267677307128906,
"learning_rate": 0.0001431456045680959,
"loss": 1.1295,
"step": 365
},
{
"epoch": 0.1629382303839733,
"grad_norm": 2.2319841384887695,
"learning_rate": 0.00014286197837751286,
"loss": 1.7568,
"step": 366
},
{
"epoch": 0.1633834168057874,
"grad_norm": 3.55242919921875,
"learning_rate": 0.00014257792915650728,
"loss": 1.6084,
"step": 367
},
{
"epoch": 0.16382860322760157,
"grad_norm": 2.2210030555725098,
"learning_rate": 0.00014229345970853032,
"loss": 2.615,
"step": 368
},
{
"epoch": 0.1642737896494157,
"grad_norm": 2.4402711391448975,
"learning_rate": 0.00014200857284118066,
"loss": 1.7663,
"step": 369
},
{
"epoch": 0.16471897607122982,
"grad_norm": 2.149660110473633,
"learning_rate": 0.00014172327136617656,
"loss": 1.4058,
"step": 370
},
{
"epoch": 0.16516416249304397,
"grad_norm": 2.59019136428833,
"learning_rate": 0.00014143755809932845,
"loss": 2.3203,
"step": 371
},
{
"epoch": 0.1656093489148581,
"grad_norm": 2.770490884780884,
"learning_rate": 0.00014115143586051088,
"loss": 2.0733,
"step": 372
},
{
"epoch": 0.16605453533667222,
"grad_norm": 1.6338651180267334,
"learning_rate": 0.00014086490747363493,
"loss": 1.7117,
"step": 373
},
{
"epoch": 0.16649972175848637,
"grad_norm": 2.856350898742676,
"learning_rate": 0.00014057797576662,
"loss": 2.2055,
"step": 374
},
{
"epoch": 0.1669449081803005,
"grad_norm": 2.6866633892059326,
"learning_rate": 0.00014029064357136628,
"loss": 1.5605,
"step": 375
},
{
"epoch": 0.16739009460211463,
"grad_norm": 3.7765796184539795,
"learning_rate": 0.00014000291372372647,
"loss": 2.924,
"step": 376
},
{
"epoch": 0.16783528102392878,
"grad_norm": 3.816124439239502,
"learning_rate": 0.00013971478906347806,
"loss": 3.0261,
"step": 377
},
{
"epoch": 0.1682804674457429,
"grad_norm": 1.5007274150848389,
"learning_rate": 0.00013942627243429512,
"loss": 1.0276,
"step": 378
},
{
"epoch": 0.16872565386755703,
"grad_norm": 2.2790749073028564,
"learning_rate": 0.00013913736668372026,
"loss": 2.5021,
"step": 379
},
{
"epoch": 0.16917084028937118,
"grad_norm": 2.2356507778167725,
"learning_rate": 0.00013884807466313663,
"loss": 1.8988,
"step": 380
},
{
"epoch": 0.1696160267111853,
"grad_norm": 2.7027039527893066,
"learning_rate": 0.00013855839922773968,
"loss": 1.9612,
"step": 381
},
{
"epoch": 0.17006121313299943,
"grad_norm": 7.827208042144775,
"learning_rate": 0.000138268343236509,
"loss": 3.1185,
"step": 382
},
{
"epoch": 0.1705063995548136,
"grad_norm": 4.195032119750977,
"learning_rate": 0.00013797790955218014,
"loss": 2.3052,
"step": 383
},
{
"epoch": 0.1709515859766277,
"grad_norm": 1.5661183595657349,
"learning_rate": 0.00013768710104121627,
"loss": 1.2437,
"step": 384
},
{
"epoch": 0.17139677239844184,
"grad_norm": 1.8515113592147827,
"learning_rate": 0.00013739592057378003,
"loss": 1.4918,
"step": 385
},
{
"epoch": 0.171841958820256,
"grad_norm": 2.7802538871765137,
"learning_rate": 0.0001371043710237051,
"loss": 1.9919,
"step": 386
},
{
"epoch": 0.17228714524207012,
"grad_norm": 2.7814526557922363,
"learning_rate": 0.00013681245526846783,
"loss": 1.1622,
"step": 387
},
{
"epoch": 0.17273233166388424,
"grad_norm": 2.9777095317840576,
"learning_rate": 0.0001365201761891588,
"loss": 1.6679,
"step": 388
},
{
"epoch": 0.1731775180856984,
"grad_norm": 3.84218692779541,
"learning_rate": 0.00013622753667045457,
"loss": 2.1753,
"step": 389
},
{
"epoch": 0.17362270450751252,
"grad_norm": 2.0334346294403076,
"learning_rate": 0.00013593453960058908,
"loss": 1.5844,
"step": 390
},
{
"epoch": 0.17406789092932665,
"grad_norm": 2.971250057220459,
"learning_rate": 0.00013564118787132506,
"loss": 1.6105,
"step": 391
},
{
"epoch": 0.1745130773511408,
"grad_norm": 3.5902481079101562,
"learning_rate": 0.00013534748437792573,
"loss": 2.641,
"step": 392
},
{
"epoch": 0.17495826377295493,
"grad_norm": 2.412871837615967,
"learning_rate": 0.0001350534320191259,
"loss": 1.54,
"step": 393
},
{
"epoch": 0.17540345019476905,
"grad_norm": 2.215141534805298,
"learning_rate": 0.0001347590336971037,
"loss": 1.8944,
"step": 394
},
{
"epoch": 0.1758486366165832,
"grad_norm": 3.511486768722534,
"learning_rate": 0.0001344642923174517,
"loss": 1.9907,
"step": 395
},
{
"epoch": 0.17629382303839733,
"grad_norm": 3.9856975078582764,
"learning_rate": 0.00013416921078914835,
"loss": 2.0401,
"step": 396
},
{
"epoch": 0.17673900946021145,
"grad_norm": 4.489439010620117,
"learning_rate": 0.00013387379202452917,
"loss": 1.5596,
"step": 397
},
{
"epoch": 0.1771841958820256,
"grad_norm": 2.6045870780944824,
"learning_rate": 0.00013357803893925807,
"loss": 1.3855,
"step": 398
},
{
"epoch": 0.17762938230383973,
"grad_norm": 2.2161664962768555,
"learning_rate": 0.00013328195445229868,
"loss": 1.391,
"step": 399
},
{
"epoch": 0.17807456872565386,
"grad_norm": 2.083101987838745,
"learning_rate": 0.00013298554148588528,
"loss": 1.3238,
"step": 400
},
{
"epoch": 0.178519755147468,
"grad_norm": 2.943004846572876,
"learning_rate": 0.00013268880296549425,
"loss": 1.7093,
"step": 401
},
{
"epoch": 0.17896494156928214,
"grad_norm": 2.9320528507232666,
"learning_rate": 0.00013239174181981495,
"loss": 1.2862,
"step": 402
},
{
"epoch": 0.17941012799109626,
"grad_norm": 2.143991470336914,
"learning_rate": 0.00013209436098072095,
"loss": 1.9759,
"step": 403
},
{
"epoch": 0.17985531441291042,
"grad_norm": 3.475919485092163,
"learning_rate": 0.00013179666338324108,
"loss": 2.2233,
"step": 404
},
{
"epoch": 0.18030050083472454,
"grad_norm": 2.583845853805542,
"learning_rate": 0.0001314986519655305,
"loss": 2.3193,
"step": 405
},
{
"epoch": 0.18074568725653867,
"grad_norm": 2.5285942554473877,
"learning_rate": 0.0001312003296688415,
"loss": 1.5733,
"step": 406
},
{
"epoch": 0.18119087367835282,
"grad_norm": 2.6708908081054688,
"learning_rate": 0.00013090169943749476,
"loss": 1.3843,
"step": 407
},
{
"epoch": 0.18163606010016695,
"grad_norm": 3.256469249725342,
"learning_rate": 0.0001306027642188501,
"loss": 2.4698,
"step": 408
},
{
"epoch": 0.18208124652198107,
"grad_norm": 2.4278006553649902,
"learning_rate": 0.00013030352696327742,
"loss": 1.987,
"step": 409
},
{
"epoch": 0.18252643294379522,
"grad_norm": 3.7837703227996826,
"learning_rate": 0.00013000399062412763,
"loss": 2.4242,
"step": 410
},
{
"epoch": 0.18297161936560935,
"grad_norm": 2.245476245880127,
"learning_rate": 0.0001297041581577035,
"loss": 1.385,
"step": 411
},
{
"epoch": 0.18341680578742348,
"grad_norm": 2.3928139209747314,
"learning_rate": 0.0001294040325232304,
"loss": 1.8256,
"step": 412
},
{
"epoch": 0.18386199220923763,
"grad_norm": 1.4292818307876587,
"learning_rate": 0.00012910361668282719,
"loss": 1.1733,
"step": 413
},
{
"epoch": 0.18430717863105175,
"grad_norm": 2.100421905517578,
"learning_rate": 0.00012880291360147693,
"loss": 1.2818,
"step": 414
},
{
"epoch": 0.18475236505286588,
"grad_norm": 2.6342415809631348,
"learning_rate": 0.0001285019262469976,
"loss": 1.7543,
"step": 415
},
{
"epoch": 0.18519755147468003,
"grad_norm": 2.015045642852783,
"learning_rate": 0.00012820065759001293,
"loss": 1.2298,
"step": 416
},
{
"epoch": 0.18564273789649416,
"grad_norm": 3.4002110958099365,
"learning_rate": 0.00012789911060392294,
"loss": 1.7277,
"step": 417
},
{
"epoch": 0.18608792431830828,
"grad_norm": 2.60213565826416,
"learning_rate": 0.0001275972882648746,
"loss": 1.916,
"step": 418
},
{
"epoch": 0.18653311074012244,
"grad_norm": 3.615614891052246,
"learning_rate": 0.00012729519355173254,
"loss": 2.3712,
"step": 419
},
{
"epoch": 0.18697829716193656,
"grad_norm": 3.994493246078491,
"learning_rate": 0.00012699282944604967,
"loss": 2.2538,
"step": 420
},
{
"epoch": 0.1874234835837507,
"grad_norm": 2.8048856258392334,
"learning_rate": 0.00012669019893203759,
"loss": 2.0137,
"step": 421
},
{
"epoch": 0.18786867000556484,
"grad_norm": 2.4496052265167236,
"learning_rate": 0.0001263873049965373,
"loss": 1.7419,
"step": 422
},
{
"epoch": 0.18831385642737897,
"grad_norm": 1.7106574773788452,
"learning_rate": 0.00012608415062898972,
"loss": 1.32,
"step": 423
},
{
"epoch": 0.1887590428491931,
"grad_norm": 2.7997119426727295,
"learning_rate": 0.000125780738821406,
"loss": 2.0131,
"step": 424
},
{
"epoch": 0.18920422927100725,
"grad_norm": 3.381044864654541,
"learning_rate": 0.00012547707256833823,
"loss": 1.9459,
"step": 425
},
{
"epoch": 0.18964941569282137,
"grad_norm": 2.4843053817749023,
"learning_rate": 0.00012517315486684972,
"loss": 1.8847,
"step": 426
},
{
"epoch": 0.1900946021146355,
"grad_norm": 2.7945895195007324,
"learning_rate": 0.0001248689887164855,
"loss": 2.2822,
"step": 427
},
{
"epoch": 0.19053978853644965,
"grad_norm": 3.6827800273895264,
"learning_rate": 0.00012456457711924266,
"loss": 3.0184,
"step": 428
},
{
"epoch": 0.19098497495826378,
"grad_norm": 1.9260412454605103,
"learning_rate": 0.00012425992307954075,
"loss": 1.2819,
"step": 429
},
{
"epoch": 0.1914301613800779,
"grad_norm": 3.411238431930542,
"learning_rate": 0.0001239550296041922,
"loss": 1.7672,
"step": 430
},
{
"epoch": 0.19187534780189205,
"grad_norm": 5.014751434326172,
"learning_rate": 0.00012364989970237248,
"loss": 2.9558,
"step": 431
},
{
"epoch": 0.19232053422370618,
"grad_norm": 2.5855774879455566,
"learning_rate": 0.00012334453638559057,
"loss": 2.3954,
"step": 432
},
{
"epoch": 0.1927657206455203,
"grad_norm": 3.727348566055298,
"learning_rate": 0.00012303894266765908,
"loss": 2.7071,
"step": 433
},
{
"epoch": 0.19321090706733446,
"grad_norm": 1.514435887336731,
"learning_rate": 0.00012273312156466464,
"loss": 1.0601,
"step": 434
},
{
"epoch": 0.19365609348914858,
"grad_norm": 3.1652112007141113,
"learning_rate": 0.00012242707609493814,
"loss": 1.8037,
"step": 435
},
{
"epoch": 0.1941012799109627,
"grad_norm": 2.0706193447113037,
"learning_rate": 0.00012212080927902474,
"loss": 1.4704,
"step": 436
},
{
"epoch": 0.19454646633277686,
"grad_norm": 2.991508722305298,
"learning_rate": 0.00012181432413965428,
"loss": 2.6486,
"step": 437
},
{
"epoch": 0.194991652754591,
"grad_norm": 2.859058141708374,
"learning_rate": 0.00012150762370171136,
"loss": 2.1157,
"step": 438
},
{
"epoch": 0.1954368391764051,
"grad_norm": 2.6109142303466797,
"learning_rate": 0.00012120071099220549,
"loss": 1.9313,
"step": 439
},
{
"epoch": 0.19588202559821927,
"grad_norm": 3.324293375015259,
"learning_rate": 0.00012089358904024117,
"loss": 1.4513,
"step": 440
},
{
"epoch": 0.1963272120200334,
"grad_norm": 4.9398722648620605,
"learning_rate": 0.00012058626087698814,
"loss": 2.2132,
"step": 441
},
{
"epoch": 0.19677239844184752,
"grad_norm": 2.100003480911255,
"learning_rate": 0.00012027872953565125,
"loss": 1.4436,
"step": 442
},
{
"epoch": 0.19721758486366167,
"grad_norm": 3.5456857681274414,
"learning_rate": 0.00011997099805144069,
"loss": 2.3848,
"step": 443
},
{
"epoch": 0.1976627712854758,
"grad_norm": 3.4976227283477783,
"learning_rate": 0.000119663069461542,
"loss": 2.0566,
"step": 444
},
{
"epoch": 0.19810795770728992,
"grad_norm": 2.21435284614563,
"learning_rate": 0.00011935494680508606,
"loss": 1.6255,
"step": 445
},
{
"epoch": 0.19855314412910408,
"grad_norm": 3.514770984649658,
"learning_rate": 0.00011904663312311901,
"loss": 1.6344,
"step": 446
},
{
"epoch": 0.1989983305509182,
"grad_norm": 4.509243488311768,
"learning_rate": 0.00011873813145857249,
"loss": 1.8616,
"step": 447
},
{
"epoch": 0.19944351697273233,
"grad_norm": 2.5948128700256348,
"learning_rate": 0.00011842944485623335,
"loss": 1.8176,
"step": 448
},
{
"epoch": 0.19988870339454648,
"grad_norm": 3.1495344638824463,
"learning_rate": 0.00011812057636271374,
"loss": 1.964,
"step": 449
},
{
"epoch": 0.2003338898163606,
"grad_norm": 3.056257486343384,
"learning_rate": 0.000117811529026421,
"loss": 2.5386,
"step": 450
},
{
"epoch": 0.20077907623817473,
"grad_norm": 3.0034544467926025,
"learning_rate": 0.00011750230589752762,
"loss": 1.7149,
"step": 451
},
{
"epoch": 0.20122426265998888,
"grad_norm": 3.060884714126587,
"learning_rate": 0.00011719291002794096,
"loss": 1.2396,
"step": 452
},
{
"epoch": 0.201669449081803,
"grad_norm": 2.2568047046661377,
"learning_rate": 0.00011688334447127338,
"loss": 1.534,
"step": 453
},
{
"epoch": 0.20211463550361713,
"grad_norm": 2.048640727996826,
"learning_rate": 0.00011657361228281199,
"loss": 1.709,
"step": 454
},
{
"epoch": 0.2025598219254313,
"grad_norm": 2.9789860248565674,
"learning_rate": 0.00011626371651948838,
"loss": 2.4558,
"step": 455
},
{
"epoch": 0.2030050083472454,
"grad_norm": 2.8929457664489746,
"learning_rate": 0.00011595366023984864,
"loss": 2.3674,
"step": 456
},
{
"epoch": 0.20345019476905954,
"grad_norm": 2.2031073570251465,
"learning_rate": 0.0001156434465040231,
"loss": 1.1918,
"step": 457
},
{
"epoch": 0.20389538119087366,
"grad_norm": 2.2434680461883545,
"learning_rate": 0.00011533307837369607,
"loss": 2.1253,
"step": 458
},
{
"epoch": 0.20434056761268782,
"grad_norm": 2.9706263542175293,
"learning_rate": 0.00011502255891207572,
"loss": 2.2269,
"step": 459
},
{
"epoch": 0.20478575403450194,
"grad_norm": 2.349477767944336,
"learning_rate": 0.00011471189118386375,
"loss": 1.6526,
"step": 460
},
{
"epoch": 0.20523094045631607,
"grad_norm": 4.1461181640625,
"learning_rate": 0.00011440107825522521,
"loss": 2.8745,
"step": 461
},
{
"epoch": 0.20567612687813022,
"grad_norm": 2.6058058738708496,
"learning_rate": 0.00011409012319375827,
"loss": 1.8548,
"step": 462
},
{
"epoch": 0.20612131329994435,
"grad_norm": 3.3619179725646973,
"learning_rate": 0.0001137790290684638,
"loss": 2.7486,
"step": 463
},
{
"epoch": 0.20656649972175847,
"grad_norm": 2.5370032787323,
"learning_rate": 0.00011346779894971527,
"loss": 1.9156,
"step": 464
},
{
"epoch": 0.20701168614357263,
"grad_norm": 3.8200554847717285,
"learning_rate": 0.00011315643590922827,
"loss": 2.6811,
"step": 465
},
{
"epoch": 0.20745687256538675,
"grad_norm": 2.52695369720459,
"learning_rate": 0.0001128449430200303,
"loss": 2.3269,
"step": 466
},
{
"epoch": 0.20790205898720088,
"grad_norm": 3.2745585441589355,
"learning_rate": 0.00011253332335643043,
"loss": 1.6872,
"step": 467
},
{
"epoch": 0.20834724540901503,
"grad_norm": 2.0885512828826904,
"learning_rate": 0.00011222157999398895,
"loss": 1.2987,
"step": 468
},
{
"epoch": 0.20879243183082916,
"grad_norm": 1.8260122537612915,
"learning_rate": 0.00011190971600948699,
"loss": 1.8033,
"step": 469
},
{
"epoch": 0.20923761825264328,
"grad_norm": 2.1838929653167725,
"learning_rate": 0.00011159773448089614,
"loss": 2.0944,
"step": 470
},
{
"epoch": 0.20968280467445743,
"grad_norm": 3.3871777057647705,
"learning_rate": 0.00011128563848734816,
"loss": 2.2967,
"step": 471
},
{
"epoch": 0.21012799109627156,
"grad_norm": 1.639346718788147,
"learning_rate": 0.00011097343110910452,
"loss": 1.0565,
"step": 472
},
{
"epoch": 0.21057317751808569,
"grad_norm": 2.0020196437835693,
"learning_rate": 0.000110661115427526,
"loss": 1.4049,
"step": 473
},
{
"epoch": 0.21101836393989984,
"grad_norm": 3.8000659942626953,
"learning_rate": 0.00011034869452504226,
"loss": 2.369,
"step": 474
},
{
"epoch": 0.21146355036171396,
"grad_norm": 2.5505828857421875,
"learning_rate": 0.00011003617148512149,
"loss": 2.0883,
"step": 475
},
{
"epoch": 0.2119087367835281,
"grad_norm": 2.576690196990967,
"learning_rate": 0.00010972354939223996,
"loss": 1.3852,
"step": 476
},
{
"epoch": 0.21235392320534224,
"grad_norm": 3.0029373168945312,
"learning_rate": 0.00010941083133185146,
"loss": 1.9155,
"step": 477
},
{
"epoch": 0.21279910962715637,
"grad_norm": 2.3589234352111816,
"learning_rate": 0.00010909802039035701,
"loss": 1.57,
"step": 478
},
{
"epoch": 0.2132442960489705,
"grad_norm": 1.7912325859069824,
"learning_rate": 0.00010878511965507434,
"loss": 1.1136,
"step": 479
},
{
"epoch": 0.21368948247078465,
"grad_norm": 3.358750820159912,
"learning_rate": 0.00010847213221420736,
"loss": 2.4167,
"step": 480
},
{
"epoch": 0.21413466889259877,
"grad_norm": 2.7186977863311768,
"learning_rate": 0.00010815906115681578,
"loss": 1.6694,
"step": 481
},
{
"epoch": 0.2145798553144129,
"grad_norm": 2.201023817062378,
"learning_rate": 0.0001078459095727845,
"loss": 1.7678,
"step": 482
},
{
"epoch": 0.21502504173622705,
"grad_norm": 2.136697769165039,
"learning_rate": 0.00010753268055279329,
"loss": 1.2536,
"step": 483
},
{
"epoch": 0.21547022815804118,
"grad_norm": 2.5579357147216797,
"learning_rate": 0.0001072193771882861,
"loss": 1.9963,
"step": 484
},
{
"epoch": 0.2159154145798553,
"grad_norm": 4.265329837799072,
"learning_rate": 0.00010690600257144061,
"loss": 2.2109,
"step": 485
},
{
"epoch": 0.21636060100166946,
"grad_norm": 3.6019909381866455,
"learning_rate": 0.0001065925597951378,
"loss": 2.2145,
"step": 486
},
{
"epoch": 0.21680578742348358,
"grad_norm": 17.6555118560791,
"learning_rate": 0.00010627905195293135,
"loss": 1.8007,
"step": 487
},
{
"epoch": 0.2172509738452977,
"grad_norm": 3.238166570663452,
"learning_rate": 0.00010596548213901708,
"loss": 1.9748,
"step": 488
},
{
"epoch": 0.21769616026711186,
"grad_norm": 2.759385824203491,
"learning_rate": 0.00010565185344820247,
"loss": 1.9488,
"step": 489
},
{
"epoch": 0.21814134668892599,
"grad_norm": 3.2196052074432373,
"learning_rate": 0.00010533816897587606,
"loss": 1.697,
"step": 490
},
{
"epoch": 0.2185865331107401,
"grad_norm": 3.861132860183716,
"learning_rate": 0.00010502443181797697,
"loss": 2.3017,
"step": 491
},
{
"epoch": 0.21903171953255426,
"grad_norm": 3.6513118743896484,
"learning_rate": 0.00010471064507096426,
"loss": 2.5697,
"step": 492
},
{
"epoch": 0.2194769059543684,
"grad_norm": 3.5032055377960205,
"learning_rate": 0.0001043968118317865,
"loss": 2.4045,
"step": 493
},
{
"epoch": 0.21992209237618252,
"grad_norm": 1.864540696144104,
"learning_rate": 0.00010408293519785101,
"loss": 1.1518,
"step": 494
},
{
"epoch": 0.22036727879799667,
"grad_norm": 3.349238872528076,
"learning_rate": 0.00010376901826699348,
"loss": 2.2104,
"step": 495
},
{
"epoch": 0.2208124652198108,
"grad_norm": 2.841289520263672,
"learning_rate": 0.00010345506413744726,
"loss": 1.696,
"step": 496
},
{
"epoch": 0.22125765164162492,
"grad_norm": 2.52150297164917,
"learning_rate": 0.00010314107590781284,
"loss": 2.0612,
"step": 497
},
{
"epoch": 0.22170283806343907,
"grad_norm": 2.2152016162872314,
"learning_rate": 0.00010282705667702734,
"loss": 1.6126,
"step": 498
},
{
"epoch": 0.2221480244852532,
"grad_norm": 1.3674941062927246,
"learning_rate": 0.00010251300954433376,
"loss": 0.9631,
"step": 499
},
{
"epoch": 0.22259321090706732,
"grad_norm": 3.501506805419922,
"learning_rate": 0.00010219893760925052,
"loss": 2.3345,
"step": 500
},
{
"epoch": 0.22303839732888148,
"grad_norm": 5.884027004241943,
"learning_rate": 0.00010188484397154084,
"loss": 1.8717,
"step": 501
},
{
"epoch": 0.2234835837506956,
"grad_norm": 2.9893875122070312,
"learning_rate": 0.00010157073173118208,
"loss": 1.5762,
"step": 502
},
{
"epoch": 0.22392877017250973,
"grad_norm": 2.135575532913208,
"learning_rate": 0.00010125660398833528,
"loss": 1.922,
"step": 503
},
{
"epoch": 0.22437395659432388,
"grad_norm": 2.745893955230713,
"learning_rate": 0.00010094246384331442,
"loss": 2.1196,
"step": 504
},
{
"epoch": 0.224819143016138,
"grad_norm": 2.79962158203125,
"learning_rate": 0.00010062831439655591,
"loss": 1.5614,
"step": 505
},
{
"epoch": 0.22526432943795213,
"grad_norm": 2.848219871520996,
"learning_rate": 0.00010031415874858797,
"loss": 2.0875,
"step": 506
},
{
"epoch": 0.22570951585976629,
"grad_norm": 1.9362163543701172,
"learning_rate": 0.0001,
"loss": 1.7142,
"step": 507
},
{
"epoch": 0.2261547022815804,
"grad_norm": 2.283254623413086,
"learning_rate": 9.968584125141204e-05,
"loss": 1.8632,
"step": 508
},
{
"epoch": 0.22659988870339454,
"grad_norm": 3.0278165340423584,
"learning_rate": 9.937168560344412e-05,
"loss": 2.1201,
"step": 509
},
{
"epoch": 0.2270450751252087,
"grad_norm": 3.8611364364624023,
"learning_rate": 9.90575361566856e-05,
"loss": 2.3123,
"step": 510
},
{
"epoch": 0.22749026154702282,
"grad_norm": 3.6970300674438477,
"learning_rate": 9.874339601166473e-05,
"loss": 3.4317,
"step": 511
},
{
"epoch": 0.22793544796883694,
"grad_norm": 2.2782552242279053,
"learning_rate": 9.842926826881796e-05,
"loss": 1.9245,
"step": 512
},
{
"epoch": 0.2283806343906511,
"grad_norm": 4.838232040405273,
"learning_rate": 9.81151560284592e-05,
"loss": 2.3966,
"step": 513
},
{
"epoch": 0.22882582081246522,
"grad_norm": 2.7265279293060303,
"learning_rate": 9.78010623907495e-05,
"loss": 1.7596,
"step": 514
},
{
"epoch": 0.22927100723427934,
"grad_norm": 2.08231782913208,
"learning_rate": 9.748699045566626e-05,
"loss": 2.1319,
"step": 515
},
{
"epoch": 0.2297161936560935,
"grad_norm": 2.751110315322876,
"learning_rate": 9.717294332297268e-05,
"loss": 2.1303,
"step": 516
},
{
"epoch": 0.23016138007790762,
"grad_norm": 2.737786293029785,
"learning_rate": 9.685892409218717e-05,
"loss": 2.0345,
"step": 517
},
{
"epoch": 0.23060656649972175,
"grad_norm": 1.4824129343032837,
"learning_rate": 9.654493586255278e-05,
"loss": 1.308,
"step": 518
},
{
"epoch": 0.2310517529215359,
"grad_norm": 3.695892333984375,
"learning_rate": 9.623098173300654e-05,
"loss": 2.7335,
"step": 519
},
{
"epoch": 0.23149693934335003,
"grad_norm": 7.445088863372803,
"learning_rate": 9.591706480214901e-05,
"loss": 1.8408,
"step": 520
},
{
"epoch": 0.23194212576516415,
"grad_norm": 1.7280323505401611,
"learning_rate": 9.560318816821353e-05,
"loss": 0.9995,
"step": 521
},
{
"epoch": 0.2323873121869783,
"grad_norm": 5.599155426025391,
"learning_rate": 9.528935492903575e-05,
"loss": 2.7664,
"step": 522
},
{
"epoch": 0.23283249860879243,
"grad_norm": 2.3892910480499268,
"learning_rate": 9.497556818202306e-05,
"loss": 1.9636,
"step": 523
},
{
"epoch": 0.23327768503060656,
"grad_norm": 1.4825445413589478,
"learning_rate": 9.466183102412395e-05,
"loss": 0.9973,
"step": 524
},
{
"epoch": 0.2337228714524207,
"grad_norm": 3.159691572189331,
"learning_rate": 9.434814655179755e-05,
"loss": 2.2085,
"step": 525
},
{
"epoch": 0.23416805787423484,
"grad_norm": 2.600054979324341,
"learning_rate": 9.403451786098294e-05,
"loss": 1.877,
"step": 526
},
{
"epoch": 0.23461324429604896,
"grad_norm": 5.829646587371826,
"learning_rate": 9.372094804706867e-05,
"loss": 1.6461,
"step": 527
},
{
"epoch": 0.23505843071786311,
"grad_norm": 1.8458348512649536,
"learning_rate": 9.340744020486222e-05,
"loss": 1.0875,
"step": 528
},
{
"epoch": 0.23550361713967724,
"grad_norm": 2.3733482360839844,
"learning_rate": 9.309399742855942e-05,
"loss": 1.9736,
"step": 529
},
{
"epoch": 0.23594880356149137,
"grad_norm": 4.013689041137695,
"learning_rate": 9.278062281171393e-05,
"loss": 2.1294,
"step": 530
},
{
"epoch": 0.23639398998330552,
"grad_norm": 2.0923335552215576,
"learning_rate": 9.246731944720675e-05,
"loss": 1.3463,
"step": 531
},
{
"epoch": 0.23683917640511964,
"grad_norm": 2.23264479637146,
"learning_rate": 9.215409042721552e-05,
"loss": 1.3212,
"step": 532
},
{
"epoch": 0.23728436282693377,
"grad_norm": 4.511864185333252,
"learning_rate": 9.184093884318425e-05,
"loss": 2.1561,
"step": 533
},
{
"epoch": 0.23772954924874792,
"grad_norm": 3.2981691360473633,
"learning_rate": 9.152786778579267e-05,
"loss": 2.9609,
"step": 534
},
{
"epoch": 0.23817473567056205,
"grad_norm": 3.0374643802642822,
"learning_rate": 9.121488034492569e-05,
"loss": 1.5647,
"step": 535
},
{
"epoch": 0.23861992209237617,
"grad_norm": 3.0681955814361572,
"learning_rate": 9.090197960964301e-05,
"loss": 2.2617,
"step": 536
},
{
"epoch": 0.23906510851419033,
"grad_norm": 2.6293389797210693,
"learning_rate": 9.058916866814858e-05,
"loss": 2.0123,
"step": 537
},
{
"epoch": 0.23951029493600445,
"grad_norm": 1.9579615592956543,
"learning_rate": 9.027645060776006e-05,
"loss": 1.4156,
"step": 538
},
{
"epoch": 0.23995548135781858,
"grad_norm": 3.5243144035339355,
"learning_rate": 8.99638285148785e-05,
"loss": 1.5838,
"step": 539
},
{
"epoch": 0.24040066777963273,
"grad_norm": 2.244328022003174,
"learning_rate": 8.965130547495776e-05,
"loss": 1.7732,
"step": 540
},
{
"epoch": 0.24084585420144686,
"grad_norm": 3.2589073181152344,
"learning_rate": 8.933888457247402e-05,
"loss": 2.2833,
"step": 541
},
{
"epoch": 0.24129104062326098,
"grad_norm": 5.108296871185303,
"learning_rate": 8.902656889089548e-05,
"loss": 2.0562,
"step": 542
},
{
"epoch": 0.24173622704507514,
"grad_norm": 2.2016584873199463,
"learning_rate": 8.871436151265184e-05,
"loss": 1.1455,
"step": 543
},
{
"epoch": 0.24218141346688926,
"grad_norm": 3.027379035949707,
"learning_rate": 8.840226551910387e-05,
"loss": 1.3459,
"step": 544
},
{
"epoch": 0.2426265998887034,
"grad_norm": 3.1470062732696533,
"learning_rate": 8.809028399051302e-05,
"loss": 2.2754,
"step": 545
},
{
"epoch": 0.24307178631051754,
"grad_norm": 2.895399570465088,
"learning_rate": 8.777842000601105e-05,
"loss": 2.0696,
"step": 546
},
{
"epoch": 0.24351697273233167,
"grad_norm": 2.655974864959717,
"learning_rate": 8.746667664356956e-05,
"loss": 2.0362,
"step": 547
},
{
"epoch": 0.2439621591541458,
"grad_norm": 3.3608624935150146,
"learning_rate": 8.715505697996971e-05,
"loss": 2.4649,
"step": 548
},
{
"epoch": 0.24440734557595994,
"grad_norm": 1.6335474252700806,
"learning_rate": 8.684356409077176e-05,
"loss": 0.9378,
"step": 549
},
{
"epoch": 0.24485253199777407,
"grad_norm": 3.400189161300659,
"learning_rate": 8.653220105028474e-05,
"loss": 2.4242,
"step": 550
},
{
"epoch": 0.2452977184195882,
"grad_norm": 1.9661957025527954,
"learning_rate": 8.62209709315362e-05,
"loss": 1.6024,
"step": 551
},
{
"epoch": 0.24574290484140235,
"grad_norm": 2.605260133743286,
"learning_rate": 8.590987680624174e-05,
"loss": 1.9463,
"step": 552
},
{
"epoch": 0.24618809126321647,
"grad_norm": 2.2247166633605957,
"learning_rate": 8.559892174477479e-05,
"loss": 2.2634,
"step": 553
},
{
"epoch": 0.2466332776850306,
"grad_norm": 3.3085622787475586,
"learning_rate": 8.528810881613626e-05,
"loss": 2.0767,
"step": 554
},
{
"epoch": 0.24707846410684475,
"grad_norm": 3.5857832431793213,
"learning_rate": 8.497744108792429e-05,
"loss": 1.9394,
"step": 555
},
{
"epoch": 0.24752365052865888,
"grad_norm": 3.7488949298858643,
"learning_rate": 8.466692162630392e-05,
"loss": 1.7949,
"step": 556
},
{
"epoch": 0.247968836950473,
"grad_norm": 2.594888925552368,
"learning_rate": 8.435655349597689e-05,
"loss": 1.8403,
"step": 557
},
{
"epoch": 0.24841402337228716,
"grad_norm": 2.8361175060272217,
"learning_rate": 8.404633976015134e-05,
"loss": 2.0542,
"step": 558
},
{
"epoch": 0.24885920979410128,
"grad_norm": 2.4254066944122314,
"learning_rate": 8.373628348051165e-05,
"loss": 2.7412,
"step": 559
},
{
"epoch": 0.2493043962159154,
"grad_norm": 2.855569362640381,
"learning_rate": 8.342638771718802e-05,
"loss": 1.6349,
"step": 560
},
{
"epoch": 0.24974958263772956,
"grad_norm": 1.6968199014663696,
"learning_rate": 8.311665552872662e-05,
"loss": 1.5019,
"step": 561
},
{
"epoch": 0.2501947690595437,
"grad_norm": 2.3553712368011475,
"learning_rate": 8.280708997205904e-05,
"loss": 1.7379,
"step": 562
},
{
"epoch": 0.2506399554813578,
"grad_norm": 1.8103502988815308,
"learning_rate": 8.249769410247239e-05,
"loss": 1.5911,
"step": 563
},
{
"epoch": 0.25108514190317194,
"grad_norm": 2.619528293609619,
"learning_rate": 8.218847097357898e-05,
"loss": 1.4286,
"step": 564
},
{
"epoch": 0.25153032832498606,
"grad_norm": 2.888099431991577,
"learning_rate": 8.187942363728625e-05,
"loss": 2.643,
"step": 565
},
{
"epoch": 0.25197551474680024,
"grad_norm": 2.008424758911133,
"learning_rate": 8.157055514376666e-05,
"loss": 1.7771,
"step": 566
},
{
"epoch": 0.25242070116861437,
"grad_norm": 3.2794687747955322,
"learning_rate": 8.126186854142752e-05,
"loss": 2.4204,
"step": 567
},
{
"epoch": 0.2528658875904285,
"grad_norm": 4.072325229644775,
"learning_rate": 8.095336687688102e-05,
"loss": 2.0796,
"step": 568
},
{
"epoch": 0.2533110740122426,
"grad_norm": 2.2015626430511475,
"learning_rate": 8.064505319491398e-05,
"loss": 2.1884,
"step": 569
},
{
"epoch": 0.25375626043405675,
"grad_norm": 2.523545742034912,
"learning_rate": 8.033693053845801e-05,
"loss": 2.0094,
"step": 570
},
{
"epoch": 0.25420144685587087,
"grad_norm": 4.743039608001709,
"learning_rate": 8.002900194855932e-05,
"loss": 3.0156,
"step": 571
},
{
"epoch": 0.25464663327768505,
"grad_norm": 2.6447269916534424,
"learning_rate": 7.972127046434878e-05,
"loss": 1.6751,
"step": 572
},
{
"epoch": 0.2550918196994992,
"grad_norm": 2.480916738510132,
"learning_rate": 7.941373912301189e-05,
"loss": 1.9236,
"step": 573
},
{
"epoch": 0.2555370061213133,
"grad_norm": 2.005185842514038,
"learning_rate": 7.910641095975886e-05,
"loss": 1.0807,
"step": 574
},
{
"epoch": 0.25598219254312743,
"grad_norm": 4.589725971221924,
"learning_rate": 7.879928900779456e-05,
"loss": 3.2364,
"step": 575
},
{
"epoch": 0.25642737896494155,
"grad_norm": 3.0145387649536133,
"learning_rate": 7.849237629828869e-05,
"loss": 1.6721,
"step": 576
},
{
"epoch": 0.2568725653867557,
"grad_norm": 2.3507320880889893,
"learning_rate": 7.818567586034577e-05,
"loss": 1.4727,
"step": 577
},
{
"epoch": 0.25731775180856986,
"grad_norm": 3.227512836456299,
"learning_rate": 7.787919072097531e-05,
"loss": 2.0266,
"step": 578
},
{
"epoch": 0.257762938230384,
"grad_norm": 2.394864082336426,
"learning_rate": 7.75729239050619e-05,
"loss": 1.8211,
"step": 579
},
{
"epoch": 0.2582081246521981,
"grad_norm": 2.832284688949585,
"learning_rate": 7.726687843533538e-05,
"loss": 2.1158,
"step": 580
},
{
"epoch": 0.25865331107401224,
"grad_norm": 2.7681491374969482,
"learning_rate": 7.696105733234098e-05,
"loss": 2.0653,
"step": 581
},
{
"epoch": 0.25909849749582636,
"grad_norm": 4.447993278503418,
"learning_rate": 7.66554636144095e-05,
"loss": 1.7896,
"step": 582
},
{
"epoch": 0.2595436839176405,
"grad_norm": 2.437788248062134,
"learning_rate": 7.635010029762756e-05,
"loss": 1.5824,
"step": 583
},
{
"epoch": 0.25998887033945467,
"grad_norm": 2.262251853942871,
"learning_rate": 7.604497039580785e-05,
"loss": 1.3313,
"step": 584
},
{
"epoch": 0.2604340567612688,
"grad_norm": 2.7486987113952637,
"learning_rate": 7.574007692045928e-05,
"loss": 2.56,
"step": 585
},
{
"epoch": 0.2608792431830829,
"grad_norm": 4.241098403930664,
"learning_rate": 7.543542288075739e-05,
"loss": 2.5701,
"step": 586
},
{
"epoch": 0.26132442960489705,
"grad_norm": 2.46081805229187,
"learning_rate": 7.513101128351454e-05,
"loss": 2.6092,
"step": 587
},
{
"epoch": 0.26176961602671117,
"grad_norm": 1.9531766176223755,
"learning_rate": 7.48268451331503e-05,
"loss": 1.479,
"step": 588
},
{
"epoch": 0.2622148024485253,
"grad_norm": 3.3573310375213623,
"learning_rate": 7.45229274316618e-05,
"loss": 2.4908,
"step": 589
},
{
"epoch": 0.2626599888703395,
"grad_norm": 3.4041333198547363,
"learning_rate": 7.421926117859403e-05,
"loss": 2.7018,
"step": 590
},
{
"epoch": 0.2631051752921536,
"grad_norm": 1.837587833404541,
"learning_rate": 7.391584937101033e-05,
"loss": 1.3851,
"step": 591
},
{
"epoch": 0.26355036171396773,
"grad_norm": 3.824810028076172,
"learning_rate": 7.361269500346274e-05,
"loss": 2.0024,
"step": 592
},
{
"epoch": 0.26399554813578185,
"grad_norm": 3.523655652999878,
"learning_rate": 7.330980106796246e-05,
"loss": 2.3259,
"step": 593
},
{
"epoch": 0.264440734557596,
"grad_norm": 2.3373308181762695,
"learning_rate": 7.300717055395039e-05,
"loss": 1.0866,
"step": 594
},
{
"epoch": 0.2648859209794101,
"grad_norm": 3.652315139770508,
"learning_rate": 7.270480644826749e-05,
"loss": 2.5877,
"step": 595
},
{
"epoch": 0.2653311074012243,
"grad_norm": 3.174226760864258,
"learning_rate": 7.240271173512546e-05,
"loss": 2.0778,
"step": 596
},
{
"epoch": 0.2657762938230384,
"grad_norm": 3.0619466304779053,
"learning_rate": 7.210088939607708e-05,
"loss": 2.7003,
"step": 597
},
{
"epoch": 0.26622148024485254,
"grad_norm": 2.6117167472839355,
"learning_rate": 7.179934240998706e-05,
"loss": 2.4971,
"step": 598
},
{
"epoch": 0.26666666666666666,
"grad_norm": 3.0407607555389404,
"learning_rate": 7.149807375300239e-05,
"loss": 1.8027,
"step": 599
},
{
"epoch": 0.2671118530884808,
"grad_norm": 1.9023070335388184,
"learning_rate": 7.119708639852312e-05,
"loss": 1.7886,
"step": 600
},
{
"epoch": 0.2675570395102949,
"grad_norm": 2.4321749210357666,
"learning_rate": 7.089638331717284e-05,
"loss": 2.2824,
"step": 601
},
{
"epoch": 0.2680022259321091,
"grad_norm": 2.2802674770355225,
"learning_rate": 7.059596747676962e-05,
"loss": 1.2629,
"step": 602
},
{
"epoch": 0.2684474123539232,
"grad_norm": 1.8967430591583252,
"learning_rate": 7.029584184229653e-05,
"loss": 1.5382,
"step": 603
},
{
"epoch": 0.26889259877573735,
"grad_norm": 2.444330930709839,
"learning_rate": 6.999600937587239e-05,
"loss": 2.0665,
"step": 604
},
{
"epoch": 0.26933778519755147,
"grad_norm": 7.994617462158203,
"learning_rate": 6.969647303672262e-05,
"loss": 2.1398,
"step": 605
},
{
"epoch": 0.2697829716193656,
"grad_norm": 1.7072683572769165,
"learning_rate": 6.939723578114993e-05,
"loss": 1.3992,
"step": 606
},
{
"epoch": 0.2702281580411797,
"grad_norm": 3.6613316535949707,
"learning_rate": 6.909830056250527e-05,
"loss": 2.4227,
"step": 607
},
{
"epoch": 0.2706733444629939,
"grad_norm": 1.5923817157745361,
"learning_rate": 6.879967033115853e-05,
"loss": 1.1673,
"step": 608
},
{
"epoch": 0.27111853088480803,
"grad_norm": 2.9673140048980713,
"learning_rate": 6.850134803446954e-05,
"loss": 1.8865,
"step": 609
},
{
"epoch": 0.27156371730662215,
"grad_norm": 2.5711653232574463,
"learning_rate": 6.820333661675893e-05,
"loss": 1.924,
"step": 610
},
{
"epoch": 0.2720089037284363,
"grad_norm": 2.23542857170105,
"learning_rate": 6.790563901927907e-05,
"loss": 2.3644,
"step": 611
},
{
"epoch": 0.2724540901502504,
"grad_norm": 1.7758067846298218,
"learning_rate": 6.760825818018508e-05,
"loss": 1.8761,
"step": 612
},
{
"epoch": 0.27289927657206453,
"grad_norm": 1.9661537408828735,
"learning_rate": 6.731119703450577e-05,
"loss": 1.3787,
"step": 613
},
{
"epoch": 0.2733444629938787,
"grad_norm": 2.7200584411621094,
"learning_rate": 6.701445851411472e-05,
"loss": 2.1951,
"step": 614
},
{
"epoch": 0.27378964941569284,
"grad_norm": 3.6363039016723633,
"learning_rate": 6.671804554770135e-05,
"loss": 2.3486,
"step": 615
},
{
"epoch": 0.27423483583750696,
"grad_norm": 8.95522403717041,
"learning_rate": 6.642196106074194e-05,
"loss": 2.2566,
"step": 616
},
{
"epoch": 0.2746800222593211,
"grad_norm": 3.6754941940307617,
"learning_rate": 6.612620797547087e-05,
"loss": 2.0391,
"step": 617
},
{
"epoch": 0.2751252086811352,
"grad_norm": 1.4028700590133667,
"learning_rate": 6.583078921085167e-05,
"loss": 1.0158,
"step": 618
},
{
"epoch": 0.27557039510294934,
"grad_norm": 2.4848408699035645,
"learning_rate": 6.55357076825483e-05,
"loss": 2.1528,
"step": 619
},
{
"epoch": 0.2760155815247635,
"grad_norm": 2.78027081489563,
"learning_rate": 6.52409663028963e-05,
"loss": 1.8844,
"step": 620
},
{
"epoch": 0.27646076794657765,
"grad_norm": 2.730515480041504,
"learning_rate": 6.494656798087412e-05,
"loss": 2.3197,
"step": 621
},
{
"epoch": 0.27690595436839177,
"grad_norm": 9.125295639038086,
"learning_rate": 6.465251562207431e-05,
"loss": 2.5809,
"step": 622
},
{
"epoch": 0.2773511407902059,
"grad_norm": 4.174372673034668,
"learning_rate": 6.435881212867493e-05,
"loss": 2.1089,
"step": 623
},
{
"epoch": 0.27779632721202,
"grad_norm": 1.8563311100006104,
"learning_rate": 6.406546039941094e-05,
"loss": 1.6145,
"step": 624
},
{
"epoch": 0.27824151363383415,
"grad_norm": 2.578172445297241,
"learning_rate": 6.377246332954544e-05,
"loss": 1.5995,
"step": 625
},
{
"epoch": 0.27868670005564833,
"grad_norm": 1.9171854257583618,
"learning_rate": 6.347982381084123e-05,
"loss": 1.2731,
"step": 626
},
{
"epoch": 0.27913188647746245,
"grad_norm": 2.240983724594116,
"learning_rate": 6.318754473153221e-05,
"loss": 1.7384,
"step": 627
},
{
"epoch": 0.2795770728992766,
"grad_norm": 1.9845826625823975,
"learning_rate": 6.289562897629492e-05,
"loss": 1.7666,
"step": 628
},
{
"epoch": 0.2800222593210907,
"grad_norm": 3.1939802169799805,
"learning_rate": 6.260407942621998e-05,
"loss": 2.0571,
"step": 629
},
{
"epoch": 0.28046744574290483,
"grad_norm": 3.2095515727996826,
"learning_rate": 6.231289895878375e-05,
"loss": 1.8829,
"step": 630
},
{
"epoch": 0.28091263216471896,
"grad_norm": 2.775292158126831,
"learning_rate": 6.20220904478199e-05,
"loss": 1.5986,
"step": 631
},
{
"epoch": 0.28135781858653314,
"grad_norm": 3.0598955154418945,
"learning_rate": 6.173165676349103e-05,
"loss": 1.6686,
"step": 632
},
{
"epoch": 0.28180300500834726,
"grad_norm": 1.9700332880020142,
"learning_rate": 6.144160077226036e-05,
"loss": 1.5556,
"step": 633
},
{
"epoch": 0.2822481914301614,
"grad_norm": 2.914013624191284,
"learning_rate": 6.11519253368634e-05,
"loss": 1.5223,
"step": 634
},
{
"epoch": 0.2826933778519755,
"grad_norm": 3.7892260551452637,
"learning_rate": 6.086263331627976e-05,
"loss": 2.5045,
"step": 635
},
{
"epoch": 0.28313856427378964,
"grad_norm": 3.4939780235290527,
"learning_rate": 6.05737275657049e-05,
"loss": 1.3862,
"step": 636
},
{
"epoch": 0.28358375069560376,
"grad_norm": 2.4467647075653076,
"learning_rate": 6.0285210936521955e-05,
"loss": 1.7271,
"step": 637
},
{
"epoch": 0.28402893711741795,
"grad_norm": 3.2485249042510986,
"learning_rate": 5.999708627627354e-05,
"loss": 2.1898,
"step": 638
},
{
"epoch": 0.28447412353923207,
"grad_norm": 2.478696584701538,
"learning_rate": 5.9709356428633746e-05,
"loss": 1.7902,
"step": 639
},
{
"epoch": 0.2849193099610462,
"grad_norm": 2.8872196674346924,
"learning_rate": 5.9422024233380013e-05,
"loss": 2.8058,
"step": 640
},
{
"epoch": 0.2853644963828603,
"grad_norm": 2.1622273921966553,
"learning_rate": 5.913509252636511e-05,
"loss": 1.0766,
"step": 641
},
{
"epoch": 0.28580968280467445,
"grad_norm": 2.5097506046295166,
"learning_rate": 5.884856413948913e-05,
"loss": 1.652,
"step": 642
},
{
"epoch": 0.2862548692264886,
"grad_norm": 2.859157085418701,
"learning_rate": 5.856244190067159e-05,
"loss": 1.946,
"step": 643
},
{
"epoch": 0.28670005564830275,
"grad_norm": 2.902848958969116,
"learning_rate": 5.82767286338235e-05,
"loss": 1.8221,
"step": 644
},
{
"epoch": 0.2871452420701169,
"grad_norm": 2.4782211780548096,
"learning_rate": 5.799142715881938e-05,
"loss": 1.178,
"step": 645
},
{
"epoch": 0.287590428491931,
"grad_norm": 1.7475439310073853,
"learning_rate": 5.770654029146969e-05,
"loss": 1.3781,
"step": 646
},
{
"epoch": 0.28803561491374513,
"grad_norm": 3.172783136367798,
"learning_rate": 5.7422070843492734e-05,
"loss": 2.3546,
"step": 647
},
{
"epoch": 0.28848080133555926,
"grad_norm": 2.8005614280700684,
"learning_rate": 5.713802162248718e-05,
"loss": 1.9669,
"step": 648
},
{
"epoch": 0.2889259877573734,
"grad_norm": 3.17270565032959,
"learning_rate": 5.6854395431904094e-05,
"loss": 2.7537,
"step": 649
},
{
"epoch": 0.28937117417918756,
"grad_norm": 1.226949691772461,
"learning_rate": 5.657119507101954e-05,
"loss": 1.1319,
"step": 650
},
{
"epoch": 0.2898163606010017,
"grad_norm": 3.1507577896118164,
"learning_rate": 5.6288423334906735e-05,
"loss": 2.1694,
"step": 651
},
{
"epoch": 0.2902615470228158,
"grad_norm": 3.0338058471679688,
"learning_rate": 5.6006083014408484e-05,
"loss": 2.559,
"step": 652
},
{
"epoch": 0.29070673344462994,
"grad_norm": 2.516258955001831,
"learning_rate": 5.572417689610987e-05,
"loss": 1.8034,
"step": 653
},
{
"epoch": 0.29115191986644406,
"grad_norm": 3.0269787311553955,
"learning_rate": 5.544270776231038e-05,
"loss": 2.4885,
"step": 654
},
{
"epoch": 0.2915971062882582,
"grad_norm": 3.1893317699432373,
"learning_rate": 5.5161678390996796e-05,
"loss": 2.0873,
"step": 655
},
{
"epoch": 0.2920422927100723,
"grad_norm": 3.51753306388855,
"learning_rate": 5.488109155581549e-05,
"loss": 2.4275,
"step": 656
},
{
"epoch": 0.2924874791318865,
"grad_norm": 4.858884811401367,
"learning_rate": 5.4600950026045326e-05,
"loss": 1.5764,
"step": 657
},
{
"epoch": 0.2929326655537006,
"grad_norm": 3.3828039169311523,
"learning_rate": 5.4321256566570036e-05,
"loss": 1.8998,
"step": 658
},
{
"epoch": 0.29337785197551475,
"grad_norm": 3.035909652709961,
"learning_rate": 5.404201393785122e-05,
"loss": 2.2468,
"step": 659
},
{
"epoch": 0.2938230383973289,
"grad_norm": 4.460206985473633,
"learning_rate": 5.3763224895900846e-05,
"loss": 1.9738,
"step": 660
},
{
"epoch": 0.294268224819143,
"grad_norm": 3.6244258880615234,
"learning_rate": 5.348489219225416e-05,
"loss": 2.4416,
"step": 661
},
{
"epoch": 0.2947134112409571,
"grad_norm": 1.7858965396881104,
"learning_rate": 5.320701857394268e-05,
"loss": 2.0355,
"step": 662
},
{
"epoch": 0.2951585976627713,
"grad_norm": 3.171755075454712,
"learning_rate": 5.292960678346675e-05,
"loss": 1.4905,
"step": 663
},
{
"epoch": 0.29560378408458543,
"grad_norm": 2.858747959136963,
"learning_rate": 5.265265955876879e-05,
"loss": 1.8204,
"step": 664
},
{
"epoch": 0.29604897050639956,
"grad_norm": 2.019382953643799,
"learning_rate": 5.237617963320608e-05,
"loss": 1.6461,
"step": 665
},
{
"epoch": 0.2964941569282137,
"grad_norm": 1.86653733253479,
"learning_rate": 5.210016973552391e-05,
"loss": 1.1889,
"step": 666
},
{
"epoch": 0.2969393433500278,
"grad_norm": 2.653991937637329,
"learning_rate": 5.182463258982846e-05,
"loss": 1.9415,
"step": 667
},
{
"epoch": 0.29738452977184193,
"grad_norm": 1.676102638244629,
"learning_rate": 5.1549570915560206e-05,
"loss": 1.4348,
"step": 668
},
{
"epoch": 0.2978297161936561,
"grad_norm": 2.8851091861724854,
"learning_rate": 5.127498742746675e-05,
"loss": 2.2143,
"step": 669
},
{
"epoch": 0.29827490261547024,
"grad_norm": 2.4972410202026367,
"learning_rate": 5.100088483557634e-05,
"loss": 1.5418,
"step": 670
},
{
"epoch": 0.29872008903728436,
"grad_norm": 2.2214536666870117,
"learning_rate": 5.072726584517086e-05,
"loss": 1.2384,
"step": 671
},
{
"epoch": 0.2991652754590985,
"grad_norm": 2.9166228771209717,
"learning_rate": 5.045413315675924e-05,
"loss": 2.0722,
"step": 672
},
{
"epoch": 0.2996104618809126,
"grad_norm": 1.8348429203033447,
"learning_rate": 5.018148946605092e-05,
"loss": 1.6178,
"step": 673
},
{
"epoch": 0.30005564830272674,
"grad_norm": 2.423125982284546,
"learning_rate": 4.990933746392899e-05,
"loss": 1.9328,
"step": 674
},
{
"epoch": 0.3005008347245409,
"grad_norm": 7.734646797180176,
"learning_rate": 4.9637679836423924e-05,
"loss": 2.3782,
"step": 675
},
{
"epoch": 0.30094602114635505,
"grad_norm": 2.6672346591949463,
"learning_rate": 4.9366519264686725e-05,
"loss": 2.1491,
"step": 676
},
{
"epoch": 0.3013912075681692,
"grad_norm": 2.255370616912842,
"learning_rate": 4.909585842496287e-05,
"loss": 1.8773,
"step": 677
},
{
"epoch": 0.3018363939899833,
"grad_norm": 4.085122108459473,
"learning_rate": 4.8825699988565485e-05,
"loss": 2.5306,
"step": 678
},
{
"epoch": 0.3022815804117974,
"grad_norm": 3.448923110961914,
"learning_rate": 4.8556046621849346e-05,
"loss": 2.3004,
"step": 679
},
{
"epoch": 0.30272676683361155,
"grad_norm": 2.694648265838623,
"learning_rate": 4.828690098618429e-05,
"loss": 1.9922,
"step": 680
},
{
"epoch": 0.30317195325542573,
"grad_norm": 2.3106231689453125,
"learning_rate": 4.8018265737929044e-05,
"loss": 1.3284,
"step": 681
},
{
"epoch": 0.30361713967723986,
"grad_norm": 3.892047882080078,
"learning_rate": 4.7750143528405126e-05,
"loss": 2.185,
"step": 682
},
{
"epoch": 0.304062326099054,
"grad_norm": 2.5504989624023438,
"learning_rate": 4.748253700387042e-05,
"loss": 2.0292,
"step": 683
},
{
"epoch": 0.3045075125208681,
"grad_norm": 1.9344456195831299,
"learning_rate": 4.721544880549337e-05,
"loss": 1.6198,
"step": 684
},
{
"epoch": 0.30495269894268223,
"grad_norm": 2.216498613357544,
"learning_rate": 4.694888156932658e-05,
"loss": 2.1632,
"step": 685
},
{
"epoch": 0.30539788536449636,
"grad_norm": 2.8042759895324707,
"learning_rate": 4.668283792628114e-05,
"loss": 2.94,
"step": 686
},
{
"epoch": 0.30584307178631054,
"grad_norm": 3.1668999195098877,
"learning_rate": 4.6417320502100316e-05,
"loss": 2.6903,
"step": 687
},
{
"epoch": 0.30628825820812466,
"grad_norm": 2.5685513019561768,
"learning_rate": 4.615233191733398e-05,
"loss": 2.1633,
"step": 688
},
{
"epoch": 0.3067334446299388,
"grad_norm": 3.140465497970581,
"learning_rate": 4.588787478731242e-05,
"loss": 2.6672,
"step": 689
},
{
"epoch": 0.3071786310517529,
"grad_norm": 5.06754207611084,
"learning_rate": 4.5623951722120736e-05,
"loss": 1.5968,
"step": 690
},
{
"epoch": 0.30762381747356704,
"grad_norm": 3.0402727127075195,
"learning_rate": 4.5360565326573104e-05,
"loss": 2.5534,
"step": 691
},
{
"epoch": 0.30806900389538117,
"grad_norm": 2.324404239654541,
"learning_rate": 4.5097718200186814e-05,
"loss": 2.1212,
"step": 692
},
{
"epoch": 0.30851419031719535,
"grad_norm": 2.1986916065216064,
"learning_rate": 4.483541293715698e-05,
"loss": 1.3312,
"step": 693
},
{
"epoch": 0.3089593767390095,
"grad_norm": 1.8250253200531006,
"learning_rate": 4.457365212633058e-05,
"loss": 1.6241,
"step": 694
},
{
"epoch": 0.3094045631608236,
"grad_norm": 2.8651695251464844,
"learning_rate": 4.431243835118124e-05,
"loss": 1.9434,
"step": 695
},
{
"epoch": 0.3098497495826377,
"grad_norm": 2.2517638206481934,
"learning_rate": 4.4051774189783315e-05,
"loss": 1.5921,
"step": 696
},
{
"epoch": 0.31029493600445185,
"grad_norm": 2.332831382751465,
"learning_rate": 4.379166221478697e-05,
"loss": 2.1504,
"step": 697
},
{
"epoch": 0.310740122426266,
"grad_norm": 2.4555563926696777,
"learning_rate": 4.3532104993392306e-05,
"loss": 2.285,
"step": 698
},
{
"epoch": 0.31118530884808016,
"grad_norm": 2.427152156829834,
"learning_rate": 4.327310508732437e-05,
"loss": 2.2453,
"step": 699
},
{
"epoch": 0.3116304952698943,
"grad_norm": 3.151796340942383,
"learning_rate": 4.301466505280762e-05,
"loss": 1.7703,
"step": 700
},
{
"epoch": 0.3120756816917084,
"grad_norm": 2.0332770347595215,
"learning_rate": 4.2756787440540936e-05,
"loss": 1.6522,
"step": 701
},
{
"epoch": 0.31252086811352253,
"grad_norm": 2.4257619380950928,
"learning_rate": 4.249947479567218e-05,
"loss": 1.5509,
"step": 702
},
{
"epoch": 0.31296605453533666,
"grad_norm": 2.1774954795837402,
"learning_rate": 4.224272965777326e-05,
"loss": 1.637,
"step": 703
},
{
"epoch": 0.3134112409571508,
"grad_norm": 2.6510019302368164,
"learning_rate": 4.1986554560815096e-05,
"loss": 1.8324,
"step": 704
},
{
"epoch": 0.31385642737896496,
"grad_norm": 2.63783860206604,
"learning_rate": 4.173095203314241e-05,
"loss": 2.2903,
"step": 705
},
{
"epoch": 0.3143016138007791,
"grad_norm": 2.778245210647583,
"learning_rate": 4.1475924597449024e-05,
"loss": 1.8128,
"step": 706
},
{
"epoch": 0.3147468002225932,
"grad_norm": 1.6933324337005615,
"learning_rate": 4.12214747707527e-05,
"loss": 1.6582,
"step": 707
},
{
"epoch": 0.31519198664440734,
"grad_norm": 2.9144859313964844,
"learning_rate": 4.096760506437057e-05,
"loss": 1.4754,
"step": 708
},
{
"epoch": 0.31563717306622147,
"grad_norm": 2.8968605995178223,
"learning_rate": 4.071431798389408e-05,
"loss": 1.2524,
"step": 709
},
{
"epoch": 0.3160823594880356,
"grad_norm": 3.108628273010254,
"learning_rate": 4.0461616029164526e-05,
"loss": 1.9153,
"step": 710
},
{
"epoch": 0.3165275459098498,
"grad_norm": 4.1388092041015625,
"learning_rate": 4.020950169424815e-05,
"loss": 1.956,
"step": 711
},
{
"epoch": 0.3169727323316639,
"grad_norm": 4.648570537567139,
"learning_rate": 3.9957977467411615e-05,
"loss": 2.7696,
"step": 712
},
{
"epoch": 0.317417918753478,
"grad_norm": 2.6900453567504883,
"learning_rate": 3.9707045831097555e-05,
"loss": 2.2918,
"step": 713
},
{
"epoch": 0.31786310517529215,
"grad_norm": 3.3735849857330322,
"learning_rate": 3.945670926189987e-05,
"loss": 2.1827,
"step": 714
},
{
"epoch": 0.3183082915971063,
"grad_norm": 3.3142940998077393,
"learning_rate": 3.920697023053949e-05,
"loss": 1.8947,
"step": 715
},
{
"epoch": 0.3187534780189204,
"grad_norm": 2.8147215843200684,
"learning_rate": 3.895783120183976e-05,
"loss": 1.9302,
"step": 716
},
{
"epoch": 0.3191986644407346,
"grad_norm": 2.287973642349243,
"learning_rate": 3.8709294634702376e-05,
"loss": 1.8818,
"step": 717
},
{
"epoch": 0.3196438508625487,
"grad_norm": 3.5175046920776367,
"learning_rate": 3.846136298208285e-05,
"loss": 1.8394,
"step": 718
},
{
"epoch": 0.32008903728436283,
"grad_norm": 1.9966739416122437,
"learning_rate": 3.821403869096658e-05,
"loss": 1.3228,
"step": 719
},
{
"epoch": 0.32053422370617696,
"grad_norm": 2.7500932216644287,
"learning_rate": 3.796732420234443e-05,
"loss": 1.41,
"step": 720
},
{
"epoch": 0.3209794101279911,
"grad_norm": 2.388460159301758,
"learning_rate": 3.7721221951188765e-05,
"loss": 2.0217,
"step": 721
},
{
"epoch": 0.3214245965498052,
"grad_norm": 2.3415017127990723,
"learning_rate": 3.747573436642951e-05,
"loss": 2.0357,
"step": 722
},
{
"epoch": 0.3218697829716194,
"grad_norm": 1.972862958908081,
"learning_rate": 3.7230863870929964e-05,
"loss": 1.322,
"step": 723
},
{
"epoch": 0.3223149693934335,
"grad_norm": 3.0771045684814453,
"learning_rate": 3.698661288146311e-05,
"loss": 2.3352,
"step": 724
},
{
"epoch": 0.32276015581524764,
"grad_norm": 4.927875518798828,
"learning_rate": 3.674298380868756e-05,
"loss": 1.1276,
"step": 725
},
{
"epoch": 0.32320534223706177,
"grad_norm": 2.021491527557373,
"learning_rate": 3.649997905712396e-05,
"loss": 1.5137,
"step": 726
},
{
"epoch": 0.3236505286588759,
"grad_norm": 1.8273261785507202,
"learning_rate": 3.6257601025131026e-05,
"loss": 1.8926,
"step": 727
},
{
"epoch": 0.32409571508069,
"grad_norm": 1.9073346853256226,
"learning_rate": 3.601585210488218e-05,
"loss": 1.6412,
"step": 728
},
{
"epoch": 0.3245409015025042,
"grad_norm": 3.6371872425079346,
"learning_rate": 3.577473468234156e-05,
"loss": 2.7159,
"step": 729
},
{
"epoch": 0.3249860879243183,
"grad_norm": 2.4259378910064697,
"learning_rate": 3.553425113724088e-05,
"loss": 1.8411,
"step": 730
},
{
"epoch": 0.32543127434613245,
"grad_norm": 3.0658254623413086,
"learning_rate": 3.52944038430556e-05,
"loss": 1.5821,
"step": 731
},
{
"epoch": 0.3258764607679466,
"grad_norm": 2.293248414993286,
"learning_rate": 3.5055195166981645e-05,
"loss": 2.3265,
"step": 732
},
{
"epoch": 0.3263216471897607,
"grad_norm": 5.806252479553223,
"learning_rate": 3.481662746991214e-05,
"loss": 3.428,
"step": 733
},
{
"epoch": 0.3267668336115748,
"grad_norm": 2.4732987880706787,
"learning_rate": 3.4578703106413904e-05,
"loss": 1.3308,
"step": 734
},
{
"epoch": 0.327212020033389,
"grad_norm": 1.5391736030578613,
"learning_rate": 3.4341424424704375e-05,
"loss": 1.2824,
"step": 735
},
{
"epoch": 0.32765720645520313,
"grad_norm": 2.5730936527252197,
"learning_rate": 3.4104793766628304e-05,
"loss": 1.4633,
"step": 736
},
{
"epoch": 0.32810239287701726,
"grad_norm": 1.4847195148468018,
"learning_rate": 3.386881346763483e-05,
"loss": 1.0787,
"step": 737
},
{
"epoch": 0.3285475792988314,
"grad_norm": 5.096831321716309,
"learning_rate": 3.363348585675414e-05,
"loss": 2.0878,
"step": 738
},
{
"epoch": 0.3289927657206455,
"grad_norm": 2.9128997325897217,
"learning_rate": 3.339881325657484e-05,
"loss": 1.7954,
"step": 739
},
{
"epoch": 0.32943795214245963,
"grad_norm": 3.4480271339416504,
"learning_rate": 3.316479798322072e-05,
"loss": 1.6608,
"step": 740
},
{
"epoch": 0.3298831385642738,
"grad_norm": 1.7849931716918945,
"learning_rate": 3.2931442346328004e-05,
"loss": 1.8315,
"step": 741
},
{
"epoch": 0.33032832498608794,
"grad_norm": 5.605867862701416,
"learning_rate": 3.269874864902269e-05,
"loss": 2.7352,
"step": 742
},
{
"epoch": 0.33077351140790207,
"grad_norm": 2.2710115909576416,
"learning_rate": 3.246671918789755e-05,
"loss": 1.4972,
"step": 743
},
{
"epoch": 0.3312186978297162,
"grad_norm": 40.97933578491211,
"learning_rate": 3.223535625298979e-05,
"loss": 2.0871,
"step": 744
},
{
"epoch": 0.3316638842515303,
"grad_norm": 2.290867567062378,
"learning_rate": 3.200466212775808e-05,
"loss": 1.7768,
"step": 745
},
{
"epoch": 0.33210907067334444,
"grad_norm": 2.331289291381836,
"learning_rate": 3.1774639089060363e-05,
"loss": 1.7775,
"step": 746
},
{
"epoch": 0.3325542570951586,
"grad_norm": 2.2418811321258545,
"learning_rate": 3.154528940713113e-05,
"loss": 1.7201,
"step": 747
},
{
"epoch": 0.33299944351697275,
"grad_norm": 2.4780850410461426,
"learning_rate": 3.1316615345559185e-05,
"loss": 1.2194,
"step": 748
},
{
"epoch": 0.3334446299387869,
"grad_norm": 2.2363805770874023,
"learning_rate": 3.108861916126518e-05,
"loss": 2.0451,
"step": 749
},
{
"epoch": 0.333889816360601,
"grad_norm": 3.651272773742676,
"learning_rate": 3.086130310447937e-05,
"loss": 1.4697,
"step": 750
},
{
"epoch": 0.3343350027824151,
"grad_norm": 2.626664161682129,
"learning_rate": 3.063466941871952e-05,
"loss": 2.0324,
"step": 751
},
{
"epoch": 0.33478018920422925,
"grad_norm": 2.636204957962036,
"learning_rate": 3.0408720340768572e-05,
"loss": 1.7141,
"step": 752
},
{
"epoch": 0.33522537562604343,
"grad_norm": 3.1656999588012695,
"learning_rate": 3.018345810065275e-05,
"loss": 2.3117,
"step": 753
},
{
"epoch": 0.33567056204785756,
"grad_norm": 4.307328224182129,
"learning_rate": 2.9958884921619367e-05,
"loss": 2.2475,
"step": 754
},
{
"epoch": 0.3361157484696717,
"grad_norm": 2.5681815147399902,
"learning_rate": 2.9735003020115092e-05,
"loss": 2.0617,
"step": 755
},
{
"epoch": 0.3365609348914858,
"grad_norm": 4.749504566192627,
"learning_rate": 2.9511814605763855e-05,
"loss": 2.7351,
"step": 756
},
{
"epoch": 0.33700612131329993,
"grad_norm": 3.8484888076782227,
"learning_rate": 2.9289321881345254e-05,
"loss": 1.8341,
"step": 757
},
{
"epoch": 0.33745130773511406,
"grad_norm": 2.8804805278778076,
"learning_rate": 2.9067527042772636e-05,
"loss": 1.3797,
"step": 758
},
{
"epoch": 0.33789649415692824,
"grad_norm": 2.2779343128204346,
"learning_rate": 2.8846432279071467e-05,
"loss": 1.9275,
"step": 759
},
{
"epoch": 0.33834168057874237,
"grad_norm": 2.432508707046509,
"learning_rate": 2.8626039772357882e-05,
"loss": 1.8774,
"step": 760
},
{
"epoch": 0.3387868670005565,
"grad_norm": 4.494417190551758,
"learning_rate": 2.840635169781688e-05,
"loss": 2.0044,
"step": 761
},
{
"epoch": 0.3392320534223706,
"grad_norm": 9.79641056060791,
"learning_rate": 2.8187370223681132e-05,
"loss": 1.7594,
"step": 762
},
{
"epoch": 0.33967723984418474,
"grad_norm": 4.837268829345703,
"learning_rate": 2.7969097511209308e-05,
"loss": 2.1129,
"step": 763
},
{
"epoch": 0.34012242626599887,
"grad_norm": 3.084362268447876,
"learning_rate": 2.775153571466502e-05,
"loss": 1.6698,
"step": 764
},
{
"epoch": 0.34056761268781305,
"grad_norm": 1.84263014793396,
"learning_rate": 2.753468698129533e-05,
"loss": 1.3117,
"step": 765
},
{
"epoch": 0.3410127991096272,
"grad_norm": 2.350165843963623,
"learning_rate": 2.7318553451309726e-05,
"loss": 1.8815,
"step": 766
},
{
"epoch": 0.3414579855314413,
"grad_norm": 3.2090234756469727,
"learning_rate": 2.7103137257858868e-05,
"loss": 2.5512,
"step": 767
},
{
"epoch": 0.3419031719532554,
"grad_norm": 1.9890962839126587,
"learning_rate": 2.688844052701359e-05,
"loss": 1.3836,
"step": 768
},
{
"epoch": 0.34234835837506955,
"grad_norm": 4.618078708648682,
"learning_rate": 2.6674465377744017e-05,
"loss": 2.4582,
"step": 769
},
{
"epoch": 0.3427935447968837,
"grad_norm": 2.055314779281616,
"learning_rate": 2.646121392189841e-05,
"loss": 1.7066,
"step": 770
},
{
"epoch": 0.34323873121869786,
"grad_norm": 2.973315477371216,
"learning_rate": 2.624868826418262e-05,
"loss": 2.8903,
"step": 771
},
{
"epoch": 0.343683917640512,
"grad_norm": 3.261991500854492,
"learning_rate": 2.603689050213902e-05,
"loss": 2.6735,
"step": 772
},
{
"epoch": 0.3441291040623261,
"grad_norm": 2.076058864593506,
"learning_rate": 2.582582272612609e-05,
"loss": 1.381,
"step": 773
},
{
"epoch": 0.34457429048414023,
"grad_norm": 3.1660914421081543,
"learning_rate": 2.561548701929749e-05,
"loss": 1.6826,
"step": 774
},
{
"epoch": 0.34501947690595436,
"grad_norm": 5.427780628204346,
"learning_rate": 2.540588545758179e-05,
"loss": 1.5369,
"step": 775
},
{
"epoch": 0.3454646633277685,
"grad_norm": 2.2618162631988525,
"learning_rate": 2.5197020109661772e-05,
"loss": 1.8258,
"step": 776
},
{
"epoch": 0.34590984974958267,
"grad_norm": 4.402366638183594,
"learning_rate": 2.4988893036954043e-05,
"loss": 2.3216,
"step": 777
},
{
"epoch": 0.3463550361713968,
"grad_norm": 2.900803804397583,
"learning_rate": 2.4781506293588873e-05,
"loss": 2.4731,
"step": 778
},
{
"epoch": 0.3468002225932109,
"grad_norm": 2.6909384727478027,
"learning_rate": 2.4574861926389615e-05,
"loss": 2.3296,
"step": 779
},
{
"epoch": 0.34724540901502504,
"grad_norm": 2.7630159854888916,
"learning_rate": 2.436896197485282e-05,
"loss": 1.7521,
"step": 780
},
{
"epoch": 0.34769059543683917,
"grad_norm": 2.2162787914276123,
"learning_rate": 2.4163808471127812e-05,
"loss": 1.3404,
"step": 781
},
{
"epoch": 0.3481357818586533,
"grad_norm": 7.932774543762207,
"learning_rate": 2.3959403439996907e-05,
"loss": 2.2814,
"step": 782
},
{
"epoch": 0.3485809682804674,
"grad_norm": 2.8942651748657227,
"learning_rate": 2.37557488988552e-05,
"loss": 2.1944,
"step": 783
},
{
"epoch": 0.3490261547022816,
"grad_norm": 3.0805366039276123,
"learning_rate": 2.3552846857690846e-05,
"loss": 1.9972,
"step": 784
},
{
"epoch": 0.3494713411240957,
"grad_norm": 2.1117780208587646,
"learning_rate": 2.3350699319065026e-05,
"loss": 1.7653,
"step": 785
},
{
"epoch": 0.34991652754590985,
"grad_norm": 2.495792865753174,
"learning_rate": 2.3149308278092342e-05,
"loss": 1.7668,
"step": 786
},
{
"epoch": 0.350361713967724,
"grad_norm": 3.875079870223999,
"learning_rate": 2.2948675722421086e-05,
"loss": 2.92,
"step": 787
},
{
"epoch": 0.3508069003895381,
"grad_norm": 1.6618883609771729,
"learning_rate": 2.2748803632213557e-05,
"loss": 1.3489,
"step": 788
},
{
"epoch": 0.3512520868113522,
"grad_norm": 2.81689190864563,
"learning_rate": 2.254969398012663e-05,
"loss": 2.3773,
"step": 789
},
{
"epoch": 0.3516972732331664,
"grad_norm": 2.7152371406555176,
"learning_rate": 2.235134873129213e-05,
"loss": 1.8859,
"step": 790
},
{
"epoch": 0.35214245965498053,
"grad_norm": 2.9427664279937744,
"learning_rate": 2.2153769843297667e-05,
"loss": 1.7977,
"step": 791
},
{
"epoch": 0.35258764607679466,
"grad_norm": 2.521787643432617,
"learning_rate": 2.195695926616702e-05,
"loss": 1.0702,
"step": 792
},
{
"epoch": 0.3530328324986088,
"grad_norm": 3.2543270587921143,
"learning_rate": 2.1760918942341192e-05,
"loss": 2.4008,
"step": 793
},
{
"epoch": 0.3534780189204229,
"grad_norm": 1.7347383499145508,
"learning_rate": 2.1565650806658975e-05,
"loss": 1.3265,
"step": 794
},
{
"epoch": 0.35392320534223703,
"grad_norm": 3.650092363357544,
"learning_rate": 2.137115678633811e-05,
"loss": 1.9098,
"step": 795
},
{
"epoch": 0.3543683917640512,
"grad_norm": 3.2362852096557617,
"learning_rate": 2.1177438800956007e-05,
"loss": 2.4286,
"step": 796
},
{
"epoch": 0.35481357818586534,
"grad_norm": 2.8814990520477295,
"learning_rate": 2.098449876243096e-05,
"loss": 1.453,
"step": 797
},
{
"epoch": 0.35525876460767947,
"grad_norm": 2.3015761375427246,
"learning_rate": 2.07923385750033e-05,
"loss": 1.7499,
"step": 798
},
{
"epoch": 0.3557039510294936,
"grad_norm": 2.598961353302002,
"learning_rate": 2.0600960135216462e-05,
"loss": 2.2751,
"step": 799
},
{
"epoch": 0.3561491374513077,
"grad_norm": 1.2370599508285522,
"learning_rate": 2.0410365331898416e-05,
"loss": 0.9129,
"step": 800
},
{
"epoch": 0.35659432387312184,
"grad_norm": 3.447509527206421,
"learning_rate": 2.0220556046142893e-05,
"loss": 1.4874,
"step": 801
},
{
"epoch": 0.357039510294936,
"grad_norm": 3.167005777359009,
"learning_rate": 2.0031534151290943e-05,
"loss": 1.5479,
"step": 802
},
{
"epoch": 0.35748469671675015,
"grad_norm": 2.0752930641174316,
"learning_rate": 1.9843301512912327e-05,
"loss": 1.1736,
"step": 803
},
{
"epoch": 0.3579298831385643,
"grad_norm": 1.605804204940796,
"learning_rate": 1.965585998878724e-05,
"loss": 1.336,
"step": 804
},
{
"epoch": 0.3583750695603784,
"grad_norm": 1.9330936670303345,
"learning_rate": 1.946921142888781e-05,
"loss": 1.6619,
"step": 805
},
{
"epoch": 0.3588202559821925,
"grad_norm": 2.9422507286071777,
"learning_rate": 1.928335767535997e-05,
"loss": 2.3493,
"step": 806
},
{
"epoch": 0.35926544240400665,
"grad_norm": 4.052491664886475,
"learning_rate": 1.9098300562505266e-05,
"loss": 1.7118,
"step": 807
},
{
"epoch": 0.35971062882582083,
"grad_norm": 2.3159127235412598,
"learning_rate": 1.891404191676265e-05,
"loss": 1.7483,
"step": 808
},
{
"epoch": 0.36015581524763496,
"grad_norm": 2.81177020072937,
"learning_rate": 1.8730583556690605e-05,
"loss": 2.1032,
"step": 809
},
{
"epoch": 0.3606010016694491,
"grad_norm": 2.669597625732422,
"learning_rate": 1.854792729294905e-05,
"loss": 1.7215,
"step": 810
},
{
"epoch": 0.3610461880912632,
"grad_norm": 1.9452595710754395,
"learning_rate": 1.8366074928281607e-05,
"loss": 1.5899,
"step": 811
},
{
"epoch": 0.36149137451307733,
"grad_norm": 3.1938717365264893,
"learning_rate": 1.818502825749764e-05,
"loss": 2.4713,
"step": 812
},
{
"epoch": 0.36193656093489146,
"grad_norm": 2.9424474239349365,
"learning_rate": 1.8004789067454764e-05,
"loss": 2.0817,
"step": 813
},
{
"epoch": 0.36238174735670564,
"grad_norm": 1.8135279417037964,
"learning_rate": 1.7825359137040988e-05,
"loss": 1.2497,
"step": 814
},
{
"epoch": 0.36282693377851977,
"grad_norm": 2.171995162963867,
"learning_rate": 1.7646740237157256e-05,
"loss": 1.2897,
"step": 815
},
{
"epoch": 0.3632721202003339,
"grad_norm": 3.4379913806915283,
"learning_rate": 1.7468934130700044e-05,
"loss": 2.2097,
"step": 816
},
{
"epoch": 0.363717306622148,
"grad_norm": 6.185148239135742,
"learning_rate": 1.7291942572543807e-05,
"loss": 2.1192,
"step": 817
},
{
"epoch": 0.36416249304396214,
"grad_norm": 2.1236114501953125,
"learning_rate": 1.7115767309523812e-05,
"loss": 1.6035,
"step": 818
},
{
"epoch": 0.36460767946577627,
"grad_norm": 3.2910990715026855,
"learning_rate": 1.6940410080418723e-05,
"loss": 2.4617,
"step": 819
},
{
"epoch": 0.36505286588759045,
"grad_norm": 2.3775298595428467,
"learning_rate": 1.6765872615933677e-05,
"loss": 1.9845,
"step": 820
},
{
"epoch": 0.3654980523094046,
"grad_norm": 1.7690722942352295,
"learning_rate": 1.6592156638682886e-05,
"loss": 1.5974,
"step": 821
},
{
"epoch": 0.3659432387312187,
"grad_norm": 2.4220054149627686,
"learning_rate": 1.6419263863172997e-05,
"loss": 1.6484,
"step": 822
},
{
"epoch": 0.3663884251530328,
"grad_norm": 2.0820889472961426,
"learning_rate": 1.6247195995785837e-05,
"loss": 1.675,
"step": 823
},
{
"epoch": 0.36683361157484695,
"grad_norm": 2.522348642349243,
"learning_rate": 1.6075954734761845e-05,
"loss": 2.4212,
"step": 824
},
{
"epoch": 0.3672787979966611,
"grad_norm": 1.4906548261642456,
"learning_rate": 1.5905541770183096e-05,
"loss": 1.0927,
"step": 825
},
{
"epoch": 0.36772398441847526,
"grad_norm": 2.353506565093994,
"learning_rate": 1.5735958783956794e-05,
"loss": 1.6243,
"step": 826
},
{
"epoch": 0.3681691708402894,
"grad_norm": 2.1150946617126465,
"learning_rate": 1.5567207449798515e-05,
"loss": 1.8702,
"step": 827
},
{
"epoch": 0.3686143572621035,
"grad_norm": 3.695554256439209,
"learning_rate": 1.539928943321579e-05,
"loss": 2.8629,
"step": 828
},
{
"epoch": 0.36905954368391763,
"grad_norm": 2.1603634357452393,
"learning_rate": 1.5232206391491699e-05,
"loss": 1.5569,
"step": 829
},
{
"epoch": 0.36950473010573176,
"grad_norm": 1.2208006381988525,
"learning_rate": 1.5065959973668353e-05,
"loss": 1.1422,
"step": 830
},
{
"epoch": 0.3699499165275459,
"grad_norm": 4.04674768447876,
"learning_rate": 1.4900551820530828e-05,
"loss": 2.4647,
"step": 831
},
{
"epoch": 0.37039510294936007,
"grad_norm": 2.1937062740325928,
"learning_rate": 1.4735983564590783e-05,
"loss": 1.5351,
"step": 832
},
{
"epoch": 0.3708402893711742,
"grad_norm": 1.7129528522491455,
"learning_rate": 1.4572256830070497e-05,
"loss": 1.1443,
"step": 833
},
{
"epoch": 0.3712854757929883,
"grad_norm": 3.113435745239258,
"learning_rate": 1.4409373232886702e-05,
"loss": 2.2822,
"step": 834
},
{
"epoch": 0.37173066221480244,
"grad_norm": 3.254114866256714,
"learning_rate": 1.4247334380634792e-05,
"loss": 1.9679,
"step": 835
},
{
"epoch": 0.37217584863661657,
"grad_norm": 2.4401051998138428,
"learning_rate": 1.4086141872572789e-05,
"loss": 1.435,
"step": 836
},
{
"epoch": 0.3726210350584307,
"grad_norm": 3.798807382583618,
"learning_rate": 1.3925797299605647e-05,
"loss": 2.3262,
"step": 837
},
{
"epoch": 0.3730662214802449,
"grad_norm": 1.5593875646591187,
"learning_rate": 1.3766302244269624e-05,
"loss": 1.0388,
"step": 838
},
{
"epoch": 0.373511407902059,
"grad_norm": 2.430449962615967,
"learning_rate": 1.3607658280716473e-05,
"loss": 1.7574,
"step": 839
},
{
"epoch": 0.3739565943238731,
"grad_norm": 4.148708343505859,
"learning_rate": 1.3449866974698122e-05,
"loss": 2.53,
"step": 840
},
{
"epoch": 0.37440178074568725,
"grad_norm": 2.0616252422332764,
"learning_rate": 1.3292929883550998e-05,
"loss": 1.8342,
"step": 841
},
{
"epoch": 0.3748469671675014,
"grad_norm": 1.8223658800125122,
"learning_rate": 1.3136848556180892e-05,
"loss": 1.6677,
"step": 842
},
{
"epoch": 0.3752921535893155,
"grad_norm": 3.7050740718841553,
"learning_rate": 1.2981624533047432e-05,
"loss": 2.0134,
"step": 843
},
{
"epoch": 0.3757373400111297,
"grad_norm": 3.2520573139190674,
"learning_rate": 1.2827259346149122e-05,
"loss": 2.7152,
"step": 844
},
{
"epoch": 0.3761825264329438,
"grad_norm": 3.439446449279785,
"learning_rate": 1.2673754519008008e-05,
"loss": 2.5235,
"step": 845
},
{
"epoch": 0.37662771285475793,
"grad_norm": 3.737055778503418,
"learning_rate": 1.2521111566654731e-05,
"loss": 1.692,
"step": 846
},
{
"epoch": 0.37707289927657206,
"grad_norm": 5.928926944732666,
"learning_rate": 1.2369331995613665e-05,
"loss": 1.8147,
"step": 847
},
{
"epoch": 0.3775180856983862,
"grad_norm": 1.5324987173080444,
"learning_rate": 1.2218417303887842e-05,
"loss": 1.4275,
"step": 848
},
{
"epoch": 0.3779632721202003,
"grad_norm": 3.3122949600219727,
"learning_rate": 1.206836898094439e-05,
"loss": 2.386,
"step": 849
},
{
"epoch": 0.3784084585420145,
"grad_norm": 1.8216137886047363,
"learning_rate": 1.191918850769964e-05,
"loss": 1.0651,
"step": 850
},
{
"epoch": 0.3788536449638286,
"grad_norm": 2.842923641204834,
"learning_rate": 1.1770877356504683e-05,
"loss": 1.9827,
"step": 851
},
{
"epoch": 0.37929883138564274,
"grad_norm": 4.741792678833008,
"learning_rate": 1.1623436991130654e-05,
"loss": 1.9713,
"step": 852
},
{
"epoch": 0.37974401780745687,
"grad_norm": 2.5982000827789307,
"learning_rate": 1.1476868866754486e-05,
"loss": 1.3794,
"step": 853
},
{
"epoch": 0.380189204229271,
"grad_norm": 2.446061611175537,
"learning_rate": 1.1331174429944347e-05,
"loss": 1.6679,
"step": 854
},
{
"epoch": 0.3806343906510851,
"grad_norm": 3.426534414291382,
"learning_rate": 1.1186355118645554e-05,
"loss": 2.7746,
"step": 855
},
{
"epoch": 0.3810795770728993,
"grad_norm": 3.0752968788146973,
"learning_rate": 1.1042412362166222e-05,
"loss": 1.8427,
"step": 856
},
{
"epoch": 0.3815247634947134,
"grad_norm": 7.70589542388916,
"learning_rate": 1.0899347581163221e-05,
"loss": 2.7767,
"step": 857
},
{
"epoch": 0.38196994991652755,
"grad_norm": 2.923967123031616,
"learning_rate": 1.0757162187628222e-05,
"loss": 2.0666,
"step": 858
},
{
"epoch": 0.3824151363383417,
"grad_norm": 4.278809547424316,
"learning_rate": 1.0615857584873623e-05,
"loss": 2.1183,
"step": 859
},
{
"epoch": 0.3828603227601558,
"grad_norm": 2.635345935821533,
"learning_rate": 1.0475435167518843e-05,
"loss": 1.6819,
"step": 860
},
{
"epoch": 0.38330550918196993,
"grad_norm": 2.319185733795166,
"learning_rate": 1.0335896321476413e-05,
"loss": 1.7721,
"step": 861
},
{
"epoch": 0.3837506956037841,
"grad_norm": 3.1770641803741455,
"learning_rate": 1.0197242423938446e-05,
"loss": 3.0742,
"step": 862
},
{
"epoch": 0.38419588202559823,
"grad_norm": 3.24739933013916,
"learning_rate": 1.0059474843362892e-05,
"loss": 2.3675,
"step": 863
},
{
"epoch": 0.38464106844741236,
"grad_norm": 2.699204683303833,
"learning_rate": 9.922594939460194e-06,
"loss": 1.9462,
"step": 864
},
{
"epoch": 0.3850862548692265,
"grad_norm": 2.9695088863372803,
"learning_rate": 9.786604063179728e-06,
"loss": 2.204,
"step": 865
},
{
"epoch": 0.3855314412910406,
"grad_norm": 4.052135467529297,
"learning_rate": 9.651503556696516e-06,
"loss": 2.354,
"step": 866
},
{
"epoch": 0.38597662771285474,
"grad_norm": 2.3410732746124268,
"learning_rate": 9.517294753398064e-06,
"loss": 1.3547,
"step": 867
},
{
"epoch": 0.3864218141346689,
"grad_norm": 2.827927827835083,
"learning_rate": 9.383978977871021e-06,
"loss": 2.5106,
"step": 868
},
{
"epoch": 0.38686700055648304,
"grad_norm": 2.7326576709747314,
"learning_rate": 9.251557545888312e-06,
"loss": 4.0232,
"step": 869
},
{
"epoch": 0.38731218697829717,
"grad_norm": 2.105860710144043,
"learning_rate": 9.120031764395987e-06,
"loss": 1.4466,
"step": 870
},
{
"epoch": 0.3877573734001113,
"grad_norm": 2.9740326404571533,
"learning_rate": 8.989402931500434e-06,
"loss": 2.0867,
"step": 871
},
{
"epoch": 0.3882025598219254,
"grad_norm": 8.592368125915527,
"learning_rate": 8.85967233645547e-06,
"loss": 2.7982,
"step": 872
},
{
"epoch": 0.38864774624373954,
"grad_norm": 2.596602201461792,
"learning_rate": 8.730841259649725e-06,
"loss": 2.2047,
"step": 873
},
{
"epoch": 0.3890929326655537,
"grad_norm": 2.2201178073883057,
"learning_rate": 8.602910972593892e-06,
"loss": 1.9367,
"step": 874
},
{
"epoch": 0.38953811908736785,
"grad_norm": 3.1746363639831543,
"learning_rate": 8.475882737908248e-06,
"loss": 2.2672,
"step": 875
},
{
"epoch": 0.389983305509182,
"grad_norm": 3.8068130016326904,
"learning_rate": 8.34975780931021e-06,
"loss": 1.9147,
"step": 876
},
{
"epoch": 0.3904284919309961,
"grad_norm": 2.8877782821655273,
"learning_rate": 8.224537431601886e-06,
"loss": 2.1708,
"step": 877
},
{
"epoch": 0.3908736783528102,
"grad_norm": 2.659292221069336,
"learning_rate": 8.100222840657878e-06,
"loss": 1.7599,
"step": 878
},
{
"epoch": 0.39131886477462435,
"grad_norm": 2.6380879878997803,
"learning_rate": 7.976815263412963e-06,
"loss": 1.6982,
"step": 879
},
{
"epoch": 0.39176405119643853,
"grad_norm": 2.541360378265381,
"learning_rate": 7.854315917850163e-06,
"loss": 2.0257,
"step": 880
},
{
"epoch": 0.39220923761825266,
"grad_norm": 2.434187650680542,
"learning_rate": 7.73272601298851e-06,
"loss": 1.7154,
"step": 881
},
{
"epoch": 0.3926544240400668,
"grad_norm": 1.727720856666565,
"learning_rate": 7.612046748871327e-06,
"loss": 1.1091,
"step": 882
},
{
"epoch": 0.3930996104618809,
"grad_norm": 3.544562578201294,
"learning_rate": 7.492279316554207e-06,
"loss": 2.8993,
"step": 883
},
{
"epoch": 0.39354479688369504,
"grad_norm": 2.7014670372009277,
"learning_rate": 7.3734248980933395e-06,
"loss": 1.7446,
"step": 884
},
{
"epoch": 0.39398998330550916,
"grad_norm": 2.4355177879333496,
"learning_rate": 7.255484666533874e-06,
"loss": 1.7447,
"step": 885
},
{
"epoch": 0.39443516972732334,
"grad_norm": 2.3996999263763428,
"learning_rate": 7.138459785898266e-06,
"loss": 1.7492,
"step": 886
},
{
"epoch": 0.39488035614913747,
"grad_norm": 2.208282232284546,
"learning_rate": 7.022351411174866e-06,
"loss": 1.9822,
"step": 887
},
{
"epoch": 0.3953255425709516,
"grad_norm": 2.583613395690918,
"learning_rate": 6.907160688306425e-06,
"loss": 1.9487,
"step": 888
},
{
"epoch": 0.3957707289927657,
"grad_norm": 2.559744358062744,
"learning_rate": 6.7928887541789055e-06,
"loss": 2.5036,
"step": 889
},
{
"epoch": 0.39621591541457984,
"grad_norm": 2.1296982765197754,
"learning_rate": 6.679536736610137e-06,
"loss": 1.7707,
"step": 890
},
{
"epoch": 0.39666110183639397,
"grad_norm": 2.857872724533081,
"learning_rate": 6.5671057543387985e-06,
"loss": 2.0831,
"step": 891
},
{
"epoch": 0.39710628825820815,
"grad_norm": 2.2674055099487305,
"learning_rate": 6.455596917013273e-06,
"loss": 1.7374,
"step": 892
},
{
"epoch": 0.3975514746800223,
"grad_norm": 1.8583672046661377,
"learning_rate": 6.345011325180772e-06,
"loss": 1.6326,
"step": 893
},
{
"epoch": 0.3979966611018364,
"grad_norm": 2.2417795658111572,
"learning_rate": 6.235350070276447e-06,
"loss": 1.8506,
"step": 894
},
{
"epoch": 0.3984418475236505,
"grad_norm": 2.7897722721099854,
"learning_rate": 6.126614234612593e-06,
"loss": 1.5891,
"step": 895
},
{
"epoch": 0.39888703394546465,
"grad_norm": 1.4530127048492432,
"learning_rate": 6.018804891368035e-06,
"loss": 1.2882,
"step": 896
},
{
"epoch": 0.3993322203672788,
"grad_norm": 4.077309608459473,
"learning_rate": 5.911923104577455e-06,
"loss": 2.4773,
"step": 897
},
{
"epoch": 0.39977740678909296,
"grad_norm": 2.6346118450164795,
"learning_rate": 5.805969929120947e-06,
"loss": 1.9211,
"step": 898
},
{
"epoch": 0.4002225932109071,
"grad_norm": 2.4013400077819824,
"learning_rate": 5.700946410713548e-06,
"loss": 2.2058,
"step": 899
},
{
"epoch": 0.4006677796327212,
"grad_norm": 2.5028560161590576,
"learning_rate": 5.5968535858950345e-06,
"loss": 2.1112,
"step": 900
},
{
"epoch": 0.40111296605453534,
"grad_norm": 2.262838363647461,
"learning_rate": 5.49369248201953e-06,
"loss": 1.792,
"step": 901
},
{
"epoch": 0.40155815247634946,
"grad_norm": 3.0734193325042725,
"learning_rate": 5.39146411724547e-06,
"loss": 1.8429,
"step": 902
},
{
"epoch": 0.4020033388981636,
"grad_norm": 2.408612012863159,
"learning_rate": 5.290169500525577e-06,
"loss": 2.1481,
"step": 903
},
{
"epoch": 0.40244852531997777,
"grad_norm": 2.9177372455596924,
"learning_rate": 5.189809631596798e-06,
"loss": 1.653,
"step": 904
},
{
"epoch": 0.4028937117417919,
"grad_norm": 3.3481597900390625,
"learning_rate": 5.0903855009705514e-06,
"loss": 2.0301,
"step": 905
},
{
"epoch": 0.403338898163606,
"grad_norm": 3.205288887023926,
"learning_rate": 4.991898089922819e-06,
"loss": 1.9313,
"step": 906
},
{
"epoch": 0.40378408458542014,
"grad_norm": 4.120809078216553,
"learning_rate": 4.8943483704846475e-06,
"loss": 2.4249,
"step": 907
},
{
"epoch": 0.40422927100723427,
"grad_norm": 2.6746208667755127,
"learning_rate": 4.797737305432337e-06,
"loss": 2.1356,
"step": 908
},
{
"epoch": 0.4046744574290484,
"grad_norm": 2.167707920074463,
"learning_rate": 4.702065848278126e-06,
"loss": 1.4489,
"step": 909
},
{
"epoch": 0.4051196438508626,
"grad_norm": 3.323460578918457,
"learning_rate": 4.607334943260655e-06,
"loss": 2.0664,
"step": 910
},
{
"epoch": 0.4055648302726767,
"grad_norm": 2.3525609970092773,
"learning_rate": 4.513545525335705e-06,
"loss": 1.7016,
"step": 911
},
{
"epoch": 0.4060100166944908,
"grad_norm": 2.3468728065490723,
"learning_rate": 4.420698520166988e-06,
"loss": 1.9747,
"step": 912
},
{
"epoch": 0.40645520311630495,
"grad_norm": 3.5238194465637207,
"learning_rate": 4.328794844116946e-06,
"loss": 2.0,
"step": 913
},
{
"epoch": 0.4069003895381191,
"grad_norm": 2.4445719718933105,
"learning_rate": 4.237835404237778e-06,
"loss": 1.7141,
"step": 914
},
{
"epoch": 0.4073455759599332,
"grad_norm": 6.485393047332764,
"learning_rate": 4.147821098262405e-06,
"loss": 1.7179,
"step": 915
},
{
"epoch": 0.40779076238174733,
"grad_norm": 3.072964906692505,
"learning_rate": 4.0587528145957235e-06,
"loss": 1.9091,
"step": 916
},
{
"epoch": 0.4082359488035615,
"grad_norm": 3.583833932876587,
"learning_rate": 3.970631432305694e-06,
"loss": 2.2719,
"step": 917
},
{
"epoch": 0.40868113522537564,
"grad_norm": 2.661090850830078,
"learning_rate": 3.883457821114811e-06,
"loss": 1.6855,
"step": 918
},
{
"epoch": 0.40912632164718976,
"grad_norm": 2.690599203109741,
"learning_rate": 3.797232841391407e-06,
"loss": 1.5438,
"step": 919
},
{
"epoch": 0.4095715080690039,
"grad_norm": 2.358945846557617,
"learning_rate": 3.711957344141237e-06,
"loss": 1.8729,
"step": 920
},
{
"epoch": 0.410016694490818,
"grad_norm": 2.3557281494140625,
"learning_rate": 3.627632170999029e-06,
"loss": 1.6729,
"step": 921
},
{
"epoch": 0.41046188091263214,
"grad_norm": 3.3151612281799316,
"learning_rate": 3.5442581542201923e-06,
"loss": 2.3729,
"step": 922
},
{
"epoch": 0.4109070673344463,
"grad_norm": 2.004413604736328,
"learning_rate": 3.461836116672612e-06,
"loss": 1.3912,
"step": 923
},
{
"epoch": 0.41135225375626044,
"grad_norm": 1.6248087882995605,
"learning_rate": 3.380366871828522e-06,
"loss": 1.0217,
"step": 924
},
{
"epoch": 0.41179744017807457,
"grad_norm": 3.879448652267456,
"learning_rate": 3.2998512237565005e-06,
"loss": 2.5093,
"step": 925
},
{
"epoch": 0.4122426265998887,
"grad_norm": 2.8478055000305176,
"learning_rate": 3.2202899671134546e-06,
"loss": 2.1615,
"step": 926
},
{
"epoch": 0.4126878130217028,
"grad_norm": 3.20760178565979,
"learning_rate": 3.1416838871368924e-06,
"loss": 1.8088,
"step": 927
},
{
"epoch": 0.41313299944351695,
"grad_norm": 1.640320897102356,
"learning_rate": 3.064033759637064e-06,
"loss": 1.7017,
"step": 928
},
{
"epoch": 0.4135781858653311,
"grad_norm": 3.4903106689453125,
"learning_rate": 2.9873403509894203e-06,
"loss": 1.799,
"step": 929
},
{
"epoch": 0.41402337228714525,
"grad_norm": 2.6346209049224854,
"learning_rate": 2.9116044181269007e-06,
"loss": 1.8041,
"step": 930
},
{
"epoch": 0.4144685587089594,
"grad_norm": 2.9832823276519775,
"learning_rate": 2.836826708532603e-06,
"loss": 1.5375,
"step": 931
},
{
"epoch": 0.4149137451307735,
"grad_norm": 1.6513136625289917,
"learning_rate": 2.7630079602323442e-06,
"loss": 0.5627,
"step": 932
},
{
"epoch": 0.41535893155258763,
"grad_norm": 5.0608015060424805,
"learning_rate": 2.690148901787337e-06,
"loss": 1.5045,
"step": 933
},
{
"epoch": 0.41580411797440175,
"grad_norm": 2.82723331451416,
"learning_rate": 2.618250252287113e-06,
"loss": 2.0713,
"step": 934
},
{
"epoch": 0.41624930439621594,
"grad_norm": 1.8556444644927979,
"learning_rate": 2.5473127213422763e-06,
"loss": 1.8779,
"step": 935
},
{
"epoch": 0.41669449081803006,
"grad_norm": 3.8842921257019043,
"learning_rate": 2.4773370090776626e-06,
"loss": 1.9951,
"step": 936
},
{
"epoch": 0.4171396772398442,
"grad_norm": 2.5429670810699463,
"learning_rate": 2.4083238061252567e-06,
"loss": 1.9011,
"step": 937
},
{
"epoch": 0.4175848636616583,
"grad_norm": 2.3649942874908447,
"learning_rate": 2.3402737936175425e-06,
"loss": 2.4083,
"step": 938
},
{
"epoch": 0.41803005008347244,
"grad_norm": 2.2096188068389893,
"learning_rate": 2.273187643180652e-06,
"loss": 1.9797,
"step": 939
},
{
"epoch": 0.41847523650528656,
"grad_norm": 2.303684949874878,
"learning_rate": 2.2070660169278166e-06,
"loss": 2.1345,
"step": 940
},
{
"epoch": 0.41892042292710074,
"grad_norm": 2.5213372707366943,
"learning_rate": 2.141909567452793e-06,
"loss": 1.7136,
"step": 941
},
{
"epoch": 0.41936560934891487,
"grad_norm": 2.287384033203125,
"learning_rate": 2.0777189378234143e-06,
"loss": 2.0467,
"step": 942
},
{
"epoch": 0.419810795770729,
"grad_norm": 2.4261367321014404,
"learning_rate": 2.014494761575314e-06,
"loss": 1.3849,
"step": 943
},
{
"epoch": 0.4202559821925431,
"grad_norm": 2.379718065261841,
"learning_rate": 1.9522376627055583e-06,
"loss": 1.5418,
"step": 944
},
{
"epoch": 0.42070116861435725,
"grad_norm": 2.5570926666259766,
"learning_rate": 1.8909482556666024e-06,
"loss": 1.6895,
"step": 945
},
{
"epoch": 0.42114635503617137,
"grad_norm": 3.3661398887634277,
"learning_rate": 1.8306271453601199e-06,
"loss": 2.2852,
"step": 946
},
{
"epoch": 0.42159154145798555,
"grad_norm": 2.8509480953216553,
"learning_rate": 1.771274927131139e-06,
"loss": 2.4994,
"step": 947
},
{
"epoch": 0.4220367278797997,
"grad_norm": 2.0693445205688477,
"learning_rate": 1.712892186762083e-06,
"loss": 1.7586,
"step": 948
},
{
"epoch": 0.4224819143016138,
"grad_norm": 3.6371309757232666,
"learning_rate": 1.6554795004670388e-06,
"loss": 2.0685,
"step": 949
},
{
"epoch": 0.42292710072342793,
"grad_norm": 2.525317907333374,
"learning_rate": 1.5990374348860305e-06,
"loss": 1.6713,
"step": 950
},
{
"epoch": 0.42337228714524205,
"grad_norm": 2.3136887550354004,
"learning_rate": 1.543566547079467e-06,
"loss": 2.0899,
"step": 951
},
{
"epoch": 0.4238174735670562,
"grad_norm": 2.1782240867614746,
"learning_rate": 1.4890673845226133e-06,
"loss": 1.8084,
"step": 952
},
{
"epoch": 0.42426265998887036,
"grad_norm": 1.9394268989562988,
"learning_rate": 1.4355404851001952e-06,
"loss": 1.3746,
"step": 953
},
{
"epoch": 0.4247078464106845,
"grad_norm": 2.159578323364258,
"learning_rate": 1.3829863771011253e-06,
"loss": 1.9826,
"step": 954
},
{
"epoch": 0.4251530328324986,
"grad_norm": 6.865352153778076,
"learning_rate": 1.3314055792131964e-06,
"loss": 2.2541,
"step": 955
},
{
"epoch": 0.42559821925431274,
"grad_norm": 3.5491487979888916,
"learning_rate": 1.280798600518085e-06,
"loss": 2.3958,
"step": 956
},
{
"epoch": 0.42604340567612686,
"grad_norm": 1.782197117805481,
"learning_rate": 1.231165940486234e-06,
"loss": 2.0068,
"step": 957
},
{
"epoch": 0.426488592097941,
"grad_norm": 2.2239012718200684,
"learning_rate": 1.1825080889719563e-06,
"loss": 1.695,
"step": 958
},
{
"epoch": 0.42693377851975517,
"grad_norm": 2.1969902515411377,
"learning_rate": 1.134825526208605e-06,
"loss": 1.5873,
"step": 959
},
{
"epoch": 0.4273789649415693,
"grad_norm": 1.4430875778198242,
"learning_rate": 1.0881187228038215e-06,
"loss": 0.9703,
"step": 960
},
{
"epoch": 0.4278241513633834,
"grad_norm": 2.2270007133483887,
"learning_rate": 1.0423881397349068e-06,
"loss": 1.8217,
"step": 961
},
{
"epoch": 0.42826933778519755,
"grad_norm": 2.846492290496826,
"learning_rate": 9.976342283442463e-07,
"loss": 2.0558,
"step": 962
},
{
"epoch": 0.42871452420701167,
"grad_norm": 3.4389944076538086,
"learning_rate": 9.538574303348813e-07,
"loss": 2.1505,
"step": 963
},
{
"epoch": 0.4291597106288258,
"grad_norm": 2.7002594470977783,
"learning_rate": 9.110581777661331e-07,
"loss": 1.4517,
"step": 964
},
{
"epoch": 0.42960489705064,
"grad_norm": 3.454148292541504,
"learning_rate": 8.692368930493521e-07,
"loss": 2.8175,
"step": 965
},
{
"epoch": 0.4300500834724541,
"grad_norm": 3.0087950229644775,
"learning_rate": 8.283939889437209e-07,
"loss": 2.0103,
"step": 966
},
{
"epoch": 0.43049526989426823,
"grad_norm": 2.8295722007751465,
"learning_rate": 7.885298685522235e-07,
"loss": 1.6394,
"step": 967
},
{
"epoch": 0.43094045631608235,
"grad_norm": 3.0573501586914062,
"learning_rate": 7.496449253176274e-07,
"loss": 3.1388,
"step": 968
},
{
"epoch": 0.4313856427378965,
"grad_norm": 2.893523931503296,
"learning_rate": 7.117395430186414e-07,
"loss": 1.8017,
"step": 969
},
{
"epoch": 0.4318308291597106,
"grad_norm": 4.128028869628906,
"learning_rate": 6.748140957660631e-07,
"loss": 1.7412,
"step": 970
},
{
"epoch": 0.4322760155815248,
"grad_norm": 1.7751336097717285,
"learning_rate": 6.388689479991605e-07,
"loss": 0.8345,
"step": 971
},
{
"epoch": 0.4327212020033389,
"grad_norm": 3.487071990966797,
"learning_rate": 6.039044544820404e-07,
"loss": 1.2831,
"step": 972
},
{
"epoch": 0.43316638842515304,
"grad_norm": 4.514010906219482,
"learning_rate": 5.699209603001076e-07,
"loss": 1.6334,
"step": 973
},
{
"epoch": 0.43361157484696716,
"grad_norm": 4.002710819244385,
"learning_rate": 5.369188008567672e-07,
"loss": 2.7366,
"step": 974
},
{
"epoch": 0.4340567612687813,
"grad_norm": 2.2446699142456055,
"learning_rate": 5.048983018699827e-07,
"loss": 1.6826,
"step": 975
},
{
"epoch": 0.4345019476905954,
"grad_norm": 2.6733949184417725,
"learning_rate": 4.738597793691679e-07,
"loss": 2.561,
"step": 976
},
{
"epoch": 0.4349471341124096,
"grad_norm": 1.9742475748062134,
"learning_rate": 4.438035396920004e-07,
"loss": 1.632,
"step": 977
},
{
"epoch": 0.4353923205342237,
"grad_norm": 2.39536714553833,
"learning_rate": 4.1472987948143473e-07,
"loss": 1.8493,
"step": 978
},
{
"epoch": 0.43583750695603785,
"grad_norm": 2.0998411178588867,
"learning_rate": 3.866390856827495e-07,
"loss": 1.4321,
"step": 979
},
{
"epoch": 0.43628269337785197,
"grad_norm": 4.382582187652588,
"learning_rate": 3.595314355407609e-07,
"loss": 1.9031,
"step": 980
},
{
"epoch": 0.4367278797996661,
"grad_norm": 2.58941912651062,
"learning_rate": 3.3340719659701313e-07,
"loss": 2.1195,
"step": 981
},
{
"epoch": 0.4371730662214802,
"grad_norm": 2.468071460723877,
"learning_rate": 3.0826662668720364e-07,
"loss": 1.1861,
"step": 982
},
{
"epoch": 0.4376182526432944,
"grad_norm": 2.3609464168548584,
"learning_rate": 2.841099739386066e-07,
"loss": 2.0167,
"step": 983
},
{
"epoch": 0.43806343906510853,
"grad_norm": 2.617172956466675,
"learning_rate": 2.609374767676309e-07,
"loss": 2.0974,
"step": 984
},
{
"epoch": 0.43850862548692265,
"grad_norm": 2.8748812675476074,
"learning_rate": 2.387493638774774e-07,
"loss": 2.28,
"step": 985
},
{
"epoch": 0.4389538119087368,
"grad_norm": 2.907883644104004,
"learning_rate": 2.175458542558517e-07,
"loss": 2.9018,
"step": 986
},
{
"epoch": 0.4393989983305509,
"grad_norm": 2.2215635776519775,
"learning_rate": 1.973271571728441e-07,
"loss": 2.2156,
"step": 987
},
{
"epoch": 0.43984418475236503,
"grad_norm": 4.6469597816467285,
"learning_rate": 1.7809347217881966e-07,
"loss": 2.4033,
"step": 988
},
{
"epoch": 0.4402893711741792,
"grad_norm": 1.8456226587295532,
"learning_rate": 1.598449891024978e-07,
"loss": 1.8562,
"step": 989
},
{
"epoch": 0.44073455759599334,
"grad_norm": 2.2966156005859375,
"learning_rate": 1.425818880490315e-07,
"loss": 1.5724,
"step": 990
},
{
"epoch": 0.44117974401780746,
"grad_norm": 1.6308859586715698,
"learning_rate": 1.2630433939825327e-07,
"loss": 1.4085,
"step": 991
},
{
"epoch": 0.4416249304396216,
"grad_norm": 1.9520773887634277,
"learning_rate": 1.1101250380300965e-07,
"loss": 1.746,
"step": 992
},
{
"epoch": 0.4420701168614357,
"grad_norm": 3.1653501987457275,
"learning_rate": 9.670653218752934e-08,
"loss": 2.2131,
"step": 993
},
{
"epoch": 0.44251530328324984,
"grad_norm": 3.58504319190979,
"learning_rate": 8.33865657459909e-08,
"loss": 1.3358,
"step": 994
},
{
"epoch": 0.442960489705064,
"grad_norm": 2.2316412925720215,
"learning_rate": 7.105273594107953e-08,
"loss": 1.3569,
"step": 995
},
{
"epoch": 0.44340567612687815,
"grad_norm": 1.7121682167053223,
"learning_rate": 5.970516450271025e-08,
"loss": 1.0737,
"step": 996
},
{
"epoch": 0.44385086254869227,
"grad_norm": 3.018528699874878,
"learning_rate": 4.934396342684e-08,
"loss": 2.1253,
"step": 997
},
{
"epoch": 0.4442960489705064,
"grad_norm": 2.544288396835327,
"learning_rate": 3.996923497434635e-08,
"loss": 2.3852,
"step": 998
},
{
"epoch": 0.4447412353923205,
"grad_norm": 2.447824716567993,
"learning_rate": 3.1581071670006015e-08,
"loss": 1.4901,
"step": 999
},
{
"epoch": 0.44518642181413465,
"grad_norm": 2.373067855834961,
"learning_rate": 2.417955630159563e-08,
"loss": 1.9986,
"step": 1000
},
{
"epoch": 0.44518642181413465,
"eval_loss": 1.9380669593811035,
"eval_runtime": 136.1191,
"eval_samples_per_second": 3.475,
"eval_steps_per_second": 3.475,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.173290868736e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}