|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5008908119114278, |
|
"eval_steps": 500, |
|
"global_step": 492, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010180707559175363, |
|
"grad_norm": 13.50528621673584, |
|
"learning_rate": 2e-05, |
|
"loss": 5.1969, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0020361415118350726, |
|
"grad_norm": 19.771379470825195, |
|
"learning_rate": 4e-05, |
|
"loss": 7.7524, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003054212267752609, |
|
"grad_norm": 19.734294891357422, |
|
"learning_rate": 6e-05, |
|
"loss": 8.0409, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004072283023670145, |
|
"grad_norm": 24.712677001953125, |
|
"learning_rate": 8e-05, |
|
"loss": 7.7622, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0050903537795876815, |
|
"grad_norm": 26.32750701904297, |
|
"learning_rate": 0.0001, |
|
"loss": 9.8505, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006108424535505218, |
|
"grad_norm": 29.443405151367188, |
|
"learning_rate": 9.999974203447433e-05, |
|
"loss": 9.1155, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007126495291422754, |
|
"grad_norm": 22.42616844177246, |
|
"learning_rate": 9.999896814055916e-05, |
|
"loss": 6.3676, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00814456604734029, |
|
"grad_norm": 27.119375228881836, |
|
"learning_rate": 9.999767832624001e-05, |
|
"loss": 7.4125, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.009162636803257827, |
|
"grad_norm": 15.422072410583496, |
|
"learning_rate": 9.999587260482597e-05, |
|
"loss": 3.5393, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.010180707559175363, |
|
"grad_norm": 13.826335906982422, |
|
"learning_rate": 9.999355099494962e-05, |
|
"loss": 2.7136, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0111987783150929, |
|
"grad_norm": 14.258417129516602, |
|
"learning_rate": 9.999071352056675e-05, |
|
"loss": 2.6158, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012216849071010435, |
|
"grad_norm": 14.128898620605469, |
|
"learning_rate": 9.99873602109562e-05, |
|
"loss": 3.0587, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.013234919826927972, |
|
"grad_norm": 12.319880485534668, |
|
"learning_rate": 9.998349110071949e-05, |
|
"loss": 2.6488, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014252990582845508, |
|
"grad_norm": 15.33985424041748, |
|
"learning_rate": 9.99791062297805e-05, |
|
"loss": 3.1476, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.015271061338763044, |
|
"grad_norm": 13.602853775024414, |
|
"learning_rate": 9.99742056433851e-05, |
|
"loss": 2.796, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01628913209468058, |
|
"grad_norm": 9.083526611328125, |
|
"learning_rate": 9.996878939210049e-05, |
|
"loss": 2.0671, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.017307202850598117, |
|
"grad_norm": 10.980738639831543, |
|
"learning_rate": 9.9962857531815e-05, |
|
"loss": 2.629, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.018325273606515653, |
|
"grad_norm": 11.885966300964355, |
|
"learning_rate": 9.99564101237372e-05, |
|
"loss": 3.1609, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01934334436243319, |
|
"grad_norm": 11.95373249053955, |
|
"learning_rate": 9.994944723439546e-05, |
|
"loss": 3.2291, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.020361415118350726, |
|
"grad_norm": 10.973458290100098, |
|
"learning_rate": 9.994196893563721e-05, |
|
"loss": 2.7778, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.021379485874268262, |
|
"grad_norm": 12.775362014770508, |
|
"learning_rate": 9.993397530462818e-05, |
|
"loss": 3.136, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0223975566301858, |
|
"grad_norm": 10.916693687438965, |
|
"learning_rate": 9.992546642385158e-05, |
|
"loss": 2.3531, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.023415627386103335, |
|
"grad_norm": 12.338353157043457, |
|
"learning_rate": 9.99164423811074e-05, |
|
"loss": 3.6968, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02443369814202087, |
|
"grad_norm": 13.168731689453125, |
|
"learning_rate": 9.990690326951126e-05, |
|
"loss": 3.0163, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.025451768897938407, |
|
"grad_norm": 11.88056755065918, |
|
"learning_rate": 9.989684918749365e-05, |
|
"loss": 3.0201, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.026469839653855944, |
|
"grad_norm": 9.301902770996094, |
|
"learning_rate": 9.988628023879883e-05, |
|
"loss": 2.6577, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02748791040977348, |
|
"grad_norm": 10.02252197265625, |
|
"learning_rate": 9.987519653248378e-05, |
|
"loss": 2.4519, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.028505981165691016, |
|
"grad_norm": 11.7409029006958, |
|
"learning_rate": 9.986359818291706e-05, |
|
"loss": 3.1898, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.029524051921608552, |
|
"grad_norm": 10.492666244506836, |
|
"learning_rate": 9.985148530977767e-05, |
|
"loss": 2.7226, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03054212267752609, |
|
"grad_norm": 12.711854934692383, |
|
"learning_rate": 9.983885803805372e-05, |
|
"loss": 2.7205, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03156019343344362, |
|
"grad_norm": 12.937368392944336, |
|
"learning_rate": 9.982571649804126e-05, |
|
"loss": 3.204, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03257826418936116, |
|
"grad_norm": 13.292618751525879, |
|
"learning_rate": 9.981206082534286e-05, |
|
"loss": 3.4911, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.033596334945278694, |
|
"grad_norm": 9.724308967590332, |
|
"learning_rate": 9.979789116086625e-05, |
|
"loss": 2.8603, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.034614405701196234, |
|
"grad_norm": 8.932650566101074, |
|
"learning_rate": 9.978320765082278e-05, |
|
"loss": 2.465, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03563247645711377, |
|
"grad_norm": 10.732564926147461, |
|
"learning_rate": 9.976801044672608e-05, |
|
"loss": 2.9517, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.036650547213031306, |
|
"grad_norm": 11.971003532409668, |
|
"learning_rate": 9.97522997053903e-05, |
|
"loss": 3.2085, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03766861796894884, |
|
"grad_norm": 10.34869384765625, |
|
"learning_rate": 9.973607558892864e-05, |
|
"loss": 2.9294, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03868668872486638, |
|
"grad_norm": 12.5684814453125, |
|
"learning_rate": 9.97193382647516e-05, |
|
"loss": 3.6198, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03970475948078391, |
|
"grad_norm": 13.42013931274414, |
|
"learning_rate": 9.970208790556532e-05, |
|
"loss": 2.8409, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04072283023670145, |
|
"grad_norm": 10.357503890991211, |
|
"learning_rate": 9.968432468936967e-05, |
|
"loss": 2.6345, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.041740900992618984, |
|
"grad_norm": 12.668771743774414, |
|
"learning_rate": 9.966604879945659e-05, |
|
"loss": 3.2825, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.042758971748536524, |
|
"grad_norm": 9.444086074829102, |
|
"learning_rate": 9.964726042440802e-05, |
|
"loss": 2.7562, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04377704250445406, |
|
"grad_norm": 10.448949813842773, |
|
"learning_rate": 9.962795975809411e-05, |
|
"loss": 2.8796, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0447951132603716, |
|
"grad_norm": 10.916976928710938, |
|
"learning_rate": 9.960814699967112e-05, |
|
"loss": 2.7582, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04581318401628913, |
|
"grad_norm": 11.323358535766602, |
|
"learning_rate": 9.958782235357938e-05, |
|
"loss": 2.6436, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04683125477220667, |
|
"grad_norm": 10.28471851348877, |
|
"learning_rate": 9.956698602954124e-05, |
|
"loss": 2.8325, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0478493255281242, |
|
"grad_norm": 13.5079984664917, |
|
"learning_rate": 9.954563824255878e-05, |
|
"loss": 3.0585, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04886739628404174, |
|
"grad_norm": 11.250194549560547, |
|
"learning_rate": 9.952377921291178e-05, |
|
"loss": 2.7686, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.049885467039959275, |
|
"grad_norm": 12.554705619812012, |
|
"learning_rate": 9.950140916615526e-05, |
|
"loss": 3.0617, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.050903537795876815, |
|
"grad_norm": 12.552079200744629, |
|
"learning_rate": 9.947852833311724e-05, |
|
"loss": 2.08, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05192160855179435, |
|
"grad_norm": 11.248369216918945, |
|
"learning_rate": 9.945513694989639e-05, |
|
"loss": 5.133, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05293967930771189, |
|
"grad_norm": 12.866747856140137, |
|
"learning_rate": 9.943123525785952e-05, |
|
"loss": 5.7232, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05395775006362942, |
|
"grad_norm": 12.395757675170898, |
|
"learning_rate": 9.940682350363912e-05, |
|
"loss": 4.6422, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05497582081954696, |
|
"grad_norm": 12.23355770111084, |
|
"learning_rate": 9.938190193913083e-05, |
|
"loss": 4.8131, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05599389157546449, |
|
"grad_norm": 14.62759017944336, |
|
"learning_rate": 9.935647082149086e-05, |
|
"loss": 6.0114, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05701196233138203, |
|
"grad_norm": 13.613059997558594, |
|
"learning_rate": 9.933053041313325e-05, |
|
"loss": 4.794, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.058030033087299565, |
|
"grad_norm": 13.422719955444336, |
|
"learning_rate": 9.930408098172725e-05, |
|
"loss": 4.5392, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.059048103843217105, |
|
"grad_norm": 17.745412826538086, |
|
"learning_rate": 9.92771228001945e-05, |
|
"loss": 7.1147, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06006617459913464, |
|
"grad_norm": 13.955183982849121, |
|
"learning_rate": 9.924965614670629e-05, |
|
"loss": 3.619, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06108424535505218, |
|
"grad_norm": 11.067267417907715, |
|
"learning_rate": 9.922168130468059e-05, |
|
"loss": 2.6905, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06210231611096971, |
|
"grad_norm": 11.641958236694336, |
|
"learning_rate": 9.91931985627792e-05, |
|
"loss": 2.398, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06312038686688724, |
|
"grad_norm": 8.590779304504395, |
|
"learning_rate": 9.916420821490472e-05, |
|
"loss": 1.9248, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06413845762280479, |
|
"grad_norm": 8.852486610412598, |
|
"learning_rate": 9.91347105601976e-05, |
|
"loss": 2.3876, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06515652837872232, |
|
"grad_norm": 9.158111572265625, |
|
"learning_rate": 9.910470590303293e-05, |
|
"loss": 1.9339, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06617459913463986, |
|
"grad_norm": 8.361588478088379, |
|
"learning_rate": 9.907419455301741e-05, |
|
"loss": 2.3266, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06719266989055739, |
|
"grad_norm": 7.891152858734131, |
|
"learning_rate": 9.904317682498608e-05, |
|
"loss": 1.9775, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06821074064647493, |
|
"grad_norm": 8.722708702087402, |
|
"learning_rate": 9.901165303899916e-05, |
|
"loss": 2.2988, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06922881140239247, |
|
"grad_norm": 10.848478317260742, |
|
"learning_rate": 9.897962352033861e-05, |
|
"loss": 2.2087, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07024688215831, |
|
"grad_norm": 7.828042984008789, |
|
"learning_rate": 9.89470885995049e-05, |
|
"loss": 2.1694, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07126495291422753, |
|
"grad_norm": 7.928416728973389, |
|
"learning_rate": 9.891404861221356e-05, |
|
"loss": 1.7946, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07228302367014508, |
|
"grad_norm": 8.273153305053711, |
|
"learning_rate": 9.888050389939172e-05, |
|
"loss": 2.2472, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07330109442606261, |
|
"grad_norm": 7.866210460662842, |
|
"learning_rate": 9.884645480717451e-05, |
|
"loss": 1.9656, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07431916518198015, |
|
"grad_norm": 9.140717506408691, |
|
"learning_rate": 9.881190168690164e-05, |
|
"loss": 2.5084, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07533723593789768, |
|
"grad_norm": 10.078163146972656, |
|
"learning_rate": 9.877684489511366e-05, |
|
"loss": 2.8882, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07635530669381523, |
|
"grad_norm": 8.583365440368652, |
|
"learning_rate": 9.874128479354832e-05, |
|
"loss": 2.2404, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07737337744973276, |
|
"grad_norm": 10.980644226074219, |
|
"learning_rate": 9.870522174913682e-05, |
|
"loss": 2.9591, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.07839144820565029, |
|
"grad_norm": 9.829695701599121, |
|
"learning_rate": 9.866865613400008e-05, |
|
"loss": 2.5868, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07940951896156782, |
|
"grad_norm": 9.993083000183105, |
|
"learning_rate": 9.863158832544477e-05, |
|
"loss": 2.7386, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08042758971748537, |
|
"grad_norm": 9.227055549621582, |
|
"learning_rate": 9.859401870595959e-05, |
|
"loss": 2.3334, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0814456604734029, |
|
"grad_norm": 9.135334968566895, |
|
"learning_rate": 9.855594766321122e-05, |
|
"loss": 2.6064, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08246373122932044, |
|
"grad_norm": 9.216446876525879, |
|
"learning_rate": 9.85173755900403e-05, |
|
"loss": 2.9289, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08348180198523797, |
|
"grad_norm": 12.71446418762207, |
|
"learning_rate": 9.847830288445745e-05, |
|
"loss": 3.5027, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08449987274115552, |
|
"grad_norm": 9.071185111999512, |
|
"learning_rate": 9.843872994963911e-05, |
|
"loss": 3.1217, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08551794349707305, |
|
"grad_norm": 7.825349807739258, |
|
"learning_rate": 9.839865719392339e-05, |
|
"loss": 2.4812, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08653601425299058, |
|
"grad_norm": 11.979453086853027, |
|
"learning_rate": 9.835808503080585e-05, |
|
"loss": 3.6076, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08755408500890811, |
|
"grad_norm": 10.889570236206055, |
|
"learning_rate": 9.831701387893533e-05, |
|
"loss": 3.9539, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.08857215576482566, |
|
"grad_norm": 6.638063430786133, |
|
"learning_rate": 9.827544416210941e-05, |
|
"loss": 2.1225, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0895902265207432, |
|
"grad_norm": 11.630864143371582, |
|
"learning_rate": 9.823337630927026e-05, |
|
"loss": 2.8508, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09060829727666073, |
|
"grad_norm": 11.906623840332031, |
|
"learning_rate": 9.819081075450014e-05, |
|
"loss": 3.0837, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09162636803257826, |
|
"grad_norm": 12.019804000854492, |
|
"learning_rate": 9.814774793701687e-05, |
|
"loss": 3.6106, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0926444387884958, |
|
"grad_norm": 7.91819953918457, |
|
"learning_rate": 9.810418830116932e-05, |
|
"loss": 2.3236, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09366250954441334, |
|
"grad_norm": 9.185378074645996, |
|
"learning_rate": 9.806013229643289e-05, |
|
"loss": 2.6397, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09468058030033087, |
|
"grad_norm": 12.451518058776855, |
|
"learning_rate": 9.801558037740478e-05, |
|
"loss": 3.3661, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0956986510562484, |
|
"grad_norm": 9.665090560913086, |
|
"learning_rate": 9.797053300379937e-05, |
|
"loss": 2.7933, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.09671672181216595, |
|
"grad_norm": 9.512073516845703, |
|
"learning_rate": 9.792499064044342e-05, |
|
"loss": 3.1669, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09773479256808348, |
|
"grad_norm": 11.063192367553711, |
|
"learning_rate": 9.787895375727136e-05, |
|
"loss": 2.4502, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.09875286332400102, |
|
"grad_norm": 11.608457565307617, |
|
"learning_rate": 9.783242282932028e-05, |
|
"loss": 2.5691, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.09977093407991855, |
|
"grad_norm": 10.834481239318848, |
|
"learning_rate": 9.778539833672524e-05, |
|
"loss": 2.8208, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1007890048358361, |
|
"grad_norm": 9.476598739624023, |
|
"learning_rate": 9.773788076471414e-05, |
|
"loss": 2.4245, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.10180707559175363, |
|
"grad_norm": 10.453302383422852, |
|
"learning_rate": 9.768987060360279e-05, |
|
"loss": 2.1369, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10282514634767116, |
|
"grad_norm": 8.380644798278809, |
|
"learning_rate": 9.764136834878986e-05, |
|
"loss": 4.4008, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1038432171035887, |
|
"grad_norm": 10.45700740814209, |
|
"learning_rate": 9.759237450075174e-05, |
|
"loss": 3.8277, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.10486128785950624, |
|
"grad_norm": 11.106316566467285, |
|
"learning_rate": 9.754288956503736e-05, |
|
"loss": 4.3912, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.10587935861542377, |
|
"grad_norm": 12.727373123168945, |
|
"learning_rate": 9.749291405226305e-05, |
|
"loss": 5.0723, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.10689742937134131, |
|
"grad_norm": 11.3184175491333, |
|
"learning_rate": 9.744244847810716e-05, |
|
"loss": 4.6612, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10791550012725884, |
|
"grad_norm": 11.49225902557373, |
|
"learning_rate": 9.739149336330482e-05, |
|
"loss": 5.2688, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.10893357088317639, |
|
"grad_norm": 9.92116928100586, |
|
"learning_rate": 9.734004923364257e-05, |
|
"loss": 3.1285, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.10995164163909392, |
|
"grad_norm": 16.322154998779297, |
|
"learning_rate": 9.728811661995288e-05, |
|
"loss": 4.3573, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11096971239501145, |
|
"grad_norm": 11.590410232543945, |
|
"learning_rate": 9.723569605810871e-05, |
|
"loss": 3.3457, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11198778315092899, |
|
"grad_norm": 6.267991065979004, |
|
"learning_rate": 9.718278808901797e-05, |
|
"loss": 1.8973, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11300585390684653, |
|
"grad_norm": 7.807132720947266, |
|
"learning_rate": 9.712939325861794e-05, |
|
"loss": 2.2999, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11402392466276406, |
|
"grad_norm": 5.800601005554199, |
|
"learning_rate": 9.707551211786965e-05, |
|
"loss": 1.0863, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1150419954186816, |
|
"grad_norm": 7.150589466094971, |
|
"learning_rate": 9.702114522275216e-05, |
|
"loss": 1.9172, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11606006617459913, |
|
"grad_norm": 8.134252548217773, |
|
"learning_rate": 9.696629313425686e-05, |
|
"loss": 2.2173, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11707813693051668, |
|
"grad_norm": 7.6389689445495605, |
|
"learning_rate": 9.691095641838169e-05, |
|
"loss": 1.8046, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11809620768643421, |
|
"grad_norm": 6.845970153808594, |
|
"learning_rate": 9.685513564612521e-05, |
|
"loss": 1.9059, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.11911427844235174, |
|
"grad_norm": 10.888468742370605, |
|
"learning_rate": 9.679883139348082e-05, |
|
"loss": 2.9148, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12013234919826928, |
|
"grad_norm": 6.594396114349365, |
|
"learning_rate": 9.674204424143078e-05, |
|
"loss": 1.8292, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12115041995418682, |
|
"grad_norm": 7.157876491546631, |
|
"learning_rate": 9.66847747759402e-05, |
|
"loss": 1.6858, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12216849071010435, |
|
"grad_norm": 7.298995494842529, |
|
"learning_rate": 9.662702358795098e-05, |
|
"loss": 1.7957, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12318656146602189, |
|
"grad_norm": 9.0108003616333, |
|
"learning_rate": 9.656879127337571e-05, |
|
"loss": 2.2843, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12420463222193942, |
|
"grad_norm": 8.476913452148438, |
|
"learning_rate": 9.651007843309163e-05, |
|
"loss": 2.1026, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.12522270297785695, |
|
"grad_norm": 9.930148124694824, |
|
"learning_rate": 9.645088567293426e-05, |
|
"loss": 2.6976, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1262407737337745, |
|
"grad_norm": 8.574073791503906, |
|
"learning_rate": 9.639121360369126e-05, |
|
"loss": 1.7768, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.12725884448969205, |
|
"grad_norm": 13.36725902557373, |
|
"learning_rate": 9.63310628410961e-05, |
|
"loss": 2.7559, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12827691524560958, |
|
"grad_norm": 8.55522346496582, |
|
"learning_rate": 9.627043400582172e-05, |
|
"loss": 2.3419, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1292949860015271, |
|
"grad_norm": 9.948506355285645, |
|
"learning_rate": 9.620932772347408e-05, |
|
"loss": 3.0092, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13031305675744465, |
|
"grad_norm": 10.05156135559082, |
|
"learning_rate": 9.614774462458573e-05, |
|
"loss": 2.1554, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13133112751336218, |
|
"grad_norm": 10.230545043945312, |
|
"learning_rate": 9.608568534460936e-05, |
|
"loss": 2.572, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1323491982692797, |
|
"grad_norm": 7.820633411407471, |
|
"learning_rate": 9.602315052391115e-05, |
|
"loss": 2.2316, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13336726902519724, |
|
"grad_norm": 7.196948528289795, |
|
"learning_rate": 9.596014080776423e-05, |
|
"loss": 2.276, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13438533978111478, |
|
"grad_norm": 10.125378608703613, |
|
"learning_rate": 9.589665684634196e-05, |
|
"loss": 3.6436, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13540341053703234, |
|
"grad_norm": 8.542695045471191, |
|
"learning_rate": 9.583269929471128e-05, |
|
"loss": 2.8726, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.13642148129294987, |
|
"grad_norm": 8.097149848937988, |
|
"learning_rate": 9.576826881282594e-05, |
|
"loss": 2.3483, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1374395520488674, |
|
"grad_norm": 8.922883987426758, |
|
"learning_rate": 9.570336606551967e-05, |
|
"loss": 2.5365, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13845762280478494, |
|
"grad_norm": 9.18602180480957, |
|
"learning_rate": 9.56379917224993e-05, |
|
"loss": 2.7464, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.13947569356070247, |
|
"grad_norm": 8.929719924926758, |
|
"learning_rate": 9.557214645833792e-05, |
|
"loss": 2.8074, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14049376431662, |
|
"grad_norm": 10.157453536987305, |
|
"learning_rate": 9.550583095246786e-05, |
|
"loss": 2.6313, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14151183507253753, |
|
"grad_norm": 8.677960395812988, |
|
"learning_rate": 9.543904588917367e-05, |
|
"loss": 2.7515, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14252990582845507, |
|
"grad_norm": 8.684197425842285, |
|
"learning_rate": 9.537179195758512e-05, |
|
"loss": 2.5564, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14354797658437263, |
|
"grad_norm": 8.283134460449219, |
|
"learning_rate": 9.530406985167004e-05, |
|
"loss": 2.3474, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14456604734029016, |
|
"grad_norm": 7.090147018432617, |
|
"learning_rate": 9.523588027022721e-05, |
|
"loss": 2.0495, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1455841180962077, |
|
"grad_norm": 9.59614086151123, |
|
"learning_rate": 9.516722391687902e-05, |
|
"loss": 2.4563, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14660218885212523, |
|
"grad_norm": 7.75164270401001, |
|
"learning_rate": 9.50981015000644e-05, |
|
"loss": 2.0795, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.14762025960804276, |
|
"grad_norm": 9.117147445678711, |
|
"learning_rate": 9.502851373303136e-05, |
|
"loss": 2.519, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1486383303639603, |
|
"grad_norm": 9.871448516845703, |
|
"learning_rate": 9.495846133382973e-05, |
|
"loss": 2.6371, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.14965640111987782, |
|
"grad_norm": 8.246638298034668, |
|
"learning_rate": 9.488794502530362e-05, |
|
"loss": 2.3142, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15067447187579536, |
|
"grad_norm": 11.579840660095215, |
|
"learning_rate": 9.48169655350841e-05, |
|
"loss": 2.8947, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15169254263171292, |
|
"grad_norm": 13.307292938232422, |
|
"learning_rate": 9.474552359558166e-05, |
|
"loss": 2.9942, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15271061338763045, |
|
"grad_norm": 10.210186958312988, |
|
"learning_rate": 9.467361994397859e-05, |
|
"loss": 2.0216, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15372868414354798, |
|
"grad_norm": 7.870486259460449, |
|
"learning_rate": 9.460125532222141e-05, |
|
"loss": 2.6203, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.15474675489946552, |
|
"grad_norm": 13.753894805908203, |
|
"learning_rate": 9.452843047701323e-05, |
|
"loss": 4.1998, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15576482565538305, |
|
"grad_norm": 10.677061080932617, |
|
"learning_rate": 9.445514615980604e-05, |
|
"loss": 3.9647, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.15678289641130058, |
|
"grad_norm": 11.903203010559082, |
|
"learning_rate": 9.438140312679291e-05, |
|
"loss": 4.2215, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.15780096716721811, |
|
"grad_norm": 12.882353782653809, |
|
"learning_rate": 9.43072021389003e-05, |
|
"loss": 5.0153, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15881903792313565, |
|
"grad_norm": 13.99023151397705, |
|
"learning_rate": 9.423254396178003e-05, |
|
"loss": 5.5362, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1598371086790532, |
|
"grad_norm": 16.683727264404297, |
|
"learning_rate": 9.415742936580157e-05, |
|
"loss": 5.1149, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16085517943497074, |
|
"grad_norm": 17.32396125793457, |
|
"learning_rate": 9.408185912604394e-05, |
|
"loss": 4.8563, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16187325019088827, |
|
"grad_norm": 14.138668060302734, |
|
"learning_rate": 9.400583402228784e-05, |
|
"loss": 3.4698, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1628913209468058, |
|
"grad_norm": 6.4397430419921875, |
|
"learning_rate": 9.392935483900749e-05, |
|
"loss": 1.8856, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16390939170272334, |
|
"grad_norm": 4.72169303894043, |
|
"learning_rate": 9.38524223653626e-05, |
|
"loss": 1.3027, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16492746245864087, |
|
"grad_norm": 7.877247333526611, |
|
"learning_rate": 9.377503739519019e-05, |
|
"loss": 1.9129, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1659455332145584, |
|
"grad_norm": 8.524123191833496, |
|
"learning_rate": 9.369720072699647e-05, |
|
"loss": 1.5605, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16696360397047594, |
|
"grad_norm": 9.966007232666016, |
|
"learning_rate": 9.361891316394851e-05, |
|
"loss": 2.5458, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.16798167472639347, |
|
"grad_norm": 9.061026573181152, |
|
"learning_rate": 9.354017551386599e-05, |
|
"loss": 1.8415, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16899974548231103, |
|
"grad_norm": 7.912156581878662, |
|
"learning_rate": 9.346098858921291e-05, |
|
"loss": 1.9514, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17001781623822856, |
|
"grad_norm": 6.926218509674072, |
|
"learning_rate": 9.338135320708911e-05, |
|
"loss": 2.1861, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1710358869941461, |
|
"grad_norm": 7.546460151672363, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.7336, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17205395775006363, |
|
"grad_norm": 6.780023097991943, |
|
"learning_rate": 9.322074036195769e-05, |
|
"loss": 1.766, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.17307202850598116, |
|
"grad_norm": 8.207006454467773, |
|
"learning_rate": 9.313976455625315e-05, |
|
"loss": 1.937, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1740900992618987, |
|
"grad_norm": 10.892253875732422, |
|
"learning_rate": 9.305834360766695e-05, |
|
"loss": 2.6682, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17510817001781623, |
|
"grad_norm": 8.318902015686035, |
|
"learning_rate": 9.297647835635102e-05, |
|
"loss": 2.0102, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17612624077373376, |
|
"grad_norm": 7.727786540985107, |
|
"learning_rate": 9.289416964704185e-05, |
|
"loss": 1.9714, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.17714431152965132, |
|
"grad_norm": 9.250336647033691, |
|
"learning_rate": 9.281141832905185e-05, |
|
"loss": 2.3855, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.17816238228556885, |
|
"grad_norm": 7.347965717315674, |
|
"learning_rate": 9.272822525626046e-05, |
|
"loss": 1.8475, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1791804530414864, |
|
"grad_norm": 7.1732354164123535, |
|
"learning_rate": 9.26445912871055e-05, |
|
"loss": 1.9938, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18019852379740392, |
|
"grad_norm": 11.556361198425293, |
|
"learning_rate": 9.25605172845742e-05, |
|
"loss": 3.3699, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18121659455332145, |
|
"grad_norm": 9.626664161682129, |
|
"learning_rate": 9.247600411619434e-05, |
|
"loss": 2.7054, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18223466530923899, |
|
"grad_norm": 7.422823429107666, |
|
"learning_rate": 9.239105265402525e-05, |
|
"loss": 2.3665, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18325273606515652, |
|
"grad_norm": 8.812822341918945, |
|
"learning_rate": 9.23056637746489e-05, |
|
"loss": 2.4336, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18427080682107405, |
|
"grad_norm": 12.493931770324707, |
|
"learning_rate": 9.221983835916074e-05, |
|
"loss": 2.4446, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1852888775769916, |
|
"grad_norm": 9.533077239990234, |
|
"learning_rate": 9.213357729316076e-05, |
|
"loss": 2.5195, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18630694833290914, |
|
"grad_norm": 7.195649147033691, |
|
"learning_rate": 9.204688146674418e-05, |
|
"loss": 1.5695, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.18732501908882668, |
|
"grad_norm": 10.850951194763184, |
|
"learning_rate": 9.195975177449238e-05, |
|
"loss": 3.3308, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1883430898447442, |
|
"grad_norm": 9.36767578125, |
|
"learning_rate": 9.187218911546362e-05, |
|
"loss": 2.8146, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.18936116060066174, |
|
"grad_norm": 14.791803359985352, |
|
"learning_rate": 9.178419439318382e-05, |
|
"loss": 3.5093, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19037923135657928, |
|
"grad_norm": 10.107565879821777, |
|
"learning_rate": 9.169576851563715e-05, |
|
"loss": 2.4756, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1913973021124968, |
|
"grad_norm": 8.8936128616333, |
|
"learning_rate": 9.160691239525674e-05, |
|
"loss": 2.4272, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.19241537286841434, |
|
"grad_norm": 8.861714363098145, |
|
"learning_rate": 9.151762694891521e-05, |
|
"loss": 2.1092, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1934334436243319, |
|
"grad_norm": 9.74419116973877, |
|
"learning_rate": 9.142791309791528e-05, |
|
"loss": 3.1339, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19445151438024944, |
|
"grad_norm": 10.207488059997559, |
|
"learning_rate": 9.133777176798013e-05, |
|
"loss": 2.5119, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19546958513616697, |
|
"grad_norm": 9.463604927062988, |
|
"learning_rate": 9.124720388924403e-05, |
|
"loss": 2.669, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1964876558920845, |
|
"grad_norm": 11.191435813903809, |
|
"learning_rate": 9.115621039624256e-05, |
|
"loss": 3.134, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.19750572664800203, |
|
"grad_norm": 8.744293212890625, |
|
"learning_rate": 9.10647922279031e-05, |
|
"loss": 2.8205, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.19852379740391957, |
|
"grad_norm": 9.338461875915527, |
|
"learning_rate": 9.09729503275351e-05, |
|
"loss": 2.2502, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1995418681598371, |
|
"grad_norm": 8.457433700561523, |
|
"learning_rate": 9.088068564282031e-05, |
|
"loss": 2.1407, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.20055993891575463, |
|
"grad_norm": 11.790545463562012, |
|
"learning_rate": 9.078799912580304e-05, |
|
"loss": 3.0246, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2015780096716722, |
|
"grad_norm": 10.485797882080078, |
|
"learning_rate": 9.069489173288038e-05, |
|
"loss": 2.7989, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.20259608042758973, |
|
"grad_norm": 10.064512252807617, |
|
"learning_rate": 9.060136442479215e-05, |
|
"loss": 2.3104, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.20361415118350726, |
|
"grad_norm": 11.273386001586914, |
|
"learning_rate": 9.050741816661128e-05, |
|
"loss": 2.1308, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2046322219394248, |
|
"grad_norm": 7.872629642486572, |
|
"learning_rate": 9.041305392773354e-05, |
|
"loss": 3.2454, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.20565029269534232, |
|
"grad_norm": 10.097418785095215, |
|
"learning_rate": 9.031827268186779e-05, |
|
"loss": 3.8778, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.20666836345125986, |
|
"grad_norm": 9.544397354125977, |
|
"learning_rate": 9.022307540702576e-05, |
|
"loss": 3.5354, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2076864342071774, |
|
"grad_norm": 13.447309494018555, |
|
"learning_rate": 9.012746308551208e-05, |
|
"loss": 5.3594, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.20870450496309492, |
|
"grad_norm": 12.501740455627441, |
|
"learning_rate": 9.003143670391403e-05, |
|
"loss": 3.5315, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.20972257571901248, |
|
"grad_norm": 13.571687698364258, |
|
"learning_rate": 8.993499725309148e-05, |
|
"loss": 4.0421, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.21074064647493002, |
|
"grad_norm": 14.879913330078125, |
|
"learning_rate": 8.983814572816656e-05, |
|
"loss": 4.1594, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.21175871723084755, |
|
"grad_norm": 17.623329162597656, |
|
"learning_rate": 8.974088312851345e-05, |
|
"loss": 4.9946, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.21277678798676508, |
|
"grad_norm": 6.669205665588379, |
|
"learning_rate": 8.964321045774807e-05, |
|
"loss": 1.5305, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.21379485874268261, |
|
"grad_norm": 9.656936645507812, |
|
"learning_rate": 8.954512872371769e-05, |
|
"loss": 2.7299, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21481292949860015, |
|
"grad_norm": 7.008784770965576, |
|
"learning_rate": 8.944663893849052e-05, |
|
"loss": 1.4462, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.21583100025451768, |
|
"grad_norm": 6.301548004150391, |
|
"learning_rate": 8.934774211834538e-05, |
|
"loss": 1.4093, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2168490710104352, |
|
"grad_norm": 7.544199466705322, |
|
"learning_rate": 8.924843928376104e-05, |
|
"loss": 1.6221, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.21786714176635277, |
|
"grad_norm": 9.308175086975098, |
|
"learning_rate": 8.914873145940584e-05, |
|
"loss": 2.1724, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2188852125222703, |
|
"grad_norm": 8.202116012573242, |
|
"learning_rate": 8.904861967412703e-05, |
|
"loss": 1.7294, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.21990328327818784, |
|
"grad_norm": 9.309891700744629, |
|
"learning_rate": 8.894810496094016e-05, |
|
"loss": 2.1319, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.22092135403410537, |
|
"grad_norm": 7.8817925453186035, |
|
"learning_rate": 8.884718835701848e-05, |
|
"loss": 2.0479, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2219394247900229, |
|
"grad_norm": 7.9436116218566895, |
|
"learning_rate": 8.874587090368221e-05, |
|
"loss": 1.9141, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.22295749554594044, |
|
"grad_norm": 9.188081741333008, |
|
"learning_rate": 8.86441536463877e-05, |
|
"loss": 2.5944, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.22397556630185797, |
|
"grad_norm": 9.442697525024414, |
|
"learning_rate": 8.85420376347168e-05, |
|
"loss": 2.616, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2249936370577755, |
|
"grad_norm": 7.059047222137451, |
|
"learning_rate": 8.843952392236594e-05, |
|
"loss": 1.8199, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.22601170781369306, |
|
"grad_norm": 9.448399543762207, |
|
"learning_rate": 8.833661356713528e-05, |
|
"loss": 2.2707, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2270297785696106, |
|
"grad_norm": 7.232347011566162, |
|
"learning_rate": 8.823330763091775e-05, |
|
"loss": 2.2834, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.22804784932552813, |
|
"grad_norm": 7.126833438873291, |
|
"learning_rate": 8.812960717968818e-05, |
|
"loss": 2.2613, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.22906592008144566, |
|
"grad_norm": 7.250087261199951, |
|
"learning_rate": 8.802551328349222e-05, |
|
"loss": 2.0233, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2300839908373632, |
|
"grad_norm": 9.801566123962402, |
|
"learning_rate": 8.792102701643531e-05, |
|
"loss": 2.6283, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.23110206159328073, |
|
"grad_norm": 8.86218547821045, |
|
"learning_rate": 8.781614945667169e-05, |
|
"loss": 2.7821, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.23212013234919826, |
|
"grad_norm": 7.009481430053711, |
|
"learning_rate": 8.771088168639312e-05, |
|
"loss": 2.187, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2331382031051158, |
|
"grad_norm": 7.643123149871826, |
|
"learning_rate": 8.760522479181784e-05, |
|
"loss": 2.0065, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.23415627386103335, |
|
"grad_norm": 6.573335647583008, |
|
"learning_rate": 8.749917986317928e-05, |
|
"loss": 1.939, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2351743446169509, |
|
"grad_norm": 9.001991271972656, |
|
"learning_rate": 8.73927479947149e-05, |
|
"loss": 2.8534, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.23619241537286842, |
|
"grad_norm": 9.186355590820312, |
|
"learning_rate": 8.72859302846548e-05, |
|
"loss": 3.112, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.23721048612878595, |
|
"grad_norm": 9.961040496826172, |
|
"learning_rate": 8.717872783521047e-05, |
|
"loss": 3.2593, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.23822855688470349, |
|
"grad_norm": 8.34619426727295, |
|
"learning_rate": 8.707114175256335e-05, |
|
"loss": 2.2664, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.23924662764062102, |
|
"grad_norm": 7.473055839538574, |
|
"learning_rate": 8.696317314685341e-05, |
|
"loss": 2.8765, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.24026469839653855, |
|
"grad_norm": 6.791398048400879, |
|
"learning_rate": 8.685482313216783e-05, |
|
"loss": 2.098, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.24128276915245608, |
|
"grad_norm": 9.765985488891602, |
|
"learning_rate": 8.674609282652934e-05, |
|
"loss": 3.2374, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.24230083990837364, |
|
"grad_norm": 7.459610462188721, |
|
"learning_rate": 8.663698335188477e-05, |
|
"loss": 2.456, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.24331891066429118, |
|
"grad_norm": 8.42564868927002, |
|
"learning_rate": 8.65274958340934e-05, |
|
"loss": 2.3464, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.2443369814202087, |
|
"grad_norm": 7.114076137542725, |
|
"learning_rate": 8.641763140291545e-05, |
|
"loss": 2.1128, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.24535505217612624, |
|
"grad_norm": 9.573076248168945, |
|
"learning_rate": 8.630739119200035e-05, |
|
"loss": 2.4448, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.24637312293204378, |
|
"grad_norm": 7.850905895233154, |
|
"learning_rate": 8.619677633887509e-05, |
|
"loss": 2.446, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.2473911936879613, |
|
"grad_norm": 9.630354881286621, |
|
"learning_rate": 8.608578798493236e-05, |
|
"loss": 2.3875, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.24840926444387884, |
|
"grad_norm": 7.196229457855225, |
|
"learning_rate": 8.597442727541897e-05, |
|
"loss": 1.6186, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.24942733519979637, |
|
"grad_norm": 11.08008098602295, |
|
"learning_rate": 8.586269535942385e-05, |
|
"loss": 2.839, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2504454059557139, |
|
"grad_norm": 8.258538246154785, |
|
"learning_rate": 8.575059338986633e-05, |
|
"loss": 2.2807, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.25146347671163144, |
|
"grad_norm": 9.670249938964844, |
|
"learning_rate": 8.563812252348411e-05, |
|
"loss": 2.2475, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.252481547467549, |
|
"grad_norm": 9.350224494934082, |
|
"learning_rate": 8.552528392082147e-05, |
|
"loss": 2.5073, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2534996182234665, |
|
"grad_norm": 9.628990173339844, |
|
"learning_rate": 8.541207874621718e-05, |
|
"loss": 2.0659, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.2545176889793841, |
|
"grad_norm": 9.84993839263916, |
|
"learning_rate": 8.529850816779251e-05, |
|
"loss": 2.2365, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2555357597353016, |
|
"grad_norm": 8.845046997070312, |
|
"learning_rate": 8.518457335743926e-05, |
|
"loss": 3.1796, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.25655383049121916, |
|
"grad_norm": 10.79970932006836, |
|
"learning_rate": 8.507027549080753e-05, |
|
"loss": 3.8036, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.2575719012471367, |
|
"grad_norm": 8.892024993896484, |
|
"learning_rate": 8.495561574729369e-05, |
|
"loss": 2.9368, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.2585899720030542, |
|
"grad_norm": 11.658991813659668, |
|
"learning_rate": 8.484059531002821e-05, |
|
"loss": 3.7456, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.25960804275897176, |
|
"grad_norm": 11.338571548461914, |
|
"learning_rate": 8.472521536586335e-05, |
|
"loss": 3.9418, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2606261135148893, |
|
"grad_norm": 14.362560272216797, |
|
"learning_rate": 8.460947710536107e-05, |
|
"loss": 4.6011, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.2616441842708068, |
|
"grad_norm": 13.662555694580078, |
|
"learning_rate": 8.449338172278059e-05, |
|
"loss": 5.049, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.26266225502672436, |
|
"grad_norm": 15.036532402038574, |
|
"learning_rate": 8.437693041606618e-05, |
|
"loss": 4.0385, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.2636803257826419, |
|
"grad_norm": 12.57422161102295, |
|
"learning_rate": 8.426012438683473e-05, |
|
"loss": 3.1101, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.2646983965385594, |
|
"grad_norm": 8.874217987060547, |
|
"learning_rate": 8.414296484036339e-05, |
|
"loss": 2.3986, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.26571646729447695, |
|
"grad_norm": 5.300018787384033, |
|
"learning_rate": 8.402545298557712e-05, |
|
"loss": 0.9408, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.2667345380503945, |
|
"grad_norm": 6.752171039581299, |
|
"learning_rate": 8.390759003503623e-05, |
|
"loss": 1.8722, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.267752608806312, |
|
"grad_norm": 7.508800029754639, |
|
"learning_rate": 8.378937720492384e-05, |
|
"loss": 1.7145, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.26877067956222955, |
|
"grad_norm": 6.305329322814941, |
|
"learning_rate": 8.367081571503332e-05, |
|
"loss": 1.6567, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2697887503181471, |
|
"grad_norm": 9.371475219726562, |
|
"learning_rate": 8.355190678875578e-05, |
|
"loss": 2.4242, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2708068210740647, |
|
"grad_norm": 5.403337001800537, |
|
"learning_rate": 8.343265165306735e-05, |
|
"loss": 1.3716, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.2718248918299822, |
|
"grad_norm": 8.240038871765137, |
|
"learning_rate": 8.331305153851658e-05, |
|
"loss": 2.2134, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.27284296258589974, |
|
"grad_norm": 5.857060432434082, |
|
"learning_rate": 8.319310767921174e-05, |
|
"loss": 1.2823, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.2738610333418173, |
|
"grad_norm": 6.11975622177124, |
|
"learning_rate": 8.307282131280804e-05, |
|
"loss": 1.7163, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.2748791040977348, |
|
"grad_norm": 10.722909927368164, |
|
"learning_rate": 8.295219368049494e-05, |
|
"loss": 2.2343, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.27589717485365234, |
|
"grad_norm": 6.3688507080078125, |
|
"learning_rate": 8.283122602698323e-05, |
|
"loss": 1.253, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.27691524560956987, |
|
"grad_norm": 8.231119155883789, |
|
"learning_rate": 8.27099196004923e-05, |
|
"loss": 2.4005, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.2779333163654874, |
|
"grad_norm": 6.623697757720947, |
|
"learning_rate": 8.258827565273718e-05, |
|
"loss": 1.5276, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.27895138712140494, |
|
"grad_norm": 8.357768058776855, |
|
"learning_rate": 8.246629543891569e-05, |
|
"loss": 2.5312, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.27996945787732247, |
|
"grad_norm": 7.037582874298096, |
|
"learning_rate": 8.23439802176954e-05, |
|
"loss": 2.6555, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.28098752863324, |
|
"grad_norm": 8.760072708129883, |
|
"learning_rate": 8.222133125120076e-05, |
|
"loss": 2.051, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.28200559938915754, |
|
"grad_norm": 10.407620429992676, |
|
"learning_rate": 8.209834980499995e-05, |
|
"loss": 2.7866, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.28302367014507507, |
|
"grad_norm": 10.065641403198242, |
|
"learning_rate": 8.197503714809191e-05, |
|
"loss": 2.7393, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.2840417409009926, |
|
"grad_norm": 8.072295188903809, |
|
"learning_rate": 8.185139455289322e-05, |
|
"loss": 2.1416, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.28505981165691013, |
|
"grad_norm": 10.837465286254883, |
|
"learning_rate": 8.172742329522493e-05, |
|
"loss": 3.0516, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.28607788241282767, |
|
"grad_norm": 8.73945426940918, |
|
"learning_rate": 8.160312465429952e-05, |
|
"loss": 2.6516, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.28709595316874525, |
|
"grad_norm": 9.408519744873047, |
|
"learning_rate": 8.147849991270752e-05, |
|
"loss": 2.4367, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.2881140239246628, |
|
"grad_norm": 8.221115112304688, |
|
"learning_rate": 8.135355035640444e-05, |
|
"loss": 2.4484, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2891320946805803, |
|
"grad_norm": 11.46916389465332, |
|
"learning_rate": 8.122827727469737e-05, |
|
"loss": 3.5208, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.29015016543649785, |
|
"grad_norm": 8.721375465393066, |
|
"learning_rate": 8.110268196023179e-05, |
|
"loss": 2.3896, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2911682361924154, |
|
"grad_norm": 9.857149124145508, |
|
"learning_rate": 8.097676570897814e-05, |
|
"loss": 2.8248, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2921863069483329, |
|
"grad_norm": 7.732857704162598, |
|
"learning_rate": 8.085052982021847e-05, |
|
"loss": 2.0455, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.29320437770425045, |
|
"grad_norm": 9.654264450073242, |
|
"learning_rate": 8.072397559653313e-05, |
|
"loss": 2.3959, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.294222448460168, |
|
"grad_norm": 10.697869300842285, |
|
"learning_rate": 8.059710434378715e-05, |
|
"loss": 3.314, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2952405192160855, |
|
"grad_norm": 8.84398078918457, |
|
"learning_rate": 8.046991737111696e-05, |
|
"loss": 2.0514, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.29625858997200305, |
|
"grad_norm": 7.673434257507324, |
|
"learning_rate": 8.034241599091665e-05, |
|
"loss": 2.165, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.2972766607279206, |
|
"grad_norm": 10.299299240112305, |
|
"learning_rate": 8.021460151882471e-05, |
|
"loss": 3.0283, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.2982947314838381, |
|
"grad_norm": 6.935961723327637, |
|
"learning_rate": 8.008647527371023e-05, |
|
"loss": 1.9187, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.29931280223975565, |
|
"grad_norm": 9.410109519958496, |
|
"learning_rate": 7.995803857765933e-05, |
|
"loss": 2.4798, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3003308729956732, |
|
"grad_norm": 9.035164833068848, |
|
"learning_rate": 7.982929275596166e-05, |
|
"loss": 2.8312, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3013489437515907, |
|
"grad_norm": 8.214160919189453, |
|
"learning_rate": 7.970023913709652e-05, |
|
"loss": 2.4572, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.30236701450750825, |
|
"grad_norm": 9.095396041870117, |
|
"learning_rate": 7.957087905271934e-05, |
|
"loss": 2.4834, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.30338508526342584, |
|
"grad_norm": 9.806940078735352, |
|
"learning_rate": 7.944121383764776e-05, |
|
"loss": 2.6364, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.30440315601934337, |
|
"grad_norm": 10.004327774047852, |
|
"learning_rate": 7.931124482984802e-05, |
|
"loss": 2.4236, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.3054212267752609, |
|
"grad_norm": 12.964902877807617, |
|
"learning_rate": 7.918097337042105e-05, |
|
"loss": 2.542, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.30643929753117843, |
|
"grad_norm": 7.418375015258789, |
|
"learning_rate": 7.905040080358868e-05, |
|
"loss": 2.7417, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.30745736828709597, |
|
"grad_norm": 8.499213218688965, |
|
"learning_rate": 7.891952847667973e-05, |
|
"loss": 3.6269, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3084754390430135, |
|
"grad_norm": 8.255233764648438, |
|
"learning_rate": 7.878835774011615e-05, |
|
"loss": 2.7048, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.30949350979893103, |
|
"grad_norm": 9.611371040344238, |
|
"learning_rate": 7.865688994739907e-05, |
|
"loss": 3.9859, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.31051158055484857, |
|
"grad_norm": 9.825111389160156, |
|
"learning_rate": 7.85251264550948e-05, |
|
"loss": 3.2304, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3115296513107661, |
|
"grad_norm": 13.28979778289795, |
|
"learning_rate": 7.839306862282089e-05, |
|
"loss": 3.8321, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.31254772206668363, |
|
"grad_norm": 16.412532806396484, |
|
"learning_rate": 7.826071781323207e-05, |
|
"loss": 5.4082, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.31356579282260116, |
|
"grad_norm": 9.727624893188477, |
|
"learning_rate": 7.812807539200622e-05, |
|
"loss": 2.5533, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3145838635785187, |
|
"grad_norm": 8.238031387329102, |
|
"learning_rate": 7.799514272783014e-05, |
|
"loss": 2.2857, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.31560193433443623, |
|
"grad_norm": 6.882323741912842, |
|
"learning_rate": 7.786192119238567e-05, |
|
"loss": 2.1371, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.31662000509035376, |
|
"grad_norm": 5.3293280601501465, |
|
"learning_rate": 7.772841216033533e-05, |
|
"loss": 1.1834, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.3176380758462713, |
|
"grad_norm": 5.593384265899658, |
|
"learning_rate": 7.759461700930823e-05, |
|
"loss": 1.4746, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.31865614660218883, |
|
"grad_norm": 5.281317234039307, |
|
"learning_rate": 7.746053711988583e-05, |
|
"loss": 1.1387, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.3196742173581064, |
|
"grad_norm": 6.735507965087891, |
|
"learning_rate": 7.73261738755877e-05, |
|
"loss": 1.9801, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.32069228811402395, |
|
"grad_norm": 6.708221912384033, |
|
"learning_rate": 7.719152866285721e-05, |
|
"loss": 1.863, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3217103588699415, |
|
"grad_norm": 8.238001823425293, |
|
"learning_rate": 7.70566028710473e-05, |
|
"loss": 2.16, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.322728429625859, |
|
"grad_norm": 6.396310329437256, |
|
"learning_rate": 7.692139789240611e-05, |
|
"loss": 2.1722, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.32374650038177655, |
|
"grad_norm": 8.0552396774292, |
|
"learning_rate": 7.678591512206255e-05, |
|
"loss": 2.2866, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.3247645711376941, |
|
"grad_norm": 5.051511287689209, |
|
"learning_rate": 7.665015595801197e-05, |
|
"loss": 1.0846, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.3257826418936116, |
|
"grad_norm": 7.378931045532227, |
|
"learning_rate": 7.651412180110176e-05, |
|
"loss": 1.7923, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32680071264952915, |
|
"grad_norm": 7.6839118003845215, |
|
"learning_rate": 7.637781405501681e-05, |
|
"loss": 1.3504, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.3278187834054467, |
|
"grad_norm": 7.594010829925537, |
|
"learning_rate": 7.624123412626512e-05, |
|
"loss": 2.5312, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.3288368541613642, |
|
"grad_norm": 7.310485363006592, |
|
"learning_rate": 7.610438342416319e-05, |
|
"loss": 1.8773, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.32985492491728174, |
|
"grad_norm": 10.614038467407227, |
|
"learning_rate": 7.596726336082158e-05, |
|
"loss": 2.8128, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.3308729956731993, |
|
"grad_norm": 7.768847465515137, |
|
"learning_rate": 7.582987535113023e-05, |
|
"loss": 2.2407, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3318910664291168, |
|
"grad_norm": 9.793490409851074, |
|
"learning_rate": 7.569222081274395e-05, |
|
"loss": 2.3751, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.33290913718503434, |
|
"grad_norm": 9.077165603637695, |
|
"learning_rate": 7.555430116606778e-05, |
|
"loss": 2.3227, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.3339272079409519, |
|
"grad_norm": 8.083662986755371, |
|
"learning_rate": 7.541611783424225e-05, |
|
"loss": 2.4783, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.3349452786968694, |
|
"grad_norm": 8.170014381408691, |
|
"learning_rate": 7.527767224312883e-05, |
|
"loss": 2.5388, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.33596334945278694, |
|
"grad_norm": 7.519252777099609, |
|
"learning_rate": 7.513896582129508e-05, |
|
"loss": 2.384, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.33698142020870453, |
|
"grad_norm": 9.94649887084961, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.7797, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.33799949096462206, |
|
"grad_norm": 8.595416069030762, |
|
"learning_rate": 7.486077621317926e-05, |
|
"loss": 2.7035, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.3390175617205396, |
|
"grad_norm": 7.753424644470215, |
|
"learning_rate": 7.472129589743033e-05, |
|
"loss": 2.0287, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.34003563247645713, |
|
"grad_norm": 9.271249771118164, |
|
"learning_rate": 7.458156049199775e-05, |
|
"loss": 2.1601, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.34105370323237466, |
|
"grad_norm": 8.564419746398926, |
|
"learning_rate": 7.44415714387582e-05, |
|
"loss": 2.9013, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3420717739882922, |
|
"grad_norm": 8.431052207946777, |
|
"learning_rate": 7.430133018220567e-05, |
|
"loss": 2.4643, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.3430898447442097, |
|
"grad_norm": 6.7154436111450195, |
|
"learning_rate": 7.416083816943653e-05, |
|
"loss": 2.271, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.34410791550012726, |
|
"grad_norm": 9.483381271362305, |
|
"learning_rate": 7.402009685013463e-05, |
|
"loss": 2.489, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.3451259862560448, |
|
"grad_norm": 7.885382175445557, |
|
"learning_rate": 7.38791076765563e-05, |
|
"loss": 3.1875, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.3461440570119623, |
|
"grad_norm": 7.622438430786133, |
|
"learning_rate": 7.373787210351541e-05, |
|
"loss": 2.0865, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.34716212776787986, |
|
"grad_norm": 7.785037517547607, |
|
"learning_rate": 7.359639158836828e-05, |
|
"loss": 2.0806, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.3481801985237974, |
|
"grad_norm": 7.861757755279541, |
|
"learning_rate": 7.345466759099875e-05, |
|
"loss": 2.4029, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.3491982692797149, |
|
"grad_norm": 8.165399551391602, |
|
"learning_rate": 7.331270157380303e-05, |
|
"loss": 2.1438, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.35021634003563246, |
|
"grad_norm": 8.010607719421387, |
|
"learning_rate": 7.317049500167465e-05, |
|
"loss": 1.8633, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.35123441079155, |
|
"grad_norm": 8.535947799682617, |
|
"learning_rate": 7.302804934198936e-05, |
|
"loss": 2.2122, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3522524815474675, |
|
"grad_norm": 8.71279239654541, |
|
"learning_rate": 7.28853660645899e-05, |
|
"loss": 2.4223, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.3532705523033851, |
|
"grad_norm": 9.44794750213623, |
|
"learning_rate": 7.274244664177097e-05, |
|
"loss": 2.5881, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.35428862305930264, |
|
"grad_norm": 10.474303245544434, |
|
"learning_rate": 7.259929254826392e-05, |
|
"loss": 1.9121, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.3553066938152202, |
|
"grad_norm": 9.976726531982422, |
|
"learning_rate": 7.245590526122159e-05, |
|
"loss": 2.3289, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.3563247645711377, |
|
"grad_norm": 10.359407424926758, |
|
"learning_rate": 7.231228626020304e-05, |
|
"loss": 2.0781, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.35734283532705524, |
|
"grad_norm": 10.392163276672363, |
|
"learning_rate": 7.216843702715831e-05, |
|
"loss": 4.3803, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3583609060829728, |
|
"grad_norm": 8.603669166564941, |
|
"learning_rate": 7.202435904641315e-05, |
|
"loss": 3.3045, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3593789768388903, |
|
"grad_norm": 11.44635009765625, |
|
"learning_rate": 7.188005380465364e-05, |
|
"loss": 5.3026, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.36039704759480784, |
|
"grad_norm": 11.560089111328125, |
|
"learning_rate": 7.173552279091087e-05, |
|
"loss": 4.0946, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3614151183507254, |
|
"grad_norm": 12.211684226989746, |
|
"learning_rate": 7.159076749654559e-05, |
|
"loss": 3.7029, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3624331891066429, |
|
"grad_norm": 13.772133827209473, |
|
"learning_rate": 7.144578941523284e-05, |
|
"loss": 4.3069, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.36345125986256044, |
|
"grad_norm": 11.917257308959961, |
|
"learning_rate": 7.130059004294647e-05, |
|
"loss": 3.3222, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.36446933061847797, |
|
"grad_norm": 15.86817741394043, |
|
"learning_rate": 7.115517087794381e-05, |
|
"loss": 3.5182, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3654874013743955, |
|
"grad_norm": 6.566526412963867, |
|
"learning_rate": 7.10095334207501e-05, |
|
"loss": 1.7594, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.36650547213031304, |
|
"grad_norm": 6.9844560623168945, |
|
"learning_rate": 7.086367917414306e-05, |
|
"loss": 1.6552, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.36752354288623057, |
|
"grad_norm": 9.259458541870117, |
|
"learning_rate": 7.07176096431374e-05, |
|
"loss": 2.2072, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3685416136421481, |
|
"grad_norm": 8.759178161621094, |
|
"learning_rate": 7.057132633496923e-05, |
|
"loss": 1.7327, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3695596843980657, |
|
"grad_norm": 7.46987247467041, |
|
"learning_rate": 7.042483075908062e-05, |
|
"loss": 1.6727, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3705777551539832, |
|
"grad_norm": 7.011016368865967, |
|
"learning_rate": 7.027812442710385e-05, |
|
"loss": 1.643, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.37159582590990076, |
|
"grad_norm": 8.475665092468262, |
|
"learning_rate": 7.013120885284598e-05, |
|
"loss": 2.4295, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3726138966658183, |
|
"grad_norm": 5.769803047180176, |
|
"learning_rate": 6.998408555227314e-05, |
|
"loss": 1.3708, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.3736319674217358, |
|
"grad_norm": 6.653828144073486, |
|
"learning_rate": 6.983675604349493e-05, |
|
"loss": 2.111, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.37465003817765336, |
|
"grad_norm": 8.172953605651855, |
|
"learning_rate": 6.968922184674867e-05, |
|
"loss": 2.3177, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3756681089335709, |
|
"grad_norm": 6.391868591308594, |
|
"learning_rate": 6.954148448438389e-05, |
|
"loss": 1.2711, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3766861796894884, |
|
"grad_norm": 8.03226375579834, |
|
"learning_rate": 6.93935454808464e-05, |
|
"loss": 1.7306, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.37770425044540595, |
|
"grad_norm": 5.273244857788086, |
|
"learning_rate": 6.924540636266272e-05, |
|
"loss": 1.3542, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3787223212013235, |
|
"grad_norm": 9.628256797790527, |
|
"learning_rate": 6.909706865842429e-05, |
|
"loss": 2.7357, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.379740391957241, |
|
"grad_norm": 11.36279582977295, |
|
"learning_rate": 6.894853389877163e-05, |
|
"loss": 2.2367, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.38075846271315855, |
|
"grad_norm": 8.72659969329834, |
|
"learning_rate": 6.879980361637866e-05, |
|
"loss": 2.2005, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3817765334690761, |
|
"grad_norm": 7.1913042068481445, |
|
"learning_rate": 6.86508793459368e-05, |
|
"loss": 2.1034, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3827946042249936, |
|
"grad_norm": 8.96323299407959, |
|
"learning_rate": 6.850176262413912e-05, |
|
"loss": 2.8465, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.38381267498091115, |
|
"grad_norm": 6.918330192565918, |
|
"learning_rate": 6.835245498966461e-05, |
|
"loss": 1.8181, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.3848307457368287, |
|
"grad_norm": 9.063780784606934, |
|
"learning_rate": 6.820295798316214e-05, |
|
"loss": 2.4932, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.38584881649274627, |
|
"grad_norm": 13.343623161315918, |
|
"learning_rate": 6.805327314723468e-05, |
|
"loss": 3.0215, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.3868668872486638, |
|
"grad_norm": 7.741687774658203, |
|
"learning_rate": 6.790340202642332e-05, |
|
"loss": 2.1827, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.38788495800458134, |
|
"grad_norm": 5.362514495849609, |
|
"learning_rate": 6.775334616719136e-05, |
|
"loss": 1.7059, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.38890302876049887, |
|
"grad_norm": 10.59506893157959, |
|
"learning_rate": 6.760310711790832e-05, |
|
"loss": 2.6664, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.3899210995164164, |
|
"grad_norm": 9.036832809448242, |
|
"learning_rate": 6.745268642883404e-05, |
|
"loss": 2.5482, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.39093917027233394, |
|
"grad_norm": 7.859615802764893, |
|
"learning_rate": 6.73020856521026e-05, |
|
"loss": 2.1062, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.39195724102825147, |
|
"grad_norm": 9.639580726623535, |
|
"learning_rate": 6.715130634170635e-05, |
|
"loss": 3.0521, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.392975311784169, |
|
"grad_norm": 8.098716735839844, |
|
"learning_rate": 6.700035005347983e-05, |
|
"loss": 2.6295, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.39399338254008653, |
|
"grad_norm": 11.05691146850586, |
|
"learning_rate": 6.684921834508379e-05, |
|
"loss": 2.6122, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.39501145329600407, |
|
"grad_norm": 9.071178436279297, |
|
"learning_rate": 6.669791277598904e-05, |
|
"loss": 2.3797, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.3960295240519216, |
|
"grad_norm": 9.826159477233887, |
|
"learning_rate": 6.654643490746042e-05, |
|
"loss": 2.4635, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.39704759480783913, |
|
"grad_norm": 7.310181140899658, |
|
"learning_rate": 6.639478630254064e-05, |
|
"loss": 2.06, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.39806566556375667, |
|
"grad_norm": 9.507575035095215, |
|
"learning_rate": 6.624296852603419e-05, |
|
"loss": 2.9877, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.3990837363196742, |
|
"grad_norm": 7.882664680480957, |
|
"learning_rate": 6.609098314449116e-05, |
|
"loss": 2.1182, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.40010180707559173, |
|
"grad_norm": 9.889808654785156, |
|
"learning_rate": 6.593883172619111e-05, |
|
"loss": 3.1559, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.40111987783150926, |
|
"grad_norm": 9.180678367614746, |
|
"learning_rate": 6.578651584112686e-05, |
|
"loss": 2.6704, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.40213794858742685, |
|
"grad_norm": 8.127158164978027, |
|
"learning_rate": 6.563403706098833e-05, |
|
"loss": 2.1218, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4031560193433444, |
|
"grad_norm": 9.298659324645996, |
|
"learning_rate": 6.548139695914622e-05, |
|
"loss": 3.3196, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4041740900992619, |
|
"grad_norm": 7.540867328643799, |
|
"learning_rate": 6.532859711063594e-05, |
|
"loss": 1.9205, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.40519216085517945, |
|
"grad_norm": 8.0338716506958, |
|
"learning_rate": 6.51756390921412e-05, |
|
"loss": 2.2858, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.406210231611097, |
|
"grad_norm": 8.012909889221191, |
|
"learning_rate": 6.502252448197782e-05, |
|
"loss": 2.3761, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.4072283023670145, |
|
"grad_norm": 7.803510665893555, |
|
"learning_rate": 6.486925486007742e-05, |
|
"loss": 2.0418, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.40824637312293205, |
|
"grad_norm": 6.121700286865234, |
|
"learning_rate": 6.471583180797121e-05, |
|
"loss": 2.1481, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.4092644438788496, |
|
"grad_norm": 8.690316200256348, |
|
"learning_rate": 6.456225690877344e-05, |
|
"loss": 3.0496, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.4102825146347671, |
|
"grad_norm": 8.961786270141602, |
|
"learning_rate": 6.440853174716534e-05, |
|
"loss": 3.4188, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.41130058539068465, |
|
"grad_norm": 9.848220825195312, |
|
"learning_rate": 6.425465790937861e-05, |
|
"loss": 3.948, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.4123186561466022, |
|
"grad_norm": 11.758772850036621, |
|
"learning_rate": 6.410063698317901e-05, |
|
"loss": 4.4288, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.4133367269025197, |
|
"grad_norm": 10.132964134216309, |
|
"learning_rate": 6.394647055785017e-05, |
|
"loss": 3.4126, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.41435479765843725, |
|
"grad_norm": 9.949785232543945, |
|
"learning_rate": 6.379216022417696e-05, |
|
"loss": 2.584, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.4153728684143548, |
|
"grad_norm": 13.347235679626465, |
|
"learning_rate": 6.363770757442927e-05, |
|
"loss": 2.8766, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.4163909391702723, |
|
"grad_norm": 7.714400768280029, |
|
"learning_rate": 6.348311420234542e-05, |
|
"loss": 2.4595, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.41740900992618984, |
|
"grad_norm": 5.146259307861328, |
|
"learning_rate": 6.332838170311585e-05, |
|
"loss": 1.4562, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.41842708068210743, |
|
"grad_norm": 5.76894474029541, |
|
"learning_rate": 6.31735116733666e-05, |
|
"loss": 1.3719, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.41944515143802497, |
|
"grad_norm": 5.680314540863037, |
|
"learning_rate": 6.301850571114281e-05, |
|
"loss": 1.173, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.4204632221939425, |
|
"grad_norm": 8.60592269897461, |
|
"learning_rate": 6.286336541589224e-05, |
|
"loss": 2.2157, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.42148129294986003, |
|
"grad_norm": 6.760054588317871, |
|
"learning_rate": 6.27080923884488e-05, |
|
"loss": 1.6651, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.42249936370577756, |
|
"grad_norm": 6.813849925994873, |
|
"learning_rate": 6.255268823101605e-05, |
|
"loss": 1.6109, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4235174344616951, |
|
"grad_norm": 7.729984760284424, |
|
"learning_rate": 6.239715454715054e-05, |
|
"loss": 2.0043, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.42453550521761263, |
|
"grad_norm": 6.723374366760254, |
|
"learning_rate": 6.224149294174548e-05, |
|
"loss": 1.7516, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.42555357597353016, |
|
"grad_norm": 7.92853307723999, |
|
"learning_rate": 6.208570502101393e-05, |
|
"loss": 2.0667, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.4265716467294477, |
|
"grad_norm": 8.561442375183105, |
|
"learning_rate": 6.192979239247243e-05, |
|
"loss": 2.3514, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.42758971748536523, |
|
"grad_norm": 6.065650463104248, |
|
"learning_rate": 6.177375666492431e-05, |
|
"loss": 1.6079, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.42860778824128276, |
|
"grad_norm": 7.547060489654541, |
|
"learning_rate": 6.161759944844308e-05, |
|
"loss": 1.6165, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.4296258589972003, |
|
"grad_norm": 7.750328540802002, |
|
"learning_rate": 6.146132235435591e-05, |
|
"loss": 2.6017, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.4306439297531178, |
|
"grad_norm": 8.826605796813965, |
|
"learning_rate": 6.13049269952269e-05, |
|
"loss": 2.8712, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.43166200050903536, |
|
"grad_norm": 9.169108390808105, |
|
"learning_rate": 6.114841498484048e-05, |
|
"loss": 3.1703, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.4326800712649529, |
|
"grad_norm": 10.452327728271484, |
|
"learning_rate": 6.0991787938184784e-05, |
|
"loss": 2.8737, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4336981420208704, |
|
"grad_norm": 6.921334743499756, |
|
"learning_rate": 6.0835047471434955e-05, |
|
"loss": 2.1521, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.434716212776788, |
|
"grad_norm": 7.088666915893555, |
|
"learning_rate": 6.067819520193645e-05, |
|
"loss": 1.833, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.43573428353270555, |
|
"grad_norm": 7.354036331176758, |
|
"learning_rate": 6.052123274818842e-05, |
|
"loss": 2.0779, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.4367523542886231, |
|
"grad_norm": 6.732209205627441, |
|
"learning_rate": 6.0364161729826905e-05, |
|
"loss": 1.8935, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.4377704250445406, |
|
"grad_norm": 7.724286079406738, |
|
"learning_rate": 6.020698376760824e-05, |
|
"loss": 2.334, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.43878849580045814, |
|
"grad_norm": 7.147849082946777, |
|
"learning_rate": 6.004970048339226e-05, |
|
"loss": 1.9002, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.4398065665563757, |
|
"grad_norm": 10.327398300170898, |
|
"learning_rate": 5.989231350012554e-05, |
|
"loss": 3.3501, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.4408246373122932, |
|
"grad_norm": 8.666500091552734, |
|
"learning_rate": 5.973482444182475e-05, |
|
"loss": 2.4769, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.44184270806821074, |
|
"grad_norm": 10.457250595092773, |
|
"learning_rate": 5.9577234933559764e-05, |
|
"loss": 3.03, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.4428607788241283, |
|
"grad_norm": 11.054841995239258, |
|
"learning_rate": 5.941954660143703e-05, |
|
"loss": 2.6938, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4438788495800458, |
|
"grad_norm": 8.820106506347656, |
|
"learning_rate": 5.9261761072582655e-05, |
|
"loss": 2.6282, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.44489692033596334, |
|
"grad_norm": 9.201048851013184, |
|
"learning_rate": 5.910387997512573e-05, |
|
"loss": 3.0258, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.4459149910918809, |
|
"grad_norm": 8.373756408691406, |
|
"learning_rate": 5.8945904938181484e-05, |
|
"loss": 2.0512, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.4469330618477984, |
|
"grad_norm": 7.493674278259277, |
|
"learning_rate": 5.878783759183442e-05, |
|
"loss": 2.3767, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.44795113260371594, |
|
"grad_norm": 8.552105903625488, |
|
"learning_rate": 5.86296795671216e-05, |
|
"loss": 2.4706, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4489692033596335, |
|
"grad_norm": 7.708653450012207, |
|
"learning_rate": 5.847143249601574e-05, |
|
"loss": 2.2514, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.449987274115551, |
|
"grad_norm": 9.070602416992188, |
|
"learning_rate": 5.8313098011408406e-05, |
|
"loss": 2.322, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.4510053448714686, |
|
"grad_norm": 7.668588638305664, |
|
"learning_rate": 5.8154677747093134e-05, |
|
"loss": 1.8555, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.4520234156273861, |
|
"grad_norm": 9.315807342529297, |
|
"learning_rate": 5.7996173337748606e-05, |
|
"loss": 2.1384, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.45304148638330366, |
|
"grad_norm": 10.211068153381348, |
|
"learning_rate": 5.783758641892172e-05, |
|
"loss": 2.8774, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4540595571392212, |
|
"grad_norm": 9.461262702941895, |
|
"learning_rate": 5.767891862701082e-05, |
|
"loss": 2.5156, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.4550776278951387, |
|
"grad_norm": 9.720065116882324, |
|
"learning_rate": 5.7520171599248704e-05, |
|
"loss": 2.6157, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.45609569865105626, |
|
"grad_norm": 8.700965881347656, |
|
"learning_rate": 5.7361346973685794e-05, |
|
"loss": 2.2904, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.4571137694069738, |
|
"grad_norm": 7.846927642822266, |
|
"learning_rate": 5.7202446389173223e-05, |
|
"loss": 1.968, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.4581318401628913, |
|
"grad_norm": 11.256888389587402, |
|
"learning_rate": 5.704347148534589e-05, |
|
"loss": 2.0931, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.45914991091880886, |
|
"grad_norm": 8.149317741394043, |
|
"learning_rate": 5.688442390260559e-05, |
|
"loss": 3.8319, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.4601679816747264, |
|
"grad_norm": 10.932345390319824, |
|
"learning_rate": 5.672530528210405e-05, |
|
"loss": 3.0198, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.4611860524306439, |
|
"grad_norm": 9.099678993225098, |
|
"learning_rate": 5.6566117265726006e-05, |
|
"loss": 3.3294, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.46220412318656146, |
|
"grad_norm": 11.72851848602295, |
|
"learning_rate": 5.640686149607228e-05, |
|
"loss": 4.2732, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.463222193942479, |
|
"grad_norm": 11.902469635009766, |
|
"learning_rate": 5.624753961644281e-05, |
|
"loss": 3.6108, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4642402646983965, |
|
"grad_norm": 10.879993438720703, |
|
"learning_rate": 5.608815327081969e-05, |
|
"loss": 2.6874, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.46525833545431405, |
|
"grad_norm": 14.943718910217285, |
|
"learning_rate": 5.5928704103850206e-05, |
|
"loss": 4.6303, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.4662764062102316, |
|
"grad_norm": 10.216432571411133, |
|
"learning_rate": 5.57691937608299e-05, |
|
"loss": 2.4846, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.4672944769661492, |
|
"grad_norm": 8.506292343139648, |
|
"learning_rate": 5.5609623887685535e-05, |
|
"loss": 2.476, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.4683125477220667, |
|
"grad_norm": 8.514628410339355, |
|
"learning_rate": 5.544999613095818e-05, |
|
"loss": 2.377, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.46933061847798424, |
|
"grad_norm": 5.901019096374512, |
|
"learning_rate": 5.5290312137786146e-05, |
|
"loss": 1.5461, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.4703486892339018, |
|
"grad_norm": 5.75610876083374, |
|
"learning_rate": 5.513057355588804e-05, |
|
"loss": 1.4872, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.4713667599898193, |
|
"grad_norm": 5.7381272315979, |
|
"learning_rate": 5.4970782033545774e-05, |
|
"loss": 1.4357, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.47238483074573684, |
|
"grad_norm": 7.901957035064697, |
|
"learning_rate": 5.4810939219587485e-05, |
|
"loss": 1.8938, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.47340290150165437, |
|
"grad_norm": 5.663755893707275, |
|
"learning_rate": 5.465104676337062e-05, |
|
"loss": 1.2289, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4744209722575719, |
|
"grad_norm": 7.933018684387207, |
|
"learning_rate": 5.44911063147648e-05, |
|
"loss": 2.2704, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.47543904301348944, |
|
"grad_norm": 7.943240165710449, |
|
"learning_rate": 5.433111952413495e-05, |
|
"loss": 2.3279, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.47645711376940697, |
|
"grad_norm": 6.650790691375732, |
|
"learning_rate": 5.417108804232409e-05, |
|
"loss": 2.2172, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.4774751845253245, |
|
"grad_norm": 7.444812774658203, |
|
"learning_rate": 5.401101352063647e-05, |
|
"loss": 2.2142, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.47849325528124204, |
|
"grad_norm": 7.4753642082214355, |
|
"learning_rate": 5.3850897610820396e-05, |
|
"loss": 1.757, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.47951132603715957, |
|
"grad_norm": 5.919788360595703, |
|
"learning_rate": 5.369074196505125e-05, |
|
"loss": 1.6481, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.4805293967930771, |
|
"grad_norm": 8.422210693359375, |
|
"learning_rate": 5.3530548235914454e-05, |
|
"loss": 2.157, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.48154746754899463, |
|
"grad_norm": 6.410617351531982, |
|
"learning_rate": 5.33703180763884e-05, |
|
"loss": 1.8347, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.48256553830491217, |
|
"grad_norm": 6.645679473876953, |
|
"learning_rate": 5.321005313982738e-05, |
|
"loss": 1.9199, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4835836090608297, |
|
"grad_norm": 8.815511703491211, |
|
"learning_rate": 5.3049755079944527e-05, |
|
"loss": 1.9625, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4846016798167473, |
|
"grad_norm": 6.998859882354736, |
|
"learning_rate": 5.288942555079479e-05, |
|
"loss": 1.9826, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4856197505726648, |
|
"grad_norm": 9.812437057495117, |
|
"learning_rate": 5.272906620675779e-05, |
|
"loss": 2.8627, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.48663782132858235, |
|
"grad_norm": 6.514684677124023, |
|
"learning_rate": 5.256867870252087e-05, |
|
"loss": 1.8711, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4876558920844999, |
|
"grad_norm": 8.101947784423828, |
|
"learning_rate": 5.240826469306187e-05, |
|
"loss": 2.2444, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4886739628404174, |
|
"grad_norm": 7.826203346252441, |
|
"learning_rate": 5.224782583363215e-05, |
|
"loss": 2.3479, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.48969203359633495, |
|
"grad_norm": 7.306244850158691, |
|
"learning_rate": 5.208736377973954e-05, |
|
"loss": 2.2877, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.4907101043522525, |
|
"grad_norm": 10.923294067382812, |
|
"learning_rate": 5.192688018713113e-05, |
|
"loss": 3.4528, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.49172817510817, |
|
"grad_norm": 7.382264614105225, |
|
"learning_rate": 5.176637671177631e-05, |
|
"loss": 2.1969, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.49274624586408755, |
|
"grad_norm": 10.03930377960205, |
|
"learning_rate": 5.1605855009849614e-05, |
|
"loss": 3.5883, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.4937643166200051, |
|
"grad_norm": 7.927920818328857, |
|
"learning_rate": 5.144531673771363e-05, |
|
"loss": 2.3655, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4947823873759226, |
|
"grad_norm": 8.499062538146973, |
|
"learning_rate": 5.1284763551901995e-05, |
|
"loss": 2.5173, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.49580045813184015, |
|
"grad_norm": 6.909058094024658, |
|
"learning_rate": 5.112419710910213e-05, |
|
"loss": 2.0323, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.4968185288877577, |
|
"grad_norm": 8.88456916809082, |
|
"learning_rate": 5.096361906613836e-05, |
|
"loss": 2.6987, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.4978365996436752, |
|
"grad_norm": 7.5175580978393555, |
|
"learning_rate": 5.080303107995461e-05, |
|
"loss": 2.1691, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.49885467039959275, |
|
"grad_norm": 7.615448951721191, |
|
"learning_rate": 5.064243480759748e-05, |
|
"loss": 2.0365, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4998727411555103, |
|
"grad_norm": 9.251805305480957, |
|
"learning_rate": 5.048183190619904e-05, |
|
"loss": 2.2146, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.5008908119114278, |
|
"grad_norm": 7.876194477081299, |
|
"learning_rate": 5.032122403295977e-05, |
|
"loss": 2.3439, |
|
"step": 492 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 983, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 246, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.032897809991598e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|