|
{ |
|
"best_metric": 0.38096651434898376, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.33222591362126247, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016611295681063123, |
|
"grad_norm": 1.2991793155670166, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6513, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0016611295681063123, |
|
"eval_loss": 0.8317100405693054, |
|
"eval_runtime": 132.9593, |
|
"eval_samples_per_second": 7.626, |
|
"eval_steps_per_second": 1.91, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033222591362126247, |
|
"grad_norm": 1.2579532861709595, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6749, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0049833887043189366, |
|
"grad_norm": 1.0699290037155151, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6399, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006644518272425249, |
|
"grad_norm": 0.7915657162666321, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6214, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008305647840531562, |
|
"grad_norm": 0.5864650011062622, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5893, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009966777408637873, |
|
"grad_norm": 0.4930439591407776, |
|
"learning_rate": 6e-05, |
|
"loss": 0.5539, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011627906976744186, |
|
"grad_norm": 0.8789781928062439, |
|
"learning_rate": 7e-05, |
|
"loss": 0.5859, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013289036544850499, |
|
"grad_norm": 0.6858446598052979, |
|
"learning_rate": 8e-05, |
|
"loss": 0.5161, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014950166112956811, |
|
"grad_norm": 0.4820753037929535, |
|
"learning_rate": 9e-05, |
|
"loss": 0.4945, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016611295681063124, |
|
"grad_norm": 0.35846245288848877, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4702, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018272425249169437, |
|
"grad_norm": 0.3399806022644043, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.4192, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019933554817275746, |
|
"grad_norm": 0.32994091510772705, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.4455, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02159468438538206, |
|
"grad_norm": 0.2916202247142792, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.4491, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 0.2513728439807892, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.4192, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.024916943521594685, |
|
"grad_norm": 0.20601803064346313, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.4135, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.026578073089700997, |
|
"grad_norm": 0.20515939593315125, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.4149, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02823920265780731, |
|
"grad_norm": 0.24935221672058105, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.4144, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.029900332225913623, |
|
"grad_norm": 0.22809065878391266, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.4117, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03156146179401993, |
|
"grad_norm": 0.2064090371131897, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.3975, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03322259136212625, |
|
"grad_norm": 0.20076528191566467, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.3937, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03488372093023256, |
|
"grad_norm": 0.20907463133335114, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.3936, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.036544850498338874, |
|
"grad_norm": 0.20472639799118042, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.3875, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03820598006644518, |
|
"grad_norm": 0.17747552692890167, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.3996, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03986710963455149, |
|
"grad_norm": 0.18577048182487488, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.4622, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04152823920265781, |
|
"grad_norm": 0.18465018272399902, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.4465, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04318936877076412, |
|
"grad_norm": 0.36926233768463135, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.6891, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.044850498338870434, |
|
"grad_norm": 0.3326234817504883, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.6926, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 0.24597014486789703, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.6492, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04817275747508306, |
|
"grad_norm": 0.22998689115047455, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.6338, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04983388704318937, |
|
"grad_norm": 0.2597779631614685, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.6451, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05149501661129568, |
|
"grad_norm": 0.30236122012138367, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.622, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.053156146179401995, |
|
"grad_norm": 0.2684450149536133, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.6151, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.054817275747508304, |
|
"grad_norm": 0.25038909912109375, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.593, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05647840531561462, |
|
"grad_norm": 0.25587284564971924, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.5875, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"grad_norm": 0.19960953295230865, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.5488, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.059800664451827246, |
|
"grad_norm": 0.19124135375022888, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.5632, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.061461794019933555, |
|
"grad_norm": 0.1789243221282959, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.5588, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06312292358803986, |
|
"grad_norm": 0.17890439927577972, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.5598, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06478405315614617, |
|
"grad_norm": 0.1927339732646942, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.5882, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0664451827242525, |
|
"grad_norm": 0.20116066932678223, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.5741, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0681063122923588, |
|
"grad_norm": 0.17227494716644287, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.5436, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 0.18728812038898468, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.5824, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.1641688197851181, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.5773, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07308970099667775, |
|
"grad_norm": 0.16669844090938568, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.5692, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07475083056478406, |
|
"grad_norm": 0.16173191368579865, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.5706, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07641196013289037, |
|
"grad_norm": 0.16865822672843933, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.5794, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07807308970099668, |
|
"grad_norm": 0.1681707352399826, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.5831, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07973421926910298, |
|
"grad_norm": 0.18740896880626678, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.5991, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08139534883720931, |
|
"grad_norm": 0.18814262747764587, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.6662, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08305647840531562, |
|
"grad_norm": 0.24064107239246368, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7061, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08305647840531562, |
|
"eval_loss": 0.45007362961769104, |
|
"eval_runtime": 134.6502, |
|
"eval_samples_per_second": 7.531, |
|
"eval_steps_per_second": 1.886, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08471760797342193, |
|
"grad_norm": 0.18804551661014557, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.3224, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08637873754152824, |
|
"grad_norm": 0.16869483888149261, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.3237, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08803986710963455, |
|
"grad_norm": 0.16511651873588562, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.353, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08970099667774087, |
|
"grad_norm": 0.1492200344800949, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.3538, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09136212624584718, |
|
"grad_norm": 0.13702312111854553, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.3423, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 0.14534348249435425, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.331, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0946843853820598, |
|
"grad_norm": 0.1291942298412323, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.3115, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09634551495016612, |
|
"grad_norm": 0.13093352317810059, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.3066, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09800664451827243, |
|
"grad_norm": 0.1452917754650116, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.3485, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09966777408637874, |
|
"grad_norm": 0.13886800408363342, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.3127, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10132890365448505, |
|
"grad_norm": 0.14664413034915924, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.3349, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10299003322259136, |
|
"grad_norm": 0.13130126893520355, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.3031, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10465116279069768, |
|
"grad_norm": 0.13619501888751984, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.3359, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10631229235880399, |
|
"grad_norm": 0.14423972368240356, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.3169, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1079734219269103, |
|
"grad_norm": 0.13670726120471954, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.3358, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10963455149501661, |
|
"grad_norm": 0.13185586035251617, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.3096, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11129568106312292, |
|
"grad_norm": 0.13396374881267548, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3091, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11295681063122924, |
|
"grad_norm": 0.1417226344347, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.3102, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11461794019933555, |
|
"grad_norm": 0.1466892659664154, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.3403, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 0.1433144360780716, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.324, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11794019933554817, |
|
"grad_norm": 0.1436891257762909, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.3434, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11960132890365449, |
|
"grad_norm": 0.15285548567771912, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.3344, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1212624584717608, |
|
"grad_norm": 0.14362435042858124, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.3191, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12292358803986711, |
|
"grad_norm": 0.15305458009243011, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.3776, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12458471760797342, |
|
"grad_norm": 0.1848021298646927, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.4502, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12624584717607973, |
|
"grad_norm": 0.18864738941192627, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.5257, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12790697674418605, |
|
"grad_norm": 0.16458578407764435, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.526, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12956810631229235, |
|
"grad_norm": 0.14648108184337616, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.5084, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13122923588039867, |
|
"grad_norm": 0.13501055538654327, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.4777, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.132890365448505, |
|
"grad_norm": 0.15276943147182465, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.5236, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1345514950166113, |
|
"grad_norm": 0.1511184722185135, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.5364, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1362126245847176, |
|
"grad_norm": 0.14624027907848358, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.5027, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1378737541528239, |
|
"grad_norm": 0.13957354426383972, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.4793, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 0.13981389999389648, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.4919, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14119601328903655, |
|
"grad_norm": 0.15372055768966675, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.5226, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.1438971310853958, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.5232, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14451827242524917, |
|
"grad_norm": 0.1383737474679947, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.5007, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1461794019933555, |
|
"grad_norm": 0.14018723368644714, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.5328, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1478405315614618, |
|
"grad_norm": 0.1336921900510788, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.5123, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.14950166112956811, |
|
"grad_norm": 0.14490532875061035, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.4996, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1511627906976744, |
|
"grad_norm": 0.14218592643737793, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.5103, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15282392026578073, |
|
"grad_norm": 0.13371700048446655, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.4695, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15448504983388706, |
|
"grad_norm": 0.14268216490745544, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.4996, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15614617940199335, |
|
"grad_norm": 0.15761351585388184, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.5142, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15780730897009967, |
|
"grad_norm": 0.14862477779388428, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.4953, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15946843853820597, |
|
"grad_norm": 0.15104886889457703, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.5656, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1611295681063123, |
|
"grad_norm": 0.1583997756242752, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.538, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16279069767441862, |
|
"grad_norm": 0.16727805137634277, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.5618, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1644518272425249, |
|
"grad_norm": 0.18140842020511627, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.5916, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16611295681063123, |
|
"grad_norm": 0.2026122659444809, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.677, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16611295681063123, |
|
"eval_loss": 0.409039705991745, |
|
"eval_runtime": 134.7555, |
|
"eval_samples_per_second": 7.525, |
|
"eval_steps_per_second": 1.885, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16777408637873753, |
|
"grad_norm": 0.1708502620458603, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.2821, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.16943521594684385, |
|
"grad_norm": 0.1842401623725891, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.2969, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17109634551495018, |
|
"grad_norm": 0.15198273956775665, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.2685, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17275747508305647, |
|
"grad_norm": 0.14956019818782806, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.2899, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"grad_norm": 0.1505981981754303, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2836, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1760797342192691, |
|
"grad_norm": 0.15199552476406097, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.2911, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1777408637873754, |
|
"grad_norm": 0.15069696307182312, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.2961, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.17940199335548174, |
|
"grad_norm": 0.14875832200050354, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.287, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18106312292358803, |
|
"grad_norm": 0.1654970794916153, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.3096, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18272425249169436, |
|
"grad_norm": 0.14778447151184082, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.2612, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18438538205980065, |
|
"grad_norm": 0.14741088449954987, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.2852, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 0.1445004642009735, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.2849, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1877076411960133, |
|
"grad_norm": 0.16333213448524475, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.2977, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1893687707641196, |
|
"grad_norm": 0.1434721201658249, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.2925, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19102990033222592, |
|
"grad_norm": 0.13791555166244507, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.2619, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19269102990033224, |
|
"grad_norm": 0.14811094105243683, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.2951, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.19435215946843853, |
|
"grad_norm": 0.13586348295211792, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.2758, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.19601328903654486, |
|
"grad_norm": 0.15308637917041779, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.2889, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.19767441860465115, |
|
"grad_norm": 0.1554800420999527, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.2985, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.19933554817275748, |
|
"grad_norm": 0.1462528556585312, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.293, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2009966777408638, |
|
"grad_norm": 0.15194445848464966, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.3062, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2026578073089701, |
|
"grad_norm": 0.15283334255218506, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.3033, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.20431893687707642, |
|
"grad_norm": 0.1533212512731552, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.3403, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2059800664451827, |
|
"grad_norm": 0.17399577796459198, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.415, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.20764119601328904, |
|
"grad_norm": 0.1844712793827057, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.4619, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.20930232558139536, |
|
"grad_norm": 0.18232126533985138, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.4879, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21096345514950166, |
|
"grad_norm": 0.17209911346435547, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.4918, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.21262458471760798, |
|
"grad_norm": 0.15540632605552673, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.4778, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 0.14485326409339905, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.498, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2159468438538206, |
|
"grad_norm": 0.1500222533941269, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.4784, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21760797342192692, |
|
"grad_norm": 0.1539774239063263, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.4763, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.21926910299003322, |
|
"grad_norm": 0.1498161256313324, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.4736, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22093023255813954, |
|
"grad_norm": 0.15067896246910095, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.4791, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.22259136212624583, |
|
"grad_norm": 0.1555660367012024, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.5062, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.22425249169435216, |
|
"grad_norm": 0.15103194117546082, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.4651, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.22591362126245848, |
|
"grad_norm": 0.15316762030124664, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.4843, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.22757475083056478, |
|
"grad_norm": 0.15561173856258392, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.5106, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2292358803986711, |
|
"grad_norm": 0.15198399126529694, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.4786, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23089700996677742, |
|
"grad_norm": 0.15727408230304718, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.4604, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 0.15432614088058472, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.4822, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23421926910299004, |
|
"grad_norm": 0.14581052958965302, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.4861, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.23588039867109634, |
|
"grad_norm": 0.14340969920158386, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.4584, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.23754152823920266, |
|
"grad_norm": 0.15534572303295135, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.4786, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.23920265780730898, |
|
"grad_norm": 0.1554005742073059, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.5156, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24086378737541528, |
|
"grad_norm": 0.15117494761943817, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.5163, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2425249169435216, |
|
"grad_norm": 0.1590317040681839, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.5077, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2441860465116279, |
|
"grad_norm": 0.15675310790538788, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.485, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.24584717607973422, |
|
"grad_norm": 0.16706211864948273, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.5485, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.24750830564784054, |
|
"grad_norm": 0.18405000865459442, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.568, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.24916943521594684, |
|
"grad_norm": 0.232390895485878, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.6994, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24916943521594684, |
|
"eval_loss": 0.3880166709423065, |
|
"eval_runtime": 135.2855, |
|
"eval_samples_per_second": 7.495, |
|
"eval_steps_per_second": 1.878, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25083056478405313, |
|
"grad_norm": 0.1730739325284958, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.2892, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.25249169435215946, |
|
"grad_norm": 0.15850678086280823, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.2542, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2541528239202658, |
|
"grad_norm": 0.1620084047317505, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.259, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2558139534883721, |
|
"grad_norm": 0.16261513531208038, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.2655, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2574750830564784, |
|
"grad_norm": 0.15043796598911285, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.2809, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2591362126245847, |
|
"grad_norm": 0.1493295282125473, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.2721, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.260797342192691, |
|
"grad_norm": 0.1420331448316574, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.254, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.26245847176079734, |
|
"grad_norm": 0.13924731314182281, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.2624, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.26411960132890366, |
|
"grad_norm": 0.1446397304534912, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.2683, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.26578073089701, |
|
"grad_norm": 0.13315437734127045, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.2467, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.26744186046511625, |
|
"grad_norm": 0.13552388548851013, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.2508, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2691029900332226, |
|
"grad_norm": 0.13076385855674744, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.2516, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2707641196013289, |
|
"grad_norm": 0.13947907090187073, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.2672, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2724252491694352, |
|
"grad_norm": 0.14179465174674988, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.2657, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.27408637873754155, |
|
"grad_norm": 0.1354580968618393, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.2633, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2757475083056478, |
|
"grad_norm": 0.143931046128273, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.2623, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.27740863787375414, |
|
"grad_norm": 0.14093278348445892, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.2587, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 0.13632744550704956, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.2625, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2807308970099668, |
|
"grad_norm": 0.14669150114059448, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.2895, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2823920265780731, |
|
"grad_norm": 0.14640481770038605, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.2644, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2840531561461794, |
|
"grad_norm": 0.1421024203300476, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.2917, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.14989648759365082, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.3455, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.287375415282392, |
|
"grad_norm": 0.14961613714694977, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.4074, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.28903654485049834, |
|
"grad_norm": 0.1498543918132782, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.4368, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29069767441860467, |
|
"grad_norm": 0.15873299539089203, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.462, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.292358803986711, |
|
"grad_norm": 0.15671579539775848, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.4607, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.29401993355481726, |
|
"grad_norm": 0.16347020864486694, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.4631, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2956810631229236, |
|
"grad_norm": 0.15467208623886108, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.4825, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2973421926910299, |
|
"grad_norm": 0.15377561748027802, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.4624, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.29900332225913623, |
|
"grad_norm": 0.160777747631073, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.48, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.30066445182724255, |
|
"grad_norm": 0.15274566411972046, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.4978, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3023255813953488, |
|
"grad_norm": 0.15474535524845123, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.4641, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.30398671096345514, |
|
"grad_norm": 0.15535560250282288, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.4961, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.30564784053156147, |
|
"grad_norm": 0.15338659286499023, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.4865, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3073089700996678, |
|
"grad_norm": 0.15274019539356232, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.4978, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3089700996677741, |
|
"grad_norm": 0.14997823536396027, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.4733, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3106312292358804, |
|
"grad_norm": 0.14499768614768982, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.4833, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3122923588039867, |
|
"grad_norm": 0.14702832698822021, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.4618, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.313953488372093, |
|
"grad_norm": 0.14408934116363525, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.4748, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.31561461794019935, |
|
"grad_norm": 0.14656466245651245, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.4584, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.31727574750830567, |
|
"grad_norm": 0.14636141061782837, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.4915, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.31893687707641194, |
|
"grad_norm": 0.14926789700984955, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.4958, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32059800664451826, |
|
"grad_norm": 0.1512245237827301, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.4979, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3222591362126246, |
|
"grad_norm": 0.1518077850341797, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.4767, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3239202657807309, |
|
"grad_norm": 0.15790244936943054, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.5174, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.32558139534883723, |
|
"grad_norm": 0.16024570167064667, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.5091, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3272425249169435, |
|
"grad_norm": 0.15498998761177063, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.5075, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3289036544850498, |
|
"grad_norm": 0.15918047726154327, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.5357, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.33056478405315615, |
|
"grad_norm": 0.18461520969867706, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.5895, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.33222591362126247, |
|
"grad_norm": 0.2483561933040619, |
|
"learning_rate": 0.0, |
|
"loss": 0.6524, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.33222591362126247, |
|
"eval_loss": 0.38096651434898376, |
|
"eval_runtime": 135.189, |
|
"eval_samples_per_second": 7.501, |
|
"eval_steps_per_second": 1.879, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.91878059343872e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|