|
{ |
|
"best_metric": 1.3613554239273071, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.09857072449482504, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004928536224741252, |
|
"grad_norm": 16.37380599975586, |
|
"learning_rate": 1e-05, |
|
"loss": 6.995, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0004928536224741252, |
|
"eval_loss": 1.744011402130127, |
|
"eval_runtime": 259.1339, |
|
"eval_samples_per_second": 13.186, |
|
"eval_steps_per_second": 3.299, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0009857072449482504, |
|
"grad_norm": 16.33266830444336, |
|
"learning_rate": 2e-05, |
|
"loss": 7.1083, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0014785608674223755, |
|
"grad_norm": 15.080232620239258, |
|
"learning_rate": 3e-05, |
|
"loss": 6.7867, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.001971414489896501, |
|
"grad_norm": 10.568120956420898, |
|
"learning_rate": 4e-05, |
|
"loss": 6.3311, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0024642681123706258, |
|
"grad_norm": 10.395201683044434, |
|
"learning_rate": 5e-05, |
|
"loss": 6.2016, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002957121734844751, |
|
"grad_norm": 11.448528289794922, |
|
"learning_rate": 6e-05, |
|
"loss": 6.5881, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0034499753573188764, |
|
"grad_norm": 8.285737037658691, |
|
"learning_rate": 7e-05, |
|
"loss": 6.178, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003942828979793002, |
|
"grad_norm": 6.383825302124023, |
|
"learning_rate": 8e-05, |
|
"loss": 5.9585, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004435682602267127, |
|
"grad_norm": 7.123228073120117, |
|
"learning_rate": 9e-05, |
|
"loss": 6.3599, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0049285362247412515, |
|
"grad_norm": 6.691042423248291, |
|
"learning_rate": 0.0001, |
|
"loss": 5.9879, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005421389847215377, |
|
"grad_norm": 6.677425861358643, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 6.0902, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.005914243469689502, |
|
"grad_norm": 6.691093921661377, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 6.13, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.006407097092163627, |
|
"grad_norm": 6.412749290466309, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 5.9985, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.006899950714637753, |
|
"grad_norm": 6.398421287536621, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 6.0254, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.007392804337111878, |
|
"grad_norm": 6.084516525268555, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 6.1664, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.007885657959586003, |
|
"grad_norm": 5.95521354675293, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 5.8991, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.008378511582060127, |
|
"grad_norm": 5.627572536468506, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 6.0787, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.008871365204534253, |
|
"grad_norm": 5.7911248207092285, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 6.2035, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.009364218827008379, |
|
"grad_norm": 6.241641044616699, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 6.0726, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.009857072449482503, |
|
"grad_norm": 5.327273845672607, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 6.2332, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010349926071956629, |
|
"grad_norm": 5.308168411254883, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 6.0171, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.010842779694430755, |
|
"grad_norm": 5.032196998596191, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 5.8416, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.011335633316904879, |
|
"grad_norm": 4.981048583984375, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 5.5985, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.011828486939379004, |
|
"grad_norm": 5.130867004394531, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 6.1288, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01232134056185313, |
|
"grad_norm": 4.848458766937256, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 5.8318, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.012814194184327254, |
|
"grad_norm": 5.0001397132873535, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 5.9925, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01330704780680138, |
|
"grad_norm": 5.232586860656738, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 6.2745, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.013799901429275506, |
|
"grad_norm": 5.037934303283691, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 5.8795, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01429275505174963, |
|
"grad_norm": 5.175755500793457, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 6.1939, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.014785608674223755, |
|
"grad_norm": 5.293409824371338, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 5.9304, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015278462296697881, |
|
"grad_norm": 4.89537239074707, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 5.7866, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.015771315919172007, |
|
"grad_norm": 5.026119232177734, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 5.9492, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.016264169541646133, |
|
"grad_norm": 5.800846576690674, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 5.5961, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.016757023164120255, |
|
"grad_norm": 4.967105388641357, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 5.9876, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01724987678659438, |
|
"grad_norm": 4.971159934997559, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 5.9039, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.017742730409068506, |
|
"grad_norm": 5.209476470947266, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 5.9231, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.018235584031542632, |
|
"grad_norm": 5.320252418518066, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 5.8169, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.018728437654016758, |
|
"grad_norm": 5.2738142013549805, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 6.002, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.019221291276490884, |
|
"grad_norm": 5.119503021240234, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 5.931, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.019714144898965006, |
|
"grad_norm": 5.318321704864502, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 5.9097, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.020206998521439132, |
|
"grad_norm": 5.050459861755371, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 6.1257, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.020699852143913258, |
|
"grad_norm": 5.593949794769287, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 6.2968, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.021192705766387383, |
|
"grad_norm": 5.589847087860107, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 6.233, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02168555938886151, |
|
"grad_norm": 5.743232727050781, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 5.9828, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.022178413011335635, |
|
"grad_norm": 5.6376848220825195, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 6.1437, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.022671266633809757, |
|
"grad_norm": 11.450542449951172, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 6.2194, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.023164120256283883, |
|
"grad_norm": 6.774994373321533, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 6.1023, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02365697387875801, |
|
"grad_norm": 5.768123626708984, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 6.2419, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.024149827501232134, |
|
"grad_norm": 7.388202667236328, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 6.5133, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02464268112370626, |
|
"grad_norm": 7.236172676086426, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 6.2176, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02464268112370626, |
|
"eval_loss": 1.732652187347412, |
|
"eval_runtime": 260.9809, |
|
"eval_samples_per_second": 13.093, |
|
"eval_steps_per_second": 3.276, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.025135534746180386, |
|
"grad_norm": 35.06370544433594, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 7.3535, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.025628388368654508, |
|
"grad_norm": 7.263309001922607, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 6.6987, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.026121241991128634, |
|
"grad_norm": 4.144477844238281, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 5.9729, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02661409561360276, |
|
"grad_norm": 3.6691770553588867, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 6.0169, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.027106949236076885, |
|
"grad_norm": 3.6410415172576904, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 5.9244, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02759980285855101, |
|
"grad_norm": 3.8702380657196045, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 6.2104, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.028092656481025137, |
|
"grad_norm": 3.817110776901245, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 5.8548, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.02858551010349926, |
|
"grad_norm": 7.407201290130615, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 5.5015, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.029078363725973385, |
|
"grad_norm": 5.8004984855651855, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 5.5889, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.02957121734844751, |
|
"grad_norm": 4.282701015472412, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 5.8837, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.030064070970921637, |
|
"grad_norm": 4.238832950592041, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 5.9101, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.030556924593395762, |
|
"grad_norm": 4.113567352294922, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 5.9194, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.031049778215869888, |
|
"grad_norm": 4.071282386779785, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 5.705, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.031542631838344014, |
|
"grad_norm": 3.9348578453063965, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 5.9225, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.032035485460818136, |
|
"grad_norm": 6.435613632202148, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 5.9364, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.032528339083292265, |
|
"grad_norm": 5.112850666046143, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 5.9065, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03302119270576639, |
|
"grad_norm": 4.052131175994873, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 5.6007, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03351404632824051, |
|
"grad_norm": 3.9013149738311768, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 5.6739, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03400689995071464, |
|
"grad_norm": 4.061436653137207, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 5.5508, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03449975357318876, |
|
"grad_norm": 4.149789333343506, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 5.8792, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03499260719566289, |
|
"grad_norm": 3.755802869796753, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 5.2883, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03548546081813701, |
|
"grad_norm": 3.9959685802459717, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 5.8289, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.035978314440611135, |
|
"grad_norm": 4.5482072830200195, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 5.5595, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.036471168063085264, |
|
"grad_norm": 4.273440361022949, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 5.7877, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03696402168555939, |
|
"grad_norm": 4.279988765716553, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 5.7015, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.037456875308033516, |
|
"grad_norm": 3.8421711921691895, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 5.6129, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03794972893050764, |
|
"grad_norm": 4.161666393280029, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 5.6966, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03844258255298177, |
|
"grad_norm": 4.275053977966309, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 5.7704, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03893543617545589, |
|
"grad_norm": 4.343554496765137, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 5.9121, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03942828979793001, |
|
"grad_norm": 4.524814128875732, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 5.6133, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03992114342040414, |
|
"grad_norm": 4.053195953369141, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 5.8539, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.040413997042878264, |
|
"grad_norm": 4.116884231567383, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 5.6884, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04090685066535239, |
|
"grad_norm": 4.242743492126465, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 5.6097, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.041399704287826515, |
|
"grad_norm": 4.237097263336182, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 5.7156, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04189255791030064, |
|
"grad_norm": 4.75575590133667, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 5.6946, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04238541153277477, |
|
"grad_norm": 4.682205677032471, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 5.7993, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04287826515524889, |
|
"grad_norm": 4.896819591522217, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 6.1002, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04337111877772302, |
|
"grad_norm": 4.500687599182129, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 5.5547, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04386397240019714, |
|
"grad_norm": 4.486171722412109, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 5.5062, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04435682602267127, |
|
"grad_norm": 4.3436737060546875, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 5.5787, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04484967964514539, |
|
"grad_norm": 4.709849834442139, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 5.8669, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.045342533267619514, |
|
"grad_norm": 4.942005634307861, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 5.7457, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04583538689009364, |
|
"grad_norm": 4.71359920501709, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 5.8917, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.046328240512567766, |
|
"grad_norm": 4.906127452850342, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 6.1047, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.046821094135041895, |
|
"grad_norm": 4.9009504318237305, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 5.6463, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04731394775751602, |
|
"grad_norm": 5.0316853523254395, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 5.982, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04780680137999014, |
|
"grad_norm": 5.0484299659729, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 5.7041, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.04829965500246427, |
|
"grad_norm": 5.215059280395508, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 5.9011, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04879250862493839, |
|
"grad_norm": 5.8646955490112305, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 6.112, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.04928536224741252, |
|
"grad_norm": 6.156351089477539, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 5.9004, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04928536224741252, |
|
"eval_loss": 1.431005597114563, |
|
"eval_runtime": 261.684, |
|
"eval_samples_per_second": 13.058, |
|
"eval_steps_per_second": 3.267, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04977821586988664, |
|
"grad_norm": 4.2226433753967285, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 5.885, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05027106949236077, |
|
"grad_norm": 4.225105285644531, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 6.0265, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.050763923114834894, |
|
"grad_norm": 3.3986520767211914, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 5.7708, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.051256776737309016, |
|
"grad_norm": 3.3197200298309326, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 5.5425, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.051749630359783146, |
|
"grad_norm": 3.359449863433838, |
|
"learning_rate": 5e-05, |
|
"loss": 5.6861, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05224248398225727, |
|
"grad_norm": 3.6861770153045654, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 5.8957, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.0527353376047314, |
|
"grad_norm": 3.3729681968688965, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 5.6209, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05322819122720552, |
|
"grad_norm": 3.5027880668640137, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 5.3333, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05372104484967965, |
|
"grad_norm": 3.4611856937408447, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 5.7229, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.05421389847215377, |
|
"grad_norm": 3.641321897506714, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 6.0581, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05470675209462789, |
|
"grad_norm": 3.8468551635742188, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 5.5633, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05519960571710202, |
|
"grad_norm": 3.6477551460266113, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 5.3808, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.055692459339576145, |
|
"grad_norm": 3.6040382385253906, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 5.1802, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.056185312962050274, |
|
"grad_norm": 3.5388293266296387, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 5.441, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.056678166584524396, |
|
"grad_norm": 3.9764630794525146, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 5.9069, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05717102020699852, |
|
"grad_norm": 3.8530142307281494, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 5.4141, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.05766387382947265, |
|
"grad_norm": 3.6980135440826416, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 5.2554, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05815672745194677, |
|
"grad_norm": 3.621671676635742, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 5.2911, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.0586495810744209, |
|
"grad_norm": 3.8117148876190186, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 5.4398, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05914243469689502, |
|
"grad_norm": 3.9566729068756104, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 5.6742, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05963528831936915, |
|
"grad_norm": 3.656689405441284, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 5.3545, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06012814194184327, |
|
"grad_norm": 3.7543258666992188, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 5.2922, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.060620995564317395, |
|
"grad_norm": 3.839445114135742, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 5.4221, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.061113849186791525, |
|
"grad_norm": 4.027908802032471, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 5.3428, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06160670280926565, |
|
"grad_norm": 3.913569927215576, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 5.5121, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.062099556431739776, |
|
"grad_norm": 3.884404420852661, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 5.4622, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0625924100542139, |
|
"grad_norm": 4.357018947601318, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 5.842, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06308526367668803, |
|
"grad_norm": 4.205898761749268, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 5.4984, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06357811729916214, |
|
"grad_norm": 3.913008213043213, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 5.5454, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.06407097092163627, |
|
"grad_norm": 4.038344383239746, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 5.681, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0645638245441104, |
|
"grad_norm": 3.8500421047210693, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 5.316, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.06505667816658453, |
|
"grad_norm": 4.233459949493408, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 5.6574, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06554953178905865, |
|
"grad_norm": 4.028703689575195, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 5.219, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06604238541153278, |
|
"grad_norm": 4.3093109130859375, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 5.5462, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.0665352390340069, |
|
"grad_norm": 4.2733378410339355, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 5.4858, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06702809265648102, |
|
"grad_norm": 4.1830878257751465, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 5.1703, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06752094627895515, |
|
"grad_norm": 4.2492170333862305, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 5.4769, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06801379990142928, |
|
"grad_norm": 4.3830060958862305, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 5.5719, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.06850665352390341, |
|
"grad_norm": 4.348109722137451, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 5.3788, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.06899950714637752, |
|
"grad_norm": 4.3019232749938965, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 5.5993, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06949236076885165, |
|
"grad_norm": 4.5521559715271, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 5.8097, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.06998521439132578, |
|
"grad_norm": 4.503178119659424, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 5.5559, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.0704780680137999, |
|
"grad_norm": 4.683341979980469, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 5.5462, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07097092163627403, |
|
"grad_norm": 4.591628074645996, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 5.4505, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07146377525874816, |
|
"grad_norm": 4.827720642089844, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 5.8953, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07195662888122227, |
|
"grad_norm": 4.654827117919922, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 5.7021, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0724494825036964, |
|
"grad_norm": 5.047876358032227, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 5.9535, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07294233612617053, |
|
"grad_norm": 5.1422014236450195, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 5.7735, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07343518974864466, |
|
"grad_norm": 5.423142433166504, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 5.9309, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.07392804337111877, |
|
"grad_norm": 5.620732307434082, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 5.6295, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07392804337111877, |
|
"eval_loss": 1.3822078704833984, |
|
"eval_runtime": 261.5296, |
|
"eval_samples_per_second": 13.065, |
|
"eval_steps_per_second": 3.269, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0744208969935929, |
|
"grad_norm": 3.530040740966797, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 6.0692, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.07491375061606703, |
|
"grad_norm": 3.790463447570801, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 6.0955, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.07540660423854115, |
|
"grad_norm": 3.6937966346740723, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 5.5648, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.07589945786101528, |
|
"grad_norm": 3.5604262351989746, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 5.6857, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.0763923114834894, |
|
"grad_norm": 3.4493308067321777, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 5.5411, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07688516510596353, |
|
"grad_norm": 3.3345842361450195, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 5.4493, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.07737801872843765, |
|
"grad_norm": 3.241334915161133, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 5.1193, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.07787087235091178, |
|
"grad_norm": 3.2870683670043945, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 5.3904, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.07836372597338591, |
|
"grad_norm": 3.2689359188079834, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 5.351, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.07885657959586002, |
|
"grad_norm": 3.798901081085205, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 5.2776, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07934943321833415, |
|
"grad_norm": 3.6019339561462402, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 5.1261, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07984228684080828, |
|
"grad_norm": 3.4421374797821045, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 5.5591, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.08033514046328241, |
|
"grad_norm": 3.6133437156677246, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 5.5354, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.08082799408575653, |
|
"grad_norm": 3.55804705619812, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 5.2061, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08132084770823066, |
|
"grad_norm": 3.50337290763855, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 5.2636, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08181370133070479, |
|
"grad_norm": 3.4068167209625244, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 5.2175, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.0823065549531789, |
|
"grad_norm": 3.619082450866699, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 5.7874, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.08279940857565303, |
|
"grad_norm": 3.40138840675354, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 4.8733, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.08329226219812716, |
|
"grad_norm": 3.7839810848236084, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 5.1912, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.08378511582060127, |
|
"grad_norm": 3.7684295177459717, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 5.6102, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0842779694430754, |
|
"grad_norm": 3.9470818042755127, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 5.6098, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.08477082306554953, |
|
"grad_norm": 3.850346326828003, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 5.4293, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.08526367668802366, |
|
"grad_norm": 3.6885390281677246, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 5.4378, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.08575653031049778, |
|
"grad_norm": 3.6200008392333984, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 5.0055, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.08624938393297191, |
|
"grad_norm": 3.9811460971832275, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 5.5862, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08674223755544604, |
|
"grad_norm": 4.14628267288208, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 5.6571, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.08723509117792015, |
|
"grad_norm": 3.7174313068389893, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 5.0116, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.08772794480039428, |
|
"grad_norm": 4.070563316345215, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 5.6555, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.08822079842286841, |
|
"grad_norm": 3.9737911224365234, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 5.3808, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.08871365204534254, |
|
"grad_norm": 4.051774024963379, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 5.2126, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08920650566781665, |
|
"grad_norm": 3.877704381942749, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 5.2849, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.08969935929029078, |
|
"grad_norm": 4.159682750701904, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 5.5821, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09019221291276491, |
|
"grad_norm": 4.183592796325684, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 5.3883, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.09068506653523903, |
|
"grad_norm": 4.481199741363525, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 5.5933, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09117792015771316, |
|
"grad_norm": 4.184470176696777, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 5.7266, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09167077378018729, |
|
"grad_norm": 4.305405139923096, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 5.5935, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.09216362740266142, |
|
"grad_norm": 4.299315452575684, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 5.686, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.09265648102513553, |
|
"grad_norm": 4.34365701675415, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 5.3935, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.09314933464760966, |
|
"grad_norm": 4.203657150268555, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 5.4036, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.09364218827008379, |
|
"grad_norm": 4.4854841232299805, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 5.5909, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0941350418925579, |
|
"grad_norm": 4.49975061416626, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 5.4727, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.09462789551503203, |
|
"grad_norm": 4.444101333618164, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 5.2179, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.09512074913750616, |
|
"grad_norm": 4.851204872131348, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 5.3875, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.09561360275998028, |
|
"grad_norm": 4.588776111602783, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 5.6425, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.09610645638245441, |
|
"grad_norm": 4.847256660461426, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 5.6931, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09659931000492854, |
|
"grad_norm": 4.965179920196533, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 5.7267, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.09709216362740267, |
|
"grad_norm": 5.139113903045654, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 5.9983, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.09758501724987678, |
|
"grad_norm": 4.971620559692383, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 5.6054, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.09807787087235091, |
|
"grad_norm": 5.485710144042969, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 5.6522, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.09857072449482504, |
|
"grad_norm": 5.633842468261719, |
|
"learning_rate": 0.0, |
|
"loss": 5.3277, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09857072449482504, |
|
"eval_loss": 1.3613554239273071, |
|
"eval_runtime": 261.7351, |
|
"eval_samples_per_second": 13.055, |
|
"eval_steps_per_second": 3.267, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.883464233494774e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|