|
{ |
|
"best_metric": 0.34753015637397766, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.3006388575723412, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001503194287861706, |
|
"grad_norm": 0.5596352219581604, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3511, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001503194287861706, |
|
"eval_loss": 0.624755859375, |
|
"eval_runtime": 54.3198, |
|
"eval_samples_per_second": 20.637, |
|
"eval_steps_per_second": 5.173, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003006388575723412, |
|
"grad_norm": 0.7194361090660095, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4613, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004509582863585118, |
|
"grad_norm": 0.7623833417892456, |
|
"learning_rate": 3e-05, |
|
"loss": 0.5343, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006012777151446824, |
|
"grad_norm": 0.6368038654327393, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4856, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00751597143930853, |
|
"grad_norm": 0.519997775554657, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4821, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009019165727170236, |
|
"grad_norm": 0.5378677248954773, |
|
"learning_rate": 6e-05, |
|
"loss": 0.4381, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.010522360015031942, |
|
"grad_norm": 0.6729313731193542, |
|
"learning_rate": 7e-05, |
|
"loss": 0.4567, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.012025554302893648, |
|
"grad_norm": 0.621289074420929, |
|
"learning_rate": 8e-05, |
|
"loss": 0.51, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013528748590755355, |
|
"grad_norm": 0.5438550114631653, |
|
"learning_rate": 9e-05, |
|
"loss": 0.4581, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01503194287861706, |
|
"grad_norm": 0.5436606407165527, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4384, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016535137166478767, |
|
"grad_norm": 0.48347610235214233, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.4526, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.018038331454340473, |
|
"grad_norm": 0.4883898198604584, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.4152, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01954152574220218, |
|
"grad_norm": 0.4870653450489044, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.3998, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.021044720030063885, |
|
"grad_norm": 0.48256585001945496, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.4152, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02254791431792559, |
|
"grad_norm": 0.44895902276039124, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.4247, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.024051108605787297, |
|
"grad_norm": 0.3959610164165497, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.3762, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025554302893649003, |
|
"grad_norm": 0.43920475244522095, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.4128, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02705749718151071, |
|
"grad_norm": 0.4507409334182739, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.4574, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.028560691469372415, |
|
"grad_norm": 0.44753408432006836, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.47, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03006388575723412, |
|
"grad_norm": 0.42001616954803467, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.4492, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03156708004509583, |
|
"grad_norm": 0.45444992184638977, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.4415, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03307027433295753, |
|
"grad_norm": 0.45968878269195557, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.4587, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03457346862081924, |
|
"grad_norm": 0.3964838981628418, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.4046, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.036076662908680945, |
|
"grad_norm": 0.4261048436164856, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.3905, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03757985719654265, |
|
"grad_norm": 0.4289442300796509, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.4308, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03908305148440436, |
|
"grad_norm": 0.435476154088974, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.4735, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.040586245772266064, |
|
"grad_norm": 0.42117980122566223, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.4119, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04208944006012777, |
|
"grad_norm": 0.44847437739372253, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.4121, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.043592634347989476, |
|
"grad_norm": 0.422210693359375, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.4041, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04509582863585118, |
|
"grad_norm": 0.4026918113231659, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.3983, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04659902292371289, |
|
"grad_norm": 0.45083874464035034, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.3989, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.048102217211574594, |
|
"grad_norm": 0.42770618200302124, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.4144, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0496054114994363, |
|
"grad_norm": 0.4121634364128113, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.3996, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.051108605787298006, |
|
"grad_norm": 0.4511989653110504, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.3884, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05261180007515971, |
|
"grad_norm": 0.44728589057922363, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.4136, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05411499436302142, |
|
"grad_norm": 0.4282330274581909, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.3864, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.055618188650883124, |
|
"grad_norm": 0.46324941515922546, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.4413, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05712138293874483, |
|
"grad_norm": 0.4187595546245575, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.3852, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.058624577226606536, |
|
"grad_norm": 0.43247416615486145, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.3865, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06012777151446824, |
|
"grad_norm": 0.39086800813674927, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.336, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06163096580232995, |
|
"grad_norm": 0.4751962125301361, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.4333, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06313416009019165, |
|
"grad_norm": 0.4536871314048767, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.3693, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06463735437805336, |
|
"grad_norm": 0.5023728013038635, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.3467, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06614054866591507, |
|
"grad_norm": 0.4579885005950928, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.3726, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06764374295377677, |
|
"grad_norm": 0.5028191208839417, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.3774, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06914693724163848, |
|
"grad_norm": 0.5278530120849609, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.4052, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07065013152950018, |
|
"grad_norm": 0.5628510117530823, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.4262, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07215332581736189, |
|
"grad_norm": 0.6105784773826599, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.415, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0736565201052236, |
|
"grad_norm": 0.5921081304550171, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.3439, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0751597143930853, |
|
"grad_norm": 0.7702072858810425, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.4604, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0751597143930853, |
|
"eval_loss": 0.39259007573127747, |
|
"eval_runtime": 54.842, |
|
"eval_samples_per_second": 20.441, |
|
"eval_steps_per_second": 5.124, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07666290868094701, |
|
"grad_norm": 0.32153835892677307, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.3509, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07816610296880872, |
|
"grad_norm": 0.29875338077545166, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.4147, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07966929725667042, |
|
"grad_norm": 0.308470219373703, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.4196, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08117249154453213, |
|
"grad_norm": 0.31666040420532227, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.4143, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08267568583239383, |
|
"grad_norm": 0.3026100993156433, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.4456, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08417888012025554, |
|
"grad_norm": 0.29371339082717896, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.3559, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08568207440811725, |
|
"grad_norm": 0.3148328363895416, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.3927, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08718526869597895, |
|
"grad_norm": 0.30351120233535767, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.3777, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08868846298384066, |
|
"grad_norm": 0.34232327342033386, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.3867, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09019165727170236, |
|
"grad_norm": 0.3512055277824402, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.4172, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09169485155956407, |
|
"grad_norm": 0.3395233750343323, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.3771, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09319804584742578, |
|
"grad_norm": 0.33125266432762146, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.3987, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.09470124013528748, |
|
"grad_norm": 0.40485480427742004, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.384, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09620443442314919, |
|
"grad_norm": 0.36481529474258423, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.401, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0977076287110109, |
|
"grad_norm": 0.33970266580581665, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.385, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0992108229988726, |
|
"grad_norm": 0.34393301606178284, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.393, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1007140172867343, |
|
"grad_norm": 0.36804962158203125, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3822, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.10221721157459601, |
|
"grad_norm": 0.3514614701271057, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.405, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.10372040586245772, |
|
"grad_norm": 0.37134188413619995, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.4007, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.10522360015031942, |
|
"grad_norm": 0.36093929409980774, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.3821, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10672679443818113, |
|
"grad_norm": 0.36206376552581787, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.3791, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10822998872604284, |
|
"grad_norm": 0.3646642565727234, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.3992, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10973318301390454, |
|
"grad_norm": 0.38398653268814087, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.3709, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11123637730176625, |
|
"grad_norm": 0.41967472434043884, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.44, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11273957158962795, |
|
"grad_norm": 0.410259485244751, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.389, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11424276587748966, |
|
"grad_norm": 0.3982982635498047, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.3772, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11574596016535137, |
|
"grad_norm": 0.38881319761276245, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.3725, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11724915445321307, |
|
"grad_norm": 0.39276328682899475, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.3715, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11875234874107478, |
|
"grad_norm": 0.3920992612838745, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.3714, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.12025554302893648, |
|
"grad_norm": 0.3906751871109009, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.3625, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12175873731679819, |
|
"grad_norm": 0.40682023763656616, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.3693, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1232619316046599, |
|
"grad_norm": 0.3880535960197449, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.3502, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1247651258925216, |
|
"grad_norm": 0.423728346824646, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.365, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1262683201803833, |
|
"grad_norm": 0.41527223587036133, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.3402, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12777151446824503, |
|
"grad_norm": 0.4017101228237152, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.3697, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12927470875610672, |
|
"grad_norm": 0.4082302451133728, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.3597, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.13077790304396844, |
|
"grad_norm": 0.4285735785961151, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.37, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.13228109733183013, |
|
"grad_norm": 0.47520652413368225, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.3604, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.13378429161969185, |
|
"grad_norm": 0.4193938970565796, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.3669, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13528748590755355, |
|
"grad_norm": 0.45612838864326477, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.3725, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13679068019541527, |
|
"grad_norm": 0.4199351370334625, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.3249, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13829387448327696, |
|
"grad_norm": 0.4824385643005371, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.3901, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13979706877113868, |
|
"grad_norm": 0.45574283599853516, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.3575, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.14130026305900037, |
|
"grad_norm": 0.46389901638031006, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.3558, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1428034573468621, |
|
"grad_norm": 0.44589194655418396, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.315, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14430665163472378, |
|
"grad_norm": 0.4838874042034149, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.3692, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1458098459225855, |
|
"grad_norm": 0.5774380564689636, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.4069, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1473130402104472, |
|
"grad_norm": 0.5149058103561401, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.3545, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1488162344983089, |
|
"grad_norm": 0.5392869710922241, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.3157, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1503194287861706, |
|
"grad_norm": 0.7136433720588684, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.3207, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1503194287861706, |
|
"eval_loss": 0.3683274984359741, |
|
"eval_runtime": 54.6677, |
|
"eval_samples_per_second": 20.506, |
|
"eval_steps_per_second": 5.14, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15182262307403233, |
|
"grad_norm": 0.26212289929389954, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.2755, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.15332581736189402, |
|
"grad_norm": 0.3127160370349884, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.4129, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15482901164975574, |
|
"grad_norm": 0.3296348452568054, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.419, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15633220593761743, |
|
"grad_norm": 0.30321285128593445, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.3779, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15783540022547915, |
|
"grad_norm": 0.301196813583374, |
|
"learning_rate": 5e-05, |
|
"loss": 0.3451, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15933859451334084, |
|
"grad_norm": 0.3540099859237671, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.362, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.16084178880120256, |
|
"grad_norm": 0.31483331322669983, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.3439, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.16234498308906425, |
|
"grad_norm": 0.3202732503414154, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.373, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.16384817737692597, |
|
"grad_norm": 0.2974686920642853, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.3569, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16535137166478767, |
|
"grad_norm": 0.30358532071113586, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.3645, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1668545659526494, |
|
"grad_norm": 0.34163451194763184, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.4031, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.16835776024051108, |
|
"grad_norm": 0.34163618087768555, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.4144, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1698609545283728, |
|
"grad_norm": 0.3094636797904968, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.3709, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1713641488162345, |
|
"grad_norm": 0.33215829730033875, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.3527, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1728673431040962, |
|
"grad_norm": 0.33634069561958313, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.3784, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1743705373919579, |
|
"grad_norm": 0.3472643792629242, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.4201, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.17587373167981962, |
|
"grad_norm": 0.344387948513031, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.3877, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17737692596768131, |
|
"grad_norm": 0.34439584612846375, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.3717, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17888012025554303, |
|
"grad_norm": 0.35252517461776733, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.3812, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.18038331454340473, |
|
"grad_norm": 0.3847930431365967, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.3554, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18188650883126645, |
|
"grad_norm": 0.35014215111732483, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.3915, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.18338970311912814, |
|
"grad_norm": 0.35064560174942017, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.3526, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18489289740698986, |
|
"grad_norm": 0.40358299016952515, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.3984, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18639609169485155, |
|
"grad_norm": 0.3744370639324188, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.3496, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.18789928598271327, |
|
"grad_norm": 0.3636492192745209, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.3926, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.18940248027057496, |
|
"grad_norm": 0.385662317276001, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.369, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.19090567455843668, |
|
"grad_norm": 0.364361047744751, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.3635, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.19240886884629838, |
|
"grad_norm": 0.40278124809265137, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.3716, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1939120631341601, |
|
"grad_norm": 0.3735451400279999, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.3252, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1954152574220218, |
|
"grad_norm": 0.38422390818595886, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.3615, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1969184517098835, |
|
"grad_norm": 0.3829570710659027, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.3807, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1984216459977452, |
|
"grad_norm": 0.3987874686717987, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.3772, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19992484028560692, |
|
"grad_norm": 0.3898436427116394, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.3396, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2014280345734686, |
|
"grad_norm": 0.4023337960243225, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.3481, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.20293122886133033, |
|
"grad_norm": 0.3817198574542999, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.3441, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.20443442314919202, |
|
"grad_norm": 0.38800162076950073, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.3319, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.20593761743705374, |
|
"grad_norm": 0.40661248564720154, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.3798, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.20744081172491544, |
|
"grad_norm": 0.4218497574329376, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.3564, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.20894400601277716, |
|
"grad_norm": 0.42600011825561523, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.3439, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.21044720030063885, |
|
"grad_norm": 0.422944575548172, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.3458, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.21195039458850057, |
|
"grad_norm": 0.4048214554786682, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.3239, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.21345358887636226, |
|
"grad_norm": 0.4112938940525055, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.3048, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.21495678316422398, |
|
"grad_norm": 0.4650246500968933, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.3565, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.21645997745208567, |
|
"grad_norm": 0.5013602375984192, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.3867, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2179631717399474, |
|
"grad_norm": 0.45937857031822205, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.3557, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.21946636602780908, |
|
"grad_norm": 0.5174309015274048, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.3477, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2209695603156708, |
|
"grad_norm": 0.476762592792511, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.3355, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2224727546035325, |
|
"grad_norm": 0.4924473464488983, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.2744, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.22397594889139422, |
|
"grad_norm": 0.6597243547439575, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.3453, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2254791431792559, |
|
"grad_norm": 0.6664057970046997, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.3596, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2254791431792559, |
|
"eval_loss": 0.3501320481300354, |
|
"eval_runtime": 54.7026, |
|
"eval_samples_per_second": 20.493, |
|
"eval_steps_per_second": 5.137, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22698233746711763, |
|
"grad_norm": 0.22929313778877258, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.2784, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22848553175497932, |
|
"grad_norm": 0.27377232909202576, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.4036, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.22998872604284104, |
|
"grad_norm": 0.26963168382644653, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.3675, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.23149192033070273, |
|
"grad_norm": 0.3080262839794159, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.4166, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.23299511461856445, |
|
"grad_norm": 0.2926326394081116, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.363, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.23449830890642615, |
|
"grad_norm": 0.29955020546913147, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.3161, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.23600150319428786, |
|
"grad_norm": 0.3129074275493622, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.3622, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.23750469748214956, |
|
"grad_norm": 0.3651416599750519, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.4296, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.23900789177001128, |
|
"grad_norm": 0.31064069271087646, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.3492, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.24051108605787297, |
|
"grad_norm": 0.3591364026069641, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.3917, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2420142803457347, |
|
"grad_norm": 0.3335144519805908, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.3776, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.24351747463359638, |
|
"grad_norm": 0.30554336309432983, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.3455, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2450206689214581, |
|
"grad_norm": 0.3222895860671997, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.3355, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2465238632093198, |
|
"grad_norm": 0.3238259255886078, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.3454, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2480270574971815, |
|
"grad_norm": 0.3491290509700775, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.3474, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2495302517850432, |
|
"grad_norm": 0.3277914822101593, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.3651, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2510334460729049, |
|
"grad_norm": 0.33758988976478577, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.3741, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2525366403607666, |
|
"grad_norm": 0.3350788354873657, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.3499, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.25403983464862834, |
|
"grad_norm": 0.3243398666381836, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.347, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.25554302893649006, |
|
"grad_norm": 0.34355881810188293, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.3526, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2570462232243517, |
|
"grad_norm": 0.3813783824443817, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.3835, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.25854941751221344, |
|
"grad_norm": 0.388657808303833, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.3828, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.26005261180007516, |
|
"grad_norm": 0.3673814833164215, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.3438, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2615558060879369, |
|
"grad_norm": 0.32948142290115356, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.3048, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.26305900037579855, |
|
"grad_norm": 0.3570793569087982, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.3447, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.26456219466366027, |
|
"grad_norm": 0.35363292694091797, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.3631, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.266065388951522, |
|
"grad_norm": 0.37539130449295044, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.3643, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2675685832393837, |
|
"grad_norm": 0.38558998703956604, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.3681, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.26907177752724537, |
|
"grad_norm": 0.3716927170753479, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.3671, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2705749718151071, |
|
"grad_norm": 0.3771487772464752, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.3309, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2720781661029688, |
|
"grad_norm": 0.3573903441429138, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.3246, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.27358136039083053, |
|
"grad_norm": 0.39243030548095703, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.3374, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2750845546786922, |
|
"grad_norm": 0.424135684967041, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.3857, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2765877489665539, |
|
"grad_norm": 0.3713597357273102, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.3332, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.27809094325441563, |
|
"grad_norm": 0.39152365922927856, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.398, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.27959413754227735, |
|
"grad_norm": 0.3714972138404846, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.3134, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.281097331830139, |
|
"grad_norm": 0.3927851915359497, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.3585, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.28260052611800074, |
|
"grad_norm": 0.4404532313346863, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.362, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.28410372040586246, |
|
"grad_norm": 0.4045756757259369, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.3221, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2856069146937242, |
|
"grad_norm": 0.42240893840789795, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.3702, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.28711010898158584, |
|
"grad_norm": 0.4267599880695343, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.3191, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.28861330326944756, |
|
"grad_norm": 0.4583100974559784, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.3849, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2901164975573093, |
|
"grad_norm": 0.4387661814689636, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.2992, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.291619691845171, |
|
"grad_norm": 0.43879735469818115, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.3221, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.29312288613303267, |
|
"grad_norm": 0.4648761451244354, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.3139, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2946260804208944, |
|
"grad_norm": 0.4891892373561859, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.3178, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2961292747087561, |
|
"grad_norm": 0.47990643978118896, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.3578, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2976324689966178, |
|
"grad_norm": 0.5234610438346863, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.305, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2991356632844795, |
|
"grad_norm": 0.5420705080032349, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.2801, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3006388575723412, |
|
"grad_norm": 0.6559951305389404, |
|
"learning_rate": 0.0, |
|
"loss": 0.3526, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3006388575723412, |
|
"eval_loss": 0.34753015637397766, |
|
"eval_runtime": 54.7235, |
|
"eval_samples_per_second": 20.485, |
|
"eval_steps_per_second": 5.135, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.826038955225907e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|