|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2946593001841621, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014732965009208103, |
|
"grad_norm": 3.1541404724121094, |
|
"learning_rate": 2e-05, |
|
"loss": 8.9189, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0014732965009208103, |
|
"eval_loss": 3.2565059661865234, |
|
"eval_runtime": 9.4566, |
|
"eval_samples_per_second": 30.244, |
|
"eval_steps_per_second": 15.122, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0029465930018416206, |
|
"grad_norm": 1.7702511548995972, |
|
"learning_rate": 4e-05, |
|
"loss": 9.741, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004419889502762431, |
|
"grad_norm": 2.489617109298706, |
|
"learning_rate": 6e-05, |
|
"loss": 12.075, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005893186003683241, |
|
"grad_norm": 6.4327898025512695, |
|
"learning_rate": 8e-05, |
|
"loss": 13.4888, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.007366482504604052, |
|
"grad_norm": 3.635437250137329, |
|
"learning_rate": 0.0001, |
|
"loss": 15.0644, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008839779005524863, |
|
"grad_norm": 5.8415045738220215, |
|
"learning_rate": 0.00012, |
|
"loss": 14.4951, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.010313075506445672, |
|
"grad_norm": 6.604334831237793, |
|
"learning_rate": 0.00014, |
|
"loss": 15.9952, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.011786372007366482, |
|
"grad_norm": 4.3601579666137695, |
|
"learning_rate": 0.00016, |
|
"loss": 11.8052, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013259668508287293, |
|
"grad_norm": 4.953864097595215, |
|
"learning_rate": 0.00018, |
|
"loss": 12.0071, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.014732965009208104, |
|
"grad_norm": 7.948404312133789, |
|
"learning_rate": 0.0002, |
|
"loss": 11.1713, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016206261510128914, |
|
"grad_norm": 6.616621971130371, |
|
"learning_rate": 0.0001999863304992469, |
|
"loss": 9.3546, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.017679558011049725, |
|
"grad_norm": 3.6598548889160156, |
|
"learning_rate": 0.00019994532573409262, |
|
"loss": 9.7632, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.019152854511970532, |
|
"grad_norm": 5.915988445281982, |
|
"learning_rate": 0.00019987699691483048, |
|
"loss": 14.6365, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.020626151012891343, |
|
"grad_norm": 6.100218772888184, |
|
"learning_rate": 0.00019978136272187747, |
|
"loss": 10.4504, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.022099447513812154, |
|
"grad_norm": 3.99997615814209, |
|
"learning_rate": 0.000199658449300667, |
|
"loss": 9.436, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.023572744014732964, |
|
"grad_norm": 4.781832695007324, |
|
"learning_rate": 0.00019950829025450114, |
|
"loss": 9.5615, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025046040515653775, |
|
"grad_norm": 4.316699504852295, |
|
"learning_rate": 0.00019933092663536382, |
|
"loss": 9.8006, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.026519337016574586, |
|
"grad_norm": 5.100368976593018, |
|
"learning_rate": 0.00019912640693269752, |
|
"loss": 10.0697, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.027992633517495397, |
|
"grad_norm": 11.279586791992188, |
|
"learning_rate": 0.00019889478706014687, |
|
"loss": 9.8463, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.029465930018416207, |
|
"grad_norm": 5.626986026763916, |
|
"learning_rate": 0.00019863613034027224, |
|
"loss": 9.7286, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.030939226519337018, |
|
"grad_norm": 11.165328025817871, |
|
"learning_rate": 0.00019835050748723824, |
|
"loss": 10.1978, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03241252302025783, |
|
"grad_norm": 9.0336275100708, |
|
"learning_rate": 0.00019803799658748094, |
|
"loss": 7.924, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03388581952117864, |
|
"grad_norm": 7.770285129547119, |
|
"learning_rate": 0.00019769868307835994, |
|
"loss": 11.2543, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03535911602209945, |
|
"grad_norm": 9.521552085876465, |
|
"learning_rate": 0.0001973326597248006, |
|
"loss": 12.0127, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03683241252302026, |
|
"grad_norm": 11.501378059387207, |
|
"learning_rate": 0.00019694002659393305, |
|
"loss": 9.5224, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.038305709023941065, |
|
"grad_norm": 4.7648515701293945, |
|
"learning_rate": 0.00019652089102773488, |
|
"loss": 9.2757, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.039779005524861875, |
|
"grad_norm": 10.825995445251465, |
|
"learning_rate": 0.00019607536761368484, |
|
"loss": 10.0724, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.041252302025782686, |
|
"grad_norm": 7.209076404571533, |
|
"learning_rate": 0.00019560357815343577, |
|
"loss": 8.9072, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0427255985267035, |
|
"grad_norm": 10.064898490905762, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 11.1213, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04419889502762431, |
|
"grad_norm": 10.006102561950684, |
|
"learning_rate": 0.00019458172417006347, |
|
"loss": 9.3537, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04567219152854512, |
|
"grad_norm": 2.8803489208221436, |
|
"learning_rate": 0.00019403193901161613, |
|
"loss": 6.3467, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04714548802946593, |
|
"grad_norm": 9.666797637939453, |
|
"learning_rate": 0.0001934564464599461, |
|
"loss": 9.9114, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04861878453038674, |
|
"grad_norm": 7.411388874053955, |
|
"learning_rate": 0.00019285540384897073, |
|
"loss": 12.8765, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05009208103130755, |
|
"grad_norm": 5.952574729919434, |
|
"learning_rate": 0.00019222897549773848, |
|
"loss": 8.1026, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05156537753222836, |
|
"grad_norm": 9.048133850097656, |
|
"learning_rate": 0.00019157733266550575, |
|
"loss": 9.8412, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05303867403314917, |
|
"grad_norm": 8.200292587280273, |
|
"learning_rate": 0.00019090065350491626, |
|
"loss": 8.7816, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05451197053406998, |
|
"grad_norm": 9.098681449890137, |
|
"learning_rate": 0.00019019912301329592, |
|
"loss": 11.8605, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05598526703499079, |
|
"grad_norm": 3.642021656036377, |
|
"learning_rate": 0.00018947293298207635, |
|
"loss": 7.211, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.057458563535911604, |
|
"grad_norm": 5.9573974609375, |
|
"learning_rate": 0.0001887222819443612, |
|
"loss": 8.0583, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.058931860036832415, |
|
"grad_norm": 5.869434833526611, |
|
"learning_rate": 0.0001879473751206489, |
|
"loss": 10.2234, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.060405156537753225, |
|
"grad_norm": 8.367606163024902, |
|
"learning_rate": 0.00018714842436272773, |
|
"loss": 9.9171, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.061878453038674036, |
|
"grad_norm": 3.8573427200317383, |
|
"learning_rate": 0.00018632564809575742, |
|
"loss": 6.9693, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06335174953959484, |
|
"grad_norm": 5.08005952835083, |
|
"learning_rate": 0.0001854792712585539, |
|
"loss": 8.545, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06482504604051566, |
|
"grad_norm": 5.412344455718994, |
|
"learning_rate": 0.00018460952524209355, |
|
"loss": 6.0708, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06629834254143646, |
|
"grad_norm": 5.560658931732178, |
|
"learning_rate": 0.00018371664782625287, |
|
"loss": 8.6937, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06777163904235728, |
|
"grad_norm": 5.4355149269104, |
|
"learning_rate": 0.00018280088311480201, |
|
"loss": 7.2302, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06924493554327808, |
|
"grad_norm": 5.770148277282715, |
|
"learning_rate": 0.00018186248146866927, |
|
"loss": 10.6754, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0707182320441989, |
|
"grad_norm": 6.869553565979004, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 9.2739, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0721915285451197, |
|
"grad_norm": 8.691353797912598, |
|
"learning_rate": 0.0001799187996894925, |
|
"loss": 9.0793, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07366482504604052, |
|
"grad_norm": 8.788375854492188, |
|
"learning_rate": 0.00017891405093963938, |
|
"loss": 9.0745, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07366482504604052, |
|
"eval_loss": 2.2372069358825684, |
|
"eval_runtime": 9.4565, |
|
"eval_samples_per_second": 30.244, |
|
"eval_steps_per_second": 15.122, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07513812154696133, |
|
"grad_norm": 7.550640106201172, |
|
"learning_rate": 0.00017788772787621126, |
|
"loss": 12.7868, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07661141804788213, |
|
"grad_norm": 7.088727951049805, |
|
"learning_rate": 0.00017684011108568592, |
|
"loss": 6.6456, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07808471454880295, |
|
"grad_norm": 6.3660101890563965, |
|
"learning_rate": 0.0001757714869760335, |
|
"loss": 9.7407, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07955801104972375, |
|
"grad_norm": 6.493173599243164, |
|
"learning_rate": 0.0001746821476984154, |
|
"loss": 8.9581, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08103130755064457, |
|
"grad_norm": 13.058594703674316, |
|
"learning_rate": 0.00017357239106731317, |
|
"loss": 7.4741, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08250460405156537, |
|
"grad_norm": 9.218979835510254, |
|
"learning_rate": 0.00017244252047910892, |
|
"loss": 10.1216, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08397790055248619, |
|
"grad_norm": 8.803467750549316, |
|
"learning_rate": 0.00017129284482913972, |
|
"loss": 10.4821, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.085451197053407, |
|
"grad_norm": 7.895155429840088, |
|
"learning_rate": 0.00017012367842724887, |
|
"loss": 8.5694, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08692449355432781, |
|
"grad_norm": 8.486394882202148, |
|
"learning_rate": 0.0001689353409118566, |
|
"loss": 10.1866, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08839779005524862, |
|
"grad_norm": 8.857569694519043, |
|
"learning_rate": 0.00016772815716257412, |
|
"loss": 8.1711, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08987108655616943, |
|
"grad_norm": 5.87221622467041, |
|
"learning_rate": 0.0001665024572113848, |
|
"loss": 8.2002, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09134438305709024, |
|
"grad_norm": 7.224503993988037, |
|
"learning_rate": 0.00016525857615241687, |
|
"loss": 9.4081, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.09281767955801105, |
|
"grad_norm": 9.808675765991211, |
|
"learning_rate": 0.00016399685405033167, |
|
"loss": 7.1058, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09429097605893186, |
|
"grad_norm": 12.092679977416992, |
|
"learning_rate": 0.0001627176358473537, |
|
"loss": 10.0414, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09576427255985268, |
|
"grad_norm": 8.735736846923828, |
|
"learning_rate": 0.0001614212712689668, |
|
"loss": 10.7128, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09723756906077348, |
|
"grad_norm": 9.30666446685791, |
|
"learning_rate": 0.00016010811472830252, |
|
"loss": 8.9495, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0987108655616943, |
|
"grad_norm": 8.964729309082031, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 6.9074, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1001841620626151, |
|
"grad_norm": 8.615819931030273, |
|
"learning_rate": 0.00015743286626829437, |
|
"loss": 7.6531, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.10165745856353592, |
|
"grad_norm": 9.311439514160156, |
|
"learning_rate": 0.0001560715057351673, |
|
"loss": 10.6691, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.10313075506445672, |
|
"grad_norm": 7.247087478637695, |
|
"learning_rate": 0.00015469481581224272, |
|
"loss": 7.229, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10460405156537753, |
|
"grad_norm": 6.454756259918213, |
|
"learning_rate": 0.0001533031728727994, |
|
"loss": 7.7774, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10607734806629834, |
|
"grad_norm": 6.882237911224365, |
|
"learning_rate": 0.00015189695737812152, |
|
"loss": 7.7409, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10755064456721915, |
|
"grad_norm": 6.786222457885742, |
|
"learning_rate": 0.0001504765537734844, |
|
"loss": 7.0605, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10902394106813997, |
|
"grad_norm": 7.709743499755859, |
|
"learning_rate": 0.00014904235038305083, |
|
"loss": 9.9043, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11049723756906077, |
|
"grad_norm": 6.459442138671875, |
|
"learning_rate": 0.00014759473930370736, |
|
"loss": 8.6428, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11197053406998159, |
|
"grad_norm": 9.24147891998291, |
|
"learning_rate": 0.0001461341162978688, |
|
"loss": 9.9, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11344383057090239, |
|
"grad_norm": 6.457151889801025, |
|
"learning_rate": 0.00014466088068528068, |
|
"loss": 10.7196, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11491712707182321, |
|
"grad_norm": 3.3985164165496826, |
|
"learning_rate": 0.00014317543523384928, |
|
"loss": 4.1652, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11639042357274401, |
|
"grad_norm": 12.46141242980957, |
|
"learning_rate": 0.00014167818604952906, |
|
"loss": 7.9478, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.11786372007366483, |
|
"grad_norm": 8.607696533203125, |
|
"learning_rate": 0.00014016954246529696, |
|
"loss": 8.3695, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11933701657458563, |
|
"grad_norm": 8.640053749084473, |
|
"learning_rate": 0.00013864991692924523, |
|
"loss": 6.5266, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12081031307550645, |
|
"grad_norm": 8.049505233764648, |
|
"learning_rate": 0.00013711972489182208, |
|
"loss": 9.0633, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12228360957642725, |
|
"grad_norm": 8.616392135620117, |
|
"learning_rate": 0.00013557938469225167, |
|
"loss": 5.9323, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12375690607734807, |
|
"grad_norm": 5.472959995269775, |
|
"learning_rate": 0.00013402931744416433, |
|
"loss": 6.0568, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1252302025782689, |
|
"grad_norm": 7.378502368927002, |
|
"learning_rate": 0.00013246994692046836, |
|
"loss": 6.6528, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12670349907918968, |
|
"grad_norm": 8.463314056396484, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 7.2891, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1281767955801105, |
|
"grad_norm": 6.927599906921387, |
|
"learning_rate": 0.0001293250037384465, |
|
"loss": 8.9668, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12965009208103132, |
|
"grad_norm": 7.007180213928223, |
|
"learning_rate": 0.00012774029087618446, |
|
"loss": 6.3727, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1311233885819521, |
|
"grad_norm": 8.22607135772705, |
|
"learning_rate": 0.00012614799409538198, |
|
"loss": 7.8068, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13259668508287292, |
|
"grad_norm": 5.844438552856445, |
|
"learning_rate": 0.00012454854871407994, |
|
"loss": 5.3604, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13406998158379374, |
|
"grad_norm": 11.601090431213379, |
|
"learning_rate": 0.00012294239200467516, |
|
"loss": 10.1369, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13554327808471456, |
|
"grad_norm": 7.081054210662842, |
|
"learning_rate": 0.0001213299630743747, |
|
"loss": 8.1575, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13701657458563535, |
|
"grad_norm": 11.263205528259277, |
|
"learning_rate": 0.00011971170274514802, |
|
"loss": 9.0753, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13848987108655617, |
|
"grad_norm": 8.47836685180664, |
|
"learning_rate": 0.000118088053433211, |
|
"loss": 7.837, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13996316758747698, |
|
"grad_norm": 7.94907283782959, |
|
"learning_rate": 0.00011645945902807341, |
|
"loss": 6.2531, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1414364640883978, |
|
"grad_norm": 14.959242820739746, |
|
"learning_rate": 0.0001148263647711842, |
|
"loss": 9.7477, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1429097605893186, |
|
"grad_norm": 9.983634948730469, |
|
"learning_rate": 0.00011318921713420691, |
|
"loss": 6.9798, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1443830570902394, |
|
"grad_norm": 10.852173805236816, |
|
"learning_rate": 0.00011154846369695863, |
|
"loss": 7.873, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14585635359116023, |
|
"grad_norm": 8.277898788452148, |
|
"learning_rate": 0.0001099045530250463, |
|
"loss": 6.3715, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14732965009208104, |
|
"grad_norm": 6.592026710510254, |
|
"learning_rate": 0.00010825793454723325, |
|
"loss": 8.1482, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14732965009208104, |
|
"eval_loss": 1.9596363306045532, |
|
"eval_runtime": 9.4182, |
|
"eval_samples_per_second": 30.367, |
|
"eval_steps_per_second": 15.183, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14880294659300183, |
|
"grad_norm": 7.7546186447143555, |
|
"learning_rate": 0.00010660905843256994, |
|
"loss": 7.1284, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.15027624309392265, |
|
"grad_norm": 8.040283203125, |
|
"learning_rate": 0.00010495837546732224, |
|
"loss": 7.7712, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15174953959484347, |
|
"grad_norm": 5.6713547706604, |
|
"learning_rate": 0.00010330633693173082, |
|
"loss": 5.3149, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15322283609576426, |
|
"grad_norm": 7.573396682739258, |
|
"learning_rate": 0.00010165339447663587, |
|
"loss": 8.7858, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15469613259668508, |
|
"grad_norm": 9.673140525817871, |
|
"learning_rate": 0.0001, |
|
"loss": 7.4904, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1561694290976059, |
|
"grad_norm": 13.609859466552734, |
|
"learning_rate": 9.834660552336415e-05, |
|
"loss": 9.5067, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1576427255985267, |
|
"grad_norm": 9.656366348266602, |
|
"learning_rate": 9.669366306826919e-05, |
|
"loss": 10.8401, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1591160220994475, |
|
"grad_norm": 12.622513771057129, |
|
"learning_rate": 9.504162453267777e-05, |
|
"loss": 7.4235, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.16058931860036832, |
|
"grad_norm": 6.947592258453369, |
|
"learning_rate": 9.339094156743007e-05, |
|
"loss": 8.8876, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16206261510128914, |
|
"grad_norm": 7.011374473571777, |
|
"learning_rate": 9.174206545276677e-05, |
|
"loss": 5.4014, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16353591160220995, |
|
"grad_norm": 4.517526626586914, |
|
"learning_rate": 9.009544697495374e-05, |
|
"loss": 5.814, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.16500920810313074, |
|
"grad_norm": 6.089804172515869, |
|
"learning_rate": 8.845153630304139e-05, |
|
"loss": 6.8508, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.16648250460405156, |
|
"grad_norm": 10.510293006896973, |
|
"learning_rate": 8.681078286579311e-05, |
|
"loss": 9.7658, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.16795580110497238, |
|
"grad_norm": 9.174927711486816, |
|
"learning_rate": 8.517363522881579e-05, |
|
"loss": 7.1456, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1694290976058932, |
|
"grad_norm": 7.33340311050415, |
|
"learning_rate": 8.35405409719266e-05, |
|
"loss": 6.902, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.170902394106814, |
|
"grad_norm": 6.4531779289245605, |
|
"learning_rate": 8.191194656678904e-05, |
|
"loss": 6.6226, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1723756906077348, |
|
"grad_norm": 9.962913513183594, |
|
"learning_rate": 8.028829725485199e-05, |
|
"loss": 9.6444, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17384898710865562, |
|
"grad_norm": 5.590907573699951, |
|
"learning_rate": 7.867003692562534e-05, |
|
"loss": 7.2986, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17532228360957644, |
|
"grad_norm": 10.6129150390625, |
|
"learning_rate": 7.705760799532485e-05, |
|
"loss": 7.5536, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.17679558011049723, |
|
"grad_norm": 14.160097122192383, |
|
"learning_rate": 7.54514512859201e-05, |
|
"loss": 12.2425, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17826887661141805, |
|
"grad_norm": 10.54971694946289, |
|
"learning_rate": 7.385200590461803e-05, |
|
"loss": 6.7272, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.17974217311233887, |
|
"grad_norm": 6.65580415725708, |
|
"learning_rate": 7.225970912381556e-05, |
|
"loss": 6.8475, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18121546961325966, |
|
"grad_norm": 10.194478034973145, |
|
"learning_rate": 7.067499626155354e-05, |
|
"loss": 8.6189, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18268876611418047, |
|
"grad_norm": 11.780241012573242, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 8.392, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1841620626151013, |
|
"grad_norm": 6.544031143188477, |
|
"learning_rate": 6.753005307953167e-05, |
|
"loss": 7.9356, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1856353591160221, |
|
"grad_norm": 5.778688430786133, |
|
"learning_rate": 6.59706825558357e-05, |
|
"loss": 4.8296, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1871086556169429, |
|
"grad_norm": 13.133682250976562, |
|
"learning_rate": 6.442061530774834e-05, |
|
"loss": 8.4469, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.18858195211786372, |
|
"grad_norm": 5.862144947052002, |
|
"learning_rate": 6.28802751081779e-05, |
|
"loss": 5.7135, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.19005524861878453, |
|
"grad_norm": 5.824400901794434, |
|
"learning_rate": 6.135008307075481e-05, |
|
"loss": 7.4102, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.19152854511970535, |
|
"grad_norm": 7.972049713134766, |
|
"learning_rate": 5.983045753470308e-05, |
|
"loss": 7.9903, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19300184162062614, |
|
"grad_norm": 7.148801803588867, |
|
"learning_rate": 5.832181395047098e-05, |
|
"loss": 4.9407, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.19447513812154696, |
|
"grad_norm": 4.160274028778076, |
|
"learning_rate": 5.6824564766150726e-05, |
|
"loss": 7.0311, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19594843462246778, |
|
"grad_norm": 4.829346656799316, |
|
"learning_rate": 5.533911931471936e-05, |
|
"loss": 5.5874, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1974217311233886, |
|
"grad_norm": 8.297733306884766, |
|
"learning_rate": 5.386588370213124e-05, |
|
"loss": 6.3769, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.19889502762430938, |
|
"grad_norm": 5.839636325836182, |
|
"learning_rate": 5.240526069629265e-05, |
|
"loss": 6.8749, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2003683241252302, |
|
"grad_norm": 7.078559398651123, |
|
"learning_rate": 5.095764961694922e-05, |
|
"loss": 6.4378, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.20184162062615102, |
|
"grad_norm": 5.329637050628662, |
|
"learning_rate": 4.952344622651566e-05, |
|
"loss": 6.1027, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.20331491712707184, |
|
"grad_norm": 10.604938507080078, |
|
"learning_rate": 4.810304262187852e-05, |
|
"loss": 9.6319, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.20478821362799263, |
|
"grad_norm": 10.23065185546875, |
|
"learning_rate": 4.669682712720065e-05, |
|
"loss": 10.1313, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.20626151012891344, |
|
"grad_norm": 14.528596878051758, |
|
"learning_rate": 4.530518418775733e-05, |
|
"loss": 8.1347, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.20773480662983426, |
|
"grad_norm": 5.988021373748779, |
|
"learning_rate": 4.392849426483274e-05, |
|
"loss": 8.9566, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.20920810313075505, |
|
"grad_norm": 13.86946964263916, |
|
"learning_rate": 4.256713373170564e-05, |
|
"loss": 7.2571, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.21068139963167587, |
|
"grad_norm": 8.190982818603516, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 6.508, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2121546961325967, |
|
"grad_norm": 8.50158405303955, |
|
"learning_rate": 3.9891885271697496e-05, |
|
"loss": 5.1829, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2136279926335175, |
|
"grad_norm": 9.470782279968262, |
|
"learning_rate": 3.857872873103322e-05, |
|
"loss": 6.3869, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2151012891344383, |
|
"grad_norm": 9.785574913024902, |
|
"learning_rate": 3.7282364152646297e-05, |
|
"loss": 7.6541, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2165745856353591, |
|
"grad_norm": 5.911442756652832, |
|
"learning_rate": 3.600314594966834e-05, |
|
"loss": 5.7766, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.21804788213627993, |
|
"grad_norm": 5.854257583618164, |
|
"learning_rate": 3.4741423847583134e-05, |
|
"loss": 4.5367, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.21952117863720075, |
|
"grad_norm": 12.969705581665039, |
|
"learning_rate": 3.349754278861517e-05, |
|
"loss": 8.9383, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.22099447513812154, |
|
"grad_norm": 9.376482963562012, |
|
"learning_rate": 3.227184283742591e-05, |
|
"loss": 9.8813, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22099447513812154, |
|
"eval_loss": 1.8338268995285034, |
|
"eval_runtime": 9.4611, |
|
"eval_samples_per_second": 30.229, |
|
"eval_steps_per_second": 15.115, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22246777163904236, |
|
"grad_norm": 7.644078731536865, |
|
"learning_rate": 3.106465908814342e-05, |
|
"loss": 6.0862, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22394106813996317, |
|
"grad_norm": 10.181632995605469, |
|
"learning_rate": 2.9876321572751144e-05, |
|
"loss": 11.6419, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.225414364640884, |
|
"grad_norm": 10.395967483520508, |
|
"learning_rate": 2.87071551708603e-05, |
|
"loss": 7.4106, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.22688766114180478, |
|
"grad_norm": 9.284590721130371, |
|
"learning_rate": 2.7557479520891104e-05, |
|
"loss": 9.6296, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2283609576427256, |
|
"grad_norm": 9.491522789001465, |
|
"learning_rate": 2.6427608932686843e-05, |
|
"loss": 7.7744, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22983425414364642, |
|
"grad_norm": 14.384906768798828, |
|
"learning_rate": 2.5317852301584643e-05, |
|
"loss": 8.2422, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2313075506445672, |
|
"grad_norm": 9.905705451965332, |
|
"learning_rate": 2.422851302396655e-05, |
|
"loss": 9.2604, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.23278084714548802, |
|
"grad_norm": 4.367447853088379, |
|
"learning_rate": 2.315988891431412e-05, |
|
"loss": 4.1067, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.23425414364640884, |
|
"grad_norm": 11.153772354125977, |
|
"learning_rate": 2.2112272123788768e-05, |
|
"loss": 7.3027, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.23572744014732966, |
|
"grad_norm": 8.646809577941895, |
|
"learning_rate": 2.1085949060360654e-05, |
|
"loss": 6.6389, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23720073664825045, |
|
"grad_norm": 15.441944122314453, |
|
"learning_rate": 2.008120031050753e-05, |
|
"loss": 7.037, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.23867403314917127, |
|
"grad_norm": 10.072053909301758, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 7.1396, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.24014732965009208, |
|
"grad_norm": 6.161656379699707, |
|
"learning_rate": 1.8137518531330767e-05, |
|
"loss": 5.8051, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2416206261510129, |
|
"grad_norm": 9.943297386169434, |
|
"learning_rate": 1.7199116885197995e-05, |
|
"loss": 8.8631, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2430939226519337, |
|
"grad_norm": 12.956791877746582, |
|
"learning_rate": 1.6283352173747145e-05, |
|
"loss": 5.8573, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2445672191528545, |
|
"grad_norm": 6.923314571380615, |
|
"learning_rate": 1.5390474757906446e-05, |
|
"loss": 7.4056, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.24604051565377533, |
|
"grad_norm": 14.149099349975586, |
|
"learning_rate": 1.4520728741446089e-05, |
|
"loss": 10.0937, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.24751381215469614, |
|
"grad_norm": 9.165054321289062, |
|
"learning_rate": 1.3674351904242611e-05, |
|
"loss": 8.1644, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.24898710865561693, |
|
"grad_norm": 9.420945167541504, |
|
"learning_rate": 1.2851575637272262e-05, |
|
"loss": 7.6371, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2504604051565378, |
|
"grad_norm": 6.907212734222412, |
|
"learning_rate": 1.2052624879351104e-05, |
|
"loss": 6.2545, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.25193370165745854, |
|
"grad_norm": 6.471442699432373, |
|
"learning_rate": 1.1277718055638819e-05, |
|
"loss": 7.0496, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.25340699815837936, |
|
"grad_norm": 7.961968421936035, |
|
"learning_rate": 1.0527067017923654e-05, |
|
"loss": 7.9685, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2548802946593002, |
|
"grad_norm": 6.750203609466553, |
|
"learning_rate": 9.80087698670411e-06, |
|
"loss": 4.4272, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.256353591160221, |
|
"grad_norm": 7.331230163574219, |
|
"learning_rate": 9.09934649508375e-06, |
|
"loss": 4.6844, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2578268876611418, |
|
"grad_norm": 8.612319946289062, |
|
"learning_rate": 8.422667334494249e-06, |
|
"loss": 6.4878, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.25930018416206263, |
|
"grad_norm": 15.962565422058105, |
|
"learning_rate": 7.771024502261526e-06, |
|
"loss": 10.4547, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.26077348066298345, |
|
"grad_norm": 11.007623672485352, |
|
"learning_rate": 7.144596151029303e-06, |
|
"loss": 8.9079, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2622467771639042, |
|
"grad_norm": 5.049574375152588, |
|
"learning_rate": 6.543553540053926e-06, |
|
"loss": 5.1004, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.263720073664825, |
|
"grad_norm": 7.689352035522461, |
|
"learning_rate": 5.968060988383883e-06, |
|
"loss": 6.4234, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.26519337016574585, |
|
"grad_norm": 12.746678352355957, |
|
"learning_rate": 5.418275829936537e-06, |
|
"loss": 10.7555, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 8.217164039611816, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 6.3151, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2681399631675875, |
|
"grad_norm": 8.921655654907227, |
|
"learning_rate": 4.3964218465642355e-06, |
|
"loss": 8.7948, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2696132596685083, |
|
"grad_norm": 7.391983509063721, |
|
"learning_rate": 3.924632386315186e-06, |
|
"loss": 5.6443, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2710865561694291, |
|
"grad_norm": 6.958240032196045, |
|
"learning_rate": 3.4791089722651436e-06, |
|
"loss": 6.8103, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.27255985267034993, |
|
"grad_norm": 9.752053260803223, |
|
"learning_rate": 3.059973406066963e-06, |
|
"loss": 5.7348, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2740331491712707, |
|
"grad_norm": 7.452392578125, |
|
"learning_rate": 2.667340275199426e-06, |
|
"loss": 8.5893, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2755064456721915, |
|
"grad_norm": 8.648473739624023, |
|
"learning_rate": 2.3013169216400733e-06, |
|
"loss": 8.2165, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.27697974217311233, |
|
"grad_norm": 10.441420555114746, |
|
"learning_rate": 1.9620034125190644e-06, |
|
"loss": 6.9184, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.27845303867403315, |
|
"grad_norm": 9.087239265441895, |
|
"learning_rate": 1.6494925127617634e-06, |
|
"loss": 9.28, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.27992633517495397, |
|
"grad_norm": 7.3535542488098145, |
|
"learning_rate": 1.3638696597277679e-06, |
|
"loss": 6.3341, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2813996316758748, |
|
"grad_norm": 8.444154739379883, |
|
"learning_rate": 1.1052129398531507e-06, |
|
"loss": 7.9693, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2828729281767956, |
|
"grad_norm": 9.900870323181152, |
|
"learning_rate": 8.735930673024806e-07, |
|
"loss": 5.7817, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.28434622467771636, |
|
"grad_norm": 14.024637222290039, |
|
"learning_rate": 6.690733646361857e-07, |
|
"loss": 9.6444, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2858195211786372, |
|
"grad_norm": 5.964371204376221, |
|
"learning_rate": 4.917097454988584e-07, |
|
"loss": 5.2127, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.287292817679558, |
|
"grad_norm": 8.721577644348145, |
|
"learning_rate": 3.415506993330153e-07, |
|
"loss": 9.1063, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2887661141804788, |
|
"grad_norm": 6.445766925811768, |
|
"learning_rate": 2.1863727812254653e-07, |
|
"loss": 5.1139, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.29023941068139963, |
|
"grad_norm": 6.205690860748291, |
|
"learning_rate": 1.230030851695263e-07, |
|
"loss": 4.9742, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.29171270718232045, |
|
"grad_norm": 8.137059211730957, |
|
"learning_rate": 5.467426590739511e-08, |
|
"loss": 8.3932, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.29318600368324127, |
|
"grad_norm": 5.232216835021973, |
|
"learning_rate": 1.3669500753099585e-08, |
|
"loss": 6.5716, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2946593001841621, |
|
"grad_norm": 7.499634265899658, |
|
"learning_rate": 0.0, |
|
"loss": 6.5585, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2946593001841621, |
|
"eval_loss": 1.8100769519805908, |
|
"eval_runtime": 9.4494, |
|
"eval_samples_per_second": 30.266, |
|
"eval_steps_per_second": 15.133, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.2900734689214464e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|