|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.955555555555556, |
|
"eval_steps": 500, |
|
"global_step": 560, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 1.7422537803649902, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 0.7247, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 1.5862261056900024, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.6432, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 1.3740944862365723, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.6158, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 1.5012167692184448, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.6322, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 1.4912959337234497, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.5955, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 1.462948203086853, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.6794, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 1.226659893989563, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.5716, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 1.1903551816940308, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.6785, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.4021542072296143, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.5768, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 1.4458407163619995, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.653, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 1.669067144393921, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 0.608, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 1.5338914394378662, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.6273, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 1.299443006515503, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 0.5894, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 1.3132046461105347, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.6952, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 1.780645728111267, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.695, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 1.391330361366272, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.5933, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 1.3170968294143677, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 0.5422, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0980342626571655, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 0.5283, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 1.122798204421997, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 0.5295, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 1.741687297821045, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.4891, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 1.3390421867370605, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.5169, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 1.5962032079696655, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 0.5211, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 1.4163485765457153, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 0.4661, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 1.2807841300964355, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.4794, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.7780894041061401, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.4965, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 3.661797285079956, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 0.8358, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.6696407794952393, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 0.4957, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 1.1838289499282837, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5393, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 1.532225489616394, |
|
"learning_rate": 5.1785714285714296e-05, |
|
"loss": 0.5784, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 2.2218728065490723, |
|
"learning_rate": 5.3571428571428575e-05, |
|
"loss": 0.4446, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 1.4392008781433105, |
|
"learning_rate": 5.535714285714286e-05, |
|
"loss": 0.3995, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.8953332901000977, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.3399, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 1.1794021129608154, |
|
"learning_rate": 5.8928571428571435e-05, |
|
"loss": 0.3983, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.9146949052810669, |
|
"learning_rate": 6.0714285714285715e-05, |
|
"loss": 0.3831, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.4634794294834137, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.3359, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5568346977233887, |
|
"learning_rate": 6.428571428571429e-05, |
|
"loss": 0.3597, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.6084932684898376, |
|
"learning_rate": 6.607142857142857e-05, |
|
"loss": 0.4061, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.6706041693687439, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 0.3533, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.9489718079566956, |
|
"learning_rate": 6.964285714285715e-05, |
|
"loss": 0.3598, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 1.2667860984802246, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 0.3796, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 1.1869858503341675, |
|
"learning_rate": 7.321428571428571e-05, |
|
"loss": 0.4386, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.5942434668540955, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.3265, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.7414906620979309, |
|
"learning_rate": 7.67857142857143e-05, |
|
"loss": 0.3344, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.6775667071342468, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 0.3514, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5995815992355347, |
|
"learning_rate": 8.035714285714287e-05, |
|
"loss": 0.3258, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.42830514907836914, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 0.3228, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.5796249508857727, |
|
"learning_rate": 8.392857142857144e-05, |
|
"loss": 0.3752, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.5575869083404541, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.3656, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.3702499270439148, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3254, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.3036387860774994, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 0.2911, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.6176546216011047, |
|
"learning_rate": 9.107142857142857e-05, |
|
"loss": 0.3641, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.46132686734199524, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 0.3164, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.42897599935531616, |
|
"learning_rate": 9.464285714285715e-05, |
|
"loss": 0.3216, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.1321483850479126, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 0.4645, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.345939576625824, |
|
"learning_rate": 9.821428571428572e-05, |
|
"loss": 0.341, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.2705654203891754, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3029, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 0.38632267713546753, |
|
"learning_rate": 9.999902864657691e-05, |
|
"loss": 0.3005, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 0.3401973247528076, |
|
"learning_rate": 9.999611462404875e-05, |
|
"loss": 0.3182, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 1.9517568349838257, |
|
"learning_rate": 9.999125804563732e-05, |
|
"loss": 0.4438, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.3414149582386017, |
|
"learning_rate": 9.998445910004082e-05, |
|
"loss": 0.3115, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 0.655548095703125, |
|
"learning_rate": 9.997571805142639e-05, |
|
"loss": 0.4094, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 0.49635574221611023, |
|
"learning_rate": 9.996503523941994e-05, |
|
"loss": 0.3135, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.34827786684036255, |
|
"learning_rate": 9.99524110790929e-05, |
|
"loss": 0.2901, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 0.4313528835773468, |
|
"learning_rate": 9.993784606094612e-05, |
|
"loss": 0.3178, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 0.5291451215744019, |
|
"learning_rate": 9.992134075089084e-05, |
|
"loss": 0.3534, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 0.4702828526496887, |
|
"learning_rate": 9.99028957902266e-05, |
|
"loss": 0.3155, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 0.5662183165550232, |
|
"learning_rate": 9.988251189561645e-05, |
|
"loss": 0.3555, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 0.3226587772369385, |
|
"learning_rate": 9.986018985905901e-05, |
|
"loss": 0.33, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 0.7809450030326843, |
|
"learning_rate": 9.983593054785776e-05, |
|
"loss": 0.3499, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 0.6240183115005493, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.353, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 0.34356218576431274, |
|
"learning_rate": 9.978160394705668e-05, |
|
"loss": 0.2993, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.33020302653312683, |
|
"learning_rate": 9.975153876827008e-05, |
|
"loss": 0.3122, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 0.4314161241054535, |
|
"learning_rate": 9.971954053638399e-05, |
|
"loss": 0.3258, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 0.5286882519721985, |
|
"learning_rate": 9.968561049466214e-05, |
|
"loss": 0.3835, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.2787860929965973, |
|
"learning_rate": 9.964974996142698e-05, |
|
"loss": 0.2817, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 0.29439663887023926, |
|
"learning_rate": 9.961196033000861e-05, |
|
"loss": 0.2828, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 0.27987906336784363, |
|
"learning_rate": 9.957224306869053e-05, |
|
"loss": 0.3253, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 0.3439163863658905, |
|
"learning_rate": 9.953059972065265e-05, |
|
"loss": 0.3215, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 0.32061126828193665, |
|
"learning_rate": 9.948703190391131e-05, |
|
"loss": 0.323, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 0.4793407917022705, |
|
"learning_rate": 9.944154131125642e-05, |
|
"loss": 0.2962, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2514803111553192, |
|
"learning_rate": 9.939412971018574e-05, |
|
"loss": 0.314, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 0.4879082143306732, |
|
"learning_rate": 9.934479894283606e-05, |
|
"loss": 0.2828, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 0.41817083954811096, |
|
"learning_rate": 9.92935509259118e-05, |
|
"loss": 0.3124, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 0.3143192529678345, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.2861, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 0.33786436915397644, |
|
"learning_rate": 9.918531118254507e-05, |
|
"loss": 0.286, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 0.3116290867328644, |
|
"learning_rate": 9.912832366166442e-05, |
|
"loss": 0.307, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 0.5235384106636047, |
|
"learning_rate": 9.906942730216939e-05, |
|
"loss": 0.3053, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 0.3879358172416687, |
|
"learning_rate": 9.900862439242719e-05, |
|
"loss": 0.3108, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 0.4028690755367279, |
|
"learning_rate": 9.894591729488242e-05, |
|
"loss": 0.3178, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.3827480375766754, |
|
"learning_rate": 9.888130844596524e-05, |
|
"loss": 0.303, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 0.48203060030937195, |
|
"learning_rate": 9.881480035599667e-05, |
|
"loss": 0.3503, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 0.30467236042022705, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 0.2876, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 0.4495410621166229, |
|
"learning_rate": 9.867609686305617e-05, |
|
"loss": 0.2937, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 0.29854950308799744, |
|
"learning_rate": 9.860390684928873e-05, |
|
"loss": 0.3155, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 0.4602304995059967, |
|
"learning_rate": 9.852982837266955e-05, |
|
"loss": 0.276, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 0.34135401248931885, |
|
"learning_rate": 9.84538643114539e-05, |
|
"loss": 0.2831, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 0.26776087284088135, |
|
"learning_rate": 9.837601761715983e-05, |
|
"loss": 0.2964, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 0.3476792573928833, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.2814, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.2479284703731537, |
|
"learning_rate": 9.82146885010314e-05, |
|
"loss": 0.2806, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.32680875062942505, |
|
"learning_rate": 9.81312123475006e-05, |
|
"loss": 0.3065, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 0.28494346141815186, |
|
"learning_rate": 9.804586609725499e-05, |
|
"loss": 0.311, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 0.444627583026886, |
|
"learning_rate": 9.79586530663494e-05, |
|
"loss": 0.3003, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 0.35969412326812744, |
|
"learning_rate": 9.78695766433709e-05, |
|
"loss": 0.2727, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 0.378616064786911, |
|
"learning_rate": 9.777864028930705e-05, |
|
"loss": 0.2834, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.2953629493713379, |
|
"learning_rate": 9.768584753741134e-05, |
|
"loss": 0.2806, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 0.44163236021995544, |
|
"learning_rate": 9.759120199306613e-05, |
|
"loss": 0.2951, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 0.26908189058303833, |
|
"learning_rate": 9.74947073336423e-05, |
|
"loss": 0.2975, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.2417159080505371, |
|
"learning_rate": 9.73963673083566e-05, |
|
"loss": 0.2677, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 0.3327369689941406, |
|
"learning_rate": 9.72961857381258e-05, |
|
"loss": 0.3219, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 0.27677884697914124, |
|
"learning_rate": 9.719416651541839e-05, |
|
"loss": 0.2631, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 0.41239938139915466, |
|
"learning_rate": 9.709031360410318e-05, |
|
"loss": 0.2788, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 0.28369349241256714, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.2723, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.008888888888889, |
|
"grad_norm": 0.3098125457763672, |
|
"learning_rate": 9.687712292719997e-05, |
|
"loss": 0.3279, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.026666666666667, |
|
"grad_norm": 0.35817790031433105, |
|
"learning_rate": 9.67677934449517e-05, |
|
"loss": 0.2708, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0444444444444443, |
|
"grad_norm": 0.32206666469573975, |
|
"learning_rate": 9.665664684045333e-05, |
|
"loss": 0.2801, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.062222222222222, |
|
"grad_norm": 0.2292087823152542, |
|
"learning_rate": 9.654368743221022e-05, |
|
"loss": 0.218, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.30288153886795044, |
|
"learning_rate": 9.642891960916268e-05, |
|
"loss": 0.2865, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.097777777777778, |
|
"grad_norm": 0.33361321687698364, |
|
"learning_rate": 9.631234783051544e-05, |
|
"loss": 0.2866, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.1155555555555554, |
|
"grad_norm": 0.2744784951210022, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.2776, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.29272395372390747, |
|
"learning_rate": 9.607381059352038e-05, |
|
"loss": 0.2655, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.151111111111111, |
|
"grad_norm": 1.0778076648712158, |
|
"learning_rate": 9.595185440333103e-05, |
|
"loss": 0.3282, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.168888888888889, |
|
"grad_norm": 0.48110413551330566, |
|
"learning_rate": 9.582811279349882e-05, |
|
"loss": 0.3087, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.1866666666666665, |
|
"grad_norm": 0.28637051582336426, |
|
"learning_rate": 9.570259057189717e-05, |
|
"loss": 0.2924, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.2044444444444444, |
|
"grad_norm": 0.27911806106567383, |
|
"learning_rate": 9.557529261558367e-05, |
|
"loss": 0.2613, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.34397801756858826, |
|
"learning_rate": 9.544622387061055e-05, |
|
"loss": 0.2574, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.2947402000427246, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.2883, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.2577777777777777, |
|
"grad_norm": 0.39530470967292786, |
|
"learning_rate": 9.518279414271183e-05, |
|
"loss": 0.3131, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.2755555555555556, |
|
"grad_norm": 0.24501116573810577, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 0.3184, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.2933333333333334, |
|
"grad_norm": 0.25899410247802734, |
|
"learning_rate": 9.491234232914221e-05, |
|
"loss": 0.2794, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 0.41601303219795227, |
|
"learning_rate": 9.477449623286505e-05, |
|
"loss": 0.2884, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.328888888888889, |
|
"grad_norm": 0.3597092628479004, |
|
"learning_rate": 9.463491046218058e-05, |
|
"loss": 0.2655, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.3466666666666667, |
|
"grad_norm": 0.361904501914978, |
|
"learning_rate": 9.449359044057345e-05, |
|
"loss": 0.2611, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3644444444444446, |
|
"grad_norm": 0.4677833020687103, |
|
"learning_rate": 9.435054165891109e-05, |
|
"loss": 0.2589, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3822222222222225, |
|
"grad_norm": 0.5052748918533325, |
|
"learning_rate": 9.420576967523049e-05, |
|
"loss": 0.3022, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.2593027651309967, |
|
"learning_rate": 9.405928011452211e-05, |
|
"loss": 0.2498, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.417777777777778, |
|
"grad_norm": 0.2840777337551117, |
|
"learning_rate": 9.391107866851143e-05, |
|
"loss": 0.2636, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.4355555555555557, |
|
"grad_norm": 0.3062927722930908, |
|
"learning_rate": 9.376117109543769e-05, |
|
"loss": 0.25, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.453333333333333, |
|
"grad_norm": 0.31112948060035706, |
|
"learning_rate": 9.360956321983028e-05, |
|
"loss": 0.2435, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.471111111111111, |
|
"grad_norm": 0.4025621712207794, |
|
"learning_rate": 9.345626093228233e-05, |
|
"loss": 0.2427, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 0.39693212509155273, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.2897, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.506666666666667, |
|
"grad_norm": 0.30706965923309326, |
|
"learning_rate": 9.314459701268065e-05, |
|
"loss": 0.2833, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.5244444444444447, |
|
"grad_norm": 0.45809707045555115, |
|
"learning_rate": 9.298624749005951e-05, |
|
"loss": 0.2698, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.542222222222222, |
|
"grad_norm": 0.295108824968338, |
|
"learning_rate": 9.282622777389258e-05, |
|
"loss": 0.2372, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.35757213830947876, |
|
"learning_rate": 9.266454408160779e-05, |
|
"loss": 0.2756, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.5777777777777775, |
|
"grad_norm": 0.2958184778690338, |
|
"learning_rate": 9.250120269528546e-05, |
|
"loss": 0.2661, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.5955555555555554, |
|
"grad_norm": 0.36897847056388855, |
|
"learning_rate": 9.233620996141421e-05, |
|
"loss": 0.2945, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.6133333333333333, |
|
"grad_norm": 0.2986817955970764, |
|
"learning_rate": 9.21695722906443e-05, |
|
"loss": 0.2391, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.631111111111111, |
|
"grad_norm": 0.28577160835266113, |
|
"learning_rate": 9.200129615753859e-05, |
|
"loss": 0.2664, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.648888888888889, |
|
"grad_norm": 0.363800585269928, |
|
"learning_rate": 9.183138810032099e-05, |
|
"loss": 0.2371, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.34674468636512756, |
|
"learning_rate": 9.165985472062246e-05, |
|
"loss": 0.2883, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6844444444444444, |
|
"grad_norm": 0.35801827907562256, |
|
"learning_rate": 9.148670268322438e-05, |
|
"loss": 0.2572, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.7022222222222223, |
|
"grad_norm": 0.24792705476284027, |
|
"learning_rate": 9.131193871579975e-05, |
|
"loss": 0.2423, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.31966882944107056, |
|
"learning_rate": 9.113556960865167e-05, |
|
"loss": 0.2453, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.7377777777777776, |
|
"grad_norm": 0.27944958209991455, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.2297, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.7555555555555555, |
|
"grad_norm": 0.34474286437034607, |
|
"learning_rate": 9.077804344796302e-05, |
|
"loss": 0.2865, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.7733333333333334, |
|
"grad_norm": 0.36895522475242615, |
|
"learning_rate": 9.059690028579284e-05, |
|
"loss": 0.2567, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.7911111111111113, |
|
"grad_norm": 0.26620030403137207, |
|
"learning_rate": 9.041417976610027e-05, |
|
"loss": 0.2537, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.8088888888888888, |
|
"grad_norm": 0.24772542715072632, |
|
"learning_rate": 9.022988898833342e-05, |
|
"loss": 0.2212, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.8266666666666667, |
|
"grad_norm": 0.3265734016895294, |
|
"learning_rate": 9.004403511295141e-05, |
|
"loss": 0.2514, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 0.44243156909942627, |
|
"learning_rate": 8.985662536114613e-05, |
|
"loss": 0.2857, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.862222222222222, |
|
"grad_norm": 0.27827122807502747, |
|
"learning_rate": 8.966766701456177e-05, |
|
"loss": 0.2507, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.47080013155937195, |
|
"learning_rate": 8.947716741501177e-05, |
|
"loss": 0.2875, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.897777777777778, |
|
"grad_norm": 0.37070050835609436, |
|
"learning_rate": 8.928513396419368e-05, |
|
"loss": 0.2601, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.9155555555555557, |
|
"grad_norm": 0.3937135338783264, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 0.2624, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 0.3232908844947815, |
|
"learning_rate": 8.889649541323574e-05, |
|
"loss": 0.2578, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.951111111111111, |
|
"grad_norm": 0.29915979504585266, |
|
"learning_rate": 8.869990541331138e-05, |
|
"loss": 0.2354, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.968888888888889, |
|
"grad_norm": 0.4688418209552765, |
|
"learning_rate": 8.850181176196315e-05, |
|
"loss": 0.3058, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"grad_norm": 0.21794776618480682, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.2424, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 3.0044444444444443, |
|
"grad_norm": 0.20859044790267944, |
|
"learning_rate": 8.810114435015054e-05, |
|
"loss": 0.2527, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 3.022222222222222, |
|
"grad_norm": 0.329470157623291, |
|
"learning_rate": 8.789858615727265e-05, |
|
"loss": 0.2644, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.2836751341819763, |
|
"learning_rate": 8.7694555447539e-05, |
|
"loss": 0.2419, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.057777777777778, |
|
"grad_norm": 0.2949230968952179, |
|
"learning_rate": 8.748906014838672e-05, |
|
"loss": 0.2201, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 3.0755555555555554, |
|
"grad_norm": 0.4008142054080963, |
|
"learning_rate": 8.728210824415827e-05, |
|
"loss": 0.2479, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 3.0933333333333333, |
|
"grad_norm": 0.25974419713020325, |
|
"learning_rate": 8.707370777579133e-05, |
|
"loss": 0.2311, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 3.111111111111111, |
|
"grad_norm": 0.2546006143093109, |
|
"learning_rate": 8.68638668405062e-05, |
|
"loss": 0.2239, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.128888888888889, |
|
"grad_norm": 0.5272888541221619, |
|
"learning_rate": 8.665259359149132e-05, |
|
"loss": 0.2798, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 3.1466666666666665, |
|
"grad_norm": 0.4749833047389984, |
|
"learning_rate": 8.643989623758643e-05, |
|
"loss": 0.2376, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 3.1644444444444444, |
|
"grad_norm": 0.3421812653541565, |
|
"learning_rate": 8.622578304296364e-05, |
|
"loss": 0.2672, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 3.1822222222222223, |
|
"grad_norm": 0.3668532073497772, |
|
"learning_rate": 8.601026232680634e-05, |
|
"loss": 0.2463, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.2934477925300598, |
|
"learning_rate": 8.579334246298593e-05, |
|
"loss": 0.2367, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.2177777777777776, |
|
"grad_norm": 0.26318293809890747, |
|
"learning_rate": 8.557503187973651e-05, |
|
"loss": 0.2362, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 3.2355555555555555, |
|
"grad_norm": 0.3571169972419739, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.2331, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 3.2533333333333334, |
|
"grad_norm": 0.39406391978263855, |
|
"learning_rate": 8.513427253773346e-05, |
|
"loss": 0.2775, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 3.2711111111111113, |
|
"grad_norm": 0.39618298411369324, |
|
"learning_rate": 8.491184090430364e-05, |
|
"loss": 0.2239, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 3.2888888888888888, |
|
"grad_norm": 0.30980485677719116, |
|
"learning_rate": 8.468805280142709e-05, |
|
"loss": 0.2395, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.3066666666666666, |
|
"grad_norm": 0.3459194004535675, |
|
"learning_rate": 8.446291692419736e-05, |
|
"loss": 0.2222, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.3244444444444445, |
|
"grad_norm": 0.2490205019712448, |
|
"learning_rate": 8.423644202007467e-05, |
|
"loss": 0.2151, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 3.3422222222222224, |
|
"grad_norm": 0.4106178879737854, |
|
"learning_rate": 8.400863688854597e-05, |
|
"loss": 0.2501, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.5236695408821106, |
|
"learning_rate": 8.377951038078302e-05, |
|
"loss": 0.2801, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 3.3777777777777778, |
|
"grad_norm": 0.39424973726272583, |
|
"learning_rate": 8.354907139929851e-05, |
|
"loss": 0.2216, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.3955555555555557, |
|
"grad_norm": 0.38971391320228577, |
|
"learning_rate": 8.33173288976002e-05, |
|
"loss": 0.228, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.413333333333333, |
|
"grad_norm": 0.48786965012550354, |
|
"learning_rate": 8.308429187984297e-05, |
|
"loss": 0.2521, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 3.431111111111111, |
|
"grad_norm": 0.37168943881988525, |
|
"learning_rate": 8.284996940047903e-05, |
|
"loss": 0.2397, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 3.448888888888889, |
|
"grad_norm": 0.36831504106521606, |
|
"learning_rate": 8.261437056390606e-05, |
|
"loss": 0.2204, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 3.466666666666667, |
|
"grad_norm": 0.32543519139289856, |
|
"learning_rate": 8.237750452411353e-05, |
|
"loss": 0.2291, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.4844444444444447, |
|
"grad_norm": 0.3227090835571289, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.2218, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 3.502222222222222, |
|
"grad_norm": 0.623389720916748, |
|
"learning_rate": 8.190000769665044e-05, |
|
"loss": 0.249, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.35425952076911926, |
|
"learning_rate": 8.1659395461707e-05, |
|
"loss": 0.2749, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 3.537777777777778, |
|
"grad_norm": 0.33658289909362793, |
|
"learning_rate": 8.141755312827736e-05, |
|
"loss": 0.2134, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 3.5555555555555554, |
|
"grad_norm": 0.34356260299682617, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 0.2216, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.5733333333333333, |
|
"grad_norm": 0.32880356907844543, |
|
"learning_rate": 8.093021579968941e-05, |
|
"loss": 0.2369, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.591111111111111, |
|
"grad_norm": 0.47223803400993347, |
|
"learning_rate": 8.068473973960238e-05, |
|
"loss": 0.2433, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 3.608888888888889, |
|
"grad_norm": 0.37927350401878357, |
|
"learning_rate": 8.043807145043604e-05, |
|
"loss": 0.2405, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 3.626666666666667, |
|
"grad_norm": 0.28793418407440186, |
|
"learning_rate": 8.019022051627388e-05, |
|
"loss": 0.2345, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.6444444444444444, |
|
"grad_norm": 0.3489110767841339, |
|
"learning_rate": 7.994119656715002e-05, |
|
"loss": 0.2574, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.6622222222222223, |
|
"grad_norm": 0.3693983554840088, |
|
"learning_rate": 7.969100927867507e-05, |
|
"loss": 0.2445, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.3646757900714874, |
|
"learning_rate": 7.943966837166023e-05, |
|
"loss": 0.2099, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.6977777777777776, |
|
"grad_norm": 0.31038224697113037, |
|
"learning_rate": 7.91871836117395e-05, |
|
"loss": 0.2051, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 3.7155555555555555, |
|
"grad_norm": 0.2764589786529541, |
|
"learning_rate": 7.89335648089903e-05, |
|
"loss": 0.2132, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.7333333333333334, |
|
"grad_norm": 0.3864257037639618, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.2284, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.7511111111111113, |
|
"grad_norm": 0.8009142279624939, |
|
"learning_rate": 7.842296453524463e-05, |
|
"loss": 0.3221, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 3.7688888888888887, |
|
"grad_norm": 0.3626723885536194, |
|
"learning_rate": 7.81660029031811e-05, |
|
"loss": 0.2558, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 3.7866666666666666, |
|
"grad_norm": 0.4381621778011322, |
|
"learning_rate": 7.79079469053842e-05, |
|
"loss": 0.228, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.8044444444444445, |
|
"grad_norm": 0.38796478509902954, |
|
"learning_rate": 7.764880656839696e-05, |
|
"loss": 0.2025, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 3.822222222222222, |
|
"grad_norm": 0.27901729941368103, |
|
"learning_rate": 7.738859196089358e-05, |
|
"loss": 0.2142, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.4677048623561859, |
|
"learning_rate": 7.712731319328798e-05, |
|
"loss": 0.2614, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.8577777777777778, |
|
"grad_norm": 0.2969120740890503, |
|
"learning_rate": 7.68649804173412e-05, |
|
"loss": 0.24, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 3.8755555555555556, |
|
"grad_norm": 0.42577284574508667, |
|
"learning_rate": 7.660160382576683e-05, |
|
"loss": 0.2054, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 3.8933333333333335, |
|
"grad_norm": 0.40695130825042725, |
|
"learning_rate": 7.633719365183504e-05, |
|
"loss": 0.2464, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.911111111111111, |
|
"grad_norm": 0.5424155592918396, |
|
"learning_rate": 7.60717601689749e-05, |
|
"loss": 0.2583, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.928888888888889, |
|
"grad_norm": 0.683966338634491, |
|
"learning_rate": 7.580531369037533e-05, |
|
"loss": 0.2387, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 3.9466666666666668, |
|
"grad_norm": 0.37036415934562683, |
|
"learning_rate": 7.553786456858429e-05, |
|
"loss": 0.2333, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.964444444444444, |
|
"grad_norm": 0.5287876129150391, |
|
"learning_rate": 7.526942319510655e-05, |
|
"loss": 0.2353, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 3.982222222222222, |
|
"grad_norm": 0.31925442814826965, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2411, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.5316102504730225, |
|
"learning_rate": 7.472960545147038e-05, |
|
"loss": 0.2472, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 4.017777777777778, |
|
"grad_norm": 0.24691040813922882, |
|
"learning_rate": 7.445825005546448e-05, |
|
"loss": 0.217, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 4.035555555555556, |
|
"grad_norm": 0.3662887215614319, |
|
"learning_rate": 7.4185944355262e-05, |
|
"loss": 0.2026, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 4.053333333333334, |
|
"grad_norm": 0.36757993698120117, |
|
"learning_rate": 7.391269893106592e-05, |
|
"loss": 0.2364, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.071111111111111, |
|
"grad_norm": 0.29429471492767334, |
|
"learning_rate": 7.363852439959135e-05, |
|
"loss": 0.1774, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 4.088888888888889, |
|
"grad_norm": 0.48565641045570374, |
|
"learning_rate": 7.33634314136531e-05, |
|
"loss": 0.2156, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.1066666666666665, |
|
"grad_norm": 0.3768898844718933, |
|
"learning_rate": 7.308743066175172e-05, |
|
"loss": 0.2006, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 4.124444444444444, |
|
"grad_norm": 0.32797563076019287, |
|
"learning_rate": 7.281053286765815e-05, |
|
"loss": 0.2195, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 4.142222222222222, |
|
"grad_norm": 0.3106316030025482, |
|
"learning_rate": 7.253274878999727e-05, |
|
"loss": 0.2268, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.43699216842651367, |
|
"learning_rate": 7.225408922182961e-05, |
|
"loss": 0.2186, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 4.177777777777778, |
|
"grad_norm": 0.569455623626709, |
|
"learning_rate": 7.197456499023225e-05, |
|
"loss": 0.2039, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.195555555555556, |
|
"grad_norm": 0.49764353036880493, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 0.2107, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 4.213333333333333, |
|
"grad_norm": 0.5107737183570862, |
|
"learning_rate": 7.141296601261314e-05, |
|
"loss": 0.2387, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 4.231111111111111, |
|
"grad_norm": 0.531856119632721, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.1914, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 4.248888888888889, |
|
"grad_norm": 0.3557555377483368, |
|
"learning_rate": 7.084803913806641e-05, |
|
"loss": 0.2192, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 0.520222544670105, |
|
"learning_rate": 7.056435515653059e-05, |
|
"loss": 0.2406, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.2844444444444445, |
|
"grad_norm": 0.5725359916687012, |
|
"learning_rate": 7.027987216472377e-05, |
|
"loss": 0.2102, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 4.302222222222222, |
|
"grad_norm": 0.39855217933654785, |
|
"learning_rate": 6.999460121598704e-05, |
|
"loss": 0.1943, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.47654294967651367, |
|
"learning_rate": 6.970855339427698e-05, |
|
"loss": 0.1955, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 4.337777777777778, |
|
"grad_norm": 0.6176052093505859, |
|
"learning_rate": 6.942173981373474e-05, |
|
"loss": 0.2316, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 4.355555555555555, |
|
"grad_norm": 0.446646511554718, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.2038, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.373333333333333, |
|
"grad_norm": 0.4111061692237854, |
|
"learning_rate": 6.884585998105026e-05, |
|
"loss": 0.2137, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 4.391111111111111, |
|
"grad_norm": 0.5625485777854919, |
|
"learning_rate": 6.855681610422189e-05, |
|
"loss": 0.2059, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 4.408888888888889, |
|
"grad_norm": 0.30589085817337036, |
|
"learning_rate": 6.826705121831976e-05, |
|
"loss": 0.2213, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 4.426666666666667, |
|
"grad_norm": 0.766379177570343, |
|
"learning_rate": 6.797657658190839e-05, |
|
"loss": 0.2061, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 0.3027375340461731, |
|
"learning_rate": 6.768540348112907e-05, |
|
"loss": 0.2185, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.4622222222222225, |
|
"grad_norm": 0.49446240067481995, |
|
"learning_rate": 6.739354322926136e-05, |
|
"loss": 0.1946, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.5550069212913513, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.2716, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 4.497777777777777, |
|
"grad_norm": 0.4387371838092804, |
|
"learning_rate": 6.680780665843155e-05, |
|
"loss": 0.1893, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 4.515555555555555, |
|
"grad_norm": 0.3026201128959656, |
|
"learning_rate": 6.651395309775837e-05, |
|
"loss": 0.216, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 4.533333333333333, |
|
"grad_norm": 0.4486079216003418, |
|
"learning_rate": 6.621945790169036e-05, |
|
"loss": 0.1849, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.551111111111111, |
|
"grad_norm": 0.4118870496749878, |
|
"learning_rate": 6.592433251258423e-05, |
|
"loss": 0.1977, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 4.568888888888889, |
|
"grad_norm": 0.5137110352516174, |
|
"learning_rate": 6.562858839728223e-05, |
|
"loss": 0.2167, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 4.586666666666667, |
|
"grad_norm": 0.34046244621276855, |
|
"learning_rate": 6.533223704666672e-05, |
|
"loss": 0.2128, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 4.604444444444445, |
|
"grad_norm": 0.44516146183013916, |
|
"learning_rate": 6.503528997521366e-05, |
|
"loss": 0.1932, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 4.622222222222222, |
|
"grad_norm": 0.2885585427284241, |
|
"learning_rate": 6.473775872054521e-05, |
|
"loss": 0.1991, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.37257564067840576, |
|
"learning_rate": 6.44396548429815e-05, |
|
"loss": 0.179, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 4.657777777777778, |
|
"grad_norm": 0.43277639150619507, |
|
"learning_rate": 6.414098992509138e-05, |
|
"loss": 0.2154, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 4.6755555555555555, |
|
"grad_norm": 0.4736541509628296, |
|
"learning_rate": 6.384177557124247e-05, |
|
"loss": 0.2024, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 4.693333333333333, |
|
"grad_norm": 0.3058260381221771, |
|
"learning_rate": 6.354202340715026e-05, |
|
"loss": 0.2264, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 4.711111111111111, |
|
"grad_norm": 0.33973225951194763, |
|
"learning_rate": 6.324174507942637e-05, |
|
"loss": 0.2175, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.728888888888889, |
|
"grad_norm": 0.370286762714386, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.1969, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 4.746666666666667, |
|
"grad_norm": 0.5065796375274658, |
|
"learning_rate": 6.263965662129487e-05, |
|
"loss": 0.2322, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 4.764444444444445, |
|
"grad_norm": 0.2974378764629364, |
|
"learning_rate": 6.233786988451468e-05, |
|
"loss": 0.1617, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 4.782222222222222, |
|
"grad_norm": 0.37753137946128845, |
|
"learning_rate": 6.203560377044866e-05, |
|
"loss": 0.2324, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.5713666677474976, |
|
"learning_rate": 6.173287002338577e-05, |
|
"loss": 0.2213, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.817777777777778, |
|
"grad_norm": 0.5500757694244385, |
|
"learning_rate": 6.142968040578449e-05, |
|
"loss": 0.2163, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 4.835555555555556, |
|
"grad_norm": 0.3870888948440552, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 0.2014, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 4.8533333333333335, |
|
"grad_norm": 0.4181479811668396, |
|
"learning_rate": 6.0821980696905146e-05, |
|
"loss": 0.2123, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 4.871111111111111, |
|
"grad_norm": 0.3824799656867981, |
|
"learning_rate": 6.0517494217274794e-05, |
|
"loss": 0.2008, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 4.888888888888889, |
|
"grad_norm": 0.3899323642253876, |
|
"learning_rate": 6.021259908948402e-05, |
|
"loss": 0.2011, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.906666666666666, |
|
"grad_norm": 0.4690476059913635, |
|
"learning_rate": 5.9907307159969884e-05, |
|
"loss": 0.2215, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 4.924444444444444, |
|
"grad_norm": 0.3852761387825012, |
|
"learning_rate": 5.960163029058682e-05, |
|
"loss": 0.1943, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 4.942222222222222, |
|
"grad_norm": 0.40581774711608887, |
|
"learning_rate": 5.9295580358145744e-05, |
|
"loss": 0.2132, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.7496148943901062, |
|
"learning_rate": 5.898916925395264e-05, |
|
"loss": 0.2227, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 4.977777777777778, |
|
"grad_norm": 0.3892797529697418, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.2165, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.995555555555556, |
|
"grad_norm": 0.284811794757843, |
|
"learning_rate": 5.837531116523682e-05, |
|
"loss": 0.1669, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 5.013333333333334, |
|
"grad_norm": 0.416891872882843, |
|
"learning_rate": 5.806788803164034e-05, |
|
"loss": 0.1828, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 5.0311111111111115, |
|
"grad_norm": 0.4431915283203125, |
|
"learning_rate": 5.7760151427217576e-05, |
|
"loss": 0.1957, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 5.0488888888888885, |
|
"grad_norm": 0.5158615708351135, |
|
"learning_rate": 5.745211330880872e-05, |
|
"loss": 0.2331, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 5.066666666666666, |
|
"grad_norm": 0.3418184220790863, |
|
"learning_rate": 5.714378564496901e-05, |
|
"loss": 0.2065, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.084444444444444, |
|
"grad_norm": 0.5028918981552124, |
|
"learning_rate": 5.683518041550368e-05, |
|
"loss": 0.1657, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 5.102222222222222, |
|
"grad_norm": 0.4428432285785675, |
|
"learning_rate": 5.6526309611002594e-05, |
|
"loss": 0.1948, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 0.3536894917488098, |
|
"learning_rate": 5.621718523237427e-05, |
|
"loss": 0.1709, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 5.137777777777778, |
|
"grad_norm": 0.4637707769870758, |
|
"learning_rate": 5.590781929037965e-05, |
|
"loss": 0.1662, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 5.155555555555556, |
|
"grad_norm": 0.5988882780075073, |
|
"learning_rate": 5.559822380516539e-05, |
|
"loss": 0.1477, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.173333333333334, |
|
"grad_norm": 0.45593711733818054, |
|
"learning_rate": 5.5288410805796895e-05, |
|
"loss": 0.1802, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 5.191111111111111, |
|
"grad_norm": 0.46239393949508667, |
|
"learning_rate": 5.497839232979084e-05, |
|
"loss": 0.178, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 5.208888888888889, |
|
"grad_norm": 0.5151268839836121, |
|
"learning_rate": 5.466818042264753e-05, |
|
"loss": 0.1453, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 5.226666666666667, |
|
"grad_norm": 0.5723825097084045, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.212, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 5.2444444444444445, |
|
"grad_norm": 0.5398524403572083, |
|
"learning_rate": 5.404722453406017e-05, |
|
"loss": 0.1773, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 5.262222222222222, |
|
"grad_norm": 0.5362580418586731, |
|
"learning_rate": 5.373650467932122e-05, |
|
"loss": 0.1816, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 0.5614521503448486, |
|
"learning_rate": 5.3425639645917834e-05, |
|
"loss": 0.1544, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 5.297777777777778, |
|
"grad_norm": 0.44896313548088074, |
|
"learning_rate": 5.311464151224261e-05, |
|
"loss": 0.1381, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 5.315555555555555, |
|
"grad_norm": 0.35840409994125366, |
|
"learning_rate": 5.2803522361859594e-05, |
|
"loss": 0.1653, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.6827304363250732, |
|
"learning_rate": 5.249229428303486e-05, |
|
"loss": 0.167, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.351111111111111, |
|
"grad_norm": 0.3499818444252014, |
|
"learning_rate": 5.218096936826681e-05, |
|
"loss": 0.2221, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 5.368888888888889, |
|
"grad_norm": 0.48628711700439453, |
|
"learning_rate": 5.18695597138163e-05, |
|
"loss": 0.1932, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 5.386666666666667, |
|
"grad_norm": 0.5208820104598999, |
|
"learning_rate": 5.155807741923666e-05, |
|
"loss": 0.1818, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 5.404444444444445, |
|
"grad_norm": 0.4346122741699219, |
|
"learning_rate": 5.124653458690365e-05, |
|
"loss": 0.1924, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 5.4222222222222225, |
|
"grad_norm": 0.45820167660713196, |
|
"learning_rate": 5.0934943321545115e-05, |
|
"loss": 0.1922, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 0.5644346475601196, |
|
"learning_rate": 5.062331572977076e-05, |
|
"loss": 0.1828, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 5.457777777777777, |
|
"grad_norm": 0.507893979549408, |
|
"learning_rate": 5.031166391960168e-05, |
|
"loss": 0.2173, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 5.475555555555555, |
|
"grad_norm": 0.4621982276439667, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1703, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 5.493333333333333, |
|
"grad_norm": 0.4117422103881836, |
|
"learning_rate": 4.968833608039832e-05, |
|
"loss": 0.1751, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 5.511111111111111, |
|
"grad_norm": 0.4004817605018616, |
|
"learning_rate": 4.9376684270229254e-05, |
|
"loss": 0.1835, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.528888888888889, |
|
"grad_norm": 0.4063107669353485, |
|
"learning_rate": 4.9065056678454904e-05, |
|
"loss": 0.1892, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 5.546666666666667, |
|
"grad_norm": 0.45984384417533875, |
|
"learning_rate": 4.875346541309637e-05, |
|
"loss": 0.181, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 5.564444444444445, |
|
"grad_norm": 0.48440927267074585, |
|
"learning_rate": 4.844192258076336e-05, |
|
"loss": 0.1723, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 5.582222222222223, |
|
"grad_norm": 0.6084842681884766, |
|
"learning_rate": 4.813044028618373e-05, |
|
"loss": 0.1875, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.5340481996536255, |
|
"learning_rate": 4.781903063173321e-05, |
|
"loss": 0.1903, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.6177777777777775, |
|
"grad_norm": 0.3680734634399414, |
|
"learning_rate": 4.750770571696514e-05, |
|
"loss": 0.1742, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 5.635555555555555, |
|
"grad_norm": 0.4389323592185974, |
|
"learning_rate": 4.7196477638140404e-05, |
|
"loss": 0.1604, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 5.653333333333333, |
|
"grad_norm": 0.5682416558265686, |
|
"learning_rate": 4.68853584877574e-05, |
|
"loss": 0.2142, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 5.671111111111111, |
|
"grad_norm": 0.5514141321182251, |
|
"learning_rate": 4.657436035408217e-05, |
|
"loss": 0.1902, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 5.688888888888889, |
|
"grad_norm": 0.5815438032150269, |
|
"learning_rate": 4.626349532067879e-05, |
|
"loss": 0.1604, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.706666666666667, |
|
"grad_norm": 0.3311176598072052, |
|
"learning_rate": 4.595277546593984e-05, |
|
"loss": 0.2049, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 5.724444444444444, |
|
"grad_norm": 0.4205529987812042, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.1715, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 5.742222222222222, |
|
"grad_norm": 0.6624743342399597, |
|
"learning_rate": 4.5331819577352474e-05, |
|
"loss": 0.1601, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 0.5203994512557983, |
|
"learning_rate": 4.502160767020918e-05, |
|
"loss": 0.1589, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 5.777777777777778, |
|
"grad_norm": 0.4620148241519928, |
|
"learning_rate": 4.471158919420312e-05, |
|
"loss": 0.1656, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.795555555555556, |
|
"grad_norm": 0.5596708655357361, |
|
"learning_rate": 4.4401776194834613e-05, |
|
"loss": 0.1821, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 5.8133333333333335, |
|
"grad_norm": 0.4360620975494385, |
|
"learning_rate": 4.409218070962036e-05, |
|
"loss": 0.1752, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 5.831111111111111, |
|
"grad_norm": 0.4674508571624756, |
|
"learning_rate": 4.378281476762576e-05, |
|
"loss": 0.204, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 5.848888888888889, |
|
"grad_norm": 0.9109951853752136, |
|
"learning_rate": 4.347369038899744e-05, |
|
"loss": 0.1562, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 5.866666666666667, |
|
"grad_norm": 0.8667967915534973, |
|
"learning_rate": 4.316481958449634e-05, |
|
"loss": 0.2376, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.884444444444444, |
|
"grad_norm": 0.6173791289329529, |
|
"learning_rate": 4.285621435503101e-05, |
|
"loss": 0.1699, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 5.902222222222222, |
|
"grad_norm": 0.6599533557891846, |
|
"learning_rate": 4.254788669119127e-05, |
|
"loss": 0.1652, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 0.39456817507743835, |
|
"learning_rate": 4.223984857278242e-05, |
|
"loss": 0.1695, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 5.937777777777778, |
|
"grad_norm": 0.7024866342544556, |
|
"learning_rate": 4.1932111968359664e-05, |
|
"loss": 0.1673, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 5.955555555555556, |
|
"grad_norm": 0.5477570295333862, |
|
"learning_rate": 4.162468883476319e-05, |
|
"loss": 0.1852, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.973333333333334, |
|
"grad_norm": 0.5014763474464417, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.1847, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 5.9911111111111115, |
|
"grad_norm": 0.334262877702713, |
|
"learning_rate": 4.101083074604737e-05, |
|
"loss": 0.1709, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 6.0088888888888885, |
|
"grad_norm": 0.5552971959114075, |
|
"learning_rate": 4.0704419641854274e-05, |
|
"loss": 0.1476, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 6.026666666666666, |
|
"grad_norm": 0.35900405049324036, |
|
"learning_rate": 4.03983697094132e-05, |
|
"loss": 0.1731, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 6.044444444444444, |
|
"grad_norm": 0.3748185634613037, |
|
"learning_rate": 4.0092692840030134e-05, |
|
"loss": 0.1625, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 6.062222222222222, |
|
"grad_norm": 0.5626404285430908, |
|
"learning_rate": 3.978740091051599e-05, |
|
"loss": 0.1718, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 0.5533756017684937, |
|
"learning_rate": 3.9482505782725224e-05, |
|
"loss": 0.1793, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 6.097777777777778, |
|
"grad_norm": 0.45061609148979187, |
|
"learning_rate": 3.917801930309486e-05, |
|
"loss": 0.1675, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 6.115555555555556, |
|
"grad_norm": 0.6766936182975769, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 0.1528, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 6.133333333333334, |
|
"grad_norm": 0.4249931573867798, |
|
"learning_rate": 3.857031959421553e-05, |
|
"loss": 0.1437, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 6.151111111111111, |
|
"grad_norm": 0.4025160074234009, |
|
"learning_rate": 3.8267129976614254e-05, |
|
"loss": 0.1647, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 6.168888888888889, |
|
"grad_norm": 0.42490458488464355, |
|
"learning_rate": 3.7964396229551364e-05, |
|
"loss": 0.1436, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 6.1866666666666665, |
|
"grad_norm": 0.6905245184898376, |
|
"learning_rate": 3.7662130115485314e-05, |
|
"loss": 0.148, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 6.204444444444444, |
|
"grad_norm": 0.608650267124176, |
|
"learning_rate": 3.7360343378705124e-05, |
|
"loss": 0.1413, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 6.222222222222222, |
|
"grad_norm": 0.560066819190979, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.1528, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 0.5189685821533203, |
|
"learning_rate": 3.675825492057364e-05, |
|
"loss": 0.1639, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 6.257777777777778, |
|
"grad_norm": 0.92945796251297, |
|
"learning_rate": 3.6457976592849754e-05, |
|
"loss": 0.2024, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 6.275555555555556, |
|
"grad_norm": 0.544842541217804, |
|
"learning_rate": 3.6158224428757535e-05, |
|
"loss": 0.1443, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 6.293333333333333, |
|
"grad_norm": 0.4329567849636078, |
|
"learning_rate": 3.585901007490863e-05, |
|
"loss": 0.1314, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 6.311111111111111, |
|
"grad_norm": 0.5567479729652405, |
|
"learning_rate": 3.556034515701852e-05, |
|
"loss": 0.1495, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 6.328888888888889, |
|
"grad_norm": 0.45176929235458374, |
|
"learning_rate": 3.5262241279454785e-05, |
|
"loss": 0.1571, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 6.346666666666667, |
|
"grad_norm": 0.4304347634315491, |
|
"learning_rate": 3.4964710024786354e-05, |
|
"loss": 0.1413, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 6.364444444444445, |
|
"grad_norm": 0.5478505492210388, |
|
"learning_rate": 3.4667762953333295e-05, |
|
"loss": 0.1643, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 6.3822222222222225, |
|
"grad_norm": 0.5922195911407471, |
|
"learning_rate": 3.4371411602717784e-05, |
|
"loss": 0.1716, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 0.4263589084148407, |
|
"learning_rate": 3.4075667487415785e-05, |
|
"loss": 0.1671, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.417777777777777, |
|
"grad_norm": 0.44783204793930054, |
|
"learning_rate": 3.3780542098309654e-05, |
|
"loss": 0.167, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 6.435555555555555, |
|
"grad_norm": 0.5761044025421143, |
|
"learning_rate": 3.3486046902241664e-05, |
|
"loss": 0.1518, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 6.453333333333333, |
|
"grad_norm": 0.4984096884727478, |
|
"learning_rate": 3.319219334156847e-05, |
|
"loss": 0.1888, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 6.471111111111111, |
|
"grad_norm": 0.5709213614463806, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.1374, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 6.488888888888889, |
|
"grad_norm": 0.45197322964668274, |
|
"learning_rate": 3.2606456770738636e-05, |
|
"loss": 0.1823, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.506666666666667, |
|
"grad_norm": 0.3853418231010437, |
|
"learning_rate": 3.231459651887093e-05, |
|
"loss": 0.1424, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 6.524444444444445, |
|
"grad_norm": 0.6892653107643127, |
|
"learning_rate": 3.2023423418091626e-05, |
|
"loss": 0.1354, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 6.542222222222223, |
|
"grad_norm": 0.8659518957138062, |
|
"learning_rate": 3.173294878168025e-05, |
|
"loss": 0.1658, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 6.5600000000000005, |
|
"grad_norm": 0.6673440933227539, |
|
"learning_rate": 3.1443183895778105e-05, |
|
"loss": 0.1341, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 6.5777777777777775, |
|
"grad_norm": 0.49865254759788513, |
|
"learning_rate": 3.115414001894974e-05, |
|
"loss": 0.1746, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.595555555555555, |
|
"grad_norm": 0.5364950299263, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.1655, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 6.613333333333333, |
|
"grad_norm": 0.6427706480026245, |
|
"learning_rate": 3.0578260186265265e-05, |
|
"loss": 0.1489, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 6.631111111111111, |
|
"grad_norm": 0.5947421193122864, |
|
"learning_rate": 3.029144660572304e-05, |
|
"loss": 0.1327, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 6.648888888888889, |
|
"grad_norm": 0.5604392886161804, |
|
"learning_rate": 3.000539878401296e-05, |
|
"loss": 0.1593, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.4848502576351166, |
|
"learning_rate": 2.9720127835276256e-05, |
|
"loss": 0.1551, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.684444444444445, |
|
"grad_norm": 0.5442904829978943, |
|
"learning_rate": 2.9435644843469436e-05, |
|
"loss": 0.1526, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 6.702222222222222, |
|
"grad_norm": 0.4855183959007263, |
|
"learning_rate": 2.9151960861933614e-05, |
|
"loss": 0.183, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 0.8890539407730103, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.1624, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 6.737777777777778, |
|
"grad_norm": 0.47631746530532837, |
|
"learning_rate": 2.858703398738686e-05, |
|
"loss": 0.1373, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 6.7555555555555555, |
|
"grad_norm": 0.46250614523887634, |
|
"learning_rate": 2.8305813044122097e-05, |
|
"loss": 0.1786, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.773333333333333, |
|
"grad_norm": 0.5734685063362122, |
|
"learning_rate": 2.8025435009767747e-05, |
|
"loss": 0.1432, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 6.791111111111111, |
|
"grad_norm": 0.7250308394432068, |
|
"learning_rate": 2.774591077817038e-05, |
|
"loss": 0.1483, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 6.808888888888889, |
|
"grad_norm": 0.4598230719566345, |
|
"learning_rate": 2.746725121000273e-05, |
|
"loss": 0.1709, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 6.826666666666666, |
|
"grad_norm": 0.5403770208358765, |
|
"learning_rate": 2.718946713234185e-05, |
|
"loss": 0.1944, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 6.844444444444444, |
|
"grad_norm": 0.43435898423194885, |
|
"learning_rate": 2.6912569338248315e-05, |
|
"loss": 0.1471, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.862222222222222, |
|
"grad_norm": 0.7569040656089783, |
|
"learning_rate": 2.66365685863469e-05, |
|
"loss": 0.1755, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 0.4488949775695801, |
|
"learning_rate": 2.636147560040866e-05, |
|
"loss": 0.1354, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 6.897777777777778, |
|
"grad_norm": 0.43409401178359985, |
|
"learning_rate": 2.6087301068934106e-05, |
|
"loss": 0.1803, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 6.915555555555556, |
|
"grad_norm": 0.38693767786026, |
|
"learning_rate": 2.581405564473801e-05, |
|
"loss": 0.1762, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 6.933333333333334, |
|
"grad_norm": 0.3770747780799866, |
|
"learning_rate": 2.5541749944535554e-05, |
|
"loss": 0.1644, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.9511111111111115, |
|
"grad_norm": 0.5996742844581604, |
|
"learning_rate": 2.527039454852963e-05, |
|
"loss": 0.1719, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 6.968888888888889, |
|
"grad_norm": 0.54173743724823, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.1366, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 6.986666666666666, |
|
"grad_norm": 0.5965537428855896, |
|
"learning_rate": 2.473057680489348e-05, |
|
"loss": 0.155, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 7.004444444444444, |
|
"grad_norm": 0.3956046998500824, |
|
"learning_rate": 2.4462135431415733e-05, |
|
"loss": 0.1627, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 7.022222222222222, |
|
"grad_norm": 0.530720055103302, |
|
"learning_rate": 2.4194686309624663e-05, |
|
"loss": 0.1384, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 0.4579175114631653, |
|
"learning_rate": 2.39282398310251e-05, |
|
"loss": 0.1731, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 7.057777777777778, |
|
"grad_norm": 0.807518720626831, |
|
"learning_rate": 2.366280634816496e-05, |
|
"loss": 0.1471, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 7.075555555555556, |
|
"grad_norm": 0.5390831232070923, |
|
"learning_rate": 2.3398396174233178e-05, |
|
"loss": 0.1066, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 7.093333333333334, |
|
"grad_norm": 0.4245457649230957, |
|
"learning_rate": 2.3135019582658802e-05, |
|
"loss": 0.1201, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 7.111111111111111, |
|
"grad_norm": 0.40093037486076355, |
|
"learning_rate": 2.2872686806712035e-05, |
|
"loss": 0.1401, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.128888888888889, |
|
"grad_norm": 0.8866329789161682, |
|
"learning_rate": 2.261140803910644e-05, |
|
"loss": 0.1096, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 7.1466666666666665, |
|
"grad_norm": 0.45840370655059814, |
|
"learning_rate": 2.235119343160303e-05, |
|
"loss": 0.1555, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 7.164444444444444, |
|
"grad_norm": 0.478550523519516, |
|
"learning_rate": 2.2092053094615813e-05, |
|
"loss": 0.1301, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 7.182222222222222, |
|
"grad_norm": 0.4727082848548889, |
|
"learning_rate": 2.1833997096818898e-05, |
|
"loss": 0.1345, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 0.5048395991325378, |
|
"learning_rate": 2.157703546475539e-05, |
|
"loss": 0.1733, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 7.217777777777778, |
|
"grad_norm": 0.40645188093185425, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.1491, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 7.235555555555556, |
|
"grad_norm": 0.560034990310669, |
|
"learning_rate": 2.1066435191009715e-05, |
|
"loss": 0.1226, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 7.253333333333333, |
|
"grad_norm": 0.7475597262382507, |
|
"learning_rate": 2.0812816388260518e-05, |
|
"loss": 0.1455, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 7.271111111111111, |
|
"grad_norm": 0.4967871904373169, |
|
"learning_rate": 2.056033162833977e-05, |
|
"loss": 0.1176, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 7.288888888888889, |
|
"grad_norm": 1.1265000104904175, |
|
"learning_rate": 2.0308990721324927e-05, |
|
"loss": 0.1654, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.306666666666667, |
|
"grad_norm": 1.0787440538406372, |
|
"learning_rate": 2.0058803432849987e-05, |
|
"loss": 0.1741, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 7.3244444444444445, |
|
"grad_norm": 0.7019418478012085, |
|
"learning_rate": 1.980977948372612e-05, |
|
"loss": 0.1198, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 7.342222222222222, |
|
"grad_norm": 0.5232211947441101, |
|
"learning_rate": 1.9561928549563968e-05, |
|
"loss": 0.1176, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 0.4455171823501587, |
|
"learning_rate": 1.931526026039764e-05, |
|
"loss": 0.1487, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 7.377777777777778, |
|
"grad_norm": 0.8757272362709045, |
|
"learning_rate": 1.906978420031059e-05, |
|
"loss": 0.1809, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 7.395555555555555, |
|
"grad_norm": 0.4900935888290405, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 0.1134, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 7.413333333333333, |
|
"grad_norm": 0.4308149218559265, |
|
"learning_rate": 1.8582446871722636e-05, |
|
"loss": 0.1467, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 7.431111111111111, |
|
"grad_norm": 0.42540425062179565, |
|
"learning_rate": 1.8340604538293015e-05, |
|
"loss": 0.1813, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 7.448888888888889, |
|
"grad_norm": 0.9641832113265991, |
|
"learning_rate": 1.8099992303349577e-05, |
|
"loss": 0.1514, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 7.466666666666667, |
|
"grad_norm": 0.47594931721687317, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.1348, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.484444444444445, |
|
"grad_norm": 0.7819913625717163, |
|
"learning_rate": 1.7622495475886487e-05, |
|
"loss": 0.127, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 7.502222222222223, |
|
"grad_norm": 0.4303249418735504, |
|
"learning_rate": 1.738562943609396e-05, |
|
"loss": 0.1081, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 0.8481171131134033, |
|
"learning_rate": 1.7150030599520984e-05, |
|
"loss": 0.1875, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 7.5377777777777775, |
|
"grad_norm": 0.5441702008247375, |
|
"learning_rate": 1.691570812015704e-05, |
|
"loss": 0.1278, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 7.555555555555555, |
|
"grad_norm": 0.6104997992515564, |
|
"learning_rate": 1.6682671102399805e-05, |
|
"loss": 0.1391, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.573333333333333, |
|
"grad_norm": 0.4054010212421417, |
|
"learning_rate": 1.6450928600701504e-05, |
|
"loss": 0.1568, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 7.591111111111111, |
|
"grad_norm": 0.4448254108428955, |
|
"learning_rate": 1.622048961921699e-05, |
|
"loss": 0.1352, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 7.608888888888889, |
|
"grad_norm": 0.5302364826202393, |
|
"learning_rate": 1.599136311145402e-05, |
|
"loss": 0.1635, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 7.626666666666667, |
|
"grad_norm": 0.42252257466316223, |
|
"learning_rate": 1.5763557979925324e-05, |
|
"loss": 0.1637, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 7.644444444444445, |
|
"grad_norm": 0.508930504322052, |
|
"learning_rate": 1.553708307580265e-05, |
|
"loss": 0.1537, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.662222222222223, |
|
"grad_norm": 0.6009820103645325, |
|
"learning_rate": 1.531194719857292e-05, |
|
"loss": 0.1205, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 0.4471452236175537, |
|
"learning_rate": 1.5088159095696363e-05, |
|
"loss": 0.128, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 7.697777777777778, |
|
"grad_norm": 0.6378930807113647, |
|
"learning_rate": 1.4865727462266543e-05, |
|
"loss": 0.1254, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 7.7155555555555555, |
|
"grad_norm": 0.627171516418457, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.1451, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 7.733333333333333, |
|
"grad_norm": 0.4755552411079407, |
|
"learning_rate": 1.4424968120263504e-05, |
|
"loss": 0.1285, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.751111111111111, |
|
"grad_norm": 0.6819527745246887, |
|
"learning_rate": 1.4206657537014079e-05, |
|
"loss": 0.1817, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 7.768888888888889, |
|
"grad_norm": 0.5436893701553345, |
|
"learning_rate": 1.398973767319368e-05, |
|
"loss": 0.1569, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 7.786666666666667, |
|
"grad_norm": 0.506430447101593, |
|
"learning_rate": 1.3774216957036367e-05, |
|
"loss": 0.1493, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 7.804444444444444, |
|
"grad_norm": 0.8908332586288452, |
|
"learning_rate": 1.3560103762413584e-05, |
|
"loss": 0.1215, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 7.822222222222222, |
|
"grad_norm": 0.38750067353248596, |
|
"learning_rate": 1.3347406408508695e-05, |
|
"loss": 0.1389, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 0.44923001527786255, |
|
"learning_rate": 1.3136133159493802e-05, |
|
"loss": 0.113, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 7.857777777777778, |
|
"grad_norm": 0.4674062132835388, |
|
"learning_rate": 1.2926292224208664e-05, |
|
"loss": 0.1344, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 7.875555555555556, |
|
"grad_norm": 0.47324058413505554, |
|
"learning_rate": 1.2717891755841722e-05, |
|
"loss": 0.1363, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 7.8933333333333335, |
|
"grad_norm": 0.6032904386520386, |
|
"learning_rate": 1.2510939851613285e-05, |
|
"loss": 0.1293, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 7.911111111111111, |
|
"grad_norm": 0.4771234393119812, |
|
"learning_rate": 1.230544455246101e-05, |
|
"loss": 0.161, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.928888888888888, |
|
"grad_norm": 0.48323342204093933, |
|
"learning_rate": 1.2101413842727345e-05, |
|
"loss": 0.1336, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 7.946666666666666, |
|
"grad_norm": 0.34565791487693787, |
|
"learning_rate": 1.1898855649849461e-05, |
|
"loss": 0.177, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 7.964444444444444, |
|
"grad_norm": 0.39549532532691956, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.1431, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 7.982222222222222, |
|
"grad_norm": 0.45379385352134705, |
|
"learning_rate": 1.1498188238036861e-05, |
|
"loss": 0.1503, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.5633464455604553, |
|
"learning_rate": 1.130009458668863e-05, |
|
"loss": 0.1328, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 8.017777777777777, |
|
"grad_norm": 0.42473697662353516, |
|
"learning_rate": 1.1103504586764263e-05, |
|
"loss": 0.1182, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 8.035555555555556, |
|
"grad_norm": 0.34909459948539734, |
|
"learning_rate": 1.090842587659851e-05, |
|
"loss": 0.1369, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 8.053333333333333, |
|
"grad_norm": 0.49491363763809204, |
|
"learning_rate": 1.0714866035806326e-05, |
|
"loss": 0.1346, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 8.071111111111112, |
|
"grad_norm": 0.39719170331954956, |
|
"learning_rate": 1.0522832584988234e-05, |
|
"loss": 0.144, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 8.088888888888889, |
|
"grad_norm": 0.530826210975647, |
|
"learning_rate": 1.0332332985438248e-05, |
|
"loss": 0.1522, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 8.106666666666667, |
|
"grad_norm": 0.44326362013816833, |
|
"learning_rate": 1.0143374638853891e-05, |
|
"loss": 0.1347, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 8.124444444444444, |
|
"grad_norm": 0.47626104950904846, |
|
"learning_rate": 9.955964887048607e-06, |
|
"loss": 0.1371, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 8.142222222222221, |
|
"grad_norm": 0.532647430896759, |
|
"learning_rate": 9.770111011666583e-06, |
|
"loss": 0.1247, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"grad_norm": 0.6383976340293884, |
|
"learning_rate": 9.58582023389974e-06, |
|
"loss": 0.1328, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 8.177777777777777, |
|
"grad_norm": 0.6057294011116028, |
|
"learning_rate": 9.403099714207175e-06, |
|
"loss": 0.1485, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.195555555555556, |
|
"grad_norm": 0.3655204474925995, |
|
"learning_rate": 9.221956552036992e-06, |
|
"loss": 0.1226, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 8.213333333333333, |
|
"grad_norm": 0.3895055055618286, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.1297, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 8.231111111111112, |
|
"grad_norm": 0.42906251549720764, |
|
"learning_rate": 8.864430391348332e-06, |
|
"loss": 0.1209, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 8.248888888888889, |
|
"grad_norm": 0.6001704335212708, |
|
"learning_rate": 8.688061284200266e-06, |
|
"loss": 0.0914, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 8.266666666666667, |
|
"grad_norm": 0.5638251304626465, |
|
"learning_rate": 8.513297316775625e-06, |
|
"loss": 0.1514, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 8.284444444444444, |
|
"grad_norm": 0.6026332378387451, |
|
"learning_rate": 8.34014527937756e-06, |
|
"loss": 0.1336, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 8.302222222222222, |
|
"grad_norm": 0.4549403190612793, |
|
"learning_rate": 8.168611899679013e-06, |
|
"loss": 0.1287, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 0.48877498507499695, |
|
"learning_rate": 7.998703842461431e-06, |
|
"loss": 0.1336, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 8.337777777777777, |
|
"grad_norm": 0.7382359504699707, |
|
"learning_rate": 7.830427709355725e-06, |
|
"loss": 0.1321, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 8.355555555555556, |
|
"grad_norm": 0.6301836371421814, |
|
"learning_rate": 7.663790038585793e-06, |
|
"loss": 0.1072, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.373333333333333, |
|
"grad_norm": 0.45454826951026917, |
|
"learning_rate": 7.498797304714544e-06, |
|
"loss": 0.1336, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 8.391111111111112, |
|
"grad_norm": 0.5664876103401184, |
|
"learning_rate": 7.33545591839222e-06, |
|
"loss": 0.1141, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 8.408888888888889, |
|
"grad_norm": 0.45692479610443115, |
|
"learning_rate": 7.173772226107434e-06, |
|
"loss": 0.1143, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 8.426666666666666, |
|
"grad_norm": 0.3842525780200958, |
|
"learning_rate": 7.013752509940485e-06, |
|
"loss": 0.1242, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 8.444444444444445, |
|
"grad_norm": 0.5034791827201843, |
|
"learning_rate": 6.855402987319348e-06, |
|
"loss": 0.1302, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 8.462222222222222, |
|
"grad_norm": 1.0054627656936646, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.1245, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 0.5008876919746399, |
|
"learning_rate": 6.54373906771768e-06, |
|
"loss": 0.134, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 8.497777777777777, |
|
"grad_norm": 0.6585814356803894, |
|
"learning_rate": 6.390436780169734e-06, |
|
"loss": 0.1106, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 8.515555555555556, |
|
"grad_norm": 0.40638384222984314, |
|
"learning_rate": 6.238828904562316e-06, |
|
"loss": 0.1309, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 8.533333333333333, |
|
"grad_norm": 0.6327247619628906, |
|
"learning_rate": 6.088921331488568e-06, |
|
"loss": 0.1263, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.551111111111112, |
|
"grad_norm": 0.4367387592792511, |
|
"learning_rate": 5.94071988547788e-06, |
|
"loss": 0.1018, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 8.568888888888889, |
|
"grad_norm": 0.5796259641647339, |
|
"learning_rate": 5.794230324769517e-06, |
|
"loss": 0.1568, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 8.586666666666666, |
|
"grad_norm": 0.485882043838501, |
|
"learning_rate": 5.649458341088915e-06, |
|
"loss": 0.1537, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 8.604444444444445, |
|
"grad_norm": 0.38468489050865173, |
|
"learning_rate": 5.506409559426573e-06, |
|
"loss": 0.1364, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 8.622222222222222, |
|
"grad_norm": 2.784595012664795, |
|
"learning_rate": 5.365089537819434e-06, |
|
"loss": 0.1214, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"grad_norm": 0.40455055236816406, |
|
"learning_rate": 5.2255037671349535e-06, |
|
"loss": 0.1029, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 8.657777777777778, |
|
"grad_norm": 0.5025741457939148, |
|
"learning_rate": 5.087657670857798e-06, |
|
"loss": 0.1279, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 8.675555555555556, |
|
"grad_norm": 0.5535502433776855, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 0.1055, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 8.693333333333333, |
|
"grad_norm": 0.47892096638679504, |
|
"learning_rate": 4.8172058572881765e-06, |
|
"loss": 0.1115, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 8.71111111111111, |
|
"grad_norm": 0.6920176148414612, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.1337, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.72888888888889, |
|
"grad_norm": 0.41214314103126526, |
|
"learning_rate": 4.5537761293894535e-06, |
|
"loss": 0.1677, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 8.746666666666666, |
|
"grad_norm": 0.4349137544631958, |
|
"learning_rate": 4.424707384416344e-06, |
|
"loss": 0.1281, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 8.764444444444445, |
|
"grad_norm": 0.5308365225791931, |
|
"learning_rate": 4.29740942810285e-06, |
|
"loss": 0.1464, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 8.782222222222222, |
|
"grad_norm": 0.5176928043365479, |
|
"learning_rate": 4.1718872065011904e-06, |
|
"loss": 0.1266, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 0.4264501631259918, |
|
"learning_rate": 4.048145596668967e-06, |
|
"loss": 0.1355, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.817777777777778, |
|
"grad_norm": 0.5254343152046204, |
|
"learning_rate": 3.9261894064796135e-06, |
|
"loss": 0.144, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 8.835555555555555, |
|
"grad_norm": 0.400968998670578, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 0.1422, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 8.853333333333333, |
|
"grad_norm": 0.7004261016845703, |
|
"learning_rate": 3.687652169484568e-06, |
|
"loss": 0.1472, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 8.87111111111111, |
|
"grad_norm": 0.42059558629989624, |
|
"learning_rate": 3.5710803908373224e-06, |
|
"loss": 0.1145, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"grad_norm": 0.5794507265090942, |
|
"learning_rate": 3.4563125677897932e-06, |
|
"loss": 0.1145, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"eval_loss": 0.3872971832752228, |
|
"eval_runtime": 44.5374, |
|
"eval_samples_per_second": 2.245, |
|
"eval_steps_per_second": 2.245, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.906666666666666, |
|
"grad_norm": 0.4100549817085266, |
|
"learning_rate": 3.343353159546675e-06, |
|
"loss": 0.1311, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 8.924444444444445, |
|
"grad_norm": 0.5439295172691345, |
|
"learning_rate": 3.2322065550483007e-06, |
|
"loss": 0.1346, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 8.942222222222222, |
|
"grad_norm": 0.5619916319847107, |
|
"learning_rate": 3.1228770728000455e-06, |
|
"loss": 0.1145, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 0.6183579564094543, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.1122, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 8.977777777777778, |
|
"grad_norm": 0.39990657567977905, |
|
"learning_rate": 2.9096863958968268e-06, |
|
"loss": 0.155, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.995555555555555, |
|
"grad_norm": 0.3883807063102722, |
|
"learning_rate": 2.8058334845816213e-06, |
|
"loss": 0.1568, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 9.013333333333334, |
|
"grad_norm": 0.4355883300304413, |
|
"learning_rate": 2.7038142618741992e-06, |
|
"loss": 0.129, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 9.03111111111111, |
|
"grad_norm": 0.505295991897583, |
|
"learning_rate": 2.603632691643415e-06, |
|
"loss": 0.1233, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 9.04888888888889, |
|
"grad_norm": 0.5392528176307678, |
|
"learning_rate": 2.5052926663577e-06, |
|
"loss": 0.1235, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 9.066666666666666, |
|
"grad_norm": 0.4447910785675049, |
|
"learning_rate": 2.408798006933882e-06, |
|
"loss": 0.1068, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 9.084444444444445, |
|
"grad_norm": 0.5352734923362732, |
|
"learning_rate": 2.314152462588659e-06, |
|
"loss": 0.1364, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 9.102222222222222, |
|
"grad_norm": 0.5564258694648743, |
|
"learning_rate": 2.221359710692961e-06, |
|
"loss": 0.1492, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 0.3572767972946167, |
|
"learning_rate": 2.1304233566290964e-06, |
|
"loss": 0.1079, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 9.137777777777778, |
|
"grad_norm": 0.5538711547851562, |
|
"learning_rate": 2.041346933650612e-06, |
|
"loss": 0.1298, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 9.155555555555555, |
|
"grad_norm": 0.5351611375808716, |
|
"learning_rate": 1.9541339027450256e-06, |
|
"loss": 0.1046, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 9.173333333333334, |
|
"grad_norm": 0.449429988861084, |
|
"learning_rate": 1.8687876524993987e-06, |
|
"loss": 0.1129, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 9.19111111111111, |
|
"grad_norm": 0.4684661328792572, |
|
"learning_rate": 1.785311498968617e-06, |
|
"loss": 0.1036, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 9.20888888888889, |
|
"grad_norm": 0.46259376406669617, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.1266, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 9.226666666666667, |
|
"grad_norm": 0.5819494128227234, |
|
"learning_rate": 1.6239823828401945e-06, |
|
"loss": 0.1162, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 9.244444444444444, |
|
"grad_norm": 0.3985779583454132, |
|
"learning_rate": 1.5461356885461075e-06, |
|
"loss": 0.1179, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 9.262222222222222, |
|
"grad_norm": 0.4249963164329529, |
|
"learning_rate": 1.4701716273304521e-06, |
|
"loss": 0.1342, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 0.3580509424209595, |
|
"learning_rate": 1.3960931507112752e-06, |
|
"loss": 0.1196, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 9.297777777777778, |
|
"grad_norm": 0.6348000764846802, |
|
"learning_rate": 1.3239031369438326e-06, |
|
"loss": 0.1205, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 9.315555555555555, |
|
"grad_norm": 0.4758366644382477, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 0.1159, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 9.333333333333334, |
|
"grad_norm": 0.39275023341178894, |
|
"learning_rate": 1.1851996440033319e-06, |
|
"loss": 0.1529, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 9.351111111111111, |
|
"grad_norm": 0.40188562870025635, |
|
"learning_rate": 1.118691554034773e-06, |
|
"loss": 0.1398, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 9.36888888888889, |
|
"grad_norm": 0.4341106712818146, |
|
"learning_rate": 1.0540827051175818e-06, |
|
"loss": 0.1054, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 9.386666666666667, |
|
"grad_norm": 0.4394477307796478, |
|
"learning_rate": 9.913756075728087e-07, |
|
"loss": 0.1361, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 9.404444444444444, |
|
"grad_norm": 0.42253348231315613, |
|
"learning_rate": 9.305726978306173e-07, |
|
"loss": 0.0994, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 9.422222222222222, |
|
"grad_norm": 0.5073143243789673, |
|
"learning_rate": 8.716763383355864e-07, |
|
"loss": 0.1426, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 0.5548211336135864, |
|
"learning_rate": 8.146888174549339e-07, |
|
"loss": 0.1543, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 9.457777777777778, |
|
"grad_norm": 0.4399406909942627, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.124, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 9.475555555555555, |
|
"grad_norm": 0.46776387095451355, |
|
"learning_rate": 7.064490740882057e-07, |
|
"loss": 0.1438, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 9.493333333333334, |
|
"grad_norm": 0.466458261013031, |
|
"learning_rate": 6.552010571639456e-07, |
|
"loss": 0.1099, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 9.511111111111111, |
|
"grad_norm": 0.3930005133152008, |
|
"learning_rate": 6.058702898142643e-07, |
|
"loss": 0.1444, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 9.528888888888888, |
|
"grad_norm": 0.36770400404930115, |
|
"learning_rate": 5.584586887435739e-07, |
|
"loss": 0.1354, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 9.546666666666667, |
|
"grad_norm": 0.49141502380371094, |
|
"learning_rate": 5.129680960887007e-07, |
|
"loss": 0.1367, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 9.564444444444444, |
|
"grad_norm": 0.47367358207702637, |
|
"learning_rate": 4.6940027934735954e-07, |
|
"loss": 0.1215, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 9.582222222222223, |
|
"grad_norm": 0.4035596251487732, |
|
"learning_rate": 4.277569313094809e-07, |
|
"loss": 0.0906, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 0.43266692757606506, |
|
"learning_rate": 3.8803966999139684e-07, |
|
"loss": 0.0995, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.617777777777778, |
|
"grad_norm": 0.4476134181022644, |
|
"learning_rate": 3.50250038573019e-07, |
|
"loss": 0.1209, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 9.635555555555555, |
|
"grad_norm": 0.3680909276008606, |
|
"learning_rate": 3.143895053378698e-07, |
|
"loss": 0.1201, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 9.653333333333332, |
|
"grad_norm": 0.3905773162841797, |
|
"learning_rate": 2.8045946361601183e-07, |
|
"loss": 0.1151, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 9.671111111111111, |
|
"grad_norm": 0.4270147979259491, |
|
"learning_rate": 2.4846123172992954e-07, |
|
"loss": 0.1356, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 9.688888888888888, |
|
"grad_norm": 0.47167471051216125, |
|
"learning_rate": 2.1839605294330933e-07, |
|
"loss": 0.1461, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.706666666666667, |
|
"grad_norm": 0.3621659576892853, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.1065, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 9.724444444444444, |
|
"grad_norm": 0.5602201819419861, |
|
"learning_rate": 1.640694521422459e-07, |
|
"loss": 0.1043, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 9.742222222222223, |
|
"grad_norm": 0.5394431948661804, |
|
"learning_rate": 1.3981014094099353e-07, |
|
"loss": 0.1214, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 0.509139895439148, |
|
"learning_rate": 1.1748810438355628e-07, |
|
"loss": 0.1368, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 9.777777777777779, |
|
"grad_norm": 0.48587632179260254, |
|
"learning_rate": 9.710420977340762e-08, |
|
"loss": 0.136, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.795555555555556, |
|
"grad_norm": 0.38901224732398987, |
|
"learning_rate": 7.865924910916977e-08, |
|
"loss": 0.126, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 9.813333333333333, |
|
"grad_norm": 0.4409494996070862, |
|
"learning_rate": 6.215393905388278e-08, |
|
"loss": 0.1457, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 9.831111111111111, |
|
"grad_norm": 0.5387219190597534, |
|
"learning_rate": 4.7588920907110094e-08, |
|
"loss": 0.1103, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 9.848888888888888, |
|
"grad_norm": 0.48696452379226685, |
|
"learning_rate": 3.496476058006959e-08, |
|
"loss": 0.1487, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 9.866666666666667, |
|
"grad_norm": 0.4282010495662689, |
|
"learning_rate": 2.4281948573617874e-08, |
|
"loss": 0.1124, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.884444444444444, |
|
"grad_norm": 0.449815034866333, |
|
"learning_rate": 1.5540899959187727e-08, |
|
"loss": 0.1004, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 9.902222222222223, |
|
"grad_norm": 0.5384798049926758, |
|
"learning_rate": 8.741954362678772e-09, |
|
"loss": 0.1479, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 0.47130730748176575, |
|
"learning_rate": 3.885375951256931e-09, |
|
"loss": 0.1138, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 9.937777777777779, |
|
"grad_norm": 0.6720632910728455, |
|
"learning_rate": 9.713534230904041e-10, |
|
"loss": 0.1207, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 9.955555555555556, |
|
"grad_norm": 0.4333189129829407, |
|
"learning_rate": 0.0, |
|
"loss": 0.1334, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.955555555555556, |
|
"step": 560, |
|
"total_flos": 9.116617776876749e+17, |
|
"train_loss": 0.2247792588014688, |
|
"train_runtime": 17934.4708, |
|
"train_samples_per_second": 0.502, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 560, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.116617776876749e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|