|
{ |
|
"best_metric": 2.4987945556640625, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 3.011764705882353, |
|
"eval_steps": 50, |
|
"global_step": 192, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"grad_norm": 0.9535642266273499, |
|
"learning_rate": 1e-05, |
|
"loss": 2.8736, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"eval_loss": 3.324666738510132, |
|
"eval_runtime": 7.9996, |
|
"eval_samples_per_second": 13.501, |
|
"eval_steps_per_second": 3.375, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03137254901960784, |
|
"grad_norm": 1.0703654289245605, |
|
"learning_rate": 2e-05, |
|
"loss": 2.8739, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 1.0498156547546387, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9074, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06274509803921569, |
|
"grad_norm": 0.9599528312683105, |
|
"learning_rate": 4e-05, |
|
"loss": 2.9035, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.9831591248512268, |
|
"learning_rate": 5e-05, |
|
"loss": 2.8063, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09411764705882353, |
|
"grad_norm": 0.9782329201698303, |
|
"learning_rate": 6e-05, |
|
"loss": 2.9952, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.10980392156862745, |
|
"grad_norm": 0.859535813331604, |
|
"learning_rate": 7e-05, |
|
"loss": 2.9002, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12549019607843137, |
|
"grad_norm": 0.9193878769874573, |
|
"learning_rate": 8e-05, |
|
"loss": 2.7132, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1411764705882353, |
|
"grad_norm": 0.8184499740600586, |
|
"learning_rate": 9e-05, |
|
"loss": 2.7317, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.8140602707862854, |
|
"learning_rate": 0.0001, |
|
"loss": 2.6723, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.17254901960784313, |
|
"grad_norm": 0.9426817893981934, |
|
"learning_rate": 9.999255120204248e-05, |
|
"loss": 2.6702, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.18823529411764706, |
|
"grad_norm": 1.0090997219085693, |
|
"learning_rate": 9.997020702755353e-05, |
|
"loss": 2.7346, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.20392156862745098, |
|
"grad_norm": 1.0597916841506958, |
|
"learning_rate": 9.99329741340228e-05, |
|
"loss": 2.678, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2196078431372549, |
|
"grad_norm": 1.2650080919265747, |
|
"learning_rate": 9.98808636150624e-05, |
|
"loss": 2.6331, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 1.5050358772277832, |
|
"learning_rate": 9.981389099710132e-05, |
|
"loss": 3.0748, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.25098039215686274, |
|
"grad_norm": 0.516484797000885, |
|
"learning_rate": 9.973207623475965e-05, |
|
"loss": 2.4093, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.6125854849815369, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 2.5081, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2823529411764706, |
|
"grad_norm": 0.5779858827590942, |
|
"learning_rate": 9.952402219937816e-05, |
|
"loss": 2.5021, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2980392156862745, |
|
"grad_norm": 0.5545258522033691, |
|
"learning_rate": 9.939784491643734e-05, |
|
"loss": 2.4604, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 0.5096269249916077, |
|
"learning_rate": 9.92569494508437e-05, |
|
"loss": 2.626, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.32941176470588235, |
|
"grad_norm": 0.4844023287296295, |
|
"learning_rate": 9.910137778267152e-05, |
|
"loss": 2.5395, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.34509803921568627, |
|
"grad_norm": 0.4641485810279846, |
|
"learning_rate": 9.893117626479777e-05, |
|
"loss": 2.5221, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3607843137254902, |
|
"grad_norm": 0.47447431087493896, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 2.4512, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3764705882352941, |
|
"grad_norm": 0.6421774625778198, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 2.4789, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.692217230796814, |
|
"learning_rate": 9.833332143466099e-05, |
|
"loss": 2.5332, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.40784313725490196, |
|
"grad_norm": 0.5684048533439636, |
|
"learning_rate": 9.810515099218003e-05, |
|
"loss": 2.7136, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.4235294117647059, |
|
"grad_norm": 0.6432080864906311, |
|
"learning_rate": 9.78626475276808e-05, |
|
"loss": 2.5727, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4392156862745098, |
|
"grad_norm": 0.7890809774398804, |
|
"learning_rate": 9.760588329553571e-05, |
|
"loss": 2.6341, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4549019607843137, |
|
"grad_norm": 0.8166062235832214, |
|
"learning_rate": 9.73349347991403e-05, |
|
"loss": 2.5955, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 1.1982145309448242, |
|
"learning_rate": 9.704988276811883e-05, |
|
"loss": 2.9488, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.48627450980392156, |
|
"grad_norm": 0.30004727840423584, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 2.3561, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5019607843137255, |
|
"grad_norm": 0.41050592064857483, |
|
"learning_rate": 9.643781200626511e-05, |
|
"loss": 2.425, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5176470588235295, |
|
"grad_norm": 0.42230165004730225, |
|
"learning_rate": 9.611097564309053e-05, |
|
"loss": 2.5036, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.4193224608898163, |
|
"learning_rate": 9.577040042626833e-05, |
|
"loss": 2.3797, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.4057847857475281, |
|
"learning_rate": 9.54161878308377e-05, |
|
"loss": 2.4552, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5647058823529412, |
|
"grad_norm": 0.43488991260528564, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 2.4372, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5803921568627451, |
|
"grad_norm": 0.4593192934989929, |
|
"learning_rate": 9.466727668927816e-05, |
|
"loss": 2.5748, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.596078431372549, |
|
"grad_norm": 0.4425160884857178, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 2.404, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.611764705882353, |
|
"grad_norm": 0.43048760294914246, |
|
"learning_rate": 9.38651347099721e-05, |
|
"loss": 2.3847, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.4540267884731293, |
|
"learning_rate": 9.344439843625034e-05, |
|
"loss": 2.4953, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6431372549019608, |
|
"grad_norm": 0.4934435486793518, |
|
"learning_rate": 9.301071782067504e-05, |
|
"loss": 2.479, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6588235294117647, |
|
"grad_norm": 0.6172159910202026, |
|
"learning_rate": 9.256422207921757e-05, |
|
"loss": 2.5236, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.6745098039215687, |
|
"grad_norm": 0.5735173225402832, |
|
"learning_rate": 9.210504424614059e-05, |
|
"loss": 2.452, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.6901960784313725, |
|
"grad_norm": 0.692180335521698, |
|
"learning_rate": 9.163332113436032e-05, |
|
"loss": 2.5363, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 1.1689708232879639, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 2.8587, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7215686274509804, |
|
"grad_norm": 0.3823428750038147, |
|
"learning_rate": 9.065280497392663e-05, |
|
"loss": 2.4599, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.7372549019607844, |
|
"grad_norm": 0.37304720282554626, |
|
"learning_rate": 9.014430407194413e-05, |
|
"loss": 2.4085, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7529411764705882, |
|
"grad_norm": 0.354853093624115, |
|
"learning_rate": 8.962384209755452e-05, |
|
"loss": 2.5411, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7686274509803922, |
|
"grad_norm": 0.3872363269329071, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 2.4203, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.40209847688674927, |
|
"learning_rate": 8.854765873974898e-05, |
|
"loss": 2.5972, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"eval_loss": 2.5110836029052734, |
|
"eval_runtime": 8.1325, |
|
"eval_samples_per_second": 13.28, |
|
"eval_steps_per_second": 3.32, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.38388505578041077, |
|
"learning_rate": 8.799225800722895e-05, |
|
"loss": 2.4428, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8156862745098039, |
|
"grad_norm": 0.4438699185848236, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 2.4849, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8313725490196079, |
|
"grad_norm": 0.4089072644710541, |
|
"learning_rate": 8.684766579921684e-05, |
|
"loss": 2.4612, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8470588235294118, |
|
"grad_norm": 0.45029300451278687, |
|
"learning_rate": 8.625881535716883e-05, |
|
"loss": 2.4742, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.4340762197971344, |
|
"learning_rate": 8.565916153152983e-05, |
|
"loss": 2.4731, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8784313725490196, |
|
"grad_norm": 0.4764128625392914, |
|
"learning_rate": 8.504888299030747e-05, |
|
"loss": 2.6086, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.8941176470588236, |
|
"grad_norm": 0.5077619552612305, |
|
"learning_rate": 8.442816156716385e-05, |
|
"loss": 2.5386, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9098039215686274, |
|
"grad_norm": 0.5658111572265625, |
|
"learning_rate": 8.379718220723773e-05, |
|
"loss": 2.5659, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9254901960784314, |
|
"grad_norm": 0.6542505621910095, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 2.5815, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.9077744483947754, |
|
"learning_rate": 8.250520468343722e-05, |
|
"loss": 2.9127, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.9568627450980393, |
|
"grad_norm": 0.3028375208377838, |
|
"learning_rate": 8.184459146674446e-05, |
|
"loss": 2.3544, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9725490196078431, |
|
"grad_norm": 0.37583601474761963, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 2.3619, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.9882352941176471, |
|
"grad_norm": 0.5018683075904846, |
|
"learning_rate": 8.049510022000364e-05, |
|
"loss": 2.5821, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.003921568627451, |
|
"grad_norm": 0.824658989906311, |
|
"learning_rate": 7.980662427346127e-05, |
|
"loss": 3.6075, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 0.2513439357280731, |
|
"learning_rate": 7.910926738603854e-05, |
|
"loss": 2.1654, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.035294117647059, |
|
"grad_norm": 0.2941010296344757, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 2.2546, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0509803921568628, |
|
"grad_norm": 0.3366384506225586, |
|
"learning_rate": 7.768874448802665e-05, |
|
"loss": 2.41, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.32259663939476013, |
|
"learning_rate": 7.696600172495997e-05, |
|
"loss": 2.2745, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0823529411764705, |
|
"grad_norm": 0.34342584013938904, |
|
"learning_rate": 7.62352243899504e-05, |
|
"loss": 2.3813, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.0980392156862746, |
|
"grad_norm": 0.3747313916683197, |
|
"learning_rate": 7.54966302195068e-05, |
|
"loss": 2.4037, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.1137254901960785, |
|
"grad_norm": 0.3675015866756439, |
|
"learning_rate": 7.475043927917907e-05, |
|
"loss": 2.3497, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1294117647058823, |
|
"grad_norm": 0.36925432085990906, |
|
"learning_rate": 7.399687389798933e-05, |
|
"loss": 2.2603, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.1450980392156862, |
|
"grad_norm": 0.4180779457092285, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 2.3582, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1607843137254903, |
|
"grad_norm": 0.47406819462776184, |
|
"learning_rate": 7.246852004835807e-05, |
|
"loss": 2.3733, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 0.485302597284317, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 2.3337, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.192156862745098, |
|
"grad_norm": 0.539759635925293, |
|
"learning_rate": 7.091339003877826e-05, |
|
"loss": 2.3197, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.2078431372549019, |
|
"grad_norm": 0.6110227108001709, |
|
"learning_rate": 7.012636193699837e-05, |
|
"loss": 2.109, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.223529411764706, |
|
"grad_norm": 0.7330251336097717, |
|
"learning_rate": 6.933333714707094e-05, |
|
"loss": 2.3911, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.2392156862745098, |
|
"grad_norm": 0.8158701062202454, |
|
"learning_rate": 6.853455195225338e-05, |
|
"loss": 2.6754, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.2549019607843137, |
|
"grad_norm": 0.3269573748111725, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 2.1136, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.2705882352941176, |
|
"grad_norm": 0.4158848226070404, |
|
"learning_rate": 6.692065399168352e-05, |
|
"loss": 2.3167, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.2862745098039214, |
|
"grad_norm": 0.4102843999862671, |
|
"learning_rate": 6.610602208992454e-05, |
|
"loss": 2.2767, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.3019607843137255, |
|
"grad_norm": 0.41636908054351807, |
|
"learning_rate": 6.528659136798764e-05, |
|
"loss": 2.2742, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3176470588235294, |
|
"grad_norm": 0.4526170492172241, |
|
"learning_rate": 6.446260597682839e-05, |
|
"loss": 2.3794, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.4637283384799957, |
|
"learning_rate": 6.363431142447469e-05, |
|
"loss": 2.3691, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.3490196078431373, |
|
"grad_norm": 0.48547840118408203, |
|
"learning_rate": 6.280195450287736e-05, |
|
"loss": 2.344, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.3647058823529412, |
|
"grad_norm": 0.48103803396224976, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 2.435, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.380392156862745, |
|
"grad_norm": 0.4679640531539917, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 2.2715, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.396078431372549, |
|
"grad_norm": 0.5256341695785522, |
|
"learning_rate": 6.028299515429683e-05, |
|
"loss": 2.3161, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4117647058823528, |
|
"grad_norm": 0.5306299328804016, |
|
"learning_rate": 5.943687977264584e-05, |
|
"loss": 2.3116, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.427450980392157, |
|
"grad_norm": 0.6107631921768188, |
|
"learning_rate": 5.8587952654563817e-05, |
|
"loss": 2.3921, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.4431372549019608, |
|
"grad_norm": 0.6549443006515503, |
|
"learning_rate": 5.773646673951406e-05, |
|
"loss": 2.2533, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.4588235294117646, |
|
"grad_norm": 0.8716562986373901, |
|
"learning_rate": 5.688267572935842e-05, |
|
"loss": 2.5249, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.4745098039215687, |
|
"grad_norm": 0.8429364562034607, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 2.5472, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.4901960784313726, |
|
"grad_norm": 0.3356582224369049, |
|
"learning_rate": 5.5169196589418504e-05, |
|
"loss": 2.0268, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.5058823529411764, |
|
"grad_norm": 0.4084802269935608, |
|
"learning_rate": 5.431001899403098e-05, |
|
"loss": 2.2999, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.5215686274509803, |
|
"grad_norm": 0.4267803430557251, |
|
"learning_rate": 5.344955722021624e-05, |
|
"loss": 2.3227, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.5372549019607842, |
|
"grad_norm": 0.4320634603500366, |
|
"learning_rate": 5.258806764421048e-05, |
|
"loss": 2.246, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.5529411764705883, |
|
"grad_norm": 0.46851688623428345, |
|
"learning_rate": 5.172580694848541e-05, |
|
"loss": 2.3576, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 0.4941369593143463, |
|
"learning_rate": 5.086303204526943e-05, |
|
"loss": 2.3498, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"eval_loss": 2.4987945556640625, |
|
"eval_runtime": 8.1314, |
|
"eval_samples_per_second": 13.282, |
|
"eval_steps_per_second": 3.32, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5843137254901962, |
|
"grad_norm": 0.5022868514060974, |
|
"learning_rate": 5e-05, |
|
"loss": 2.2742, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5015295147895813, |
|
"learning_rate": 4.913696795473058e-05, |
|
"loss": 2.2551, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.615686274509804, |
|
"grad_norm": 0.5870837569236755, |
|
"learning_rate": 4.827419305151461e-05, |
|
"loss": 2.2968, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.6313725490196078, |
|
"grad_norm": 0.5984936356544495, |
|
"learning_rate": 4.741193235578952e-05, |
|
"loss": 2.4352, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.6470588235294117, |
|
"grad_norm": 0.6345235705375671, |
|
"learning_rate": 4.655044277978375e-05, |
|
"loss": 2.3359, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.6627450980392156, |
|
"grad_norm": 0.6582222580909729, |
|
"learning_rate": 4.568998100596903e-05, |
|
"loss": 2.3348, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.6784313725490196, |
|
"grad_norm": 0.7701143622398376, |
|
"learning_rate": 4.48308034105815e-05, |
|
"loss": 2.201, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.6941176470588235, |
|
"grad_norm": 0.884581983089447, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 2.4985, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.7098039215686276, |
|
"grad_norm": 0.8130044937133789, |
|
"learning_rate": 4.31173242706416e-05, |
|
"loss": 2.6248, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.7254901960784315, |
|
"grad_norm": 0.328084796667099, |
|
"learning_rate": 4.226353326048593e-05, |
|
"loss": 1.9045, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.7411764705882353, |
|
"grad_norm": 0.4180457890033722, |
|
"learning_rate": 4.1412047345436195e-05, |
|
"loss": 2.2426, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.7568627450980392, |
|
"grad_norm": 0.44295573234558105, |
|
"learning_rate": 4.056312022735417e-05, |
|
"loss": 2.3269, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.772549019607843, |
|
"grad_norm": 0.4977704882621765, |
|
"learning_rate": 3.971700484570318e-05, |
|
"loss": 2.3613, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.788235294117647, |
|
"grad_norm": 0.4835895597934723, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 2.3462, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.803921568627451, |
|
"grad_norm": 0.5100789666175842, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 2.2091, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.8196078431372549, |
|
"grad_norm": 0.5331653356552124, |
|
"learning_rate": 3.719804549712265e-05, |
|
"loss": 2.2373, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.835294117647059, |
|
"grad_norm": 0.5414100885391235, |
|
"learning_rate": 3.6365688575525315e-05, |
|
"loss": 2.2153, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.8509803921568628, |
|
"grad_norm": 0.5721606612205505, |
|
"learning_rate": 3.553739402317162e-05, |
|
"loss": 2.3138, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.5930068492889404, |
|
"learning_rate": 3.471340863201237e-05, |
|
"loss": 2.244, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.8823529411764706, |
|
"grad_norm": 0.7018293738365173, |
|
"learning_rate": 3.389397791007548e-05, |
|
"loss": 2.2488, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.8980392156862744, |
|
"grad_norm": 0.7131605744361877, |
|
"learning_rate": 3.307934600831648e-05, |
|
"loss": 2.1945, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.9137254901960783, |
|
"grad_norm": 0.812116265296936, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 2.3625, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.9294117647058824, |
|
"grad_norm": 0.9558323621749878, |
|
"learning_rate": 3.146544804774663e-05, |
|
"loss": 2.2682, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.9450980392156862, |
|
"grad_norm": 0.9968075156211853, |
|
"learning_rate": 3.066666285292906e-05, |
|
"loss": 2.5473, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 0.38400799036026, |
|
"learning_rate": 2.9873638063001628e-05, |
|
"loss": 1.9606, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.9764705882352942, |
|
"grad_norm": 0.5562489032745361, |
|
"learning_rate": 2.9086609961221755e-05, |
|
"loss": 2.3365, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.992156862745098, |
|
"grad_norm": 0.9361739158630371, |
|
"learning_rate": 2.8305813044122097e-05, |
|
"loss": 2.5897, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.007843137254902, |
|
"grad_norm": 0.9938849210739136, |
|
"learning_rate": 2.7531479951641924e-05, |
|
"loss": 2.6321, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.023529411764706, |
|
"grad_norm": 0.39070141315460205, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 2.0195, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.0392156862745097, |
|
"grad_norm": 0.4302551746368408, |
|
"learning_rate": 2.6003126102010695e-05, |
|
"loss": 2.1712, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.0549019607843135, |
|
"grad_norm": 0.44673267006874084, |
|
"learning_rate": 2.5249560720820932e-05, |
|
"loss": 2.2957, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.070588235294118, |
|
"grad_norm": 0.4669826030731201, |
|
"learning_rate": 2.450336978049322e-05, |
|
"loss": 2.1386, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.0862745098039217, |
|
"grad_norm": 0.48010262846946716, |
|
"learning_rate": 2.37647756100496e-05, |
|
"loss": 2.1393, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.1019607843137256, |
|
"grad_norm": 0.5177429914474487, |
|
"learning_rate": 2.3033998275040046e-05, |
|
"loss": 2.2392, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.1176470588235294, |
|
"grad_norm": 0.5121442675590515, |
|
"learning_rate": 2.2311255511973345e-05, |
|
"loss": 2.0218, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.5644603371620178, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 2.2426, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.149019607843137, |
|
"grad_norm": 0.5926538705825806, |
|
"learning_rate": 2.0890732613961478e-05, |
|
"loss": 2.1036, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.164705882352941, |
|
"grad_norm": 0.6267763376235962, |
|
"learning_rate": 2.0193375726538737e-05, |
|
"loss": 2.0515, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.180392156862745, |
|
"grad_norm": 0.7685708403587341, |
|
"learning_rate": 1.9504899779996355e-05, |
|
"loss": 2.0115, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.196078431372549, |
|
"grad_norm": 0.7948710918426514, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 1.9578, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.211764705882353, |
|
"grad_norm": 0.9403231739997864, |
|
"learning_rate": 1.8155408533255553e-05, |
|
"loss": 2.1181, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.227450980392157, |
|
"grad_norm": 1.1127105951309204, |
|
"learning_rate": 1.749479531656279e-05, |
|
"loss": 2.0001, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.243137254901961, |
|
"grad_norm": 0.771511435508728, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 2.255, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.2588235294117647, |
|
"grad_norm": 0.44342976808547974, |
|
"learning_rate": 1.6202817792762282e-05, |
|
"loss": 2.1462, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.2745098039215685, |
|
"grad_norm": 0.4897748827934265, |
|
"learning_rate": 1.557183843283614e-05, |
|
"loss": 2.2906, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.2901960784313724, |
|
"grad_norm": 0.5105887651443481, |
|
"learning_rate": 1.4951117009692528e-05, |
|
"loss": 2.2308, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.3058823529411763, |
|
"grad_norm": 0.5891292691230774, |
|
"learning_rate": 1.4340838468470197e-05, |
|
"loss": 2.193, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.3215686274509806, |
|
"grad_norm": 0.6128895282745361, |
|
"learning_rate": 1.3741184642831189e-05, |
|
"loss": 2.3101, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.3372549019607844, |
|
"grad_norm": 0.6382685303688049, |
|
"learning_rate": 1.3152334200783167e-05, |
|
"loss": 2.2268, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"grad_norm": 0.661220133304596, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 2.1702, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"eval_loss": 2.535630941390991, |
|
"eval_runtime": 8.1303, |
|
"eval_samples_per_second": 13.284, |
|
"eval_steps_per_second": 3.321, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.368627450980392, |
|
"grad_norm": 0.6427918076515198, |
|
"learning_rate": 1.2007741992771065e-05, |
|
"loss": 2.0535, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.384313725490196, |
|
"grad_norm": 0.7129402160644531, |
|
"learning_rate": 1.145234126025102e-05, |
|
"loss": 2.108, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7248530983924866, |
|
"learning_rate": 1.090842587659851e-05, |
|
"loss": 2.0189, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.4156862745098038, |
|
"grad_norm": 0.8135029673576355, |
|
"learning_rate": 1.0376157902445488e-05, |
|
"loss": 1.9979, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.431372549019608, |
|
"grad_norm": 0.8778344392776489, |
|
"learning_rate": 9.85569592805588e-06, |
|
"loss": 2.0551, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.447058823529412, |
|
"grad_norm": 0.9533079266548157, |
|
"learning_rate": 9.347195026073369e-06, |
|
"loss": 1.9452, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.462745098039216, |
|
"grad_norm": 1.3244249820709229, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 2.0356, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.4784313725490197, |
|
"grad_norm": 0.899782657623291, |
|
"learning_rate": 8.366678865639688e-06, |
|
"loss": 2.1336, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.4941176470588236, |
|
"grad_norm": 0.44793930649757385, |
|
"learning_rate": 7.894955753859413e-06, |
|
"loss": 1.9492, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.5098039215686274, |
|
"grad_norm": 0.5112513899803162, |
|
"learning_rate": 7.435777920782444e-06, |
|
"loss": 2.1618, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.5254901960784313, |
|
"grad_norm": 0.5470635890960693, |
|
"learning_rate": 6.989282179324963e-06, |
|
"loss": 2.1548, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.541176470588235, |
|
"grad_norm": 0.5480771064758301, |
|
"learning_rate": 6.555601563749675e-06, |
|
"loss": 2.1317, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.556862745098039, |
|
"grad_norm": 0.6045755743980408, |
|
"learning_rate": 6.1348652900279025e-06, |
|
"loss": 2.273, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.572549019607843, |
|
"grad_norm": 0.6052933931350708, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 2.1034, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.588235294117647, |
|
"grad_norm": 0.6706366539001465, |
|
"learning_rate": 5.332723310721854e-06, |
|
"loss": 2.1542, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.603921568627451, |
|
"grad_norm": 0.7189547419548035, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 2.1109, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.619607843137255, |
|
"grad_norm": 0.7314002513885498, |
|
"learning_rate": 4.5838121691623e-06, |
|
"loss": 2.0928, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.635294117647059, |
|
"grad_norm": 0.8130326867103577, |
|
"learning_rate": 4.229599573731685e-06, |
|
"loss": 2.184, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.6509803921568627, |
|
"grad_norm": 0.8824214339256287, |
|
"learning_rate": 3.8890243569094874e-06, |
|
"loss": 2.28, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.9210173487663269, |
|
"learning_rate": 3.5621879937348836e-06, |
|
"loss": 2.0636, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.682352941176471, |
|
"grad_norm": 0.9852874875068665, |
|
"learning_rate": 3.249187865729264e-06, |
|
"loss": 1.9793, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.6980392156862747, |
|
"grad_norm": 1.3538250923156738, |
|
"learning_rate": 2.950117231881183e-06, |
|
"loss": 2.0158, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.7137254901960786, |
|
"grad_norm": 1.0066053867340088, |
|
"learning_rate": 2.6650652008597068e-06, |
|
"loss": 2.3757, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.7294117647058824, |
|
"grad_norm": 0.45928213000297546, |
|
"learning_rate": 2.3941167044642944e-06, |
|
"loss": 1.8917, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"grad_norm": 0.5406450629234314, |
|
"learning_rate": 2.137352472319215e-06, |
|
"loss": 2.2532, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.76078431372549, |
|
"grad_norm": 0.5478349328041077, |
|
"learning_rate": 1.8948490078199764e-06, |
|
"loss": 2.2074, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.776470588235294, |
|
"grad_norm": 0.5485714077949524, |
|
"learning_rate": 1.6666785653390249e-06, |
|
"loss": 2.1245, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.792156862745098, |
|
"grad_norm": 0.6040245294570923, |
|
"learning_rate": 1.4529091286973995e-06, |
|
"loss": 2.2487, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.8078431372549018, |
|
"grad_norm": 0.6607996821403503, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 2.2545, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.8235294117647056, |
|
"grad_norm": 0.6602091789245605, |
|
"learning_rate": 1.0688237352022345e-06, |
|
"loss": 2.234, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.83921568627451, |
|
"grad_norm": 0.687446653842926, |
|
"learning_rate": 8.986222173284875e-07, |
|
"loss": 2.1189, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.854901960784314, |
|
"grad_norm": 0.7298436760902405, |
|
"learning_rate": 7.4305054915631e-07, |
|
"loss": 2.1273, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.8705882352941177, |
|
"grad_norm": 0.7987807989120483, |
|
"learning_rate": 6.021550835626777e-07, |
|
"loss": 2.1664, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.8862745098039215, |
|
"grad_norm": 0.860362708568573, |
|
"learning_rate": 4.7597780062184073e-07, |
|
"loss": 2.1681, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.9019607843137254, |
|
"grad_norm": 0.9093377590179443, |
|
"learning_rate": 3.6455629509730136e-07, |
|
"loss": 1.9198, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.9176470588235293, |
|
"grad_norm": 0.9680060744285583, |
|
"learning_rate": 2.6792376524036877e-07, |
|
"loss": 1.827, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 1.2015458345413208, |
|
"learning_rate": 1.8610900289867673e-07, |
|
"loss": 2.0031, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.9490196078431374, |
|
"grad_norm": 0.9453166127204895, |
|
"learning_rate": 1.191363849376237e-07, |
|
"loss": 2.2456, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.9647058823529413, |
|
"grad_norm": 0.5282714366912842, |
|
"learning_rate": 6.702586597719385e-08, |
|
"loss": 1.8368, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.980392156862745, |
|
"grad_norm": 0.7178200483322144, |
|
"learning_rate": 2.9792972446479605e-08, |
|
"loss": 2.2156, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.996078431372549, |
|
"grad_norm": 1.3029696941375732, |
|
"learning_rate": 7.448797957526621e-09, |
|
"loss": 2.4417, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.011764705882353, |
|
"grad_norm": 0.6315656304359436, |
|
"learning_rate": 0.0, |
|
"loss": 2.2333, |
|
"step": 192 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 192, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.62298517110784e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|