|
{ |
|
"best_metric": 0.07863566279411316, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 3.004366812227074, |
|
"eval_steps": 50, |
|
"global_step": 172, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017467248908296942, |
|
"grad_norm": 1.0100444555282593, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7291, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017467248908296942, |
|
"eval_loss": 1.4666682481765747, |
|
"eval_runtime": 2.8998, |
|
"eval_samples_per_second": 33.451, |
|
"eval_steps_per_second": 8.621, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.034934497816593885, |
|
"grad_norm": 1.6248141527175903, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9125, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05240174672489083, |
|
"grad_norm": 1.6520636081695557, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0164, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06986899563318777, |
|
"grad_norm": 1.8291550874710083, |
|
"learning_rate": 4e-05, |
|
"loss": 1.1878, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08733624454148471, |
|
"grad_norm": 1.8957704305648804, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0702, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10480349344978165, |
|
"grad_norm": 1.6647785902023315, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1481, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1222707423580786, |
|
"grad_norm": 1.678140640258789, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9731, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.13973799126637554, |
|
"grad_norm": 1.7518115043640137, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1692, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1572052401746725, |
|
"grad_norm": 1.968929648399353, |
|
"learning_rate": 9e-05, |
|
"loss": 1.0349, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17467248908296942, |
|
"grad_norm": 1.2481895685195923, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8936, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19213973799126638, |
|
"grad_norm": 2.483721971511841, |
|
"learning_rate": 9.999059852242507e-05, |
|
"loss": 0.9547, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.2096069868995633, |
|
"grad_norm": 1.7359554767608643, |
|
"learning_rate": 9.996239762521151e-05, |
|
"loss": 1.1258, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22707423580786026, |
|
"grad_norm": 2.3270020484924316, |
|
"learning_rate": 9.991540791356342e-05, |
|
"loss": 1.1838, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2445414847161572, |
|
"grad_norm": 2.784715175628662, |
|
"learning_rate": 9.98496470583896e-05, |
|
"loss": 1.1575, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.26200873362445415, |
|
"grad_norm": 0.8565568923950195, |
|
"learning_rate": 9.976513978965829e-05, |
|
"loss": 0.6597, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2794759825327511, |
|
"grad_norm": 1.0250848531723022, |
|
"learning_rate": 9.966191788709716e-05, |
|
"loss": 0.6515, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.29694323144104806, |
|
"grad_norm": 0.8767344355583191, |
|
"learning_rate": 9.954002016824227e-05, |
|
"loss": 0.738, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.314410480349345, |
|
"grad_norm": 1.0085071325302124, |
|
"learning_rate": 9.939949247384046e-05, |
|
"loss": 0.6975, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3318777292576419, |
|
"grad_norm": 0.8842006325721741, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.7691, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.34934497816593885, |
|
"grad_norm": 0.9122969508171082, |
|
"learning_rate": 9.906276553136923e-05, |
|
"loss": 0.7258, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.36681222707423583, |
|
"grad_norm": 1.0844351053237915, |
|
"learning_rate": 9.88666929125318e-05, |
|
"loss": 0.7773, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.38427947598253276, |
|
"grad_norm": 1.1110165119171143, |
|
"learning_rate": 9.865224352899119e-05, |
|
"loss": 0.7144, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.4017467248908297, |
|
"grad_norm": 1.1816184520721436, |
|
"learning_rate": 9.84194980263903e-05, |
|
"loss": 0.785, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4192139737991266, |
|
"grad_norm": 1.3115167617797852, |
|
"learning_rate": 9.816854393079403e-05, |
|
"loss": 0.8229, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4366812227074236, |
|
"grad_norm": 1.1766431331634521, |
|
"learning_rate": 9.789947561577445e-05, |
|
"loss": 0.7382, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.45414847161572053, |
|
"grad_norm": 1.5775043964385986, |
|
"learning_rate": 9.761239426692077e-05, |
|
"loss": 0.8968, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.47161572052401746, |
|
"grad_norm": 1.766829490661621, |
|
"learning_rate": 9.730740784378753e-05, |
|
"loss": 0.863, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4890829694323144, |
|
"grad_norm": 2.7797858715057373, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.899, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5065502183406113, |
|
"grad_norm": 0.5734480023384094, |
|
"learning_rate": 9.664418523660004e-05, |
|
"loss": 0.5252, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5240174672489083, |
|
"grad_norm": 0.7527353763580322, |
|
"learning_rate": 9.628619846344454e-05, |
|
"loss": 0.5005, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5414847161572053, |
|
"grad_norm": 0.9429885149002075, |
|
"learning_rate": 9.591080534401371e-05, |
|
"loss": 0.6192, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5589519650655022, |
|
"grad_norm": 0.797751247882843, |
|
"learning_rate": 9.551814704830734e-05, |
|
"loss": 0.6492, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5764192139737991, |
|
"grad_norm": 0.7790107131004333, |
|
"learning_rate": 9.51083712390519e-05, |
|
"loss": 0.5706, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5938864628820961, |
|
"grad_norm": 0.8417590856552124, |
|
"learning_rate": 9.468163201617062e-05, |
|
"loss": 0.5481, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.611353711790393, |
|
"grad_norm": 0.9345868229866028, |
|
"learning_rate": 9.423808985883289e-05, |
|
"loss": 0.626, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.62882096069869, |
|
"grad_norm": 1.1942213773727417, |
|
"learning_rate": 9.377791156510455e-05, |
|
"loss": 0.6025, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6462882096069869, |
|
"grad_norm": 0.9529750943183899, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.5216, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6637554585152838, |
|
"grad_norm": 1.2015634775161743, |
|
"learning_rate": 9.280834497651334e-05, |
|
"loss": 0.6232, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6812227074235808, |
|
"grad_norm": 1.266699194908142, |
|
"learning_rate": 9.229932129599205e-05, |
|
"loss": 0.6139, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6986899563318777, |
|
"grad_norm": 1.4141474962234497, |
|
"learning_rate": 9.177439057064683e-05, |
|
"loss": 0.7265, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7161572052401747, |
|
"grad_norm": 1.8044284582138062, |
|
"learning_rate": 9.123375020545535e-05, |
|
"loss": 0.6337, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7336244541484717, |
|
"grad_norm": 1.9032659530639648, |
|
"learning_rate": 9.067760351314838e-05, |
|
"loss": 0.8511, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7510917030567685, |
|
"grad_norm": 0.765811026096344, |
|
"learning_rate": 9.01061596377522e-05, |
|
"loss": 0.444, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7685589519650655, |
|
"grad_norm": 0.8824454545974731, |
|
"learning_rate": 8.951963347593797e-05, |
|
"loss": 0.4689, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7860262008733624, |
|
"grad_norm": 0.7503276467323303, |
|
"learning_rate": 8.891824559620801e-05, |
|
"loss": 0.426, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8034934497816594, |
|
"grad_norm": 1.1007099151611328, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.5502, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8209606986899564, |
|
"grad_norm": 0.8477142453193665, |
|
"learning_rate": 8.767179481638303e-05, |
|
"loss": 0.451, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8384279475982532, |
|
"grad_norm": 0.9551414847373962, |
|
"learning_rate": 8.702720065545024e-05, |
|
"loss": 0.3994, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8558951965065502, |
|
"grad_norm": 1.0447505712509155, |
|
"learning_rate": 8.636868207865244e-05, |
|
"loss": 0.4649, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8733624454148472, |
|
"grad_norm": 1.1280784606933594, |
|
"learning_rate": 8.569648672789497e-05, |
|
"loss": 0.395, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8733624454148472, |
|
"eval_loss": 0.4161189794540405, |
|
"eval_runtime": 2.9162, |
|
"eval_samples_per_second": 33.263, |
|
"eval_steps_per_second": 8.573, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8908296943231441, |
|
"grad_norm": 1.1243302822113037, |
|
"learning_rate": 8.501086738835843e-05, |
|
"loss": 0.3851, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9082969432314411, |
|
"grad_norm": 1.348684549331665, |
|
"learning_rate": 8.43120818934367e-05, |
|
"loss": 0.4908, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.925764192139738, |
|
"grad_norm": 1.6293962001800537, |
|
"learning_rate": 8.360039302777612e-05, |
|
"loss": 0.4444, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.9432314410480349, |
|
"grad_norm": 2.103517770767212, |
|
"learning_rate": 8.28760684284532e-05, |
|
"loss": 0.574, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9606986899563319, |
|
"grad_norm": 1.4056304693222046, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.3752, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9781659388646288, |
|
"grad_norm": 1.6228885650634766, |
|
"learning_rate": 8.139060623360493e-05, |
|
"loss": 0.3766, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9956331877729258, |
|
"grad_norm": 1.6813204288482666, |
|
"learning_rate": 8.063002725966015e-05, |
|
"loss": 0.3613, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0131004366812226, |
|
"grad_norm": 5.2064619064331055, |
|
"learning_rate": 7.985792958513931e-05, |
|
"loss": 0.788, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0305676855895196, |
|
"grad_norm": 0.968858003616333, |
|
"learning_rate": 7.907460356440133e-05, |
|
"loss": 0.2553, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0480349344978166, |
|
"grad_norm": 0.8971614837646484, |
|
"learning_rate": 7.828034377432693e-05, |
|
"loss": 0.2998, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0655021834061136, |
|
"grad_norm": 1.2629083395004272, |
|
"learning_rate": 7.74754489035403e-05, |
|
"loss": 0.3353, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.0829694323144106, |
|
"grad_norm": 1.0545297861099243, |
|
"learning_rate": 7.666022164008457e-05, |
|
"loss": 0.2211, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.1004366812227073, |
|
"grad_norm": 1.3016352653503418, |
|
"learning_rate": 7.583496855759316e-05, |
|
"loss": 0.2633, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1179039301310043, |
|
"grad_norm": 1.4473152160644531, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.246, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1353711790393013, |
|
"grad_norm": 1.0543831586837769, |
|
"learning_rate": 7.415562996483192e-05, |
|
"loss": 0.2277, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1528384279475983, |
|
"grad_norm": 1.0669910907745361, |
|
"learning_rate": 7.330217598512695e-05, |
|
"loss": 0.2476, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1703056768558953, |
|
"grad_norm": 1.5889818668365479, |
|
"learning_rate": 7.243995901002312e-05, |
|
"loss": 0.2648, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1877729257641922, |
|
"grad_norm": 1.4110018014907837, |
|
"learning_rate": 7.156930328406268e-05, |
|
"loss": 0.2575, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.205240174672489, |
|
"grad_norm": 1.1937973499298096, |
|
"learning_rate": 7.069053622525696e-05, |
|
"loss": 0.2309, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.222707423580786, |
|
"grad_norm": 1.118398666381836, |
|
"learning_rate": 6.980398830195785e-05, |
|
"loss": 0.1686, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.240174672489083, |
|
"grad_norm": 5.387632846832275, |
|
"learning_rate": 6.890999290858214e-05, |
|
"loss": 0.3197, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.25764192139738, |
|
"grad_norm": 1.4671061038970947, |
|
"learning_rate": 6.800888624023553e-05, |
|
"loss": 0.3027, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2751091703056767, |
|
"grad_norm": 1.4483202695846558, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.2432, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2925764192139737, |
|
"grad_norm": 1.154068946838379, |
|
"learning_rate": 6.618669710291606e-05, |
|
"loss": 0.2641, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3100436681222707, |
|
"grad_norm": 0.9692803621292114, |
|
"learning_rate": 6.526629988475567e-05, |
|
"loss": 0.2105, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3275109170305677, |
|
"grad_norm": 1.334222674369812, |
|
"learning_rate": 6.434016163555452e-05, |
|
"loss": 0.1716, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3449781659388647, |
|
"grad_norm": 1.1230826377868652, |
|
"learning_rate": 6.340863063803188e-05, |
|
"loss": 0.1533, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3624454148471616, |
|
"grad_norm": 1.4789968729019165, |
|
"learning_rate": 6.247205720289907e-05, |
|
"loss": 0.2085, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3799126637554586, |
|
"grad_norm": 1.0231616497039795, |
|
"learning_rate": 6.153079353712201e-05, |
|
"loss": 0.1325, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.3973799126637554, |
|
"grad_norm": 1.1890296936035156, |
|
"learning_rate": 6.058519361147055e-05, |
|
"loss": 0.1489, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.4148471615720524, |
|
"grad_norm": 1.5673424005508423, |
|
"learning_rate": 5.963561302740449e-05, |
|
"loss": 0.1368, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4323144104803494, |
|
"grad_norm": 1.3184294700622559, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.2192, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4497816593886463, |
|
"grad_norm": 1.7077476978302002, |
|
"learning_rate": 5.772593964039203e-05, |
|
"loss": 0.2395, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.467248908296943, |
|
"grad_norm": 1.303192138671875, |
|
"learning_rate": 5.6766564987506566e-05, |
|
"loss": 0.1591, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.48471615720524, |
|
"grad_norm": 1.891233205795288, |
|
"learning_rate": 5.5804645706261514e-05, |
|
"loss": 0.181, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.502183406113537, |
|
"grad_norm": 1.065505862236023, |
|
"learning_rate": 5.484054353515896e-05, |
|
"loss": 0.2988, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.519650655021834, |
|
"grad_norm": 0.8333722352981567, |
|
"learning_rate": 5.387462103359655e-05, |
|
"loss": 0.1231, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.537117903930131, |
|
"grad_norm": 0.8929618000984192, |
|
"learning_rate": 5.290724144552379e-05, |
|
"loss": 0.1581, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.554585152838428, |
|
"grad_norm": 0.6446655988693237, |
|
"learning_rate": 5.193876856284085e-05, |
|
"loss": 0.103, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.572052401746725, |
|
"grad_norm": 1.5530974864959717, |
|
"learning_rate": 5.096956658859122e-05, |
|
"loss": 0.2085, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.589519650655022, |
|
"grad_norm": 1.2184090614318848, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1611, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6069868995633187, |
|
"grad_norm": 1.355815052986145, |
|
"learning_rate": 4.903043341140879e-05, |
|
"loss": 0.1425, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6244541484716157, |
|
"grad_norm": 2.162626266479492, |
|
"learning_rate": 4.806123143715916e-05, |
|
"loss": 0.228, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6419213973799127, |
|
"grad_norm": 1.2516330480575562, |
|
"learning_rate": 4.709275855447621e-05, |
|
"loss": 0.0985, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6593886462882095, |
|
"grad_norm": 1.455117106437683, |
|
"learning_rate": 4.612537896640346e-05, |
|
"loss": 0.169, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6768558951965065, |
|
"grad_norm": 1.3163059949874878, |
|
"learning_rate": 4.515945646484105e-05, |
|
"loss": 0.1206, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.6943231441048034, |
|
"grad_norm": 1.735390067100525, |
|
"learning_rate": 4.4195354293738484e-05, |
|
"loss": 0.1459, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7117903930131004, |
|
"grad_norm": 1.2925852537155151, |
|
"learning_rate": 4.323343501249346e-05, |
|
"loss": 0.0877, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.7292576419213974, |
|
"grad_norm": 1.971182942390442, |
|
"learning_rate": 4.227406035960798e-05, |
|
"loss": 0.1316, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7467248908296944, |
|
"grad_norm": 0.9830079078674316, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.2094, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7467248908296944, |
|
"eval_loss": 0.16238145530223846, |
|
"eval_runtime": 2.9208, |
|
"eval_samples_per_second": 33.21, |
|
"eval_steps_per_second": 8.559, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7641921397379914, |
|
"grad_norm": 0.9521262049674988, |
|
"learning_rate": 4.036438697259551e-05, |
|
"loss": 0.1226, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.7816593886462884, |
|
"grad_norm": 0.912953794002533, |
|
"learning_rate": 3.941480638852948e-05, |
|
"loss": 0.1147, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.7991266375545851, |
|
"grad_norm": 1.0413082838058472, |
|
"learning_rate": 3.846920646287799e-05, |
|
"loss": 0.131, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8165938864628821, |
|
"grad_norm": 1.115757703781128, |
|
"learning_rate": 3.752794279710094e-05, |
|
"loss": 0.1041, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8340611353711789, |
|
"grad_norm": 0.8693847060203552, |
|
"learning_rate": 3.6591369361968124e-05, |
|
"loss": 0.065, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8515283842794759, |
|
"grad_norm": 0.7901813983917236, |
|
"learning_rate": 3.5659838364445505e-05, |
|
"loss": 0.0864, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8689956331877728, |
|
"grad_norm": 1.1693010330200195, |
|
"learning_rate": 3.473370011524435e-05, |
|
"loss": 0.1329, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.8864628820960698, |
|
"grad_norm": 0.6798319816589355, |
|
"learning_rate": 3.381330289708396e-05, |
|
"loss": 0.0583, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.9039301310043668, |
|
"grad_norm": 0.8059512972831726, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.079, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9213973799126638, |
|
"grad_norm": 2.1519033908843994, |
|
"learning_rate": 3.199111375976449e-05, |
|
"loss": 0.1282, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9388646288209608, |
|
"grad_norm": 1.3084039688110352, |
|
"learning_rate": 3.109000709141788e-05, |
|
"loss": 0.1078, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.9563318777292578, |
|
"grad_norm": 1.7590997219085693, |
|
"learning_rate": 3.019601169804216e-05, |
|
"loss": 0.088, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.9737991266375547, |
|
"grad_norm": 2.152207612991333, |
|
"learning_rate": 2.9309463774743046e-05, |
|
"loss": 0.1825, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.9912663755458515, |
|
"grad_norm": 1.6330548524856567, |
|
"learning_rate": 2.8430696715937337e-05, |
|
"loss": 0.1104, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0087336244541483, |
|
"grad_norm": 3.343381404876709, |
|
"learning_rate": 2.7560040989976892e-05, |
|
"loss": 0.2211, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.0262008733624453, |
|
"grad_norm": 0.38313835859298706, |
|
"learning_rate": 2.6697824014873075e-05, |
|
"loss": 0.0646, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.0436681222707422, |
|
"grad_norm": 0.9553971886634827, |
|
"learning_rate": 2.5844370035168073e-05, |
|
"loss": 0.064, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.061135371179039, |
|
"grad_norm": 0.6309722065925598, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.0499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.078602620087336, |
|
"grad_norm": 0.5666458010673523, |
|
"learning_rate": 2.4165031442406855e-05, |
|
"loss": 0.0624, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.096069868995633, |
|
"grad_norm": 0.8651885390281677, |
|
"learning_rate": 2.333977835991545e-05, |
|
"loss": 0.0405, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.11353711790393, |
|
"grad_norm": 0.6641396880149841, |
|
"learning_rate": 2.25245510964597e-05, |
|
"loss": 0.0363, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.131004366812227, |
|
"grad_norm": 0.7574361562728882, |
|
"learning_rate": 2.171965622567308e-05, |
|
"loss": 0.0423, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.148471615720524, |
|
"grad_norm": 1.8589038848876953, |
|
"learning_rate": 2.0925396435598664e-05, |
|
"loss": 0.0724, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.165938864628821, |
|
"grad_norm": 1.456919550895691, |
|
"learning_rate": 2.0142070414860704e-05, |
|
"loss": 0.0459, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.183406113537118, |
|
"grad_norm": 0.9130675792694092, |
|
"learning_rate": 1.936997274033986e-05, |
|
"loss": 0.0442, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.2008733624454146, |
|
"grad_norm": 1.0100352764129639, |
|
"learning_rate": 1.8609393766395085e-05, |
|
"loss": 0.0608, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.2183406113537116, |
|
"grad_norm": 0.5539523959159851, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.0303, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.2358078602620086, |
|
"grad_norm": 1.8237937688827515, |
|
"learning_rate": 1.7123931571546827e-05, |
|
"loss": 0.0489, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.2532751091703056, |
|
"grad_norm": 0.9342569708824158, |
|
"learning_rate": 1.639960697222388e-05, |
|
"loss": 0.0792, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.2707423580786026, |
|
"grad_norm": 0.4814267158508301, |
|
"learning_rate": 1.5687918106563326e-05, |
|
"loss": 0.0559, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2882096069868996, |
|
"grad_norm": 0.5614648461341858, |
|
"learning_rate": 1.4989132611641576e-05, |
|
"loss": 0.0691, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.3056768558951966, |
|
"grad_norm": 0.6089550852775574, |
|
"learning_rate": 1.4303513272105057e-05, |
|
"loss": 0.0603, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3231441048034935, |
|
"grad_norm": 0.39180800318717957, |
|
"learning_rate": 1.3631317921347563e-05, |
|
"loss": 0.0422, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3406113537117905, |
|
"grad_norm": 1.3804267644882202, |
|
"learning_rate": 1.297279934454978e-05, |
|
"loss": 0.044, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.3580786026200875, |
|
"grad_norm": 1.0875520706176758, |
|
"learning_rate": 1.2328205183616965e-05, |
|
"loss": 0.0495, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.3755458515283845, |
|
"grad_norm": 0.661360502243042, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.043, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.393013100436681, |
|
"grad_norm": 0.48929262161254883, |
|
"learning_rate": 1.1081754403791999e-05, |
|
"loss": 0.0203, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.410480349344978, |
|
"grad_norm": 0.6960212588310242, |
|
"learning_rate": 1.0480366524062042e-05, |
|
"loss": 0.0266, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.427947598253275, |
|
"grad_norm": 0.7719947099685669, |
|
"learning_rate": 9.893840362247809e-06, |
|
"loss": 0.0367, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.445414847161572, |
|
"grad_norm": 0.6129599213600159, |
|
"learning_rate": 9.322396486851626e-06, |
|
"loss": 0.0358, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.462882096069869, |
|
"grad_norm": 0.8176167607307434, |
|
"learning_rate": 8.766249794544662e-06, |
|
"loss": 0.0227, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.480349344978166, |
|
"grad_norm": 1.3852804899215698, |
|
"learning_rate": 8.225609429353187e-06, |
|
"loss": 0.0755, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.497816593886463, |
|
"grad_norm": 1.7854435443878174, |
|
"learning_rate": 7.700678704007947e-06, |
|
"loss": 0.1555, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.51528384279476, |
|
"grad_norm": 0.4705410301685333, |
|
"learning_rate": 7.191655023486682e-06, |
|
"loss": 0.0344, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.532751091703057, |
|
"grad_norm": 0.40318259596824646, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.0432, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.5502183406113534, |
|
"grad_norm": 0.33191582560539246, |
|
"learning_rate": 6.222088434895462e-06, |
|
"loss": 0.0197, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.567685589519651, |
|
"grad_norm": 0.3540843725204468, |
|
"learning_rate": 5.7619101411671095e-06, |
|
"loss": 0.0366, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.5851528384279474, |
|
"grad_norm": 0.30799436569213867, |
|
"learning_rate": 5.318367983829392e-06, |
|
"loss": 0.0201, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.6026200873362444, |
|
"grad_norm": 0.7724692225456238, |
|
"learning_rate": 4.891628760948114e-06, |
|
"loss": 0.0352, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.6200873362445414, |
|
"grad_norm": 0.599810004234314, |
|
"learning_rate": 4.4818529516926726e-06, |
|
"loss": 0.0345, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6200873362445414, |
|
"eval_loss": 0.07863566279411316, |
|
"eval_runtime": 2.9153, |
|
"eval_samples_per_second": 33.272, |
|
"eval_steps_per_second": 8.575, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6375545851528384, |
|
"grad_norm": 0.7440508604049683, |
|
"learning_rate": 4.089194655986306e-06, |
|
"loss": 0.0413, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.6550218340611353, |
|
"grad_norm": 0.4938758313655853, |
|
"learning_rate": 3.7138015365554833e-06, |
|
"loss": 0.0321, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.6724890829694323, |
|
"grad_norm": 0.47660332918167114, |
|
"learning_rate": 3.3558147633999728e-06, |
|
"loss": 0.0272, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.6899563318777293, |
|
"grad_norm": 0.7508668899536133, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.0389, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.7074235807860263, |
|
"grad_norm": 2.5183489322662354, |
|
"learning_rate": 2.692592156212487e-06, |
|
"loss": 0.0222, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.7248908296943233, |
|
"grad_norm": 0.7234876751899719, |
|
"learning_rate": 2.3876057330792346e-06, |
|
"loss": 0.0377, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.74235807860262, |
|
"grad_norm": 0.44617539644241333, |
|
"learning_rate": 2.100524384225555e-06, |
|
"loss": 0.0596, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.7598253275109172, |
|
"grad_norm": 0.5034026503562927, |
|
"learning_rate": 1.8314560692059835e-06, |
|
"loss": 0.0542, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.777292576419214, |
|
"grad_norm": 0.45781925320625305, |
|
"learning_rate": 1.5805019736097104e-06, |
|
"loss": 0.0488, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.7947598253275108, |
|
"grad_norm": 0.39495936036109924, |
|
"learning_rate": 1.3477564710088098e-06, |
|
"loss": 0.0281, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.8122270742358078, |
|
"grad_norm": 0.6215051412582397, |
|
"learning_rate": 1.1333070874682216e-06, |
|
"loss": 0.0502, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.8296943231441047, |
|
"grad_norm": 0.27189934253692627, |
|
"learning_rate": 9.372344686307655e-07, |
|
"loss": 0.0157, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.8471615720524017, |
|
"grad_norm": 0.4099377691745758, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.0285, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.8646288209606987, |
|
"grad_norm": 0.4258248209953308, |
|
"learning_rate": 6.005075261595494e-07, |
|
"loss": 0.0281, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.8820960698689957, |
|
"grad_norm": 0.3853057026863098, |
|
"learning_rate": 4.5997983175773417e-07, |
|
"loss": 0.0248, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.8995633187772927, |
|
"grad_norm": 0.503648042678833, |
|
"learning_rate": 3.380821129028489e-07, |
|
"loss": 0.0272, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.9170305676855897, |
|
"grad_norm": 4.854583740234375, |
|
"learning_rate": 2.3486021034170857e-07, |
|
"loss": 0.0287, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.934497816593886, |
|
"grad_norm": 0.6928201913833618, |
|
"learning_rate": 1.503529416103988e-07, |
|
"loss": 0.0327, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.9519650655021836, |
|
"grad_norm": 0.25627920031547546, |
|
"learning_rate": 8.459208643659122e-08, |
|
"loss": 0.011, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.96943231441048, |
|
"grad_norm": 0.8357424736022949, |
|
"learning_rate": 3.760237478849793e-08, |
|
"loss": 0.0533, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.986899563318777, |
|
"grad_norm": 2.073831081390381, |
|
"learning_rate": 9.401477574932926e-09, |
|
"loss": 0.0415, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.004366812227074, |
|
"grad_norm": 0.7584478259086609, |
|
"learning_rate": 0.0, |
|
"loss": 0.0659, |
|
"step": 172 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 172, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.729365147942912e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|