|
{ |
|
"best_metric": 0.08335956186056137, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 3.004366812227074, |
|
"eval_steps": 50, |
|
"global_step": 172, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017467248908296942, |
|
"grad_norm": 1.0150203704833984, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7291, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017467248908296942, |
|
"eval_loss": 1.4666682481765747, |
|
"eval_runtime": 2.9027, |
|
"eval_samples_per_second": 33.417, |
|
"eval_steps_per_second": 8.613, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.034934497816593885, |
|
"grad_norm": 1.6312181949615479, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9125, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05240174672489083, |
|
"grad_norm": 1.6606181859970093, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0164, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06986899563318777, |
|
"grad_norm": 1.8427821397781372, |
|
"learning_rate": 4e-05, |
|
"loss": 1.1878, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08733624454148471, |
|
"grad_norm": 1.8938854932785034, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0719, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10480349344978165, |
|
"grad_norm": 1.6704180240631104, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1482, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1222707423580786, |
|
"grad_norm": 1.6908645629882812, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9746, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.13973799126637554, |
|
"grad_norm": 1.744346261024475, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1686, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1572052401746725, |
|
"grad_norm": 1.9767279624938965, |
|
"learning_rate": 9e-05, |
|
"loss": 1.0338, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17467248908296942, |
|
"grad_norm": 1.2403647899627686, |
|
"learning_rate": 0.0001, |
|
"loss": 0.895, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19213973799126638, |
|
"grad_norm": 2.6359500885009766, |
|
"learning_rate": 9.999059852242507e-05, |
|
"loss": 0.9552, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.2096069868995633, |
|
"grad_norm": 1.7321456670761108, |
|
"learning_rate": 9.996239762521151e-05, |
|
"loss": 1.127, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22707423580786026, |
|
"grad_norm": 2.320572853088379, |
|
"learning_rate": 9.991540791356342e-05, |
|
"loss": 1.1839, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2445414847161572, |
|
"grad_norm": 2.7635228633880615, |
|
"learning_rate": 9.98496470583896e-05, |
|
"loss": 1.1559, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.26200873362445415, |
|
"grad_norm": 0.8590458035469055, |
|
"learning_rate": 9.976513978965829e-05, |
|
"loss": 0.6596, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2794759825327511, |
|
"grad_norm": 1.0409654378890991, |
|
"learning_rate": 9.966191788709716e-05, |
|
"loss": 0.6509, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.29694323144104806, |
|
"grad_norm": 0.8696012496948242, |
|
"learning_rate": 9.954002016824227e-05, |
|
"loss": 0.7372, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.314410480349345, |
|
"grad_norm": 0.9967894554138184, |
|
"learning_rate": 9.939949247384046e-05, |
|
"loss": 0.6979, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3318777292576419, |
|
"grad_norm": 0.8892637491226196, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.7706, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.34934497816593885, |
|
"grad_norm": 0.9137187004089355, |
|
"learning_rate": 9.906276553136923e-05, |
|
"loss": 0.7256, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.36681222707423583, |
|
"grad_norm": 1.0746030807495117, |
|
"learning_rate": 9.88666929125318e-05, |
|
"loss": 0.7775, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.38427947598253276, |
|
"grad_norm": 1.112693428993225, |
|
"learning_rate": 9.865224352899119e-05, |
|
"loss": 0.7126, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.4017467248908297, |
|
"grad_norm": 1.1786601543426514, |
|
"learning_rate": 9.84194980263903e-05, |
|
"loss": 0.7844, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4192139737991266, |
|
"grad_norm": 1.3309204578399658, |
|
"learning_rate": 9.816854393079403e-05, |
|
"loss": 0.8217, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4366812227074236, |
|
"grad_norm": 1.180550217628479, |
|
"learning_rate": 9.789947561577445e-05, |
|
"loss": 0.7397, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.45414847161572053, |
|
"grad_norm": 1.602713942527771, |
|
"learning_rate": 9.761239426692077e-05, |
|
"loss": 0.8979, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.47161572052401746, |
|
"grad_norm": 1.7447450160980225, |
|
"learning_rate": 9.730740784378753e-05, |
|
"loss": 0.8654, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4890829694323144, |
|
"grad_norm": 2.8003647327423096, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.8954, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5065502183406113, |
|
"grad_norm": 0.5731148719787598, |
|
"learning_rate": 9.664418523660004e-05, |
|
"loss": 0.5257, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5240174672489083, |
|
"grad_norm": 0.7524777054786682, |
|
"learning_rate": 9.628619846344454e-05, |
|
"loss": 0.4996, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5414847161572053, |
|
"grad_norm": 0.931867778301239, |
|
"learning_rate": 9.591080534401371e-05, |
|
"loss": 0.6164, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5589519650655022, |
|
"grad_norm": 0.7929329872131348, |
|
"learning_rate": 9.551814704830734e-05, |
|
"loss": 0.6483, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5764192139737991, |
|
"grad_norm": 0.7793125510215759, |
|
"learning_rate": 9.51083712390519e-05, |
|
"loss": 0.5689, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5938864628820961, |
|
"grad_norm": 0.8453607559204102, |
|
"learning_rate": 9.468163201617062e-05, |
|
"loss": 0.546, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.611353711790393, |
|
"grad_norm": 0.9560432434082031, |
|
"learning_rate": 9.423808985883289e-05, |
|
"loss": 0.6277, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.62882096069869, |
|
"grad_norm": 1.192608118057251, |
|
"learning_rate": 9.377791156510455e-05, |
|
"loss": 0.6021, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6462882096069869, |
|
"grad_norm": 0.9529364109039307, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.5185, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6637554585152838, |
|
"grad_norm": 1.2152981758117676, |
|
"learning_rate": 9.280834497651334e-05, |
|
"loss": 0.6219, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6812227074235808, |
|
"grad_norm": 1.2711488008499146, |
|
"learning_rate": 9.229932129599205e-05, |
|
"loss": 0.6143, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6986899563318777, |
|
"grad_norm": 1.4244954586029053, |
|
"learning_rate": 9.177439057064683e-05, |
|
"loss": 0.7323, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7161572052401747, |
|
"grad_norm": 1.7837212085723877, |
|
"learning_rate": 9.123375020545535e-05, |
|
"loss": 0.6341, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7336244541484717, |
|
"grad_norm": 1.9221779108047485, |
|
"learning_rate": 9.067760351314838e-05, |
|
"loss": 0.8552, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7510917030567685, |
|
"grad_norm": 0.7614825367927551, |
|
"learning_rate": 9.01061596377522e-05, |
|
"loss": 0.4449, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7685589519650655, |
|
"grad_norm": 0.8698350191116333, |
|
"learning_rate": 8.951963347593797e-05, |
|
"loss": 0.468, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7860262008733624, |
|
"grad_norm": 0.7544025182723999, |
|
"learning_rate": 8.891824559620801e-05, |
|
"loss": 0.4263, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8034934497816594, |
|
"grad_norm": 1.096413493156433, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.5457, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8209606986899564, |
|
"grad_norm": 0.8501629829406738, |
|
"learning_rate": 8.767179481638303e-05, |
|
"loss": 0.4508, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8384279475982532, |
|
"grad_norm": 0.9768447875976562, |
|
"learning_rate": 8.702720065545024e-05, |
|
"loss": 0.4, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8558951965065502, |
|
"grad_norm": 1.1103242635726929, |
|
"learning_rate": 8.636868207865244e-05, |
|
"loss": 0.4616, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8733624454148472, |
|
"grad_norm": 1.2032922506332397, |
|
"learning_rate": 8.569648672789497e-05, |
|
"loss": 0.3951, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8733624454148472, |
|
"eval_loss": 0.4165593981742859, |
|
"eval_runtime": 2.92, |
|
"eval_samples_per_second": 33.219, |
|
"eval_steps_per_second": 8.562, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8908296943231441, |
|
"grad_norm": 1.121232271194458, |
|
"learning_rate": 8.501086738835843e-05, |
|
"loss": 0.3865, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9082969432314411, |
|
"grad_norm": 1.3548880815505981, |
|
"learning_rate": 8.43120818934367e-05, |
|
"loss": 0.493, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.925764192139738, |
|
"grad_norm": 1.6399530172348022, |
|
"learning_rate": 8.360039302777612e-05, |
|
"loss": 0.4454, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.9432314410480349, |
|
"grad_norm": 2.1539204120635986, |
|
"learning_rate": 8.28760684284532e-05, |
|
"loss": 0.5806, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9606986899563319, |
|
"grad_norm": 1.3799574375152588, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.3791, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9781659388646288, |
|
"grad_norm": 1.6386706829071045, |
|
"learning_rate": 8.139060623360493e-05, |
|
"loss": 0.3788, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9956331877729258, |
|
"grad_norm": 1.8362394571304321, |
|
"learning_rate": 8.063002725966015e-05, |
|
"loss": 0.3617, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0131004366812226, |
|
"grad_norm": 4.955390453338623, |
|
"learning_rate": 7.985792958513931e-05, |
|
"loss": 0.7963, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0305676855895196, |
|
"grad_norm": 1.039722204208374, |
|
"learning_rate": 7.907460356440133e-05, |
|
"loss": 0.2566, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0480349344978166, |
|
"grad_norm": 0.9322420358657837, |
|
"learning_rate": 7.828034377432693e-05, |
|
"loss": 0.3009, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0655021834061136, |
|
"grad_norm": 1.1808615922927856, |
|
"learning_rate": 7.74754489035403e-05, |
|
"loss": 0.3358, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.0829694323144106, |
|
"grad_norm": 0.9506165981292725, |
|
"learning_rate": 7.666022164008457e-05, |
|
"loss": 0.2231, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.1004366812227073, |
|
"grad_norm": 1.2802693843841553, |
|
"learning_rate": 7.583496855759316e-05, |
|
"loss": 0.2601, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1179039301310043, |
|
"grad_norm": 1.6419092416763306, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2622, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1353711790393013, |
|
"grad_norm": 0.9867280125617981, |
|
"learning_rate": 7.415562996483192e-05, |
|
"loss": 0.2306, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1528384279475983, |
|
"grad_norm": 1.1051852703094482, |
|
"learning_rate": 7.330217598512695e-05, |
|
"loss": 0.2497, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1703056768558953, |
|
"grad_norm": 1.6345847845077515, |
|
"learning_rate": 7.243995901002312e-05, |
|
"loss": 0.2628, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1877729257641922, |
|
"grad_norm": 1.4212472438812256, |
|
"learning_rate": 7.156930328406268e-05, |
|
"loss": 0.2629, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.205240174672489, |
|
"grad_norm": 1.264844536781311, |
|
"learning_rate": 7.069053622525696e-05, |
|
"loss": 0.2389, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.222707423580786, |
|
"grad_norm": 1.098765254020691, |
|
"learning_rate": 6.980398830195785e-05, |
|
"loss": 0.1724, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.240174672489083, |
|
"grad_norm": 4.439842224121094, |
|
"learning_rate": 6.890999290858214e-05, |
|
"loss": 0.3221, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.25764192139738, |
|
"grad_norm": 1.4293755292892456, |
|
"learning_rate": 6.800888624023553e-05, |
|
"loss": 0.2996, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2751091703056767, |
|
"grad_norm": 1.592860460281372, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.2456, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2925764192139737, |
|
"grad_norm": 1.2025070190429688, |
|
"learning_rate": 6.618669710291606e-05, |
|
"loss": 0.2663, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3100436681222707, |
|
"grad_norm": 0.9999786019325256, |
|
"learning_rate": 6.526629988475567e-05, |
|
"loss": 0.2102, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3275109170305677, |
|
"grad_norm": 1.3882269859313965, |
|
"learning_rate": 6.434016163555452e-05, |
|
"loss": 0.1725, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3449781659388647, |
|
"grad_norm": 1.0446701049804688, |
|
"learning_rate": 6.340863063803188e-05, |
|
"loss": 0.1511, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3624454148471616, |
|
"grad_norm": 1.5679444074630737, |
|
"learning_rate": 6.247205720289907e-05, |
|
"loss": 0.217, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3799126637554586, |
|
"grad_norm": 1.159275770187378, |
|
"learning_rate": 6.153079353712201e-05, |
|
"loss": 0.142, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.3973799126637554, |
|
"grad_norm": 1.1023359298706055, |
|
"learning_rate": 6.058519361147055e-05, |
|
"loss": 0.1504, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.4148471615720524, |
|
"grad_norm": 1.6043438911437988, |
|
"learning_rate": 5.963561302740449e-05, |
|
"loss": 0.1497, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4323144104803494, |
|
"grad_norm": 1.2015390396118164, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.2159, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4497816593886463, |
|
"grad_norm": 1.9150855541229248, |
|
"learning_rate": 5.772593964039203e-05, |
|
"loss": 0.2409, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.467248908296943, |
|
"grad_norm": 1.2557482719421387, |
|
"learning_rate": 5.6766564987506566e-05, |
|
"loss": 0.1594, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.48471615720524, |
|
"grad_norm": 3.442033052444458, |
|
"learning_rate": 5.5804645706261514e-05, |
|
"loss": 0.1946, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.502183406113537, |
|
"grad_norm": 1.3177473545074463, |
|
"learning_rate": 5.484054353515896e-05, |
|
"loss": 0.3011, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.519650655021834, |
|
"grad_norm": 0.811776340007782, |
|
"learning_rate": 5.387462103359655e-05, |
|
"loss": 0.1271, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.537117903930131, |
|
"grad_norm": 1.0938807725906372, |
|
"learning_rate": 5.290724144552379e-05, |
|
"loss": 0.1645, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.554585152838428, |
|
"grad_norm": 0.7474001049995422, |
|
"learning_rate": 5.193876856284085e-05, |
|
"loss": 0.1091, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.572052401746725, |
|
"grad_norm": 1.4519085884094238, |
|
"learning_rate": 5.096956658859122e-05, |
|
"loss": 0.2083, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.589519650655022, |
|
"grad_norm": 1.1171748638153076, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1567, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6069868995633187, |
|
"grad_norm": 1.226719856262207, |
|
"learning_rate": 4.903043341140879e-05, |
|
"loss": 0.1468, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6244541484716157, |
|
"grad_norm": 1.6665370464324951, |
|
"learning_rate": 4.806123143715916e-05, |
|
"loss": 0.215, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6419213973799127, |
|
"grad_norm": 1.3081082105636597, |
|
"learning_rate": 4.709275855447621e-05, |
|
"loss": 0.1094, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6593886462882095, |
|
"grad_norm": 1.3947356939315796, |
|
"learning_rate": 4.612537896640346e-05, |
|
"loss": 0.1714, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6768558951965065, |
|
"grad_norm": 1.4104937314987183, |
|
"learning_rate": 4.515945646484105e-05, |
|
"loss": 0.1227, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.6943231441048034, |
|
"grad_norm": 1.774125337600708, |
|
"learning_rate": 4.4195354293738484e-05, |
|
"loss": 0.152, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7117903930131004, |
|
"grad_norm": 2.2607905864715576, |
|
"learning_rate": 4.323343501249346e-05, |
|
"loss": 0.0859, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.7292576419213974, |
|
"grad_norm": 1.8278192281723022, |
|
"learning_rate": 4.227406035960798e-05, |
|
"loss": 0.1364, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7467248908296944, |
|
"grad_norm": 1.3324322700500488, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.2195, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7467248908296944, |
|
"eval_loss": 0.16824179887771606, |
|
"eval_runtime": 2.9465, |
|
"eval_samples_per_second": 32.921, |
|
"eval_steps_per_second": 8.485, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7641921397379914, |
|
"grad_norm": 1.0157638788223267, |
|
"learning_rate": 4.036438697259551e-05, |
|
"loss": 0.1246, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.7816593886462884, |
|
"grad_norm": 1.0742075443267822, |
|
"learning_rate": 3.941480638852948e-05, |
|
"loss": 0.1228, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.7991266375545851, |
|
"grad_norm": 1.666223168373108, |
|
"learning_rate": 3.846920646287799e-05, |
|
"loss": 0.135, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8165938864628821, |
|
"grad_norm": 0.8187023997306824, |
|
"learning_rate": 3.752794279710094e-05, |
|
"loss": 0.1066, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8340611353711789, |
|
"grad_norm": 1.2794800996780396, |
|
"learning_rate": 3.6591369361968124e-05, |
|
"loss": 0.0715, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8515283842794759, |
|
"grad_norm": 0.7151631116867065, |
|
"learning_rate": 3.5659838364445505e-05, |
|
"loss": 0.088, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8689956331877728, |
|
"grad_norm": 1.6745882034301758, |
|
"learning_rate": 3.473370011524435e-05, |
|
"loss": 0.1362, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.8864628820960698, |
|
"grad_norm": 0.7424354553222656, |
|
"learning_rate": 3.381330289708396e-05, |
|
"loss": 0.0615, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.9039301310043668, |
|
"grad_norm": 1.122255563735962, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.0812, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9213973799126638, |
|
"grad_norm": 2.3225650787353516, |
|
"learning_rate": 3.199111375976449e-05, |
|
"loss": 0.1281, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9388646288209608, |
|
"grad_norm": 1.1841471195220947, |
|
"learning_rate": 3.109000709141788e-05, |
|
"loss": 0.1178, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.9563318777292578, |
|
"grad_norm": 1.6408817768096924, |
|
"learning_rate": 3.019601169804216e-05, |
|
"loss": 0.1156, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.9737991266375547, |
|
"grad_norm": 1.8442050218582153, |
|
"learning_rate": 2.9309463774743046e-05, |
|
"loss": 0.184, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.9912663755458515, |
|
"grad_norm": 1.5312315225601196, |
|
"learning_rate": 2.8430696715937337e-05, |
|
"loss": 0.1173, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0087336244541483, |
|
"grad_norm": 2.971773624420166, |
|
"learning_rate": 2.7560040989976892e-05, |
|
"loss": 0.2291, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.0262008733624453, |
|
"grad_norm": 0.3828129768371582, |
|
"learning_rate": 2.6697824014873075e-05, |
|
"loss": 0.0673, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.0436681222707422, |
|
"grad_norm": 0.8585243821144104, |
|
"learning_rate": 2.5844370035168073e-05, |
|
"loss": 0.0667, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.061135371179039, |
|
"grad_norm": 0.7909578084945679, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.0569, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.078602620087336, |
|
"grad_norm": 0.5592768788337708, |
|
"learning_rate": 2.4165031442406855e-05, |
|
"loss": 0.0664, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.096069868995633, |
|
"grad_norm": 0.9716861844062805, |
|
"learning_rate": 2.333977835991545e-05, |
|
"loss": 0.0454, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.11353711790393, |
|
"grad_norm": 0.6158682107925415, |
|
"learning_rate": 2.25245510964597e-05, |
|
"loss": 0.0409, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.131004366812227, |
|
"grad_norm": 0.5465672612190247, |
|
"learning_rate": 2.171965622567308e-05, |
|
"loss": 0.0429, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.148471615720524, |
|
"grad_norm": 2.4121029376983643, |
|
"learning_rate": 2.0925396435598664e-05, |
|
"loss": 0.0533, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.165938864628821, |
|
"grad_norm": 2.363229990005493, |
|
"learning_rate": 2.0142070414860704e-05, |
|
"loss": 0.065, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.183406113537118, |
|
"grad_norm": 0.7529873847961426, |
|
"learning_rate": 1.936997274033986e-05, |
|
"loss": 0.0447, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.2008733624454146, |
|
"grad_norm": 0.9575099945068359, |
|
"learning_rate": 1.8609393766395085e-05, |
|
"loss": 0.0638, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.2183406113537116, |
|
"grad_norm": 0.6782347559928894, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.0385, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.2358078602620086, |
|
"grad_norm": 1.4730333089828491, |
|
"learning_rate": 1.7123931571546827e-05, |
|
"loss": 0.0518, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.2532751091703056, |
|
"grad_norm": 0.9120597839355469, |
|
"learning_rate": 1.639960697222388e-05, |
|
"loss": 0.0866, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.2707423580786026, |
|
"grad_norm": 0.5575978755950928, |
|
"learning_rate": 1.5687918106563326e-05, |
|
"loss": 0.0619, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2882096069868996, |
|
"grad_norm": 0.6944248676300049, |
|
"learning_rate": 1.4989132611641576e-05, |
|
"loss": 0.0769, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.3056768558951966, |
|
"grad_norm": 0.5566210746765137, |
|
"learning_rate": 1.4303513272105057e-05, |
|
"loss": 0.0656, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3231441048034935, |
|
"grad_norm": 0.3767731785774231, |
|
"learning_rate": 1.3631317921347563e-05, |
|
"loss": 0.0436, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3406113537117905, |
|
"grad_norm": 0.7124356627464294, |
|
"learning_rate": 1.297279934454978e-05, |
|
"loss": 0.0454, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.3580786026200875, |
|
"grad_norm": 1.0094614028930664, |
|
"learning_rate": 1.2328205183616965e-05, |
|
"loss": 0.0529, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.3755458515283845, |
|
"grad_norm": 0.7214720845222473, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.0482, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.393013100436681, |
|
"grad_norm": 0.43886321783065796, |
|
"learning_rate": 1.1081754403791999e-05, |
|
"loss": 0.0199, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.410480349344978, |
|
"grad_norm": 0.5449000000953674, |
|
"learning_rate": 1.0480366524062042e-05, |
|
"loss": 0.0244, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.427947598253275, |
|
"grad_norm": 0.7334902882575989, |
|
"learning_rate": 9.893840362247809e-06, |
|
"loss": 0.0368, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.445414847161572, |
|
"grad_norm": 0.6182017922401428, |
|
"learning_rate": 9.322396486851626e-06, |
|
"loss": 0.0406, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.462882096069869, |
|
"grad_norm": 0.47970086336135864, |
|
"learning_rate": 8.766249794544662e-06, |
|
"loss": 0.0241, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.480349344978166, |
|
"grad_norm": 1.3502711057662964, |
|
"learning_rate": 8.225609429353187e-06, |
|
"loss": 0.0638, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.497816593886463, |
|
"grad_norm": 1.8579151630401611, |
|
"learning_rate": 7.700678704007947e-06, |
|
"loss": 0.1778, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.51528384279476, |
|
"grad_norm": 0.498096227645874, |
|
"learning_rate": 7.191655023486682e-06, |
|
"loss": 0.037, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.532751091703057, |
|
"grad_norm": 0.4153275191783905, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.0469, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.5502183406113534, |
|
"grad_norm": 0.3408838212490082, |
|
"learning_rate": 6.222088434895462e-06, |
|
"loss": 0.0235, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.567685589519651, |
|
"grad_norm": 0.401105672121048, |
|
"learning_rate": 5.7619101411671095e-06, |
|
"loss": 0.0402, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.5851528384279474, |
|
"grad_norm": 0.4758196175098419, |
|
"learning_rate": 5.318367983829392e-06, |
|
"loss": 0.0227, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.6026200873362444, |
|
"grad_norm": 0.5934529304504395, |
|
"learning_rate": 4.891628760948114e-06, |
|
"loss": 0.0374, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.6200873362445414, |
|
"grad_norm": 0.7455284595489502, |
|
"learning_rate": 4.4818529516926726e-06, |
|
"loss": 0.0425, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6200873362445414, |
|
"eval_loss": 0.08335956186056137, |
|
"eval_runtime": 2.9173, |
|
"eval_samples_per_second": 33.25, |
|
"eval_steps_per_second": 8.57, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6375545851528384, |
|
"grad_norm": 0.8464643359184265, |
|
"learning_rate": 4.089194655986306e-06, |
|
"loss": 0.0425, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.6550218340611353, |
|
"grad_norm": 0.5147814750671387, |
|
"learning_rate": 3.7138015365554833e-06, |
|
"loss": 0.0333, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.6724890829694323, |
|
"grad_norm": 0.486793577671051, |
|
"learning_rate": 3.3558147633999728e-06, |
|
"loss": 0.027, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.6899563318777293, |
|
"grad_norm": 1.436091661453247, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.0495, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.7074235807860263, |
|
"grad_norm": 2.1560921669006348, |
|
"learning_rate": 2.692592156212487e-06, |
|
"loss": 0.0305, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.7248908296943233, |
|
"grad_norm": 1.1343252658843994, |
|
"learning_rate": 2.3876057330792346e-06, |
|
"loss": 0.0429, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.74235807860262, |
|
"grad_norm": 0.5252926349639893, |
|
"learning_rate": 2.100524384225555e-06, |
|
"loss": 0.0663, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.7598253275109172, |
|
"grad_norm": 0.4776017665863037, |
|
"learning_rate": 1.8314560692059835e-06, |
|
"loss": 0.0578, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.777292576419214, |
|
"grad_norm": 0.4830605983734131, |
|
"learning_rate": 1.5805019736097104e-06, |
|
"loss": 0.0531, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.7947598253275108, |
|
"grad_norm": 0.4230193495750427, |
|
"learning_rate": 1.3477564710088098e-06, |
|
"loss": 0.0314, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.8122270742358078, |
|
"grad_norm": 0.6493737101554871, |
|
"learning_rate": 1.1333070874682216e-06, |
|
"loss": 0.0523, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.8296943231441047, |
|
"grad_norm": 0.2730914056301117, |
|
"learning_rate": 9.372344686307655e-07, |
|
"loss": 0.0168, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.8471615720524017, |
|
"grad_norm": 0.42375174164772034, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.0308, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.8646288209606987, |
|
"grad_norm": 0.4749857783317566, |
|
"learning_rate": 6.005075261595494e-07, |
|
"loss": 0.0295, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.8820960698689957, |
|
"grad_norm": 0.4455066919326782, |
|
"learning_rate": 4.5997983175773417e-07, |
|
"loss": 0.0275, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.8995633187772927, |
|
"grad_norm": 0.6617838144302368, |
|
"learning_rate": 3.380821129028489e-07, |
|
"loss": 0.0339, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.9170305676855897, |
|
"grad_norm": 0.3624148964881897, |
|
"learning_rate": 2.3486021034170857e-07, |
|
"loss": 0.0242, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.934497816593886, |
|
"grad_norm": 0.6199899911880493, |
|
"learning_rate": 1.503529416103988e-07, |
|
"loss": 0.0337, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.9519650655021836, |
|
"grad_norm": 0.24329711496829987, |
|
"learning_rate": 8.459208643659122e-08, |
|
"loss": 0.0109, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.96943231441048, |
|
"grad_norm": 0.8114237189292908, |
|
"learning_rate": 3.760237478849793e-08, |
|
"loss": 0.05, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.986899563318777, |
|
"grad_norm": 0.4971747100353241, |
|
"learning_rate": 9.401477574932926e-09, |
|
"loss": 0.0369, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.004366812227074, |
|
"grad_norm": 0.4547681510448456, |
|
"learning_rate": 0.0, |
|
"loss": 0.0711, |
|
"step": 172 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 172, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.729365147942912e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|