|
{ |
|
"best_metric": 1.1534959077835083, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-350", |
|
"epoch": 0.47538200339558573, |
|
"eval_steps": 50, |
|
"global_step": 350, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013582342954159593, |
|
"grad_norm": 0.7695918679237366, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3733, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0013582342954159593, |
|
"eval_loss": 1.4100896120071411, |
|
"eval_runtime": 74.7293, |
|
"eval_samples_per_second": 16.593, |
|
"eval_steps_per_second": 4.148, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0027164685908319186, |
|
"grad_norm": 0.7374516129493713, |
|
"learning_rate": 2e-05, |
|
"loss": 1.4754, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0040747028862478775, |
|
"grad_norm": 0.7155938148498535, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3174, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005432937181663837, |
|
"grad_norm": 0.6224644184112549, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3707, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006791171477079796, |
|
"grad_norm": 0.4430236220359802, |
|
"learning_rate": 5e-05, |
|
"loss": 1.3729, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008149405772495755, |
|
"grad_norm": 0.40971505641937256, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3221, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009507640067911714, |
|
"grad_norm": 0.3615075945854187, |
|
"learning_rate": 7e-05, |
|
"loss": 1.3454, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010865874363327675, |
|
"grad_norm": 0.35050615668296814, |
|
"learning_rate": 8e-05, |
|
"loss": 1.309, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.012224108658743633, |
|
"grad_norm": 0.30065447092056274, |
|
"learning_rate": 9e-05, |
|
"loss": 1.2385, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013582342954159592, |
|
"grad_norm": 0.3258337378501892, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2086, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014940577249575551, |
|
"grad_norm": 0.3569473326206207, |
|
"learning_rate": 9.99983777858264e-05, |
|
"loss": 1.3262, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01629881154499151, |
|
"grad_norm": 0.37945055961608887, |
|
"learning_rate": 9.999351124856874e-05, |
|
"loss": 1.3609, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01765704584040747, |
|
"grad_norm": 0.3099724352359772, |
|
"learning_rate": 9.998540070400966e-05, |
|
"loss": 1.2673, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.019015280135823428, |
|
"grad_norm": 0.31235983967781067, |
|
"learning_rate": 9.997404667843075e-05, |
|
"loss": 1.2393, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02037351443123939, |
|
"grad_norm": 0.27202108502388, |
|
"learning_rate": 9.995944990857849e-05, |
|
"loss": 1.2544, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02173174872665535, |
|
"grad_norm": 0.27280139923095703, |
|
"learning_rate": 9.994161134161634e-05, |
|
"loss": 1.2061, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.023089983022071308, |
|
"grad_norm": 0.293769896030426, |
|
"learning_rate": 9.992053213506334e-05, |
|
"loss": 1.1964, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.024448217317487267, |
|
"grad_norm": 0.28996336460113525, |
|
"learning_rate": 9.989621365671902e-05, |
|
"loss": 1.2483, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.025806451612903226, |
|
"grad_norm": 0.271930456161499, |
|
"learning_rate": 9.986865748457457e-05, |
|
"loss": 1.2141, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.027164685908319185, |
|
"grad_norm": 0.2686532735824585, |
|
"learning_rate": 9.983786540671051e-05, |
|
"loss": 1.2347, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.028522920203735144, |
|
"grad_norm": 0.2654222249984741, |
|
"learning_rate": 9.980383942118066e-05, |
|
"loss": 1.2133, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.029881154499151102, |
|
"grad_norm": 0.2612136900424957, |
|
"learning_rate": 9.976658173588244e-05, |
|
"loss": 1.2274, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03123938879456706, |
|
"grad_norm": 0.24660049378871918, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.2137, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03259762308998302, |
|
"grad_norm": 0.2399974763393402, |
|
"learning_rate": 9.968238114591566e-05, |
|
"loss": 1.1527, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03395585738539898, |
|
"grad_norm": 0.24953867495059967, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 1.2337, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03531409168081494, |
|
"grad_norm": 0.25382018089294434, |
|
"learning_rate": 9.95852854910781e-05, |
|
"loss": 1.2282, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0366723259762309, |
|
"grad_norm": 0.26546919345855713, |
|
"learning_rate": 9.953190975913647e-05, |
|
"loss": 1.2031, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.038030560271646856, |
|
"grad_norm": 0.2536904513835907, |
|
"learning_rate": 9.947531997255256e-05, |
|
"loss": 1.218, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03938879456706282, |
|
"grad_norm": 0.2628403902053833, |
|
"learning_rate": 9.941551980335652e-05, |
|
"loss": 1.1947, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04074702886247878, |
|
"grad_norm": 0.26902860403060913, |
|
"learning_rate": 9.935251313189564e-05, |
|
"loss": 1.2258, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 0.264118492603302, |
|
"learning_rate": 9.928630404658255e-05, |
|
"loss": 1.2215, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0434634974533107, |
|
"grad_norm": 0.2503666579723358, |
|
"learning_rate": 9.921689684362989e-05, |
|
"loss": 1.2607, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.044821731748726654, |
|
"grad_norm": 0.2590126693248749, |
|
"learning_rate": 9.914429602677162e-05, |
|
"loss": 1.236, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.046179966044142616, |
|
"grad_norm": 0.2683681845664978, |
|
"learning_rate": 9.906850630697068e-05, |
|
"loss": 1.195, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04753820033955857, |
|
"grad_norm": 0.2629988491535187, |
|
"learning_rate": 9.898953260211338e-05, |
|
"loss": 1.1939, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.048896434634974534, |
|
"grad_norm": 0.28124624490737915, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.277, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05025466893039049, |
|
"grad_norm": 0.27577537298202515, |
|
"learning_rate": 9.882205394146361e-05, |
|
"loss": 1.1631, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05161290322580645, |
|
"grad_norm": 0.26586708426475525, |
|
"learning_rate": 9.87335598531214e-05, |
|
"loss": 1.1817, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.052971137521222414, |
|
"grad_norm": 0.28580763936042786, |
|
"learning_rate": 9.864190351391822e-05, |
|
"loss": 1.2785, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05432937181663837, |
|
"grad_norm": 0.28752267360687256, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 1.1675, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05568760611205433, |
|
"grad_norm": 0.2884860634803772, |
|
"learning_rate": 9.844912807753104e-05, |
|
"loss": 1.1855, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05704584040747029, |
|
"grad_norm": 0.2822176218032837, |
|
"learning_rate": 9.834802148926882e-05, |
|
"loss": 1.2276, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05840407470288625, |
|
"grad_norm": 0.28766173124313354, |
|
"learning_rate": 9.824377766717759e-05, |
|
"loss": 1.1655, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.059762308998302205, |
|
"grad_norm": 0.30563387274742126, |
|
"learning_rate": 9.813640337548954e-05, |
|
"loss": 1.2651, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06112054329371817, |
|
"grad_norm": 0.3028828799724579, |
|
"learning_rate": 9.802590558156862e-05, |
|
"loss": 1.1419, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06247877758913412, |
|
"grad_norm": 0.3264331817626953, |
|
"learning_rate": 9.791229145545831e-05, |
|
"loss": 1.1629, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06383701188455009, |
|
"grad_norm": 0.3301156163215637, |
|
"learning_rate": 9.779556836941645e-05, |
|
"loss": 1.2186, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06519524617996604, |
|
"grad_norm": 0.3410678505897522, |
|
"learning_rate": 9.767574389743682e-05, |
|
"loss": 1.2162, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06655348047538201, |
|
"grad_norm": 0.36292213201522827, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.1575, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06791171477079797, |
|
"grad_norm": 0.5214879512786865, |
|
"learning_rate": 9.742682209735727e-05, |
|
"loss": 1.1233, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06791171477079797, |
|
"eval_loss": 1.2765874862670898, |
|
"eval_runtime": 75.9116, |
|
"eval_samples_per_second": 16.335, |
|
"eval_steps_per_second": 4.084, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06926994906621392, |
|
"grad_norm": 0.7125508189201355, |
|
"learning_rate": 9.729774092143627e-05, |
|
"loss": 1.3574, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07062818336162988, |
|
"grad_norm": 0.5578199028968811, |
|
"learning_rate": 9.716559066288715e-05, |
|
"loss": 1.2801, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07198641765704585, |
|
"grad_norm": 0.4072265923023224, |
|
"learning_rate": 9.703037989675087e-05, |
|
"loss": 1.1512, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0733446519524618, |
|
"grad_norm": 0.2695824205875397, |
|
"learning_rate": 9.689211739666023e-05, |
|
"loss": 1.2332, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07470288624787776, |
|
"grad_norm": 0.2340139001607895, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 1.1724, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07606112054329371, |
|
"grad_norm": 0.23189423978328705, |
|
"learning_rate": 9.66064732786784e-05, |
|
"loss": 1.1846, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07741935483870968, |
|
"grad_norm": 0.2402050495147705, |
|
"learning_rate": 9.645911019582467e-05, |
|
"loss": 1.207, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07877758913412564, |
|
"grad_norm": 0.25576063990592957, |
|
"learning_rate": 9.630873244788883e-05, |
|
"loss": 1.2574, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08013582342954159, |
|
"grad_norm": 0.25986823439598083, |
|
"learning_rate": 9.615534979266745e-05, |
|
"loss": 1.2114, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08149405772495756, |
|
"grad_norm": 0.250826358795166, |
|
"learning_rate": 9.599897218294122e-05, |
|
"loss": 1.1968, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08285229202037352, |
|
"grad_norm": 0.25176748633384705, |
|
"learning_rate": 9.583960976582913e-05, |
|
"loss": 1.1929, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 0.2325923889875412, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.2047, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08556876061120543, |
|
"grad_norm": 0.22908318042755127, |
|
"learning_rate": 9.551197206565173e-05, |
|
"loss": 1.1473, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0869269949066214, |
|
"grad_norm": 0.21206900477409363, |
|
"learning_rate": 9.534371804252728e-05, |
|
"loss": 1.1855, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08828522920203735, |
|
"grad_norm": 0.22555455565452576, |
|
"learning_rate": 9.517252173051911e-05, |
|
"loss": 1.1698, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08964346349745331, |
|
"grad_norm": 0.23330341279506683, |
|
"learning_rate": 9.49983942383106e-05, |
|
"loss": 1.1946, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09100169779286928, |
|
"grad_norm": 0.24638806283473969, |
|
"learning_rate": 9.482134686478519e-05, |
|
"loss": 1.209, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09235993208828523, |
|
"grad_norm": 0.2484908103942871, |
|
"learning_rate": 9.464139109829321e-05, |
|
"loss": 1.1865, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09371816638370119, |
|
"grad_norm": 0.23545753955841064, |
|
"learning_rate": 9.445853861590647e-05, |
|
"loss": 1.2447, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09507640067911714, |
|
"grad_norm": 0.22677823901176453, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 1.1572, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09643463497453311, |
|
"grad_norm": 0.22840863466262817, |
|
"learning_rate": 9.408419115078471e-05, |
|
"loss": 1.2223, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09779286926994907, |
|
"grad_norm": 0.23291656374931335, |
|
"learning_rate": 9.389272045892024e-05, |
|
"loss": 1.2053, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09915110356536502, |
|
"grad_norm": 0.24530543386936188, |
|
"learning_rate": 9.36984016313259e-05, |
|
"loss": 1.2103, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10050933786078098, |
|
"grad_norm": 0.24790343642234802, |
|
"learning_rate": 9.350124727707197e-05, |
|
"loss": 1.1125, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.10186757215619695, |
|
"grad_norm": 0.23026898503303528, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.2442, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1032258064516129, |
|
"grad_norm": 0.24416302144527435, |
|
"learning_rate": 9.309848334400246e-05, |
|
"loss": 1.1953, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10458404074702886, |
|
"grad_norm": 0.22965535521507263, |
|
"learning_rate": 9.289289989996133e-05, |
|
"loss": 1.2232, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10594227504244483, |
|
"grad_norm": 0.2312672883272171, |
|
"learning_rate": 9.268453319711363e-05, |
|
"loss": 1.1542, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.10730050933786078, |
|
"grad_norm": 0.2299879491329193, |
|
"learning_rate": 9.247339675607605e-05, |
|
"loss": 1.209, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10865874363327674, |
|
"grad_norm": 0.23572081327438354, |
|
"learning_rate": 9.225950427718975e-05, |
|
"loss": 1.2257, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1100169779286927, |
|
"grad_norm": 0.23509450256824493, |
|
"learning_rate": 9.204286963963111e-05, |
|
"loss": 1.1656, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11137521222410866, |
|
"grad_norm": 0.2482880800962448, |
|
"learning_rate": 9.182350690051133e-05, |
|
"loss": 1.1448, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11273344651952462, |
|
"grad_norm": 0.24817270040512085, |
|
"learning_rate": 9.160143029396422e-05, |
|
"loss": 1.2322, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11409168081494057, |
|
"grad_norm": 0.24748308956623077, |
|
"learning_rate": 9.13766542302225e-05, |
|
"loss": 1.1227, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11544991511035653, |
|
"grad_norm": 0.25305241346359253, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 1.1337, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1168081494057725, |
|
"grad_norm": 0.24612681567668915, |
|
"learning_rate": 9.091906224695935e-05, |
|
"loss": 1.114, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11816638370118845, |
|
"grad_norm": 0.28515782952308655, |
|
"learning_rate": 9.068627601992598e-05, |
|
"loss": 1.1734, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11952461799660441, |
|
"grad_norm": 0.2633991539478302, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.1993, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12088285229202038, |
|
"grad_norm": 0.26582807302474976, |
|
"learning_rate": 9.021279861989885e-05, |
|
"loss": 1.1467, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12224108658743633, |
|
"grad_norm": 0.26425063610076904, |
|
"learning_rate": 8.997213817017507e-05, |
|
"loss": 1.2081, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12359932088285229, |
|
"grad_norm": 0.26553940773010254, |
|
"learning_rate": 8.972888398568772e-05, |
|
"loss": 1.1629, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12495755517826825, |
|
"grad_norm": 0.2655147910118103, |
|
"learning_rate": 8.948305185085225e-05, |
|
"loss": 1.1801, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12631578947368421, |
|
"grad_norm": 0.2920195162296295, |
|
"learning_rate": 8.92346577173636e-05, |
|
"loss": 1.1947, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12767402376910017, |
|
"grad_norm": 0.2780720591545105, |
|
"learning_rate": 8.898371770316111e-05, |
|
"loss": 1.209, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 0.27526527643203735, |
|
"learning_rate": 8.873024809138272e-05, |
|
"loss": 1.173, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13039049235993208, |
|
"grad_norm": 0.29257383942604065, |
|
"learning_rate": 8.847426532930831e-05, |
|
"loss": 1.2088, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13174872665534804, |
|
"grad_norm": 0.29788464307785034, |
|
"learning_rate": 8.821578602729242e-05, |
|
"loss": 1.135, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13310696095076402, |
|
"grad_norm": 0.31753095984458923, |
|
"learning_rate": 8.795482695768658e-05, |
|
"loss": 1.1596, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13446519524617997, |
|
"grad_norm": 0.37438032031059265, |
|
"learning_rate": 8.769140505375085e-05, |
|
"loss": 1.1788, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13582342954159593, |
|
"grad_norm": 0.4673005938529968, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 1.2225, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13582342954159593, |
|
"eval_loss": 1.2262212038040161, |
|
"eval_runtime": 75.8474, |
|
"eval_samples_per_second": 16.349, |
|
"eval_steps_per_second": 4.087, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13718166383701189, |
|
"grad_norm": 0.5559870600700378, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 1.2997, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13853989813242784, |
|
"grad_norm": 0.4555906057357788, |
|
"learning_rate": 8.688653405904652e-05, |
|
"loss": 1.301, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1398981324278438, |
|
"grad_norm": 0.31413426995277405, |
|
"learning_rate": 8.661343332988869e-05, |
|
"loss": 1.2188, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14125636672325975, |
|
"grad_norm": 0.2615854740142822, |
|
"learning_rate": 8.633795680751116e-05, |
|
"loss": 1.1284, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.14261460101867574, |
|
"grad_norm": 0.2208709567785263, |
|
"learning_rate": 8.606012236719073e-05, |
|
"loss": 1.2063, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1439728353140917, |
|
"grad_norm": 0.20494621992111206, |
|
"learning_rate": 8.577994803720606e-05, |
|
"loss": 1.2157, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14533106960950765, |
|
"grad_norm": 0.2158403843641281, |
|
"learning_rate": 8.549745199766792e-05, |
|
"loss": 1.1396, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1466893039049236, |
|
"grad_norm": 0.22656506299972534, |
|
"learning_rate": 8.521265257933948e-05, |
|
"loss": 1.1591, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14804753820033956, |
|
"grad_norm": 0.23627708852291107, |
|
"learning_rate": 8.492556826244687e-05, |
|
"loss": 1.1998, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1494057724957555, |
|
"grad_norm": 0.23139552772045135, |
|
"learning_rate": 8.463621767547998e-05, |
|
"loss": 1.2165, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15076400679117147, |
|
"grad_norm": 0.237015500664711, |
|
"learning_rate": 8.434461959398376e-05, |
|
"loss": 1.1609, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15212224108658742, |
|
"grad_norm": 0.2324032187461853, |
|
"learning_rate": 8.405079293933986e-05, |
|
"loss": 1.2107, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1534804753820034, |
|
"grad_norm": 0.22056780755519867, |
|
"learning_rate": 8.375475677753881e-05, |
|
"loss": 1.1398, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15483870967741936, |
|
"grad_norm": 0.21998582780361176, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 1.1788, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15619694397283532, |
|
"grad_norm": 0.220359206199646, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 1.2086, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15755517826825127, |
|
"grad_norm": 0.2111772894859314, |
|
"learning_rate": 8.285358405218655e-05, |
|
"loss": 1.1961, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15891341256366723, |
|
"grad_norm": 0.21595050394535065, |
|
"learning_rate": 8.25489033703452e-05, |
|
"loss": 1.2173, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.16027164685908318, |
|
"grad_norm": 0.22538748383522034, |
|
"learning_rate": 8.224211063680853e-05, |
|
"loss": 1.1223, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16162988115449914, |
|
"grad_norm": 0.22196736931800842, |
|
"learning_rate": 8.19332257589174e-05, |
|
"loss": 1.1796, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.16298811544991512, |
|
"grad_norm": 0.22292838990688324, |
|
"learning_rate": 8.162226877976887e-05, |
|
"loss": 1.1198, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16434634974533108, |
|
"grad_norm": 0.22580811381340027, |
|
"learning_rate": 8.130925987691569e-05, |
|
"loss": 1.1997, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.16570458404074703, |
|
"grad_norm": 0.22446706891059875, |
|
"learning_rate": 8.099421936105702e-05, |
|
"loss": 1.0938, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.167062818336163, |
|
"grad_norm": 0.24063196778297424, |
|
"learning_rate": 8.067716767472045e-05, |
|
"loss": 1.272, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 0.2342248260974884, |
|
"learning_rate": 8.035812539093557e-05, |
|
"loss": 1.1282, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1697792869269949, |
|
"grad_norm": 0.23322924971580505, |
|
"learning_rate": 8.003711321189895e-05, |
|
"loss": 1.1774, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17113752122241085, |
|
"grad_norm": 0.227143794298172, |
|
"learning_rate": 7.971415196763088e-05, |
|
"loss": 1.1614, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17249575551782684, |
|
"grad_norm": 0.2375260442495346, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.0878, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1738539898132428, |
|
"grad_norm": 0.23172912001609802, |
|
"learning_rate": 7.906246623448183e-05, |
|
"loss": 1.114, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.17521222410865875, |
|
"grad_norm": 0.2283349484205246, |
|
"learning_rate": 7.873378403255419e-05, |
|
"loss": 1.1243, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1765704584040747, |
|
"grad_norm": 0.22897659242153168, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 1.1494, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17792869269949066, |
|
"grad_norm": 0.2336939573287964, |
|
"learning_rate": 7.807084759519405e-05, |
|
"loss": 1.2519, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.17928692699490661, |
|
"grad_norm": 0.23826266825199127, |
|
"learning_rate": 7.773663637675694e-05, |
|
"loss": 1.1086, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18064516129032257, |
|
"grad_norm": 0.2425377517938614, |
|
"learning_rate": 7.740062536773352e-05, |
|
"loss": 1.1654, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18200339558573855, |
|
"grad_norm": 0.2372315227985382, |
|
"learning_rate": 7.706283637139658e-05, |
|
"loss": 1.1962, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1833616298811545, |
|
"grad_norm": 0.24342741072177887, |
|
"learning_rate": 7.672329130639005e-05, |
|
"loss": 1.1181, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18471986417657046, |
|
"grad_norm": 0.2434910535812378, |
|
"learning_rate": 7.638201220530665e-05, |
|
"loss": 1.2285, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18607809847198642, |
|
"grad_norm": 0.24890004098415375, |
|
"learning_rate": 7.603902121325813e-05, |
|
"loss": 1.1714, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.18743633276740237, |
|
"grad_norm": 0.25163108110427856, |
|
"learning_rate": 7.569434058643844e-05, |
|
"loss": 1.1956, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18879456706281833, |
|
"grad_norm": 0.24931834638118744, |
|
"learning_rate": 7.534799269067953e-05, |
|
"loss": 1.1483, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19015280135823429, |
|
"grad_norm": 0.25025516748428345, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.2085, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19151103565365024, |
|
"grad_norm": 0.2583712339401245, |
|
"learning_rate": 7.465038509514688e-05, |
|
"loss": 1.2139, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19286926994906622, |
|
"grad_norm": 0.26586151123046875, |
|
"learning_rate": 7.42991706621303e-05, |
|
"loss": 1.2006, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19422750424448218, |
|
"grad_norm": 0.2643159031867981, |
|
"learning_rate": 7.394637949075154e-05, |
|
"loss": 1.1739, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.19558573853989814, |
|
"grad_norm": 0.2751873731613159, |
|
"learning_rate": 7.35920344731241e-05, |
|
"loss": 1.2109, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1969439728353141, |
|
"grad_norm": 0.2727619707584381, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 1.1851, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19830220713073005, |
|
"grad_norm": 0.2831723690032959, |
|
"learning_rate": 7.287877497021978e-05, |
|
"loss": 1.1864, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.199660441426146, |
|
"grad_norm": 0.28427502512931824, |
|
"learning_rate": 7.251990676732984e-05, |
|
"loss": 1.1347, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.20101867572156196, |
|
"grad_norm": 0.2935657203197479, |
|
"learning_rate": 7.215957727996207e-05, |
|
"loss": 1.1435, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.20237691001697794, |
|
"grad_norm": 0.3436415195465088, |
|
"learning_rate": 7.179780988938051e-05, |
|
"loss": 1.182, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2037351443123939, |
|
"grad_norm": 0.41627824306488037, |
|
"learning_rate": 7.143462807015271e-05, |
|
"loss": 1.2069, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2037351443123939, |
|
"eval_loss": 1.1833840608596802, |
|
"eval_runtime": 75.9037, |
|
"eval_samples_per_second": 16.336, |
|
"eval_steps_per_second": 4.084, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20509337860780985, |
|
"grad_norm": 0.2699250876903534, |
|
"learning_rate": 7.107005538862646e-05, |
|
"loss": 1.2426, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2064516129032258, |
|
"grad_norm": 0.29538774490356445, |
|
"learning_rate": 7.07041155014006e-05, |
|
"loss": 1.2144, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.20780984719864176, |
|
"grad_norm": 0.2714727222919464, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 1.2891, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.20916808149405772, |
|
"grad_norm": 0.26055291295051575, |
|
"learning_rate": 6.996822917828477e-05, |
|
"loss": 1.1651, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.2369328737258911, |
|
"learning_rate": 6.959833049300377e-05, |
|
"loss": 1.1572, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.21188455008488966, |
|
"grad_norm": 0.215769425034523, |
|
"learning_rate": 6.922716010014255e-05, |
|
"loss": 1.1521, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2132427843803056, |
|
"grad_norm": 0.20645937323570251, |
|
"learning_rate": 6.885474208441603e-05, |
|
"loss": 1.1744, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.21460101867572157, |
|
"grad_norm": 0.21098513901233673, |
|
"learning_rate": 6.848110061149556e-05, |
|
"loss": 1.1768, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.21595925297113752, |
|
"grad_norm": 0.20801562070846558, |
|
"learning_rate": 6.810625992644085e-05, |
|
"loss": 1.1577, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.21731748726655348, |
|
"grad_norm": 0.22393718361854553, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 1.2199, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21867572156196943, |
|
"grad_norm": 0.20644541084766388, |
|
"learning_rate": 6.735307828766515e-05, |
|
"loss": 1.1711, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2200339558573854, |
|
"grad_norm": 0.21023766696453094, |
|
"learning_rate": 6.697478620682137e-05, |
|
"loss": 1.1736, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22139219015280137, |
|
"grad_norm": 0.21561093628406525, |
|
"learning_rate": 6.659539265642643e-05, |
|
"loss": 1.1502, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.22275042444821733, |
|
"grad_norm": 0.2196631133556366, |
|
"learning_rate": 6.621492225478414e-05, |
|
"loss": 1.1813, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.22410865874363328, |
|
"grad_norm": 0.2159351408481598, |
|
"learning_rate": 6.583339969007363e-05, |
|
"loss": 1.1997, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22546689303904924, |
|
"grad_norm": 0.215266153216362, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.0927, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2268251273344652, |
|
"grad_norm": 0.22314059734344482, |
|
"learning_rate": 6.506729716392481e-05, |
|
"loss": 1.2044, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.22818336162988115, |
|
"grad_norm": 0.22275614738464355, |
|
"learning_rate": 6.468276691378155e-05, |
|
"loss": 1.2058, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2295415959252971, |
|
"grad_norm": 0.22310510277748108, |
|
"learning_rate": 6.429728391993446e-05, |
|
"loss": 1.1506, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23089983022071306, |
|
"grad_norm": 0.21441923081874847, |
|
"learning_rate": 6.391087319582264e-05, |
|
"loss": 1.1346, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23225806451612904, |
|
"grad_norm": 0.2179337441921234, |
|
"learning_rate": 6.35235598150842e-05, |
|
"loss": 1.1355, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.233616298811545, |
|
"grad_norm": 0.2206660360097885, |
|
"learning_rate": 6.313536890992935e-05, |
|
"loss": 1.1346, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.23497453310696095, |
|
"grad_norm": 0.21749481558799744, |
|
"learning_rate": 6.274632566950967e-05, |
|
"loss": 1.1424, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2363327674023769, |
|
"grad_norm": 0.21882691979408264, |
|
"learning_rate": 6.235645533828349e-05, |
|
"loss": 1.2118, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.23769100169779286, |
|
"grad_norm": 0.22239045798778534, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 1.1532, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.23904923599320882, |
|
"grad_norm": 0.22943776845932007, |
|
"learning_rate": 6.157433464794716e-05, |
|
"loss": 1.1658, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24040747028862477, |
|
"grad_norm": 0.22224801778793335, |
|
"learning_rate": 6.118213503952779e-05, |
|
"loss": 1.2036, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24176570458404076, |
|
"grad_norm": 0.23055648803710938, |
|
"learning_rate": 6.078920983839031e-05, |
|
"loss": 1.1496, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2431239388794567, |
|
"grad_norm": 0.22937384247779846, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 1.1748, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.24448217317487267, |
|
"grad_norm": 0.22974710166454315, |
|
"learning_rate": 6.0001284688802226e-05, |
|
"loss": 1.1668, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24584040747028862, |
|
"grad_norm": 0.2364954799413681, |
|
"learning_rate": 5.960633586768543e-05, |
|
"loss": 1.1893, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.24719864176570458, |
|
"grad_norm": 0.2343706637620926, |
|
"learning_rate": 5.921076370520058e-05, |
|
"loss": 1.1655, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.24855687606112054, |
|
"grad_norm": 0.23308633267879486, |
|
"learning_rate": 5.8814593869458455e-05, |
|
"loss": 1.1332, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2499151103565365, |
|
"grad_norm": 0.2331320345401764, |
|
"learning_rate": 5.841785206735192e-05, |
|
"loss": 1.088, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25127334465195245, |
|
"grad_norm": 0.23977631330490112, |
|
"learning_rate": 5.8020564042888015e-05, |
|
"loss": 1.2078, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.25263157894736843, |
|
"grad_norm": 0.23800607025623322, |
|
"learning_rate": 5.762275557551727e-05, |
|
"loss": 1.1134, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.25398981324278436, |
|
"grad_norm": 0.24236267805099487, |
|
"learning_rate": 5.7224452478461064e-05, |
|
"loss": 1.2159, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.25534804753820034, |
|
"grad_norm": 0.24531729519367218, |
|
"learning_rate": 5.682568059703659e-05, |
|
"loss": 1.1084, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2567062818336163, |
|
"grad_norm": 0.2564159333705902, |
|
"learning_rate": 5.642646580697973e-05, |
|
"loss": 1.0714, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 0.2536662817001343, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 1.1332, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25942275042444823, |
|
"grad_norm": 0.2573738098144531, |
|
"learning_rate": 5.562681114593028e-05, |
|
"loss": 1.1732, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.26078098471986416, |
|
"grad_norm": 0.28553032875061035, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 1.1813, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26213921901528014, |
|
"grad_norm": 0.2642425000667572, |
|
"learning_rate": 5.482569604572576e-05, |
|
"loss": 1.1169, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2634974533106961, |
|
"grad_norm": 0.27143600583076477, |
|
"learning_rate": 5.442465579556793e-05, |
|
"loss": 1.17, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.26485568760611206, |
|
"grad_norm": 0.27368876338005066, |
|
"learning_rate": 5.402332843583631e-05, |
|
"loss": 1.1743, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26621392190152804, |
|
"grad_norm": 0.28669026494026184, |
|
"learning_rate": 5.3621740008088126e-05, |
|
"loss": 1.1476, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.26757215619694397, |
|
"grad_norm": 0.3066455125808716, |
|
"learning_rate": 5.321991657082097e-05, |
|
"loss": 1.2093, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.26893039049235995, |
|
"grad_norm": 0.3169887661933899, |
|
"learning_rate": 5.281788419778187e-05, |
|
"loss": 1.1844, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2702886247877759, |
|
"grad_norm": 0.34708085656166077, |
|
"learning_rate": 5.2415668976275355e-05, |
|
"loss": 1.154, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27164685908319186, |
|
"grad_norm": 0.39115723967552185, |
|
"learning_rate": 5.201329700547076e-05, |
|
"loss": 1.1696, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27164685908319186, |
|
"eval_loss": 1.169433355331421, |
|
"eval_runtime": 75.7878, |
|
"eval_samples_per_second": 16.361, |
|
"eval_steps_per_second": 4.09, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2730050933786078, |
|
"grad_norm": 0.22767877578735352, |
|
"learning_rate": 5.161079439470866e-05, |
|
"loss": 1.2705, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.27436332767402377, |
|
"grad_norm": 0.23202009499073029, |
|
"learning_rate": 5.1208187261806615e-05, |
|
"loss": 1.1301, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.27572156196943975, |
|
"grad_norm": 0.2392009049654007, |
|
"learning_rate": 5.080550173136457e-05, |
|
"loss": 1.2333, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2770797962648557, |
|
"grad_norm": 0.2345076948404312, |
|
"learning_rate": 5.0402763933069496e-05, |
|
"loss": 1.1608, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.27843803056027167, |
|
"grad_norm": 0.2247873842716217, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1777, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2797962648556876, |
|
"grad_norm": 0.22494539618492126, |
|
"learning_rate": 4.9597236066930516e-05, |
|
"loss": 1.2278, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2811544991511036, |
|
"grad_norm": 0.20997026562690735, |
|
"learning_rate": 4.919449826863544e-05, |
|
"loss": 1.1539, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2825127334465195, |
|
"grad_norm": 0.20957647264003754, |
|
"learning_rate": 4.87918127381934e-05, |
|
"loss": 1.1532, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2838709677419355, |
|
"grad_norm": 0.21663248538970947, |
|
"learning_rate": 4.8389205605291365e-05, |
|
"loss": 1.2097, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.28522920203735147, |
|
"grad_norm": 0.2205830067396164, |
|
"learning_rate": 4.798670299452926e-05, |
|
"loss": 1.1235, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2865874363327674, |
|
"grad_norm": 0.21743406355381012, |
|
"learning_rate": 4.758433102372466e-05, |
|
"loss": 1.0935, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2879456706281834, |
|
"grad_norm": 0.21099945902824402, |
|
"learning_rate": 4.7182115802218126e-05, |
|
"loss": 1.2179, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2893039049235993, |
|
"grad_norm": 0.2079070806503296, |
|
"learning_rate": 4.678008342917903e-05, |
|
"loss": 1.1754, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2906621392190153, |
|
"grad_norm": 0.2137642800807953, |
|
"learning_rate": 4.6378259991911886e-05, |
|
"loss": 1.2339, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2920203735144312, |
|
"grad_norm": 0.22139067947864532, |
|
"learning_rate": 4.597667156416371e-05, |
|
"loss": 1.0929, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2933786078098472, |
|
"grad_norm": 0.21825869381427765, |
|
"learning_rate": 4.5575344204432084e-05, |
|
"loss": 1.1509, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.29473684210526313, |
|
"grad_norm": 0.217268168926239, |
|
"learning_rate": 4.5174303954274244e-05, |
|
"loss": 1.1935, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2960950764006791, |
|
"grad_norm": 0.21918080747127533, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 1.1205, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2974533106960951, |
|
"grad_norm": 0.21873408555984497, |
|
"learning_rate": 4.437318885406973e-05, |
|
"loss": 1.0912, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.298811544991511, |
|
"grad_norm": 0.22883340716362, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 1.1726, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.300169779286927, |
|
"grad_norm": 0.22588761150836945, |
|
"learning_rate": 4.3573534193020274e-05, |
|
"loss": 1.1836, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.30152801358234294, |
|
"grad_norm": 0.22106902301311493, |
|
"learning_rate": 4.317431940296343e-05, |
|
"loss": 1.1351, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3028862478777589, |
|
"grad_norm": 0.22283874452114105, |
|
"learning_rate": 4.277554752153895e-05, |
|
"loss": 1.1228, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.30424448217317485, |
|
"grad_norm": 0.21989330649375916, |
|
"learning_rate": 4.237724442448273e-05, |
|
"loss": 1.1015, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.30560271646859083, |
|
"grad_norm": 0.2293674349784851, |
|
"learning_rate": 4.197943595711198e-05, |
|
"loss": 1.1147, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3069609507640068, |
|
"grad_norm": 0.2231401950120926, |
|
"learning_rate": 4.1582147932648074e-05, |
|
"loss": 1.1674, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.30831918505942274, |
|
"grad_norm": 0.2178943008184433, |
|
"learning_rate": 4.118540613054156e-05, |
|
"loss": 1.1364, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.3096774193548387, |
|
"grad_norm": 0.22939598560333252, |
|
"learning_rate": 4.078923629479943e-05, |
|
"loss": 1.0993, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.31103565365025465, |
|
"grad_norm": 0.22620512545108795, |
|
"learning_rate": 4.039366413231458e-05, |
|
"loss": 1.1105, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.31239388794567063, |
|
"grad_norm": 0.23713254928588867, |
|
"learning_rate": 3.9998715311197785e-05, |
|
"loss": 1.1572, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31375212224108656, |
|
"grad_norm": 0.23715730011463165, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 1.1551, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.31511035653650254, |
|
"grad_norm": 0.2342255711555481, |
|
"learning_rate": 3.92107901616097e-05, |
|
"loss": 1.1296, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.31646859083191853, |
|
"grad_norm": 0.2432532161474228, |
|
"learning_rate": 3.8817864960472236e-05, |
|
"loss": 1.1381, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.31782682512733446, |
|
"grad_norm": 0.23796603083610535, |
|
"learning_rate": 3.842566535205286e-05, |
|
"loss": 1.1609, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.31918505942275044, |
|
"grad_norm": 0.23435711860656738, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 1.2078, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.32054329371816637, |
|
"grad_norm": 0.23656964302062988, |
|
"learning_rate": 3.764354466171652e-05, |
|
"loss": 1.1346, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.32190152801358235, |
|
"grad_norm": 0.24362805485725403, |
|
"learning_rate": 3.725367433049033e-05, |
|
"loss": 1.1695, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3232597623089983, |
|
"grad_norm": 0.2416103184223175, |
|
"learning_rate": 3.6864631090070655e-05, |
|
"loss": 1.1603, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.32461799660441426, |
|
"grad_norm": 0.2516671121120453, |
|
"learning_rate": 3.6476440184915815e-05, |
|
"loss": 1.1832, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.32597623089983024, |
|
"grad_norm": 0.25407183170318604, |
|
"learning_rate": 3.608912680417737e-05, |
|
"loss": 1.1619, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32733446519524617, |
|
"grad_norm": 0.24797658622264862, |
|
"learning_rate": 3.570271608006555e-05, |
|
"loss": 1.1321, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.32869269949066215, |
|
"grad_norm": 0.26799654960632324, |
|
"learning_rate": 3.531723308621847e-05, |
|
"loss": 1.2035, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3300509337860781, |
|
"grad_norm": 0.27492526173591614, |
|
"learning_rate": 3.493270283607522e-05, |
|
"loss": 1.171, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.33140916808149407, |
|
"grad_norm": 0.2613578736782074, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.1238, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.33276740237691, |
|
"grad_norm": 0.28807345032691956, |
|
"learning_rate": 3.4166600309926387e-05, |
|
"loss": 1.1153, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.334125636672326, |
|
"grad_norm": 0.28468626737594604, |
|
"learning_rate": 3.3785077745215873e-05, |
|
"loss": 1.1984, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.33548387096774196, |
|
"grad_norm": 0.28014445304870605, |
|
"learning_rate": 3.340460734357359e-05, |
|
"loss": 1.1793, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3368421052631579, |
|
"grad_norm": 0.2991635203361511, |
|
"learning_rate": 3.3025213793178646e-05, |
|
"loss": 1.1491, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.33820033955857387, |
|
"grad_norm": 0.34155353903770447, |
|
"learning_rate": 3.264692171233485e-05, |
|
"loss": 1.1472, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3395585738539898, |
|
"grad_norm": 0.39328497648239136, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 1.1768, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3395585738539898, |
|
"eval_loss": 1.1618001461029053, |
|
"eval_runtime": 75.9188, |
|
"eval_samples_per_second": 16.333, |
|
"eval_steps_per_second": 4.083, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3409168081494058, |
|
"grad_norm": 0.21301254630088806, |
|
"learning_rate": 3.189374007355917e-05, |
|
"loss": 1.1747, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3422750424448217, |
|
"grad_norm": 0.2225201576948166, |
|
"learning_rate": 3.151889938850445e-05, |
|
"loss": 1.1631, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3436332767402377, |
|
"grad_norm": 0.2226182520389557, |
|
"learning_rate": 3.114525791558398e-05, |
|
"loss": 1.1984, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3449915110356537, |
|
"grad_norm": 0.23307554423809052, |
|
"learning_rate": 3.0772839899857464e-05, |
|
"loss": 1.1848, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3463497453310696, |
|
"grad_norm": 0.22128012776374817, |
|
"learning_rate": 3.0401669506996256e-05, |
|
"loss": 1.1897, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3477079796264856, |
|
"grad_norm": 0.22312457859516144, |
|
"learning_rate": 3.003177082171523e-05, |
|
"loss": 1.1978, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3490662139219015, |
|
"grad_norm": 0.21898281574249268, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 1.1822, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3504244482173175, |
|
"grad_norm": 0.2056150883436203, |
|
"learning_rate": 2.9295884498599414e-05, |
|
"loss": 1.2227, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3517826825127334, |
|
"grad_norm": 0.221930593252182, |
|
"learning_rate": 2.8929944611373554e-05, |
|
"loss": 1.1336, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3531409168081494, |
|
"grad_norm": 0.21197658777236938, |
|
"learning_rate": 2.8565371929847284e-05, |
|
"loss": 1.205, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3544991511035654, |
|
"grad_norm": 0.20999324321746826, |
|
"learning_rate": 2.8202190110619493e-05, |
|
"loss": 1.1551, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3558573853989813, |
|
"grad_norm": 0.20450879633426666, |
|
"learning_rate": 2.784042272003794e-05, |
|
"loss": 1.1924, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3572156196943973, |
|
"grad_norm": 0.20566685497760773, |
|
"learning_rate": 2.7480093232670158e-05, |
|
"loss": 1.1541, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.35857385398981323, |
|
"grad_norm": 0.2097368687391281, |
|
"learning_rate": 2.712122502978024e-05, |
|
"loss": 1.1414, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3599320882852292, |
|
"grad_norm": 0.21455705165863037, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 1.1273, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.36129032258064514, |
|
"grad_norm": 0.20965005457401276, |
|
"learning_rate": 2.64079655268759e-05, |
|
"loss": 1.2223, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3626485568760611, |
|
"grad_norm": 0.21943199634552002, |
|
"learning_rate": 2.605362050924848e-05, |
|
"loss": 1.2121, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3640067911714771, |
|
"grad_norm": 0.21387051045894623, |
|
"learning_rate": 2.57008293378697e-05, |
|
"loss": 1.1135, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.36536502546689303, |
|
"grad_norm": 0.22179877758026123, |
|
"learning_rate": 2.534961490485313e-05, |
|
"loss": 1.1187, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.366723259762309, |
|
"grad_norm": 0.21317237615585327, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 1.1579, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.36808149405772495, |
|
"grad_norm": 0.2269050031900406, |
|
"learning_rate": 2.4652007309320498e-05, |
|
"loss": 1.1958, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.36943972835314093, |
|
"grad_norm": 0.22643712162971497, |
|
"learning_rate": 2.430565941356157e-05, |
|
"loss": 1.1358, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.37079796264855686, |
|
"grad_norm": 0.22138290107250214, |
|
"learning_rate": 2.3960978786741877e-05, |
|
"loss": 1.199, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.37215619694397284, |
|
"grad_norm": 0.22572052478790283, |
|
"learning_rate": 2.361798779469336e-05, |
|
"loss": 1.1746, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3735144312393888, |
|
"grad_norm": 0.2312777042388916, |
|
"learning_rate": 2.3276708693609943e-05, |
|
"loss": 1.1201, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.37487266553480475, |
|
"grad_norm": 0.21996109187602997, |
|
"learning_rate": 2.2937163628603435e-05, |
|
"loss": 1.0967, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.37623089983022073, |
|
"grad_norm": 0.2212551087141037, |
|
"learning_rate": 2.259937463226651e-05, |
|
"loss": 1.118, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.37758913412563666, |
|
"grad_norm": 0.22271205484867096, |
|
"learning_rate": 2.2263363623243054e-05, |
|
"loss": 1.1563, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.37894736842105264, |
|
"grad_norm": 0.23066063225269318, |
|
"learning_rate": 2.192915240480596e-05, |
|
"loss": 1.1351, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.38030560271646857, |
|
"grad_norm": 0.24335499107837677, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 1.2207, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.38166383701188455, |
|
"grad_norm": 0.23115313053131104, |
|
"learning_rate": 2.1266215967445824e-05, |
|
"loss": 1.1353, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3830220713073005, |
|
"grad_norm": 0.23002904653549194, |
|
"learning_rate": 2.0937533765518187e-05, |
|
"loss": 1.1279, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.38438030560271647, |
|
"grad_norm": 0.2410050332546234, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.1076, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.38573853989813245, |
|
"grad_norm": 0.23403891921043396, |
|
"learning_rate": 2.0285848032369137e-05, |
|
"loss": 1.1247, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 0.23966725170612335, |
|
"learning_rate": 1.996288678810105e-05, |
|
"loss": 1.1294, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.38845500848896436, |
|
"grad_norm": 0.2466222494840622, |
|
"learning_rate": 1.9641874609064443e-05, |
|
"loss": 1.158, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3898132427843803, |
|
"grad_norm": 0.23464591801166534, |
|
"learning_rate": 1.932283232527956e-05, |
|
"loss": 1.1227, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.39117147707979627, |
|
"grad_norm": 0.2456170916557312, |
|
"learning_rate": 1.9005780638942982e-05, |
|
"loss": 1.1333, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3925297113752122, |
|
"grad_norm": 0.25727927684783936, |
|
"learning_rate": 1.8690740123084316e-05, |
|
"loss": 1.148, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3938879456706282, |
|
"grad_norm": 0.2539084851741791, |
|
"learning_rate": 1.837773122023114e-05, |
|
"loss": 1.2192, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.39524617996604416, |
|
"grad_norm": 0.2602689862251282, |
|
"learning_rate": 1.8066774241082612e-05, |
|
"loss": 1.1582, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3966044142614601, |
|
"grad_norm": 0.2653775215148926, |
|
"learning_rate": 1.7757889363191483e-05, |
|
"loss": 1.122, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3979626485568761, |
|
"grad_norm": 0.26318901777267456, |
|
"learning_rate": 1.745109662965481e-05, |
|
"loss": 1.0884, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.399320882852292, |
|
"grad_norm": 0.28393861651420593, |
|
"learning_rate": 1.714641594781347e-05, |
|
"loss": 1.1316, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.400679117147708, |
|
"grad_norm": 0.2693468928337097, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 1.1191, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4020373514431239, |
|
"grad_norm": 0.28361696004867554, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 1.1576, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4033955857385399, |
|
"grad_norm": 0.30078738927841187, |
|
"learning_rate": 1.62452432224612e-05, |
|
"loss": 1.1782, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.4047538200339559, |
|
"grad_norm": 0.31263673305511475, |
|
"learning_rate": 1.5949207060660138e-05, |
|
"loss": 1.1739, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4061120543293718, |
|
"grad_norm": 0.33561959862709045, |
|
"learning_rate": 1.5655380406016235e-05, |
|
"loss": 1.1671, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.4074702886247878, |
|
"grad_norm": 0.4124099314212799, |
|
"learning_rate": 1.536378232452003e-05, |
|
"loss": 1.1998, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4074702886247878, |
|
"eval_loss": 1.1560478210449219, |
|
"eval_runtime": 75.727, |
|
"eval_samples_per_second": 16.375, |
|
"eval_steps_per_second": 4.094, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4088285229202037, |
|
"grad_norm": 0.20203496515750885, |
|
"learning_rate": 1.5074431737553157e-05, |
|
"loss": 1.1235, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.4101867572156197, |
|
"grad_norm": 0.2083783745765686, |
|
"learning_rate": 1.4787347420660541e-05, |
|
"loss": 1.1932, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.41154499151103563, |
|
"grad_norm": 0.2198968380689621, |
|
"learning_rate": 1.4502548002332088e-05, |
|
"loss": 1.1904, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4129032258064516, |
|
"grad_norm": 0.21494299173355103, |
|
"learning_rate": 1.422005196279395e-05, |
|
"loss": 1.1966, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.4142614601018676, |
|
"grad_norm": 0.20090734958648682, |
|
"learning_rate": 1.3939877632809278e-05, |
|
"loss": 1.1352, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4156196943972835, |
|
"grad_norm": 0.21259278059005737, |
|
"learning_rate": 1.3662043192488849e-05, |
|
"loss": 1.1839, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.4169779286926995, |
|
"grad_norm": 0.2208050638437271, |
|
"learning_rate": 1.338656667011134e-05, |
|
"loss": 1.147, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.41833616298811543, |
|
"grad_norm": 0.2121581882238388, |
|
"learning_rate": 1.3113465940953495e-05, |
|
"loss": 1.0805, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.4196943972835314, |
|
"grad_norm": 0.21660660207271576, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 1.1498, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.2145054191350937, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 1.1337, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.42241086587436333, |
|
"grad_norm": 0.20337048172950745, |
|
"learning_rate": 1.2308594946249163e-05, |
|
"loss": 1.1421, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.4237691001697793, |
|
"grad_norm": 0.21610523760318756, |
|
"learning_rate": 1.204517304231343e-05, |
|
"loss": 1.1509, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.42512733446519524, |
|
"grad_norm": 0.21116265654563904, |
|
"learning_rate": 1.178421397270758e-05, |
|
"loss": 1.1868, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4264855687606112, |
|
"grad_norm": 0.21202139556407928, |
|
"learning_rate": 1.1525734670691701e-05, |
|
"loss": 1.1499, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.42784380305602715, |
|
"grad_norm": 0.21659952402114868, |
|
"learning_rate": 1.1269751908617277e-05, |
|
"loss": 1.149, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.42920203735144313, |
|
"grad_norm": 0.20819424092769623, |
|
"learning_rate": 1.1016282296838887e-05, |
|
"loss": 1.131, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.43056027164685906, |
|
"grad_norm": 0.21869328618049622, |
|
"learning_rate": 1.0765342282636416e-05, |
|
"loss": 1.0854, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.43191850594227504, |
|
"grad_norm": 0.2176653891801834, |
|
"learning_rate": 1.0516948149147754e-05, |
|
"loss": 1.1614, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.433276740237691, |
|
"grad_norm": 0.21377655863761902, |
|
"learning_rate": 1.0271116014312293e-05, |
|
"loss": 1.0566, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.43463497453310695, |
|
"grad_norm": 0.21560655534267426, |
|
"learning_rate": 1.0027861829824952e-05, |
|
"loss": 1.0961, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.43599320882852294, |
|
"grad_norm": 0.21113787591457367, |
|
"learning_rate": 9.787201380101157e-06, |
|
"loss": 1.1845, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.43735144312393887, |
|
"grad_norm": 0.21498249471187592, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.2006, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.43870967741935485, |
|
"grad_norm": 0.22041656076908112, |
|
"learning_rate": 9.313723980074018e-06, |
|
"loss": 1.1301, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.4400679117147708, |
|
"grad_norm": 0.22150786221027374, |
|
"learning_rate": 9.080937753040646e-06, |
|
"loss": 1.0695, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.44142614601018676, |
|
"grad_norm": 0.21770867705345154, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 1.1373, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.44278438030560274, |
|
"grad_norm": 0.21694478392601013, |
|
"learning_rate": 8.623345769777514e-06, |
|
"loss": 1.1801, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.44414261460101867, |
|
"grad_norm": 0.2310832142829895, |
|
"learning_rate": 8.398569706035792e-06, |
|
"loss": 1.1082, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.44550084889643465, |
|
"grad_norm": 0.23200993239879608, |
|
"learning_rate": 8.176493099488663e-06, |
|
"loss": 1.1414, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4468590831918506, |
|
"grad_norm": 0.2326376736164093, |
|
"learning_rate": 7.957130360368898e-06, |
|
"loss": 1.1599, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.44821731748726656, |
|
"grad_norm": 0.2363159954547882, |
|
"learning_rate": 7.740495722810271e-06, |
|
"loss": 1.1786, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4495755517826825, |
|
"grad_norm": 0.2400091290473938, |
|
"learning_rate": 7.526603243923957e-06, |
|
"loss": 1.1777, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4509337860780985, |
|
"grad_norm": 0.23490336537361145, |
|
"learning_rate": 7.315466802886401e-06, |
|
"loss": 1.1933, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.45229202037351446, |
|
"grad_norm": 0.24406562745571136, |
|
"learning_rate": 7.107100100038671e-06, |
|
"loss": 1.2079, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4536502546689304, |
|
"grad_norm": 0.2354699969291687, |
|
"learning_rate": 6.901516655997536e-06, |
|
"loss": 1.1893, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.45500848896434637, |
|
"grad_norm": 0.2448936104774475, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 1.1109, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4563667232597623, |
|
"grad_norm": 0.24912843108177185, |
|
"learning_rate": 6.498752722928042e-06, |
|
"loss": 1.1893, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4577249575551783, |
|
"grad_norm": 0.24521379172801971, |
|
"learning_rate": 6.301598368674105e-06, |
|
"loss": 1.1255, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4590831918505942, |
|
"grad_norm": 0.255632609128952, |
|
"learning_rate": 6.107279541079769e-06, |
|
"loss": 1.1292, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4604414261460102, |
|
"grad_norm": 0.25223416090011597, |
|
"learning_rate": 5.915808849215304e-06, |
|
"loss": 1.1416, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4617996604414261, |
|
"grad_norm": 0.2556709051132202, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 1.1466, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4631578947368421, |
|
"grad_norm": 0.25671127438545227, |
|
"learning_rate": 5.54146138409355e-06, |
|
"loss": 1.1387, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4645161290322581, |
|
"grad_norm": 0.2614463269710541, |
|
"learning_rate": 5.358608901706802e-06, |
|
"loss": 1.1486, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.465874363327674, |
|
"grad_norm": 0.2740238308906555, |
|
"learning_rate": 5.178653135214812e-06, |
|
"loss": 1.133, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.46723259762309, |
|
"grad_norm": 0.27351686358451843, |
|
"learning_rate": 5.001605761689398e-06, |
|
"loss": 1.1442, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.4685908319185059, |
|
"grad_norm": 0.2915915250778198, |
|
"learning_rate": 4.827478269480895e-06, |
|
"loss": 1.1289, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4699490662139219, |
|
"grad_norm": 0.28379565477371216, |
|
"learning_rate": 4.65628195747273e-06, |
|
"loss": 1.1466, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.47130730050933783, |
|
"grad_norm": 0.29930734634399414, |
|
"learning_rate": 4.488027934348271e-06, |
|
"loss": 1.1425, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4726655348047538, |
|
"grad_norm": 0.32029014825820923, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 1.1495, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.4740237691001698, |
|
"grad_norm": 0.34378722310066223, |
|
"learning_rate": 4.16039023417088e-06, |
|
"loss": 1.1866, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.47538200339558573, |
|
"grad_norm": 0.43387606739997864, |
|
"learning_rate": 4.001027817058789e-06, |
|
"loss": 1.1076, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.47538200339558573, |
|
"eval_loss": 1.1534959077835083, |
|
"eval_runtime": 75.7386, |
|
"eval_samples_per_second": 16.372, |
|
"eval_steps_per_second": 4.093, |
|
"step": 350 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.813516281188188e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|