|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.7961476725521669, |
|
"eval_steps": 150, |
|
"global_step": 155, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005136436597110754, |
|
"grad_norm": 104.2032241821289, |
|
"learning_rate": 2e-05, |
|
"loss": 43.8361, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005136436597110754, |
|
"eval_loss": 2.7993953227996826, |
|
"eval_runtime": 137.9974, |
|
"eval_samples_per_second": 4.754, |
|
"eval_steps_per_second": 1.188, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010272873194221509, |
|
"grad_norm": 87.10350799560547, |
|
"learning_rate": 4e-05, |
|
"loss": 45.8657, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.015409309791332263, |
|
"grad_norm": 96.3443374633789, |
|
"learning_rate": 6e-05, |
|
"loss": 43.7377, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.020545746388443017, |
|
"grad_norm": 94.10945892333984, |
|
"learning_rate": 8e-05, |
|
"loss": 40.9281, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.025682182985553772, |
|
"grad_norm": 92.76588439941406, |
|
"learning_rate": 0.0001, |
|
"loss": 33.8946, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.030818619582664526, |
|
"grad_norm": 53.15927505493164, |
|
"learning_rate": 0.00012, |
|
"loss": 24.9538, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.035955056179775284, |
|
"grad_norm": 84.37348937988281, |
|
"learning_rate": 0.00014, |
|
"loss": 23.256, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.041091492776886035, |
|
"grad_norm": 32.134864807128906, |
|
"learning_rate": 0.00016, |
|
"loss": 21.5102, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04622792937399679, |
|
"grad_norm": 28.912691116333008, |
|
"learning_rate": 0.00018, |
|
"loss": 17.9462, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.051364365971107544, |
|
"grad_norm": 23.97195053100586, |
|
"learning_rate": 0.0002, |
|
"loss": 16.3874, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0565008025682183, |
|
"grad_norm": 53.18326187133789, |
|
"learning_rate": 0.00019997652980184843, |
|
"loss": 18.0917, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06163723916532905, |
|
"grad_norm": 31.358110427856445, |
|
"learning_rate": 0.0001999061302243977, |
|
"loss": 17.1273, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06677367576243981, |
|
"grad_norm": 27.875253677368164, |
|
"learning_rate": 0.00019978883431348845, |
|
"loss": 15.7283, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07191011235955057, |
|
"grad_norm": 29.975252151489258, |
|
"learning_rate": 0.00019962469712828614, |
|
"loss": 16.687, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07704654895666131, |
|
"grad_norm": 35.4009895324707, |
|
"learning_rate": 0.00019941379571543596, |
|
"loss": 17.2859, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08218298555377207, |
|
"grad_norm": 89.57384490966797, |
|
"learning_rate": 0.00019915622907289694, |
|
"loss": 17.1492, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08731942215088283, |
|
"grad_norm": 35.824459075927734, |
|
"learning_rate": 0.00019885211810347184, |
|
"loss": 14.816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09245585874799359, |
|
"grad_norm": 21.76598358154297, |
|
"learning_rate": 0.00019850160555805486, |
|
"loss": 14.582, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09759229534510433, |
|
"grad_norm": 22.013967514038086, |
|
"learning_rate": 0.00019810485596862392, |
|
"loss": 13.9424, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10272873194221509, |
|
"grad_norm": 19.30277442932129, |
|
"learning_rate": 0.00019766205557100868, |
|
"loss": 15.0135, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10786516853932585, |
|
"grad_norm": 20.07377052307129, |
|
"learning_rate": 0.00019717341221747056, |
|
"loss": 15.026, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1130016051364366, |
|
"grad_norm": 19.19417953491211, |
|
"learning_rate": 0.00019663915527913625, |
|
"loss": 16.28, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11813804173354735, |
|
"grad_norm": 145.2245330810547, |
|
"learning_rate": 0.00019605953553832988, |
|
"loss": 14.1316, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1232744783306581, |
|
"grad_norm": 21.75243377685547, |
|
"learning_rate": 0.00019543482507085482, |
|
"loss": 15.4915, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12841091492776885, |
|
"grad_norm": 19.737768173217773, |
|
"learning_rate": 0.00019476531711828027, |
|
"loss": 15.0702, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13354735152487962, |
|
"grad_norm": 21.004024505615234, |
|
"learning_rate": 0.0001940513259502924, |
|
"loss": 13.8939, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13868378812199036, |
|
"grad_norm": 99.0382080078125, |
|
"learning_rate": 0.0001932931867171751, |
|
"loss": 14.1338, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14382022471910114, |
|
"grad_norm": 21.052730560302734, |
|
"learning_rate": 0.0001924912552924889, |
|
"loss": 15.3611, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14895666131621188, |
|
"grad_norm": 25.83721160888672, |
|
"learning_rate": 0.00019164590810602262, |
|
"loss": 14.6511, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15409309791332262, |
|
"grad_norm": 23.324485778808594, |
|
"learning_rate": 0.00019075754196709572, |
|
"loss": 13.7324, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1592295345104334, |
|
"grad_norm": 17.926301956176758, |
|
"learning_rate": 0.00018982657387829445, |
|
"loss": 14.6006, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16436597110754414, |
|
"grad_norm": 22.068103790283203, |
|
"learning_rate": 0.00018885344083972914, |
|
"loss": 15.3865, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16950240770465488, |
|
"grad_norm": 20.398998260498047, |
|
"learning_rate": 0.00018783859964390464, |
|
"loss": 14.2294, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.17463884430176566, |
|
"grad_norm": 17.491390228271484, |
|
"learning_rate": 0.00018678252666130013, |
|
"loss": 14.1286, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1797752808988764, |
|
"grad_norm": 19.122509002685547, |
|
"learning_rate": 0.00018568571761675893, |
|
"loss": 15.3119, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.18491171749598717, |
|
"grad_norm": 18.384124755859375, |
|
"learning_rate": 0.0001845486873567932, |
|
"loss": 15.4398, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.19004815409309792, |
|
"grad_norm": 41.315956115722656, |
|
"learning_rate": 0.00018337196960791302, |
|
"loss": 14.5058, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.19518459069020866, |
|
"grad_norm": 19.567920684814453, |
|
"learning_rate": 0.00018215611672609317, |
|
"loss": 12.7428, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.20032102728731943, |
|
"grad_norm": 19.43627166748047, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 15.1849, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.20545746388443017, |
|
"grad_norm": 49.411319732666016, |
|
"learning_rate": 0.00017960930657056438, |
|
"loss": 13.3985, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.21059390048154092, |
|
"grad_norm": 21.388917922973633, |
|
"learning_rate": 0.00017827954477963557, |
|
"loss": 14.1779, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2157303370786517, |
|
"grad_norm": 24.790122985839844, |
|
"learning_rate": 0.0001769130382601629, |
|
"loss": 15.1373, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.22086677367576243, |
|
"grad_norm": 16.753856658935547, |
|
"learning_rate": 0.00017551042845572208, |
|
"loss": 14.7309, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2260032102728732, |
|
"grad_norm": 23.899765014648438, |
|
"learning_rate": 0.00017407237375691392, |
|
"loss": 13.6093, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.23113964686998395, |
|
"grad_norm": 52.38229751586914, |
|
"learning_rate": 0.0001725995491923131, |
|
"loss": 14.5637, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2362760834670947, |
|
"grad_norm": 44.96168518066406, |
|
"learning_rate": 0.00017109264611160708, |
|
"loss": 14.0165, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.24141252006420547, |
|
"grad_norm": 20.429908752441406, |
|
"learning_rate": 0.00016955237186107387, |
|
"loss": 14.7569, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2465489566613162, |
|
"grad_norm": 16.509567260742188, |
|
"learning_rate": 0.0001679794494515508, |
|
"loss": 13.9921, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.251685393258427, |
|
"grad_norm": 16.397178649902344, |
|
"learning_rate": 0.00016637461721905045, |
|
"loss": 13.2361, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2568218298555377, |
|
"grad_norm": 16.84309196472168, |
|
"learning_rate": 0.00016473862847818277, |
|
"loss": 15.3742, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.26195826645264847, |
|
"grad_norm": 17.62554359436035, |
|
"learning_rate": 0.00016307225116854622, |
|
"loss": 14.4287, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.26709470304975924, |
|
"grad_norm": 17.205963134765625, |
|
"learning_rate": 0.00016137626749425377, |
|
"loss": 13.9601, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.27223113964686996, |
|
"grad_norm": 19.4569091796875, |
|
"learning_rate": 0.00015965147355676343, |
|
"loss": 16.3653, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.27736757624398073, |
|
"grad_norm": 17.375333786010742, |
|
"learning_rate": 0.0001578986789811849, |
|
"loss": 14.8869, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2825040128410915, |
|
"grad_norm": 19.079904556274414, |
|
"learning_rate": 0.00015611870653623825, |
|
"loss": 13.38, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2876404494382023, |
|
"grad_norm": 14.686327934265137, |
|
"learning_rate": 0.00015431239174804328, |
|
"loss": 13.8367, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.292776886035313, |
|
"grad_norm": 16.750314712524414, |
|
"learning_rate": 0.00015248058250792008, |
|
"loss": 13.7871, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.29791332263242376, |
|
"grad_norm": 20.95732879638672, |
|
"learning_rate": 0.0001506241386743854, |
|
"loss": 13.5049, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.30304975922953453, |
|
"grad_norm": 678.4100341796875, |
|
"learning_rate": 0.00014874393166953192, |
|
"loss": 11.7639, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.30818619582664525, |
|
"grad_norm": 18.715354919433594, |
|
"learning_rate": 0.00014684084406997903, |
|
"loss": 11.5865, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.313322632423756, |
|
"grad_norm": 27.259925842285156, |
|
"learning_rate": 0.00014491576919258792, |
|
"loss": 16.0604, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3184590690208668, |
|
"grad_norm": 146.95001220703125, |
|
"learning_rate": 0.0001429696106751352, |
|
"loss": 15.6741, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3235955056179775, |
|
"grad_norm": 16.70216941833496, |
|
"learning_rate": 0.0001410032820521416, |
|
"loss": 15.1472, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3287319422150883, |
|
"grad_norm": 24.176897048950195, |
|
"learning_rate": 0.00013901770632605547, |
|
"loss": 15.0085, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.33386837881219905, |
|
"grad_norm": 26.137481689453125, |
|
"learning_rate": 0.00013701381553399145, |
|
"loss": 14.074, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.33900481540930977, |
|
"grad_norm": 16.90602684020996, |
|
"learning_rate": 0.00013499255031022885, |
|
"loss": 13.6073, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.34414125200642054, |
|
"grad_norm": 15.70071029663086, |
|
"learning_rate": 0.00013295485944467405, |
|
"loss": 13.4469, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3492776886035313, |
|
"grad_norm": 16.358449935913086, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 13.356, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.354414125200642, |
|
"grad_norm": 39.20793914794922, |
|
"learning_rate": 0.0001288340340501351, |
|
"loss": 13.7233, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3595505617977528, |
|
"grad_norm": 16.230451583862305, |
|
"learning_rate": 0.00012675283385292212, |
|
"loss": 12.6883, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.36468699839486357, |
|
"grad_norm": 17.057575225830078, |
|
"learning_rate": 0.00012465907576947622, |
|
"loss": 13.5811, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.36982343499197434, |
|
"grad_norm": 18.70912742614746, |
|
"learning_rate": 0.00012255374261813944, |
|
"loss": 17.5856, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.37495987158908506, |
|
"grad_norm": 17.773834228515625, |
|
"learning_rate": 0.0001204378226506365, |
|
"loss": 15.1836, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.38009630818619583, |
|
"grad_norm": 19.240266799926758, |
|
"learning_rate": 0.00011831230908818563, |
|
"loss": 14.201, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3852327447833066, |
|
"grad_norm": 28.190780639648438, |
|
"learning_rate": 0.0001161781996552765, |
|
"loss": 14.8009, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3903691813804173, |
|
"grad_norm": 13.847818374633789, |
|
"learning_rate": 0.00011403649611133444, |
|
"loss": 11.7056, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3955056179775281, |
|
"grad_norm": 19.09795379638672, |
|
"learning_rate": 0.00011188820378049065, |
|
"loss": 14.7104, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.40064205457463886, |
|
"grad_norm": 16.784597396850586, |
|
"learning_rate": 0.00010973433107967902, |
|
"loss": 13.9271, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4057784911717496, |
|
"grad_norm": 18.32082748413086, |
|
"learning_rate": 0.00010757588904528106, |
|
"loss": 14.7108, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.41091492776886035, |
|
"grad_norm": 19.074981689453125, |
|
"learning_rate": 0.00010541389085854176, |
|
"loss": 16.207, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4160513643659711, |
|
"grad_norm": 18.795635223388672, |
|
"learning_rate": 0.00010324935136997806, |
|
"loss": 13.4865, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.42118780096308184, |
|
"grad_norm": 18.05921173095703, |
|
"learning_rate": 0.000101083286623004, |
|
"loss": 13.5321, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4263242375601926, |
|
"grad_norm": 26.4294490814209, |
|
"learning_rate": 9.891671337699602e-05, |
|
"loss": 14.5322, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4314606741573034, |
|
"grad_norm": 19.195995330810547, |
|
"learning_rate": 9.675064863002196e-05, |
|
"loss": 14.3342, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.43659711075441415, |
|
"grad_norm": 56.876407623291016, |
|
"learning_rate": 9.458610914145826e-05, |
|
"loss": 14.4958, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.44173354735152487, |
|
"grad_norm": 38.66791915893555, |
|
"learning_rate": 9.242411095471897e-05, |
|
"loss": 15.1417, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.44686998394863564, |
|
"grad_norm": 15.607385635375977, |
|
"learning_rate": 9.026566892032105e-05, |
|
"loss": 12.8892, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4520064205457464, |
|
"grad_norm": 18.23895263671875, |
|
"learning_rate": 8.811179621950936e-05, |
|
"loss": 11.6303, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.45714285714285713, |
|
"grad_norm": 20.267276763916016, |
|
"learning_rate": 8.596350388866558e-05, |
|
"loss": 15.0553, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4622792937399679, |
|
"grad_norm": 15.288996696472168, |
|
"learning_rate": 8.382180034472353e-05, |
|
"loss": 11.5903, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.46741573033707867, |
|
"grad_norm": 17.56521224975586, |
|
"learning_rate": 8.168769091181438e-05, |
|
"loss": 14.486, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4725521669341894, |
|
"grad_norm": 18.872352600097656, |
|
"learning_rate": 7.956217734936353e-05, |
|
"loss": 11.8002, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.47768860353130016, |
|
"grad_norm": 16.709739685058594, |
|
"learning_rate": 7.744625738186059e-05, |
|
"loss": 13.3616, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.48282504012841093, |
|
"grad_norm": 16.364673614501953, |
|
"learning_rate": 7.534092423052381e-05, |
|
"loss": 14.2758, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.48796147672552165, |
|
"grad_norm": 15.938226699829102, |
|
"learning_rate": 7.324716614707793e-05, |
|
"loss": 11.5942, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4930979133226324, |
|
"grad_norm": 17.955299377441406, |
|
"learning_rate": 7.116596594986494e-05, |
|
"loss": 14.6283, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4982343499197432, |
|
"grad_norm": 19.787822723388672, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 14.1204, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.503370786516854, |
|
"grad_norm": 24.217247009277344, |
|
"learning_rate": 6.704514055532597e-05, |
|
"loss": 12.5203, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5085072231139647, |
|
"grad_norm": 18.735475540161133, |
|
"learning_rate": 6.500744968977116e-05, |
|
"loss": 13.2639, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5136436597110754, |
|
"grad_norm": 16.328588485717773, |
|
"learning_rate": 6.298618446600856e-05, |
|
"loss": 13.0953, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5187800963081862, |
|
"grad_norm": 17.875574111938477, |
|
"learning_rate": 6.0982293673944544e-05, |
|
"loss": 12.6593, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5239165329052969, |
|
"grad_norm": 16.03655242919922, |
|
"learning_rate": 5.899671794785839e-05, |
|
"loss": 13.7208, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5290529695024077, |
|
"grad_norm": 15.7937593460083, |
|
"learning_rate": 5.703038932486484e-05, |
|
"loss": 12.1358, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5341894060995185, |
|
"grad_norm": 15.46942138671875, |
|
"learning_rate": 5.5084230807412126e-05, |
|
"loss": 13.246, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5393258426966292, |
|
"grad_norm": 21.15059471130371, |
|
"learning_rate": 5.3159155930021e-05, |
|
"loss": 17.4358, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5444622792937399, |
|
"grad_norm": 16.775680541992188, |
|
"learning_rate": 5.12560683304681e-05, |
|
"loss": 15.4665, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5495987158908507, |
|
"grad_norm": 15.216927528381348, |
|
"learning_rate": 4.9375861325614606e-05, |
|
"loss": 14.233, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5547351524879615, |
|
"grad_norm": 16.067556381225586, |
|
"learning_rate": 4.751941749207995e-05, |
|
"loss": 13.9475, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5598715890850723, |
|
"grad_norm": 15.383379936218262, |
|
"learning_rate": 4.5687608251956714e-05, |
|
"loss": 13.1619, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.565008025682183, |
|
"grad_norm": 13.594917297363281, |
|
"learning_rate": 4.388129346376178e-05, |
|
"loss": 10.6763, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5701444622792937, |
|
"grad_norm": 16.273210525512695, |
|
"learning_rate": 4.210132101881516e-05, |
|
"loss": 13.5974, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5752808988764045, |
|
"grad_norm": 14.688011169433594, |
|
"learning_rate": 4.034852644323661e-05, |
|
"loss": 12.487, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5804173354735153, |
|
"grad_norm": 15.278275489807129, |
|
"learning_rate": 3.862373250574626e-05, |
|
"loss": 11.589, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.585553772070626, |
|
"grad_norm": 16.51677703857422, |
|
"learning_rate": 3.6927748831453836e-05, |
|
"loss": 14.3822, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5906902086677368, |
|
"grad_norm": 15.376001358032227, |
|
"learning_rate": 3.5261371521817244e-05, |
|
"loss": 12.6597, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5958266452648475, |
|
"grad_norm": 16.46743392944336, |
|
"learning_rate": 3.3625382780949574e-05, |
|
"loss": 12.9537, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.6009630818619582, |
|
"grad_norm": 16.67145347595215, |
|
"learning_rate": 3.202055054844921e-05, |
|
"loss": 15.0622, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6060995184590691, |
|
"grad_norm": 17.57502555847168, |
|
"learning_rate": 3.0447628138926156e-05, |
|
"loss": 14.1264, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6112359550561798, |
|
"grad_norm": 14.330567359924316, |
|
"learning_rate": 2.890735388839295e-05, |
|
"loss": 13.1008, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6163723916532905, |
|
"grad_norm": 16.226436614990234, |
|
"learning_rate": 2.7400450807686938e-05, |
|
"loss": 13.5438, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6215088282504013, |
|
"grad_norm": 18.105459213256836, |
|
"learning_rate": 2.59276262430861e-05, |
|
"loss": 13.7602, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.626645264847512, |
|
"grad_norm": 15.712982177734375, |
|
"learning_rate": 2.4489571544277945e-05, |
|
"loss": 14.5214, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6317817014446228, |
|
"grad_norm": 16.994945526123047, |
|
"learning_rate": 2.308696173983711e-05, |
|
"loss": 12.3241, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6369181380417336, |
|
"grad_norm": 23.144821166992188, |
|
"learning_rate": 2.1720455220364444e-05, |
|
"loss": 12.649, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6420545746388443, |
|
"grad_norm": 20.007465362548828, |
|
"learning_rate": 2.0390693429435627e-05, |
|
"loss": 12.3556, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.647191011235955, |
|
"grad_norm": 17.01409149169922, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 14.2751, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6523274478330658, |
|
"grad_norm": 20.14617347717285, |
|
"learning_rate": 1.784388327390687e-05, |
|
"loss": 11.4152, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6574638844301766, |
|
"grad_norm": 15.188875198364258, |
|
"learning_rate": 1.6628030392087e-05, |
|
"loss": 11.5006, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6626003210272873, |
|
"grad_norm": 12.970837593078613, |
|
"learning_rate": 1.5451312643206827e-05, |
|
"loss": 10.4096, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6677367576243981, |
|
"grad_norm": 23.07573890686035, |
|
"learning_rate": 1.4314282383241096e-05, |
|
"loss": 12.7064, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6728731942215088, |
|
"grad_norm": 15.414446830749512, |
|
"learning_rate": 1.3217473338699859e-05, |
|
"loss": 12.8737, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6780096308186195, |
|
"grad_norm": 14.607954978942871, |
|
"learning_rate": 1.2161400356095375e-05, |
|
"loss": 12.8935, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6831460674157304, |
|
"grad_norm": 16.611759185791016, |
|
"learning_rate": 1.1146559160270875e-05, |
|
"loss": 14.0584, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6882825040128411, |
|
"grad_norm": 22.148141860961914, |
|
"learning_rate": 1.0173426121705576e-05, |
|
"loss": 12.2587, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6934189406099518, |
|
"grad_norm": 18.16852378845215, |
|
"learning_rate": 9.242458032904311e-06, |
|
"loss": 14.9363, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6985553772070626, |
|
"grad_norm": 22.371219635009766, |
|
"learning_rate": 8.354091893977401e-06, |
|
"loss": 14.528, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7036918138041733, |
|
"grad_norm": 13.901525497436523, |
|
"learning_rate": 7.508744707511117e-06, |
|
"loss": 14.1155, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.708828250401284, |
|
"grad_norm": 13.545099258422852, |
|
"learning_rate": 6.70681328282492e-06, |
|
"loss": 12.0036, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7139646869983949, |
|
"grad_norm": 13.885724067687988, |
|
"learning_rate": 5.948674049707603e-06, |
|
"loss": 11.6004, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7191011235955056, |
|
"grad_norm": 18.979543685913086, |
|
"learning_rate": 5.2346828817197655e-06, |
|
"loss": 12.442, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7242375601926164, |
|
"grad_norm": 18.40704917907715, |
|
"learning_rate": 4.565174929145188e-06, |
|
"loss": 13.0276, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7293739967897271, |
|
"grad_norm": 14.125924110412598, |
|
"learning_rate": 3.940464461670135e-06, |
|
"loss": 12.777, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7345104333868379, |
|
"grad_norm": 14.582460403442383, |
|
"learning_rate": 3.360844720863765e-06, |
|
"loss": 12.5393, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7396468699839487, |
|
"grad_norm": 101.13643646240234, |
|
"learning_rate": 2.826587782529444e-06, |
|
"loss": 10.174, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7447833065810594, |
|
"grad_norm": 17.687610626220703, |
|
"learning_rate": 2.3379444289913342e-06, |
|
"loss": 14.5668, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7499197431781701, |
|
"grad_norm": 13.70815372467041, |
|
"learning_rate": 1.8951440313760837e-06, |
|
"loss": 12.749, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.755056179775281, |
|
"grad_norm": 15.379615783691406, |
|
"learning_rate": 1.4983944419451613e-06, |
|
"loss": 12.6604, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7601926163723917, |
|
"grad_norm": 16.77601432800293, |
|
"learning_rate": 1.1478818965281911e-06, |
|
"loss": 15.1071, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7653290529695024, |
|
"grad_norm": 17.265384674072266, |
|
"learning_rate": 8.437709271030603e-07, |
|
"loss": 13.4781, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7704654895666132, |
|
"grad_norm": 16.5050106048584, |
|
"learning_rate": 5.862042845640403e-07, |
|
"loss": 12.4935, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7704654895666132, |
|
"eval_loss": 0.8121631145477295, |
|
"eval_runtime": 138.4812, |
|
"eval_samples_per_second": 4.737, |
|
"eval_steps_per_second": 1.184, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7756019261637239, |
|
"grad_norm": 15.610320091247559, |
|
"learning_rate": 3.7530287171387843e-07, |
|
"loss": 11.1469, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7807383627608346, |
|
"grad_norm": 16.400800704956055, |
|
"learning_rate": 2.1116568651156076e-07, |
|
"loss": 14.1984, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7858747993579455, |
|
"grad_norm": 14.4852933883667, |
|
"learning_rate": 9.386977560232879e-08, |
|
"loss": 12.8882, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7910112359550562, |
|
"grad_norm": 17.637113571166992, |
|
"learning_rate": 2.347019815158724e-08, |
|
"loss": 15.6212, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7961476725521669, |
|
"grad_norm": 16.612701416015625, |
|
"learning_rate": 0.0, |
|
"loss": 13.2727, |
|
"step": 155 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 155, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 300, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 2, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4215448129804698e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|