|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.06824198450828513, |
|
"eval_steps": 348, |
|
"global_step": 348, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00019609765663300324, |
|
"grad_norm": 20.33372688293457, |
|
"learning_rate": 2e-05, |
|
"loss": 3.0843, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00019609765663300324, |
|
"eval_loss": 1.1017773151397705, |
|
"eval_runtime": 79.9135, |
|
"eval_samples_per_second": 26.879, |
|
"eval_steps_per_second": 13.44, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003921953132660065, |
|
"grad_norm": 19.32895278930664, |
|
"learning_rate": 4e-05, |
|
"loss": 3.2221, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0005882929698990097, |
|
"grad_norm": 18.6882266998291, |
|
"learning_rate": 6e-05, |
|
"loss": 3.8951, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000784390626532013, |
|
"grad_norm": 43.008060455322266, |
|
"learning_rate": 8e-05, |
|
"loss": 5.167, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0009804882831650162, |
|
"grad_norm": 21.642993927001953, |
|
"learning_rate": 0.0001, |
|
"loss": 3.1304, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0011765859397980193, |
|
"grad_norm": 29.79266929626465, |
|
"learning_rate": 0.00012, |
|
"loss": 4.5153, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0013726835964310226, |
|
"grad_norm": 25.503681182861328, |
|
"learning_rate": 0.00014, |
|
"loss": 3.8083, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.001568781253064026, |
|
"grad_norm": 32.35524368286133, |
|
"learning_rate": 0.00016, |
|
"loss": 4.253, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0017648789096970292, |
|
"grad_norm": 21.053390502929688, |
|
"learning_rate": 0.00018, |
|
"loss": 3.3757, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0019609765663300325, |
|
"grad_norm": 25.7067928314209, |
|
"learning_rate": 0.0002, |
|
"loss": 3.2484, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0021570742229630358, |
|
"grad_norm": 22.57227897644043, |
|
"learning_rate": 0.00019999974049780868, |
|
"loss": 2.8378, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0023531718795960386, |
|
"grad_norm": 19.06597900390625, |
|
"learning_rate": 0.00019999896199258152, |
|
"loss": 3.231, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002549269536229042, |
|
"grad_norm": 17.590620040893555, |
|
"learning_rate": 0.000199997664488359, |
|
"loss": 2.2391, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002745367192862045, |
|
"grad_norm": 8.627043724060059, |
|
"learning_rate": 0.00019999584799187522, |
|
"loss": 1.7095, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0029414648494950485, |
|
"grad_norm": 21.60858917236328, |
|
"learning_rate": 0.0001999935125125579, |
|
"loss": 3.9299, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.003137562506128052, |
|
"grad_norm": 8.075380325317383, |
|
"learning_rate": 0.00019999065806252829, |
|
"loss": 1.7939, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.003333660162761055, |
|
"grad_norm": 11.393594741821289, |
|
"learning_rate": 0.00019998728465660105, |
|
"loss": 1.601, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0035297578193940584, |
|
"grad_norm": 8.256339073181152, |
|
"learning_rate": 0.00019998339231228434, |
|
"loss": 3.1556, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0037258554760270617, |
|
"grad_norm": 20.03615951538086, |
|
"learning_rate": 0.0001999789810497796, |
|
"loss": 2.0883, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.003921953132660065, |
|
"grad_norm": 10.166353225708008, |
|
"learning_rate": 0.0001999740508919815, |
|
"loss": 3.5616, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.004118050789293068, |
|
"grad_norm": 15.80553913116455, |
|
"learning_rate": 0.0001999686018644777, |
|
"loss": 3.0344, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0043141484459260715, |
|
"grad_norm": 7.451974391937256, |
|
"learning_rate": 0.00019996263399554897, |
|
"loss": 2.1049, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.004510246102559075, |
|
"grad_norm": 5.434274673461914, |
|
"learning_rate": 0.00019995614731616875, |
|
"loss": 2.3178, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.004706343759192077, |
|
"grad_norm": 10.594315528869629, |
|
"learning_rate": 0.00019994914186000328, |
|
"loss": 1.7096, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0049024414158250805, |
|
"grad_norm": 5.348718166351318, |
|
"learning_rate": 0.0001999416176634111, |
|
"loss": 2.695, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.005098539072458084, |
|
"grad_norm": 17.776073455810547, |
|
"learning_rate": 0.00019993357476544312, |
|
"loss": 1.7411, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.005294636729091087, |
|
"grad_norm": 10.051606178283691, |
|
"learning_rate": 0.0001999250132078424, |
|
"loss": 2.6161, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.00549073438572409, |
|
"grad_norm": 26.03020668029785, |
|
"learning_rate": 0.00019991593303504376, |
|
"loss": 3.3977, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.005686832042357094, |
|
"grad_norm": 10.213540077209473, |
|
"learning_rate": 0.00019990633429417363, |
|
"loss": 1.2442, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.005882929698990097, |
|
"grad_norm": 11.69288444519043, |
|
"learning_rate": 0.00019989621703505, |
|
"loss": 1.4702, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0060790273556231, |
|
"grad_norm": 4.343452453613281, |
|
"learning_rate": 0.00019988558131018186, |
|
"loss": 1.0779, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.006275125012256104, |
|
"grad_norm": 9.106976509094238, |
|
"learning_rate": 0.00019987442717476906, |
|
"loss": 2.5887, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.006471222668889107, |
|
"grad_norm": 17.658370971679688, |
|
"learning_rate": 0.00019986275468670205, |
|
"loss": 2.2258, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.00666732032552211, |
|
"grad_norm": 6.7451090812683105, |
|
"learning_rate": 0.00019985056390656162, |
|
"loss": 1.7206, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0068634179821551134, |
|
"grad_norm": 28.07065200805664, |
|
"learning_rate": 0.00019983785489761837, |
|
"loss": 2.7356, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.007059515638788117, |
|
"grad_norm": 11.387879371643066, |
|
"learning_rate": 0.00019982462772583266, |
|
"loss": 1.973, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00725561329542112, |
|
"grad_norm": 9.64372444152832, |
|
"learning_rate": 0.00019981088245985408, |
|
"loss": 2.7339, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.007451710952054123, |
|
"grad_norm": 9.302544593811035, |
|
"learning_rate": 0.00019979661917102115, |
|
"loss": 1.7498, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.007647808608687127, |
|
"grad_norm": 15.064400672912598, |
|
"learning_rate": 0.000199781837933361, |
|
"loss": 3.0109, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00784390626532013, |
|
"grad_norm": 7.281099319458008, |
|
"learning_rate": 0.00019976653882358884, |
|
"loss": 1.3118, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008040003921953132, |
|
"grad_norm": 6.4474873542785645, |
|
"learning_rate": 0.0001997507219211078, |
|
"loss": 1.408, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.008236101578586136, |
|
"grad_norm": 13.101079940795898, |
|
"learning_rate": 0.00019973438730800822, |
|
"loss": 2.3367, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.008432199235219139, |
|
"grad_norm": 5.951049327850342, |
|
"learning_rate": 0.00019971753506906753, |
|
"loss": 0.9101, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.008628296891852143, |
|
"grad_norm": 11.212276458740234, |
|
"learning_rate": 0.00019970016529174947, |
|
"loss": 2.7058, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.008824394548485145, |
|
"grad_norm": 8.68136978149414, |
|
"learning_rate": 0.0001996822780662041, |
|
"loss": 2.0276, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.00902049220511815, |
|
"grad_norm": 17.70038414001465, |
|
"learning_rate": 0.00019966387348526683, |
|
"loss": 2.7989, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.009216589861751152, |
|
"grad_norm": 10.247598648071289, |
|
"learning_rate": 0.00019964495164445824, |
|
"loss": 1.9618, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.009412687518384154, |
|
"grad_norm": 10.378255844116211, |
|
"learning_rate": 0.0001996255126419835, |
|
"loss": 1.8003, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.009608785175017159, |
|
"grad_norm": 31.620820999145508, |
|
"learning_rate": 0.0001996055565787319, |
|
"loss": 2.8785, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.009804882831650161, |
|
"grad_norm": 9.976147651672363, |
|
"learning_rate": 0.0001995850835582763, |
|
"loss": 2.5605, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010000980488283165, |
|
"grad_norm": 11.751899719238281, |
|
"learning_rate": 0.00019956409368687258, |
|
"loss": 2.7556, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.010197078144916168, |
|
"grad_norm": 15.828932762145996, |
|
"learning_rate": 0.000199542587073459, |
|
"loss": 2.7773, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.010393175801549172, |
|
"grad_norm": 10.772979736328125, |
|
"learning_rate": 0.00019952056382965597, |
|
"loss": 1.9553, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.010589273458182174, |
|
"grad_norm": 10.821427345275879, |
|
"learning_rate": 0.00019949802406976495, |
|
"loss": 1.8528, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.010785371114815178, |
|
"grad_norm": 7.228662490844727, |
|
"learning_rate": 0.00019947496791076837, |
|
"loss": 1.1844, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01098146877144818, |
|
"grad_norm": 7.164773941040039, |
|
"learning_rate": 0.00019945139547232872, |
|
"loss": 1.0291, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.011177566428081185, |
|
"grad_norm": 13.927733421325684, |
|
"learning_rate": 0.0001994273068767879, |
|
"loss": 1.5417, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.011373664084714187, |
|
"grad_norm": 10.366493225097656, |
|
"learning_rate": 0.00019940270224916688, |
|
"loss": 1.5122, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.011569761741347192, |
|
"grad_norm": 11.2214994430542, |
|
"learning_rate": 0.00019937758171716468, |
|
"loss": 1.6003, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.011765859397980194, |
|
"grad_norm": 14.360090255737305, |
|
"learning_rate": 0.000199351945411158, |
|
"loss": 1.5651, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.011961957054613198, |
|
"grad_norm": 17.97150993347168, |
|
"learning_rate": 0.00019932579346420038, |
|
"loss": 1.6064, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 10.190518379211426, |
|
"learning_rate": 0.00019929912601202151, |
|
"loss": 1.9151, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.012354152367879203, |
|
"grad_norm": 13.573248863220215, |
|
"learning_rate": 0.00019927194319302677, |
|
"loss": 4.0602, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.012550250024512207, |
|
"grad_norm": 16.919841766357422, |
|
"learning_rate": 0.00019924424514829606, |
|
"loss": 2.8292, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.01274634768114521, |
|
"grad_norm": 58.470252990722656, |
|
"learning_rate": 0.00019921603202158354, |
|
"loss": 1.9637, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.012942445337778214, |
|
"grad_norm": 18.334800720214844, |
|
"learning_rate": 0.00019918730395931649, |
|
"loss": 2.5609, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.013138542994411216, |
|
"grad_norm": 12.280759811401367, |
|
"learning_rate": 0.00019915806111059486, |
|
"loss": 1.2495, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01333464065104422, |
|
"grad_norm": 8.015874862670898, |
|
"learning_rate": 0.0001991283036271903, |
|
"loss": 1.505, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.013530738307677223, |
|
"grad_norm": 7.713284969329834, |
|
"learning_rate": 0.0001990980316635455, |
|
"loss": 2.3898, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.013726835964310227, |
|
"grad_norm": 18.01800537109375, |
|
"learning_rate": 0.00019906724537677316, |
|
"loss": 3.0263, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01392293362094323, |
|
"grad_norm": 21.270421981811523, |
|
"learning_rate": 0.00019903594492665558, |
|
"loss": 3.2547, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.014119031277576233, |
|
"grad_norm": 21.60205841064453, |
|
"learning_rate": 0.0001990041304756434, |
|
"loss": 2.577, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.014315128934209236, |
|
"grad_norm": 10.01419734954834, |
|
"learning_rate": 0.00019897180218885507, |
|
"loss": 1.9092, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.01451122659084224, |
|
"grad_norm": 14.10943603515625, |
|
"learning_rate": 0.00019893896023407578, |
|
"loss": 2.2377, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.014707324247475242, |
|
"grad_norm": 11.310667037963867, |
|
"learning_rate": 0.0001989056047817567, |
|
"loss": 1.6645, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.014903421904108247, |
|
"grad_norm": 6.586666107177734, |
|
"learning_rate": 0.0001988717360050141, |
|
"loss": 2.2651, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.015099519560741249, |
|
"grad_norm": 4.402716159820557, |
|
"learning_rate": 0.00019883735407962846, |
|
"loss": 1.3483, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.015295617217374253, |
|
"grad_norm": 9.384387016296387, |
|
"learning_rate": 0.00019880245918404342, |
|
"loss": 2.6391, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.015491714874007256, |
|
"grad_norm": 6.753894329071045, |
|
"learning_rate": 0.000198767051499365, |
|
"loss": 2.9391, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.01568781253064026, |
|
"grad_norm": 6.399787902832031, |
|
"learning_rate": 0.00019873113120936074, |
|
"loss": 3.7452, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01588391018727326, |
|
"grad_norm": 8.880107879638672, |
|
"learning_rate": 0.00019869469850045842, |
|
"loss": 1.2771, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.016080007843906265, |
|
"grad_norm": 12.630661964416504, |
|
"learning_rate": 0.00019865775356174545, |
|
"loss": 2.2072, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.01627610550053927, |
|
"grad_norm": 7.974503993988037, |
|
"learning_rate": 0.00019862029658496762, |
|
"loss": 1.9795, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.016472203157172273, |
|
"grad_norm": 50.43594741821289, |
|
"learning_rate": 0.00019858232776452837, |
|
"loss": 1.5331, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.016668300813805274, |
|
"grad_norm": 7.273484230041504, |
|
"learning_rate": 0.00019854384729748746, |
|
"loss": 2.4005, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.016864398470438278, |
|
"grad_norm": 5.826492786407471, |
|
"learning_rate": 0.00019850485538356027, |
|
"loss": 2.1915, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.017060496127071282, |
|
"grad_norm": 9.881019592285156, |
|
"learning_rate": 0.0001984653522251165, |
|
"loss": 2.3309, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.017256593783704286, |
|
"grad_norm": 9.147713661193848, |
|
"learning_rate": 0.00019842533802717923, |
|
"loss": 1.1404, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.017452691440337287, |
|
"grad_norm": 13.98263931274414, |
|
"learning_rate": 0.00019838481299742398, |
|
"loss": 1.2166, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.01764878909697029, |
|
"grad_norm": 8.206791877746582, |
|
"learning_rate": 0.0001983437773461774, |
|
"loss": 2.6039, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.017844886753603295, |
|
"grad_norm": 10.445443153381348, |
|
"learning_rate": 0.00019830223128641637, |
|
"loss": 2.3554, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0180409844102363, |
|
"grad_norm": 11.756292343139648, |
|
"learning_rate": 0.00019826017503376666, |
|
"loss": 1.7371, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 7.509032249450684, |
|
"learning_rate": 0.00019821760880650214, |
|
"loss": 1.389, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.018433179723502304, |
|
"grad_norm": 8.619280815124512, |
|
"learning_rate": 0.00019817453282554333, |
|
"loss": 1.6818, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.01862927738013531, |
|
"grad_norm": 9.11640739440918, |
|
"learning_rate": 0.00019813094731445654, |
|
"loss": 1.631, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01882537503676831, |
|
"grad_norm": 14.109521865844727, |
|
"learning_rate": 0.00019808685249945245, |
|
"loss": 2.0497, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.019021472693401313, |
|
"grad_norm": 10.804281234741211, |
|
"learning_rate": 0.00019804224860938506, |
|
"loss": 2.2364, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.019217570350034317, |
|
"grad_norm": 7.363731384277344, |
|
"learning_rate": 0.0001979971358757505, |
|
"loss": 1.0967, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.01941366800666732, |
|
"grad_norm": 15.269912719726562, |
|
"learning_rate": 0.0001979515145326859, |
|
"loss": 2.8752, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.019609765663300322, |
|
"grad_norm": 5.457535266876221, |
|
"learning_rate": 0.000197905384816968, |
|
"loss": 1.7098, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.019805863319933326, |
|
"grad_norm": 4.689967632293701, |
|
"learning_rate": 0.00019785874696801202, |
|
"loss": 2.2133, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.02000196097656633, |
|
"grad_norm": 10.993409156799316, |
|
"learning_rate": 0.00019781160122787046, |
|
"loss": 2.314, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.020198058633199335, |
|
"grad_norm": 8.199251174926758, |
|
"learning_rate": 0.00019776394784123177, |
|
"loss": 2.5164, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.020394156289832335, |
|
"grad_norm": 15.144885063171387, |
|
"learning_rate": 0.00019771578705541916, |
|
"loss": 2.0058, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.02059025394646534, |
|
"grad_norm": 5.252450466156006, |
|
"learning_rate": 0.00019766711912038915, |
|
"loss": 1.7012, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.020786351603098344, |
|
"grad_norm": 8.265049934387207, |
|
"learning_rate": 0.0001976179442887305, |
|
"loss": 1.8646, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.020982449259731348, |
|
"grad_norm": 8.365408897399902, |
|
"learning_rate": 0.00019756826281566272, |
|
"loss": 1.9615, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.02117854691636435, |
|
"grad_norm": 7.514213562011719, |
|
"learning_rate": 0.00019751807495903484, |
|
"loss": 1.4897, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.021374644572997353, |
|
"grad_norm": 15.234655380249023, |
|
"learning_rate": 0.00019746738097932407, |
|
"loss": 2.0467, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.021570742229630357, |
|
"grad_norm": 6.856448650360107, |
|
"learning_rate": 0.0001974161811396343, |
|
"loss": 1.4492, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.021766839886263357, |
|
"grad_norm": 7.893224716186523, |
|
"learning_rate": 0.00019736447570569503, |
|
"loss": 1.919, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.02196293754289636, |
|
"grad_norm": 8.966511726379395, |
|
"learning_rate": 0.0001973122649458597, |
|
"loss": 2.4484, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.022159035199529366, |
|
"grad_norm": 7.631579875946045, |
|
"learning_rate": 0.00019725954913110442, |
|
"loss": 1.4992, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.02235513285616237, |
|
"grad_norm": 7.418518543243408, |
|
"learning_rate": 0.0001972063285350266, |
|
"loss": 0.8401, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.02255123051279537, |
|
"grad_norm": 7.739930629730225, |
|
"learning_rate": 0.00019715260343384347, |
|
"loss": 2.0713, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.022747328169428375, |
|
"grad_norm": 6.441893100738525, |
|
"learning_rate": 0.00019709837410639063, |
|
"loss": 1.4438, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.02294342582606138, |
|
"grad_norm": 6.008083820343018, |
|
"learning_rate": 0.0001970436408341207, |
|
"loss": 1.3503, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.023139523482694383, |
|
"grad_norm": 7.100820541381836, |
|
"learning_rate": 0.00019698840390110176, |
|
"loss": 1.4726, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.023335621139327384, |
|
"grad_norm": 10.213756561279297, |
|
"learning_rate": 0.0001969326635940159, |
|
"loss": 0.8107, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.023531718795960388, |
|
"grad_norm": 5.251387119293213, |
|
"learning_rate": 0.00019687642020215775, |
|
"loss": 1.5542, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.023727816452593392, |
|
"grad_norm": 6.100740432739258, |
|
"learning_rate": 0.00019681967401743297, |
|
"loss": 1.2512, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.023923914109226396, |
|
"grad_norm": 7.356696128845215, |
|
"learning_rate": 0.00019676242533435678, |
|
"loss": 2.4725, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.024120011765859397, |
|
"grad_norm": 11.542431831359863, |
|
"learning_rate": 0.00019670467445005233, |
|
"loss": 3.0307, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 12.166086196899414, |
|
"learning_rate": 0.00019664642166424928, |
|
"loss": 1.2784, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.024512207079125405, |
|
"grad_norm": 5.222433090209961, |
|
"learning_rate": 0.00019658766727928206, |
|
"loss": 1.1759, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.024708304735758406, |
|
"grad_norm": 4.77174711227417, |
|
"learning_rate": 0.00019652841160008858, |
|
"loss": 1.1041, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02490440239239141, |
|
"grad_norm": 4.879274368286133, |
|
"learning_rate": 0.0001964686549342084, |
|
"loss": 2.6326, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.025100500049024414, |
|
"grad_norm": 14.171689987182617, |
|
"learning_rate": 0.00019640839759178116, |
|
"loss": 3.4144, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.02529659770565742, |
|
"grad_norm": 7.598373889923096, |
|
"learning_rate": 0.00019634763988554522, |
|
"loss": 2.0596, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.02549269536229042, |
|
"grad_norm": 6.88770866394043, |
|
"learning_rate": 0.00019628638213083565, |
|
"loss": 1.4691, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.025688793018923423, |
|
"grad_norm": 7.128096580505371, |
|
"learning_rate": 0.00019622462464558295, |
|
"loss": 1.3307, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.025884890675556427, |
|
"grad_norm": 6.430881500244141, |
|
"learning_rate": 0.00019616236775031113, |
|
"loss": 0.9491, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.02608098833218943, |
|
"grad_norm": 9.912070274353027, |
|
"learning_rate": 0.00019609961176813624, |
|
"loss": 2.5006, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.026277085988822432, |
|
"grad_norm": 8.550467491149902, |
|
"learning_rate": 0.0001960363570247645, |
|
"loss": 2.4952, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.026473183645455436, |
|
"grad_norm": 4.201476573944092, |
|
"learning_rate": 0.0001959726038484909, |
|
"loss": 0.9033, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02666928130208844, |
|
"grad_norm": 5.774847984313965, |
|
"learning_rate": 0.00019590835257019714, |
|
"loss": 2.1291, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.026865378958721445, |
|
"grad_norm": 8.179195404052734, |
|
"learning_rate": 0.00019584360352335023, |
|
"loss": 2.7527, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.027061476615354445, |
|
"grad_norm": 15.658841133117676, |
|
"learning_rate": 0.0001957783570440005, |
|
"loss": 1.8304, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.02725757427198745, |
|
"grad_norm": 5.7399163246154785, |
|
"learning_rate": 0.0001957126134707801, |
|
"loss": 1.7071, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.027453671928620454, |
|
"grad_norm": 5.0817389488220215, |
|
"learning_rate": 0.00019564637314490108, |
|
"loss": 1.8933, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.027649769585253458, |
|
"grad_norm": 5.634946346282959, |
|
"learning_rate": 0.0001955796364101535, |
|
"loss": 1.7343, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.02784586724188646, |
|
"grad_norm": 6.406938552856445, |
|
"learning_rate": 0.00019551240361290407, |
|
"loss": 2.3013, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.028041964898519463, |
|
"grad_norm": 8.239458084106445, |
|
"learning_rate": 0.00019544467510209388, |
|
"loss": 1.2177, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.028238062555152467, |
|
"grad_norm": 11.887965202331543, |
|
"learning_rate": 0.0001953764512292369, |
|
"loss": 2.4312, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.028434160211785468, |
|
"grad_norm": 7.482359409332275, |
|
"learning_rate": 0.00019530773234841803, |
|
"loss": 1.1083, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.028630257868418472, |
|
"grad_norm": 8.86729621887207, |
|
"learning_rate": 0.00019523851881629126, |
|
"loss": 1.6451, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.028826355525051476, |
|
"grad_norm": 7.395509719848633, |
|
"learning_rate": 0.0001951688109920778, |
|
"loss": 1.31, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.02902245318168448, |
|
"grad_norm": 4.955163955688477, |
|
"learning_rate": 0.00019509860923756442, |
|
"loss": 2.5206, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.02921855083831748, |
|
"grad_norm": 5.034746170043945, |
|
"learning_rate": 0.00019502791391710125, |
|
"loss": 0.9336, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.029414648494950485, |
|
"grad_norm": 12.375234603881836, |
|
"learning_rate": 0.00019495672539760007, |
|
"loss": 2.1276, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02961074615158349, |
|
"grad_norm": 5.832932949066162, |
|
"learning_rate": 0.00019488504404853248, |
|
"loss": 1.3252, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.029806843808216493, |
|
"grad_norm": 5.934417724609375, |
|
"learning_rate": 0.00019481287024192775, |
|
"loss": 1.5907, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.030002941464849494, |
|
"grad_norm": 9.238896369934082, |
|
"learning_rate": 0.00019474020435237117, |
|
"loss": 1.1184, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.030199039121482498, |
|
"grad_norm": 9.787931442260742, |
|
"learning_rate": 0.00019466704675700185, |
|
"loss": 1.4931, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.030395136778115502, |
|
"grad_norm": 7.260796070098877, |
|
"learning_rate": 0.00019459339783551094, |
|
"loss": 0.8924, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.030591234434748506, |
|
"grad_norm": 8.712836265563965, |
|
"learning_rate": 0.00019451925797013954, |
|
"loss": 1.586, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.030787332091381507, |
|
"grad_norm": 11.15104866027832, |
|
"learning_rate": 0.00019444462754567682, |
|
"loss": 1.5007, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.03098342974801451, |
|
"grad_norm": 7.158255100250244, |
|
"learning_rate": 0.00019436950694945798, |
|
"loss": 2.4118, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.031179527404647515, |
|
"grad_norm": 11.58385944366455, |
|
"learning_rate": 0.00019429389657136213, |
|
"loss": 2.1638, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.03137562506128052, |
|
"grad_norm": 7.469117641448975, |
|
"learning_rate": 0.00019421779680381054, |
|
"loss": 3.0682, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.031571722717913524, |
|
"grad_norm": 10.78966999053955, |
|
"learning_rate": 0.00019414120804176426, |
|
"loss": 1.1822, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.03176782037454652, |
|
"grad_norm": 9.68694019317627, |
|
"learning_rate": 0.00019406413068272238, |
|
"loss": 2.5351, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.031963918031179525, |
|
"grad_norm": 11.67428970336914, |
|
"learning_rate": 0.00019398656512671972, |
|
"loss": 1.9244, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.03216001568781253, |
|
"grad_norm": 12.72513198852539, |
|
"learning_rate": 0.00019390851177632497, |
|
"loss": 3.2138, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.03235611334444553, |
|
"grad_norm": 8.345921516418457, |
|
"learning_rate": 0.00019382997103663838, |
|
"loss": 2.6435, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03255221100107854, |
|
"grad_norm": 7.740304470062256, |
|
"learning_rate": 0.0001937509433152899, |
|
"loss": 0.8189, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.03274830865771154, |
|
"grad_norm": 9.329862594604492, |
|
"learning_rate": 0.0001936714290224368, |
|
"loss": 1.4106, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.032944406314344546, |
|
"grad_norm": 7.179844379425049, |
|
"learning_rate": 0.00019359142857076176, |
|
"loss": 1.8125, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03314050397097755, |
|
"grad_norm": 7.835447311401367, |
|
"learning_rate": 0.00019351094237547066, |
|
"loss": 1.6617, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.03333660162761055, |
|
"grad_norm": 6.018518924713135, |
|
"learning_rate": 0.0001934299708542904, |
|
"loss": 2.4333, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03353269928424355, |
|
"grad_norm": 8.176468849182129, |
|
"learning_rate": 0.00019334851442746664, |
|
"loss": 2.5915, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.033728796940876556, |
|
"grad_norm": 8.241739273071289, |
|
"learning_rate": 0.00019326657351776186, |
|
"loss": 1.666, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.03392489459750956, |
|
"grad_norm": 8.064835548400879, |
|
"learning_rate": 0.000193184148550453, |
|
"loss": 1.477, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.034120992254142564, |
|
"grad_norm": 5.790217399597168, |
|
"learning_rate": 0.00019310123995332917, |
|
"loss": 0.7703, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.03431708991077557, |
|
"grad_norm": 9.38430118560791, |
|
"learning_rate": 0.00019301784815668974, |
|
"loss": 1.5785, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.03451318756740857, |
|
"grad_norm": 8.252826690673828, |
|
"learning_rate": 0.00019293397359334167, |
|
"loss": 2.1462, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.03470928522404157, |
|
"grad_norm": 12.65652847290039, |
|
"learning_rate": 0.00019284961669859766, |
|
"loss": 1.3009, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.034905382880674574, |
|
"grad_norm": 6.8490753173828125, |
|
"learning_rate": 0.00019276477791027374, |
|
"loss": 2.4905, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.03510148053730758, |
|
"grad_norm": 4.2581048011779785, |
|
"learning_rate": 0.0001926794576686869, |
|
"loss": 0.9042, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.03529757819394058, |
|
"grad_norm": 6.415445327758789, |
|
"learning_rate": 0.0001925936564166529, |
|
"loss": 2.238, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.035493675850573586, |
|
"grad_norm": 13.620756149291992, |
|
"learning_rate": 0.00019250737459948405, |
|
"loss": 1.5966, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.03568977350720659, |
|
"grad_norm": 10.609662055969238, |
|
"learning_rate": 0.00019242061266498675, |
|
"loss": 1.081, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.035885871163839594, |
|
"grad_norm": 8.404073715209961, |
|
"learning_rate": 0.00019233337106345925, |
|
"loss": 1.849, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0360819688204726, |
|
"grad_norm": 5.560455322265625, |
|
"learning_rate": 0.00019224565024768926, |
|
"loss": 1.4533, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.036278066477105596, |
|
"grad_norm": 7.896220684051514, |
|
"learning_rate": 0.00019215745067295169, |
|
"loss": 2.482, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 9.554024696350098, |
|
"learning_rate": 0.00019206877279700612, |
|
"loss": 1.9367, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.036670261790371604, |
|
"grad_norm": 3.333113193511963, |
|
"learning_rate": 0.00019197961708009473, |
|
"loss": 1.1477, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.03686635944700461, |
|
"grad_norm": 9.468240737915039, |
|
"learning_rate": 0.00019188998398493953, |
|
"loss": 1.0849, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.03706245710363761, |
|
"grad_norm": 10.807921409606934, |
|
"learning_rate": 0.00019179987397674022, |
|
"loss": 2.0192, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.03725855476027062, |
|
"grad_norm": 7.14724588394165, |
|
"learning_rate": 0.0001917092875231717, |
|
"loss": 2.1502, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03745465241690362, |
|
"grad_norm": 12.262707710266113, |
|
"learning_rate": 0.00019161822509438162, |
|
"loss": 2.423, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.03765075007353662, |
|
"grad_norm": 35.0489387512207, |
|
"learning_rate": 0.000191526687162988, |
|
"loss": 2.5959, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.03784684773016962, |
|
"grad_norm": 6.615735054016113, |
|
"learning_rate": 0.0001914346742040767, |
|
"loss": 1.7733, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.038042945386802626, |
|
"grad_norm": 4.537426471710205, |
|
"learning_rate": 0.00019134218669519896, |
|
"loss": 1.0028, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.03823904304343563, |
|
"grad_norm": 5.247801303863525, |
|
"learning_rate": 0.00019124922511636912, |
|
"loss": 0.8412, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.038435140700068635, |
|
"grad_norm": 6.2183918952941895, |
|
"learning_rate": 0.00019115578995006173, |
|
"loss": 1.7212, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.03863123835670164, |
|
"grad_norm": 9.330825805664062, |
|
"learning_rate": 0.00019106188168120948, |
|
"loss": 1.5341, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.03882733601333464, |
|
"grad_norm": 9.86260986328125, |
|
"learning_rate": 0.00019096750079720037, |
|
"loss": 2.765, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.03902343366996765, |
|
"grad_norm": 10.341052055358887, |
|
"learning_rate": 0.00019087264778787534, |
|
"loss": 1.9024, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.039219531326600644, |
|
"grad_norm": 9.549159049987793, |
|
"learning_rate": 0.00019077732314552566, |
|
"loss": 1.2644, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03941562898323365, |
|
"grad_norm": 5.25094747543335, |
|
"learning_rate": 0.00019068152736489036, |
|
"loss": 1.334, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.03961172663986665, |
|
"grad_norm": 7.197662830352783, |
|
"learning_rate": 0.00019058526094315378, |
|
"loss": 1.9093, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.03980782429649966, |
|
"grad_norm": 8.476766586303711, |
|
"learning_rate": 0.0001904885243799429, |
|
"loss": 1.477, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.04000392195313266, |
|
"grad_norm": 8.232537269592285, |
|
"learning_rate": 0.00019039131817732462, |
|
"loss": 1.4013, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.040200019609765665, |
|
"grad_norm": 15.687997817993164, |
|
"learning_rate": 0.0001902936428398035, |
|
"loss": 1.6772, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04039611726639867, |
|
"grad_norm": 7.573246479034424, |
|
"learning_rate": 0.00019019549887431877, |
|
"loss": 1.5007, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.040592214923031666, |
|
"grad_norm": 11.531679153442383, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 2.6798, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.04078831257966467, |
|
"grad_norm": 6.225399494171143, |
|
"learning_rate": 0.00018999780709937398, |
|
"loss": 1.3078, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.040984410236297675, |
|
"grad_norm": 10.358306884765625, |
|
"learning_rate": 0.0001898982603159429, |
|
"loss": 1.7353, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.04118050789293068, |
|
"grad_norm": 8.146821975708008, |
|
"learning_rate": 0.00018979824695660087, |
|
"loss": 1.415, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04137660554956368, |
|
"grad_norm": 4.390834808349609, |
|
"learning_rate": 0.00018969776754042156, |
|
"loss": 1.7612, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.04157270320619669, |
|
"grad_norm": 7.958174228668213, |
|
"learning_rate": 0.0001895968225888976, |
|
"loss": 2.6614, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.04176880086282969, |
|
"grad_norm": 9.981225967407227, |
|
"learning_rate": 0.00018949541262593762, |
|
"loss": 2.0158, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.041964898519462696, |
|
"grad_norm": 4.456605911254883, |
|
"learning_rate": 0.00018939353817786387, |
|
"loss": 1.0621, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.04216099617609569, |
|
"grad_norm": 7.546274662017822, |
|
"learning_rate": 0.00018929119977340917, |
|
"loss": 1.7333, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.0423570938327287, |
|
"grad_norm": 11.629569053649902, |
|
"learning_rate": 0.0001891883979437143, |
|
"loss": 1.4268, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 17.710948944091797, |
|
"learning_rate": 0.00018908513322232528, |
|
"loss": 2.8701, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.042749289145994705, |
|
"grad_norm": 6.267049789428711, |
|
"learning_rate": 0.00018898140614519054, |
|
"loss": 1.6313, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.04294538680262771, |
|
"grad_norm": 4.971591949462891, |
|
"learning_rate": 0.00018887721725065814, |
|
"loss": 2.0962, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.043141484459260714, |
|
"grad_norm": 5.603585243225098, |
|
"learning_rate": 0.00018877256707947306, |
|
"loss": 0.6683, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.04333758211589372, |
|
"grad_norm": 6.029137134552002, |
|
"learning_rate": 0.00018866745617477423, |
|
"loss": 1.5375, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.043533679772526715, |
|
"grad_norm": 7.4105143547058105, |
|
"learning_rate": 0.00018856188508209183, |
|
"loss": 1.9524, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.04372977742915972, |
|
"grad_norm": 8.321500778198242, |
|
"learning_rate": 0.00018845585434934452, |
|
"loss": 2.1109, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.04392587508579272, |
|
"grad_norm": 9.238992691040039, |
|
"learning_rate": 0.00018834936452683638, |
|
"loss": 1.4247, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.04412197274242573, |
|
"grad_norm": 5.125700950622559, |
|
"learning_rate": 0.00018824241616725434, |
|
"loss": 1.1266, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.04431807039905873, |
|
"grad_norm": 7.538069725036621, |
|
"learning_rate": 0.000188135009825665, |
|
"loss": 2.1554, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.044514168055691736, |
|
"grad_norm": 8.309137344360352, |
|
"learning_rate": 0.00018802714605951199, |
|
"loss": 1.1435, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.04471026571232474, |
|
"grad_norm": 22.02942657470703, |
|
"learning_rate": 0.00018791882542861302, |
|
"loss": 1.8154, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.044906363368957744, |
|
"grad_norm": 7.017299652099609, |
|
"learning_rate": 0.0001878100484951569, |
|
"loss": 1.4998, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.04510246102559074, |
|
"grad_norm": 18.39406394958496, |
|
"learning_rate": 0.00018770081582370068, |
|
"loss": 2.1662, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.045298558682223745, |
|
"grad_norm": 9.11802864074707, |
|
"learning_rate": 0.0001875911279811667, |
|
"loss": 0.7446, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.04549465633885675, |
|
"grad_norm": 7.193735122680664, |
|
"learning_rate": 0.00018748098553683968, |
|
"loss": 1.9472, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.045690753995489754, |
|
"grad_norm": 23.407245635986328, |
|
"learning_rate": 0.0001873703890623637, |
|
"loss": 2.1782, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.04588685165212276, |
|
"grad_norm": 6.547053813934326, |
|
"learning_rate": 0.00018725933913173938, |
|
"loss": 1.9687, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.04608294930875576, |
|
"grad_norm": 10.576699256896973, |
|
"learning_rate": 0.00018714783632132068, |
|
"loss": 1.8832, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.046279046965388766, |
|
"grad_norm": 5.852027416229248, |
|
"learning_rate": 0.00018703588120981207, |
|
"loss": 1.8932, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.04647514462202176, |
|
"grad_norm": 7.023755073547363, |
|
"learning_rate": 0.00018692347437826548, |
|
"loss": 3.7953, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.04667124227865477, |
|
"grad_norm": 13.61612606048584, |
|
"learning_rate": 0.00018681061641007737, |
|
"loss": 1.9077, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.04686733993528777, |
|
"grad_norm": 5.3344526290893555, |
|
"learning_rate": 0.0001866973078909854, |
|
"loss": 1.4342, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.047063437591920776, |
|
"grad_norm": 38.80408477783203, |
|
"learning_rate": 0.00018658354940906586, |
|
"loss": 2.3665, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04725953524855378, |
|
"grad_norm": 9.670344352722168, |
|
"learning_rate": 0.00018646934155473022, |
|
"loss": 0.9006, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.047455632905186784, |
|
"grad_norm": 5.1102495193481445, |
|
"learning_rate": 0.00018635468492072228, |
|
"loss": 1.2289, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.04765173056181979, |
|
"grad_norm": 9.1209077835083, |
|
"learning_rate": 0.00018623958010211493, |
|
"loss": 1.6009, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.04784782821845279, |
|
"grad_norm": 16.793027877807617, |
|
"learning_rate": 0.0001861240276963073, |
|
"loss": 0.94, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.04804392587508579, |
|
"grad_norm": 6.90054988861084, |
|
"learning_rate": 0.00018600802830302134, |
|
"loss": 1.559, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.048240023531718794, |
|
"grad_norm": 13.111268043518066, |
|
"learning_rate": 0.0001858915825242991, |
|
"loss": 2.1186, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.0484361211883518, |
|
"grad_norm": 6.356579780578613, |
|
"learning_rate": 0.00018577469096449925, |
|
"loss": 1.6653, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 9.505541801452637, |
|
"learning_rate": 0.00018565735423029404, |
|
"loss": 0.9774, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.048828316501617806, |
|
"grad_norm": 8.927581787109375, |
|
"learning_rate": 0.00018553957293066632, |
|
"loss": 2.6455, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.04902441415825081, |
|
"grad_norm": 7.568793773651123, |
|
"learning_rate": 0.00018542134767690616, |
|
"loss": 1.1464, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.049220511814883815, |
|
"grad_norm": 7.632232189178467, |
|
"learning_rate": 0.00018530267908260784, |
|
"loss": 1.2671, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.04941660947151681, |
|
"grad_norm": 4.4279561042785645, |
|
"learning_rate": 0.00018518356776366657, |
|
"loss": 2.0384, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.049612707128149816, |
|
"grad_norm": 10.818602561950684, |
|
"learning_rate": 0.00018506401433827528, |
|
"loss": 1.0559, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.04980880478478282, |
|
"grad_norm": 5.57148551940918, |
|
"learning_rate": 0.00018494401942692153, |
|
"loss": 0.9603, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.050004902441415824, |
|
"grad_norm": 11.1985502243042, |
|
"learning_rate": 0.00018482358365238413, |
|
"loss": 2.4928, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05020100009804883, |
|
"grad_norm": 4.890799522399902, |
|
"learning_rate": 0.00018470270763973004, |
|
"loss": 1.4034, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.05039709775468183, |
|
"grad_norm": 6.2078680992126465, |
|
"learning_rate": 0.00018458139201631108, |
|
"loss": 1.782, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.05059319541131484, |
|
"grad_norm": 24.89278221130371, |
|
"learning_rate": 0.00018445963741176065, |
|
"loss": 3.7879, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.05078929306794784, |
|
"grad_norm": 5.363570213317871, |
|
"learning_rate": 0.00018433744445799045, |
|
"loss": 1.4292, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.05098539072458084, |
|
"grad_norm": 7.669764041900635, |
|
"learning_rate": 0.0001842148137891873, |
|
"loss": 2.0483, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.05118148838121384, |
|
"grad_norm": 5.229150295257568, |
|
"learning_rate": 0.00018409174604180976, |
|
"loss": 3.2863, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.05137758603784685, |
|
"grad_norm": 5.850373268127441, |
|
"learning_rate": 0.0001839682418545848, |
|
"loss": 1.8197, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.05157368369447985, |
|
"grad_norm": 7.138283729553223, |
|
"learning_rate": 0.00018384430186850454, |
|
"loss": 2.7101, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.051769781351112855, |
|
"grad_norm": 10.918169975280762, |
|
"learning_rate": 0.000183719926726823, |
|
"loss": 1.8243, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.05196587900774586, |
|
"grad_norm": 9.205517768859863, |
|
"learning_rate": 0.00018359511707505258, |
|
"loss": 1.4992, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.05216197666437886, |
|
"grad_norm": 8.567139625549316, |
|
"learning_rate": 0.00018346987356096086, |
|
"loss": 1.051, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.05235807432101187, |
|
"grad_norm": 10.313075065612793, |
|
"learning_rate": 0.00018334419683456717, |
|
"loss": 2.6062, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.052554171977644865, |
|
"grad_norm": 7.515801906585693, |
|
"learning_rate": 0.0001832180875481392, |
|
"loss": 1.266, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.05275026963427787, |
|
"grad_norm": 5.345809459686279, |
|
"learning_rate": 0.00018309154635618965, |
|
"loss": 1.2526, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.05294636729091087, |
|
"grad_norm": 13.568882942199707, |
|
"learning_rate": 0.00018296457391547296, |
|
"loss": 2.5183, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.05314246494754388, |
|
"grad_norm": 10.022235870361328, |
|
"learning_rate": 0.00018283717088498155, |
|
"loss": 2.2774, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.05333856260417688, |
|
"grad_norm": 6.537176132202148, |
|
"learning_rate": 0.0001827093379259428, |
|
"loss": 1.4989, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.053534660260809885, |
|
"grad_norm": 17.213987350463867, |
|
"learning_rate": 0.00018258107570181533, |
|
"loss": 2.4885, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.05373075791744289, |
|
"grad_norm": 6.48647403717041, |
|
"learning_rate": 0.00018245238487828573, |
|
"loss": 1.2309, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.05392685557407589, |
|
"grad_norm": 5.479822158813477, |
|
"learning_rate": 0.000182323266123265, |
|
"loss": 1.8959, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.05412295323070889, |
|
"grad_norm": 7.716124534606934, |
|
"learning_rate": 0.00018219372010688515, |
|
"loss": 1.8321, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.054319050887341895, |
|
"grad_norm": 9.968965530395508, |
|
"learning_rate": 0.00018206374750149567, |
|
"loss": 4.1652, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.0545151485439749, |
|
"grad_norm": 6.009235382080078, |
|
"learning_rate": 0.00018193334898166007, |
|
"loss": 0.8178, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 8.031886100769043, |
|
"learning_rate": 0.00018180252522415242, |
|
"loss": 1.783, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.05490734385724091, |
|
"grad_norm": 5.5589680671691895, |
|
"learning_rate": 0.00018167127690795368, |
|
"loss": 1.3049, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.05510344151387391, |
|
"grad_norm": 5.04995059967041, |
|
"learning_rate": 0.0001815396047142485, |
|
"loss": 0.8962, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.055299539170506916, |
|
"grad_norm": 5.3526692390441895, |
|
"learning_rate": 0.0001814075093264212, |
|
"loss": 1.201, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.05549563682713991, |
|
"grad_norm": 11.980429649353027, |
|
"learning_rate": 0.00018127499143005268, |
|
"loss": 0.6955, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.05569173448377292, |
|
"grad_norm": 38.28229904174805, |
|
"learning_rate": 0.00018114205171291663, |
|
"loss": 1.7335, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.05588783214040592, |
|
"grad_norm": 6.15138053894043, |
|
"learning_rate": 0.000181008690864976, |
|
"loss": 1.2766, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.056083929797038926, |
|
"grad_norm": 7.846836566925049, |
|
"learning_rate": 0.00018087490957837944, |
|
"loss": 1.155, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.05628002745367193, |
|
"grad_norm": 7.675628185272217, |
|
"learning_rate": 0.00018074070854745772, |
|
"loss": 1.6129, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.056476125110304934, |
|
"grad_norm": 12.245649337768555, |
|
"learning_rate": 0.00018060608846872005, |
|
"loss": 1.7585, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.05667222276693794, |
|
"grad_norm": 10.520101547241211, |
|
"learning_rate": 0.00018047105004085053, |
|
"loss": 1.9265, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.056868320423570935, |
|
"grad_norm": 7.400151252746582, |
|
"learning_rate": 0.00018033559396470454, |
|
"loss": 1.4189, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05706441808020394, |
|
"grad_norm": 12.058060646057129, |
|
"learning_rate": 0.00018019972094330503, |
|
"loss": 2.3312, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.057260515736836944, |
|
"grad_norm": 5.313794136047363, |
|
"learning_rate": 0.00018006343168183893, |
|
"loss": 2.0051, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.05745661339346995, |
|
"grad_norm": 11.182997703552246, |
|
"learning_rate": 0.0001799267268876535, |
|
"loss": 1.4779, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.05765271105010295, |
|
"grad_norm": 16.24866485595703, |
|
"learning_rate": 0.0001797896072702526, |
|
"loss": 2.4689, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.057848808706735956, |
|
"grad_norm": 7.471411228179932, |
|
"learning_rate": 0.00017965207354129307, |
|
"loss": 3.0599, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.05804490636336896, |
|
"grad_norm": 7.715878486633301, |
|
"learning_rate": 0.00017951412641458098, |
|
"loss": 0.8256, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.058241004020001964, |
|
"grad_norm": 22.084482192993164, |
|
"learning_rate": 0.000179375766606068, |
|
"loss": 2.457, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.05843710167663496, |
|
"grad_norm": 8.041847229003906, |
|
"learning_rate": 0.00017923699483384753, |
|
"loss": 1.5642, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.058633199333267966, |
|
"grad_norm": 12.814888000488281, |
|
"learning_rate": 0.00017909781181815117, |
|
"loss": 1.5129, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.05882929698990097, |
|
"grad_norm": 9.216371536254883, |
|
"learning_rate": 0.0001789582182813449, |
|
"loss": 2.0632, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.059025394646533974, |
|
"grad_norm": 12.80371379852295, |
|
"learning_rate": 0.00017881821494792528, |
|
"loss": 2.8705, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.05922149230316698, |
|
"grad_norm": 7.234943389892578, |
|
"learning_rate": 0.00017867780254451576, |
|
"loss": 2.6664, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.05941758995979998, |
|
"grad_norm": 11.168726921081543, |
|
"learning_rate": 0.00017853698179986282, |
|
"loss": 1.347, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.059613687616432987, |
|
"grad_norm": 19.369266510009766, |
|
"learning_rate": 0.00017839575344483238, |
|
"loss": 2.68, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.059809785273065984, |
|
"grad_norm": 7.1730570793151855, |
|
"learning_rate": 0.0001782541182124057, |
|
"loss": 2.3908, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06000588292969899, |
|
"grad_norm": 7.243929862976074, |
|
"learning_rate": 0.0001781120768376759, |
|
"loss": 1.0056, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.06020198058633199, |
|
"grad_norm": 7.748988628387451, |
|
"learning_rate": 0.00017796963005784394, |
|
"loss": 2.1776, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.060398078242964996, |
|
"grad_norm": 13.446945190429688, |
|
"learning_rate": 0.0001778267786122148, |
|
"loss": 2.3275, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.060594175899598, |
|
"grad_norm": 10.720627784729004, |
|
"learning_rate": 0.0001776835232421938, |
|
"loss": 1.046, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 11.274985313415527, |
|
"learning_rate": 0.00017753986469128257, |
|
"loss": 2.4269, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.06098637121286401, |
|
"grad_norm": 8.671335220336914, |
|
"learning_rate": 0.00017739580370507532, |
|
"loss": 2.1488, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.06118246886949701, |
|
"grad_norm": 8.375978469848633, |
|
"learning_rate": 0.0001772513410312548, |
|
"loss": 1.8458, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.06137856652613001, |
|
"grad_norm": 11.178112983703613, |
|
"learning_rate": 0.00017710647741958868, |
|
"loss": 2.7169, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.061574664182763014, |
|
"grad_norm": 8.29799747467041, |
|
"learning_rate": 0.00017696121362192544, |
|
"loss": 1.455, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.06177076183939602, |
|
"grad_norm": 6.712766647338867, |
|
"learning_rate": 0.00017681555039219054, |
|
"loss": 1.2604, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.06196685949602902, |
|
"grad_norm": 7.891608238220215, |
|
"learning_rate": 0.00017666948848638257, |
|
"loss": 2.1795, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.06216295715266203, |
|
"grad_norm": 5.039219379425049, |
|
"learning_rate": 0.00017652302866256916, |
|
"loss": 0.9069, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.06235905480929503, |
|
"grad_norm": 9.421103477478027, |
|
"learning_rate": 0.00017637617168088325, |
|
"loss": 2.4256, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.06255515246592804, |
|
"grad_norm": 4.435902118682861, |
|
"learning_rate": 0.000176228918303519, |
|
"loss": 1.9269, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.06275125012256104, |
|
"grad_norm": 10.938987731933594, |
|
"learning_rate": 0.00017608126929472795, |
|
"loss": 1.4649, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.06294734777919404, |
|
"grad_norm": 6.332970142364502, |
|
"learning_rate": 0.00017593322542081485, |
|
"loss": 2.0089, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.06314344543582705, |
|
"grad_norm": 6.731532573699951, |
|
"learning_rate": 0.00017578478745013392, |
|
"loss": 2.4046, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.06333954309246005, |
|
"grad_norm": 8.772012710571289, |
|
"learning_rate": 0.00017563595615308474, |
|
"loss": 1.4935, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.06353564074909304, |
|
"grad_norm": 5.693745136260986, |
|
"learning_rate": 0.00017548673230210823, |
|
"loss": 1.848, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.06373173840572605, |
|
"grad_norm": 15.056157112121582, |
|
"learning_rate": 0.0001753371166716828, |
|
"loss": 1.4598, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.06392783606235905, |
|
"grad_norm": 9.370506286621094, |
|
"learning_rate": 0.00017518711003832002, |
|
"loss": 1.4809, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.06412393371899205, |
|
"grad_norm": 19.398839950561523, |
|
"learning_rate": 0.000175036713180561, |
|
"loss": 1.0093, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.06432003137562506, |
|
"grad_norm": 4.393742084503174, |
|
"learning_rate": 0.00017488592687897193, |
|
"loss": 0.817, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 6.7713799476623535, |
|
"learning_rate": 0.00017473475191614037, |
|
"loss": 2.1701, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.06471222668889107, |
|
"grad_norm": 5.920267581939697, |
|
"learning_rate": 0.00017458318907667098, |
|
"loss": 3.3491, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.06490832434552407, |
|
"grad_norm": 15.095996856689453, |
|
"learning_rate": 0.0001744312391471816, |
|
"loss": 1.7637, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.06510442200215708, |
|
"grad_norm": 9.470211029052734, |
|
"learning_rate": 0.00017427890291629893, |
|
"loss": 2.7744, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.06530051965879008, |
|
"grad_norm": 9.082067489624023, |
|
"learning_rate": 0.00017412618117465477, |
|
"loss": 3.1791, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.06549661731542308, |
|
"grad_norm": 5.174635410308838, |
|
"learning_rate": 0.0001739730747148816, |
|
"loss": 1.2189, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.06569271497205609, |
|
"grad_norm": 5.053405284881592, |
|
"learning_rate": 0.00017381958433160865, |
|
"loss": 1.7119, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.06588881262868909, |
|
"grad_norm": 5.771046161651611, |
|
"learning_rate": 0.0001736657108214578, |
|
"loss": 1.4188, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.0660849102853221, |
|
"grad_norm": 8.400517463684082, |
|
"learning_rate": 0.00017351145498303925, |
|
"loss": 2.3167, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.0662810079419551, |
|
"grad_norm": 4.6646728515625, |
|
"learning_rate": 0.0001733568176169476, |
|
"loss": 1.2102, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.06647710559858809, |
|
"grad_norm": 8.288646697998047, |
|
"learning_rate": 0.0001732017995257575, |
|
"loss": 2.4803, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.0666732032552211, |
|
"grad_norm": 10.970074653625488, |
|
"learning_rate": 0.00017304640151401967, |
|
"loss": 2.5839, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.0668693009118541, |
|
"grad_norm": 6.0125732421875, |
|
"learning_rate": 0.00017289062438825665, |
|
"loss": 1.5807, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0670653985684871, |
|
"grad_norm": 5.844028472900391, |
|
"learning_rate": 0.0001727344689569585, |
|
"loss": 3.34, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.06726149622512011, |
|
"grad_norm": 7.1026387214660645, |
|
"learning_rate": 0.00017257793603057871, |
|
"loss": 1.4347, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.06745759388175311, |
|
"grad_norm": 9.198262214660645, |
|
"learning_rate": 0.00017242102642153016, |
|
"loss": 1.834, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.06765369153838612, |
|
"grad_norm": 5.76854133605957, |
|
"learning_rate": 0.00017226374094418044, |
|
"loss": 0.9294, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.06784978919501912, |
|
"grad_norm": 10.319186210632324, |
|
"learning_rate": 0.0001721060804148482, |
|
"loss": 2.0088, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.06804588685165212, |
|
"grad_norm": 22.298240661621094, |
|
"learning_rate": 0.00017194804565179842, |
|
"loss": 2.6901, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.06824198450828513, |
|
"grad_norm": 11.38401985168457, |
|
"learning_rate": 0.00017178963747523847, |
|
"loss": 2.6342, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.06824198450828513, |
|
"eval_loss": 0.4400941729545593, |
|
"eval_runtime": 78.7276, |
|
"eval_samples_per_second": 27.284, |
|
"eval_steps_per_second": 13.642, |
|
"step": 348 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1389, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 348, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.492671941640192e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|