|
{ |
|
"best_metric": 2.718132734298706, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.6019563581640331, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0030097817908201654, |
|
"grad_norm": 1.0383433103561401, |
|
"learning_rate": 1e-05, |
|
"loss": 2.837, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0030097817908201654, |
|
"eval_loss": 3.62335467338562, |
|
"eval_runtime": 42.8751, |
|
"eval_samples_per_second": 13.061, |
|
"eval_steps_per_second": 3.265, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006019563581640331, |
|
"grad_norm": 1.223947525024414, |
|
"learning_rate": 2e-05, |
|
"loss": 2.924, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009029345372460496, |
|
"grad_norm": 1.2525657415390015, |
|
"learning_rate": 3e-05, |
|
"loss": 2.8023, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012039127163280662, |
|
"grad_norm": 1.2499439716339111, |
|
"learning_rate": 4e-05, |
|
"loss": 2.8144, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.015048908954100828, |
|
"grad_norm": 0.8609808683395386, |
|
"learning_rate": 5e-05, |
|
"loss": 2.7237, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01805869074492099, |
|
"grad_norm": 1.0881868600845337, |
|
"learning_rate": 6e-05, |
|
"loss": 2.9225, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.021068472535741158, |
|
"grad_norm": 1.000299334526062, |
|
"learning_rate": 7e-05, |
|
"loss": 2.8644, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024078254326561323, |
|
"grad_norm": 0.9951403141021729, |
|
"learning_rate": 8e-05, |
|
"loss": 2.7948, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02708803611738149, |
|
"grad_norm": 1.1025782823562622, |
|
"learning_rate": 9e-05, |
|
"loss": 2.853, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.030097817908201655, |
|
"grad_norm": 1.0384840965270996, |
|
"learning_rate": 0.0001, |
|
"loss": 2.8641, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03310759969902182, |
|
"grad_norm": 1.3857022523880005, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 3.0502, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03611738148984198, |
|
"grad_norm": 1.0399494171142578, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 2.8511, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03912716328066215, |
|
"grad_norm": 1.0963470935821533, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 2.9582, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.042136945071482315, |
|
"grad_norm": 1.1283156871795654, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 2.7851, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.045146726862302484, |
|
"grad_norm": 1.0826599597930908, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 2.768, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04815650865312265, |
|
"grad_norm": 0.9662471413612366, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 2.7946, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.051166290443942816, |
|
"grad_norm": 0.9118244051933289, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 2.7911, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05417607223476298, |
|
"grad_norm": 1.045340895652771, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 2.8433, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05718585402558315, |
|
"grad_norm": 1.2076919078826904, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 2.9204, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06019563581640331, |
|
"grad_norm": 0.9516964554786682, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 2.8679, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06320541760722348, |
|
"grad_norm": 0.9742953181266785, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 2.6819, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06621519939804364, |
|
"grad_norm": 1.2952927350997925, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 2.9203, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0692249811888638, |
|
"grad_norm": 1.0710830688476562, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 2.918, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07223476297968397, |
|
"grad_norm": 1.1441292762756348, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 3.0047, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07524454477050414, |
|
"grad_norm": 0.9753203988075256, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 2.6872, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0782543265613243, |
|
"grad_norm": 1.068655014038086, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 2.8628, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08126410835214447, |
|
"grad_norm": 1.1840782165527344, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.8454, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08427389014296463, |
|
"grad_norm": 1.118066430091858, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 2.646, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0872836719337848, |
|
"grad_norm": 1.0265779495239258, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.5202, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09029345372460497, |
|
"grad_norm": 1.3051583766937256, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 2.8238, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09330323551542513, |
|
"grad_norm": 1.3814913034439087, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.7794, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0963130173062453, |
|
"grad_norm": 1.313609004020691, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 2.8425, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09932279909706546, |
|
"grad_norm": 1.2244532108306885, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 2.8177, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10233258088788563, |
|
"grad_norm": 1.4679851531982422, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 2.8303, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1053423626787058, |
|
"grad_norm": 1.2653416395187378, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 2.8379, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10835214446952596, |
|
"grad_norm": 1.4733144044876099, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 2.7795, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11136192626034612, |
|
"grad_norm": 1.387394905090332, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 3.0197, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1143717080511663, |
|
"grad_norm": 1.3538862466812134, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 2.9811, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11738148984198646, |
|
"grad_norm": 1.4648830890655518, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 3.0393, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12039127163280662, |
|
"grad_norm": 1.4998297691345215, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 2.983, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12340105342362678, |
|
"grad_norm": 1.3303641080856323, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 2.8374, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12641083521444696, |
|
"grad_norm": 1.434880018234253, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 3.001, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1294206170052671, |
|
"grad_norm": 1.4882705211639404, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 2.9935, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13243039879608728, |
|
"grad_norm": 1.495313048362732, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 3.062, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13544018058690746, |
|
"grad_norm": 1.6354619264602661, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 2.9735, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1384499623777276, |
|
"grad_norm": 1.6975343227386475, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 2.8529, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.14145974416854779, |
|
"grad_norm": 1.8099063634872437, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 2.931, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14446952595936793, |
|
"grad_norm": 1.795233964920044, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 3.1318, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1474793077501881, |
|
"grad_norm": 2.3223793506622314, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 3.4612, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1504890895410083, |
|
"grad_norm": 2.675429582595825, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 3.2712, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1504890895410083, |
|
"eval_loss": 2.8597769737243652, |
|
"eval_runtime": 43.5315, |
|
"eval_samples_per_second": 12.864, |
|
"eval_steps_per_second": 3.216, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15349887133182843, |
|
"grad_norm": 0.9883130788803101, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.1225, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1565086531226486, |
|
"grad_norm": 1.146127462387085, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 2.7079, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1595184349134688, |
|
"grad_norm": 0.7247859239578247, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 2.4953, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16252821670428894, |
|
"grad_norm": 0.6606523394584656, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 2.6936, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1655379984951091, |
|
"grad_norm": 0.661456286907196, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.6561, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16854778028592926, |
|
"grad_norm": 0.6059918999671936, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 2.529, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17155756207674944, |
|
"grad_norm": 0.7299861311912537, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 2.8652, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1745673438675696, |
|
"grad_norm": 0.6570218205451965, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 2.6003, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17757712565838976, |
|
"grad_norm": 0.7697155475616455, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 2.9703, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.18058690744920994, |
|
"grad_norm": 0.8205830454826355, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.719, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1835966892400301, |
|
"grad_norm": 0.8287568092346191, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 2.7445, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18660647103085026, |
|
"grad_norm": 0.7720220685005188, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 2.6071, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18961625282167044, |
|
"grad_norm": 0.8769961595535278, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 2.9422, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1926260346124906, |
|
"grad_norm": 0.7806358337402344, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 2.7808, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19563581640331076, |
|
"grad_norm": 0.7959004044532776, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 2.6023, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1986455981941309, |
|
"grad_norm": 0.9200003147125244, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 2.8079, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2016553799849511, |
|
"grad_norm": 0.9827006459236145, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.9431, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20466516177577126, |
|
"grad_norm": 0.9591490626335144, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 2.7521, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2076749435665914, |
|
"grad_norm": 0.9347128868103027, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 2.6955, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2106847253574116, |
|
"grad_norm": 0.8477599620819092, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 2.8735, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21369450714823177, |
|
"grad_norm": 0.8589199781417847, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 2.7021, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21670428893905191, |
|
"grad_norm": 0.8839547634124756, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 2.725, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2197140707298721, |
|
"grad_norm": 1.0816020965576172, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 2.9448, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22272385252069224, |
|
"grad_norm": 0.9550137519836426, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 2.673, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22573363431151242, |
|
"grad_norm": 1.0094174146652222, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 2.6478, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2287434161023326, |
|
"grad_norm": 0.9577383995056152, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 2.949, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.23175319789315274, |
|
"grad_norm": 1.0763578414916992, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 2.752, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23476297968397292, |
|
"grad_norm": 1.019482970237732, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 3.0192, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23777276147479307, |
|
"grad_norm": 1.2120007276535034, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 2.958, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24078254326561324, |
|
"grad_norm": 1.0528912544250488, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 2.6808, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24379232505643342, |
|
"grad_norm": 1.070916771888733, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 2.8994, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24680210684725357, |
|
"grad_norm": 0.9918401837348938, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.6755, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.24981188863807374, |
|
"grad_norm": 1.4494421482086182, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 2.9853, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2528216704288939, |
|
"grad_norm": 1.0939158201217651, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 2.7875, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2558314522197141, |
|
"grad_norm": 1.1803234815597534, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 2.87, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2588412340105342, |
|
"grad_norm": 1.224190354347229, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.7292, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2618510158013544, |
|
"grad_norm": 1.1455833911895752, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 2.7313, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26486079759217457, |
|
"grad_norm": 1.1967684030532837, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 2.8791, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26787057938299474, |
|
"grad_norm": 1.4523427486419678, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 2.8266, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2708803611738149, |
|
"grad_norm": 1.6074488162994385, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 2.7747, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.27389014296463504, |
|
"grad_norm": 1.4523072242736816, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 2.9463, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2768999247554552, |
|
"grad_norm": 1.4732896089553833, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 2.864, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2799097065462754, |
|
"grad_norm": 1.4440211057662964, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 2.5142, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.28291948833709557, |
|
"grad_norm": 1.569728970527649, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 3.1005, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.28592927012791575, |
|
"grad_norm": 1.6300673484802246, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 2.9026, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.28893905191873587, |
|
"grad_norm": 1.6838175058364868, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 2.9086, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.29194883370955604, |
|
"grad_norm": 1.7202688455581665, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 2.7964, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2949586155003762, |
|
"grad_norm": 1.8333368301391602, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 2.7565, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2979683972911964, |
|
"grad_norm": 1.8898142576217651, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 2.7308, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3009781790820166, |
|
"grad_norm": 3.0437440872192383, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 3.1335, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3009781790820166, |
|
"eval_loss": 2.7928898334503174, |
|
"eval_runtime": 43.5272, |
|
"eval_samples_per_second": 12.866, |
|
"eval_steps_per_second": 3.216, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3039879608728367, |
|
"grad_norm": 0.9572544693946838, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 2.59, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.30699774266365687, |
|
"grad_norm": 0.8997783064842224, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 2.5526, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.31000752445447705, |
|
"grad_norm": 0.906589925289154, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.5865, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3130173062452972, |
|
"grad_norm": 0.6696784496307373, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 2.5878, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3160270880361174, |
|
"grad_norm": 0.6746740341186523, |
|
"learning_rate": 5e-05, |
|
"loss": 2.6586, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3190368698269376, |
|
"grad_norm": 0.6169286966323853, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 2.4803, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3220466516177577, |
|
"grad_norm": 0.6375830173492432, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 2.7054, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.32505643340857787, |
|
"grad_norm": 0.6389443278312683, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 2.5025, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.32806621519939805, |
|
"grad_norm": 0.6818410754203796, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 2.4627, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3310759969902182, |
|
"grad_norm": 0.7902951836585999, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 2.6796, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3340857787810384, |
|
"grad_norm": 0.8217026591300964, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 2.4694, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3370955605718585, |
|
"grad_norm": 0.8203257918357849, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 2.7957, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3401053423626787, |
|
"grad_norm": 0.7445793747901917, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 2.6715, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3431151241534989, |
|
"grad_norm": 0.9404602646827698, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 2.8712, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.34612490594431905, |
|
"grad_norm": 0.8628761768341064, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 2.6455, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3491346877351392, |
|
"grad_norm": 0.8209057450294495, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 2.7571, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.35214446952595935, |
|
"grad_norm": 0.7805319428443909, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 2.5658, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3551542513167795, |
|
"grad_norm": 0.8453297019004822, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 2.8783, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3581640331075997, |
|
"grad_norm": 0.814822256565094, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 2.7575, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3611738148984199, |
|
"grad_norm": 1.0434857606887817, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 2.7282, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.36418359668924005, |
|
"grad_norm": 0.8157389163970947, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 2.5727, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3671933784800602, |
|
"grad_norm": 0.870448887348175, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 2.643, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.37020316027088035, |
|
"grad_norm": 0.8797793984413147, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 2.706, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3732129420617005, |
|
"grad_norm": 0.9377344846725464, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.6047, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3762227238525207, |
|
"grad_norm": 0.9191809296607971, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 2.7021, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3792325056433409, |
|
"grad_norm": 0.9445900917053223, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 2.8099, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.382242287434161, |
|
"grad_norm": 0.9417356848716736, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 2.8348, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3852520692249812, |
|
"grad_norm": 1.145680546760559, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 2.755, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.38826185101580135, |
|
"grad_norm": 0.967311441898346, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 2.7202, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3912716328066215, |
|
"grad_norm": 0.9585645198822021, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 2.8266, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3942814145974417, |
|
"grad_norm": 1.0192813873291016, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 2.7238, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3972911963882618, |
|
"grad_norm": 1.1950159072875977, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 3.0407, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.400300978179082, |
|
"grad_norm": 1.1695165634155273, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 2.8674, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4033107599699022, |
|
"grad_norm": 1.1528035402297974, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 2.9491, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.40632054176072235, |
|
"grad_norm": 1.1426727771759033, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 2.7404, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.40933032355154253, |
|
"grad_norm": 1.325439453125, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 2.7394, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4123401053423627, |
|
"grad_norm": 1.2166088819503784, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 2.6504, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4153498871331828, |
|
"grad_norm": 1.3485374450683594, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 2.7567, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.418359668924003, |
|
"grad_norm": 1.1951407194137573, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 2.9476, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4213694507148232, |
|
"grad_norm": 1.1799910068511963, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 2.4991, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.42437923250564336, |
|
"grad_norm": 1.217898964881897, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 2.7609, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.42738901429646353, |
|
"grad_norm": 1.485028862953186, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 2.8643, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.43039879608728365, |
|
"grad_norm": 1.5121876001358032, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.8997, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.43340857787810383, |
|
"grad_norm": 1.3564367294311523, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 2.6157, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.436418359668924, |
|
"grad_norm": 1.3653913736343384, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 2.883, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4394281414597442, |
|
"grad_norm": 1.441187858581543, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 2.8226, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.44243792325056436, |
|
"grad_norm": 1.439168930053711, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 2.8308, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4454477050413845, |
|
"grad_norm": 1.5764445066452026, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 2.7922, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.44845748683220465, |
|
"grad_norm": 2.1895360946655273, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 2.9549, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.45146726862302483, |
|
"grad_norm": 2.6794886589050293, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 2.9266, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45146726862302483, |
|
"eval_loss": 2.7207462787628174, |
|
"eval_runtime": 43.5205, |
|
"eval_samples_per_second": 12.867, |
|
"eval_steps_per_second": 3.217, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.454477050413845, |
|
"grad_norm": 0.5827016830444336, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 2.6062, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4574868322046652, |
|
"grad_norm": 0.6507757902145386, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 2.5759, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4604966139954853, |
|
"grad_norm": 0.6897706389427185, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 2.5331, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4635063957863055, |
|
"grad_norm": 0.7376187443733215, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 2.6529, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.46651617757712566, |
|
"grad_norm": 4.104633331298828, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 2.7607, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.46952595936794583, |
|
"grad_norm": 0.7048830389976501, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 2.7224, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.472535741158766, |
|
"grad_norm": 0.6888774037361145, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 2.6512, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.47554552294958613, |
|
"grad_norm": 0.7250568270683289, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 2.767, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4785553047404063, |
|
"grad_norm": 0.6772659420967102, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 2.6853, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4815650865312265, |
|
"grad_norm": 0.6967433094978333, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 2.6472, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48457486832204666, |
|
"grad_norm": 0.7274630069732666, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 2.6822, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.48758465011286684, |
|
"grad_norm": 0.8335841298103333, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.6068, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.49059443190368696, |
|
"grad_norm": 0.7825259566307068, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 2.7789, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.49360421369450713, |
|
"grad_norm": 0.7136161923408508, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 2.4088, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4966139954853273, |
|
"grad_norm": 0.7300835847854614, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 2.6801, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4996237772761475, |
|
"grad_norm": 0.7578651309013367, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 2.6223, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5026335590669676, |
|
"grad_norm": 0.9058271646499634, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 2.6618, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5056433408577878, |
|
"grad_norm": 0.7805584669113159, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 2.7429, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.508653122648608, |
|
"grad_norm": 0.8842304944992065, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 2.5051, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5116629044394282, |
|
"grad_norm": 0.8665125966072083, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 2.7933, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5146726862302483, |
|
"grad_norm": 0.9622229337692261, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 2.7892, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5176824680210684, |
|
"grad_norm": 0.9020377993583679, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 2.6627, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5206922498118887, |
|
"grad_norm": 0.7896916270256042, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 2.4676, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5237020316027088, |
|
"grad_norm": 0.8899149894714355, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 2.5136, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.526711813393529, |
|
"grad_norm": 0.9398487210273743, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 2.6724, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5297215951843491, |
|
"grad_norm": 0.8152235746383667, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 2.4499, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5327313769751693, |
|
"grad_norm": 1.0111510753631592, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 2.7443, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5357411587659895, |
|
"grad_norm": 0.97441166639328, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 2.5807, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5387509405568096, |
|
"grad_norm": 0.9852139949798584, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 2.787, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5417607223476298, |
|
"grad_norm": 1.0015703439712524, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 2.5823, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.54477050413845, |
|
"grad_norm": 1.0196603536605835, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.596, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5477802859292701, |
|
"grad_norm": 1.1556131839752197, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 2.8026, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5507900677200903, |
|
"grad_norm": 1.1140217781066895, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 2.7128, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5537998495109104, |
|
"grad_norm": 1.183419942855835, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 2.814, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5568096313017307, |
|
"grad_norm": 1.10748291015625, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 2.676, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5598194130925508, |
|
"grad_norm": 1.1944704055786133, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 2.5415, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5628291948833709, |
|
"grad_norm": 1.0653362274169922, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 2.6195, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5658389766741911, |
|
"grad_norm": 1.2596079111099243, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 3.0578, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5688487584650113, |
|
"grad_norm": 1.1765263080596924, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 2.8044, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5718585402558315, |
|
"grad_norm": 1.3503178358078003, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 2.9753, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5748683220466516, |
|
"grad_norm": 1.3773014545440674, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 2.8056, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5778781038374717, |
|
"grad_norm": 1.4599443674087524, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 2.94, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.580887885628292, |
|
"grad_norm": 1.3997405767440796, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 2.6367, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5838976674191121, |
|
"grad_norm": 1.42356538772583, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 2.9428, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5869074492099323, |
|
"grad_norm": 1.5371516942977905, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 2.755, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5899172310007524, |
|
"grad_norm": 1.7925596237182617, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 3.057, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5929270127915726, |
|
"grad_norm": 2.0468907356262207, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 2.9634, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5959367945823928, |
|
"grad_norm": 2.079108953475952, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 3.0522, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5989465763732129, |
|
"grad_norm": 2.6386282444000244, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 3.3413, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6019563581640331, |
|
"grad_norm": 3.1302988529205322, |
|
"learning_rate": 0.0, |
|
"loss": 2.98, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6019563581640331, |
|
"eval_loss": 2.718132734298706, |
|
"eval_runtime": 43.5214, |
|
"eval_samples_per_second": 12.867, |
|
"eval_steps_per_second": 3.217, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.047197429066629e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|