|
{ |
|
"best_metric": 0.1947915256023407, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.032298437563082885, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00016149218781541443, |
|
"grad_norm": 66.04085540771484, |
|
"learning_rate": 1e-05, |
|
"loss": 6.8126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00016149218781541443, |
|
"eval_loss": 2.066232681274414, |
|
"eval_runtime": 764.8511, |
|
"eval_samples_per_second": 13.635, |
|
"eval_steps_per_second": 3.41, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00032298437563082886, |
|
"grad_norm": 58.66199493408203, |
|
"learning_rate": 2e-05, |
|
"loss": 5.9843, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0004844765634462433, |
|
"grad_norm": 49.33218002319336, |
|
"learning_rate": 3e-05, |
|
"loss": 5.3096, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0006459687512616577, |
|
"grad_norm": 53.177345275878906, |
|
"learning_rate": 4e-05, |
|
"loss": 4.4682, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008074609390770722, |
|
"grad_norm": 63.67516326904297, |
|
"learning_rate": 5e-05, |
|
"loss": 4.1167, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0009689531268924866, |
|
"grad_norm": 27.747201919555664, |
|
"learning_rate": 6e-05, |
|
"loss": 3.2396, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.001130445314707901, |
|
"grad_norm": 52.03746795654297, |
|
"learning_rate": 7e-05, |
|
"loss": 2.5648, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0012919375025233154, |
|
"grad_norm": 19.467458724975586, |
|
"learning_rate": 8e-05, |
|
"loss": 2.2277, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0014534296903387298, |
|
"grad_norm": 18.454164505004883, |
|
"learning_rate": 9e-05, |
|
"loss": 2.119, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0016149218781541443, |
|
"grad_norm": 35.559288024902344, |
|
"learning_rate": 0.0001, |
|
"loss": 1.6841, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0017764140659695586, |
|
"grad_norm": 94.37145233154297, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 2.2293, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0019379062537849732, |
|
"grad_norm": 16.54827880859375, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.5729, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0020993984416003875, |
|
"grad_norm": 15.243042945861816, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.4742, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002260890629415802, |
|
"grad_norm": 19.226024627685547, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.168, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0024223828172312166, |
|
"grad_norm": 13.779980659484863, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.4476, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.002583875005046631, |
|
"grad_norm": 14.464970588684082, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.6151, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.002745367192862045, |
|
"grad_norm": 8.296271324157715, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.8688, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0029068593806774595, |
|
"grad_norm": 11.498894691467285, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.0009, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0030683515684928743, |
|
"grad_norm": 16.902612686157227, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.1112, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0032298437563082886, |
|
"grad_norm": 7.251413822174072, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.9666, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.003391335944123703, |
|
"grad_norm": 27.787395477294922, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.344, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0035528281319391172, |
|
"grad_norm": 11.747908592224121, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.9903, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.003714320319754532, |
|
"grad_norm": 10.102913856506348, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.2567, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0038758125075699463, |
|
"grad_norm": 11.134997367858887, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.9622, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.004037304695385361, |
|
"grad_norm": 9.873003959655762, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.3336, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004198796883200775, |
|
"grad_norm": 7.550684928894043, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.0543, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004360289071016189, |
|
"grad_norm": 6.65726375579834, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.6565, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004521781258831604, |
|
"grad_norm": 9.558565139770508, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.2839, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004683273446647019, |
|
"grad_norm": 12.047572135925293, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.0033, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.004844765634462433, |
|
"grad_norm": 11.137338638305664, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.9529, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0050062578222778474, |
|
"grad_norm": 12.697957992553711, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.3462, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005167750010093262, |
|
"grad_norm": 8.739521980285645, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.0512, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005329242197908676, |
|
"grad_norm": 19.00428009033203, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.3543, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.00549073438572409, |
|
"grad_norm": 13.473799705505371, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.4515, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.005652226573539505, |
|
"grad_norm": 9.601152420043945, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.1191, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.005813718761354919, |
|
"grad_norm": 10.229377746582031, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.9022, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.005975210949170334, |
|
"grad_norm": 8.150893211364746, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.1132, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0061367031369857486, |
|
"grad_norm": 7.512841701507568, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.1505, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006298195324801163, |
|
"grad_norm": 5.389281272888184, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.6357, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.006459687512616577, |
|
"grad_norm": 10.969232559204102, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.2097, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0066211797004319915, |
|
"grad_norm": 7.593698501586914, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.1604, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.006782671888247406, |
|
"grad_norm": 6.020129680633545, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.7548, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00694416407606282, |
|
"grad_norm": 8.169340133666992, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.0628, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0071056562638782345, |
|
"grad_norm": 7.304530143737793, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.0682, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00726714845169365, |
|
"grad_norm": 7.301529884338379, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.132, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.007428640639509064, |
|
"grad_norm": 8.042936325073242, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.8787, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.007590132827324478, |
|
"grad_norm": 8.325237274169922, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.9612, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.007751625015139893, |
|
"grad_norm": 8.121849060058594, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.2475, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.007913117202955308, |
|
"grad_norm": 7.383357048034668, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.2338, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.008074609390770721, |
|
"grad_norm": 8.456700325012207, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.2756, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008074609390770721, |
|
"eval_loss": 0.2579116225242615, |
|
"eval_runtime": 769.3851, |
|
"eval_samples_per_second": 13.555, |
|
"eval_steps_per_second": 3.39, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008236101578586136, |
|
"grad_norm": 12.224557876586914, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 1.1957, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.00839759376640155, |
|
"grad_norm": 8.004744529724121, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.9128, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.008559085954216965, |
|
"grad_norm": 5.253309726715088, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.9792, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.008720578142032379, |
|
"grad_norm": 3.9836456775665283, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.8306, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.008882070329847794, |
|
"grad_norm": 9.755916595458984, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 1.2643, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.009043562517663207, |
|
"grad_norm": 3.1687428951263428, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.4911, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.009205054705478622, |
|
"grad_norm": 4.697276592254639, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.8635, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.009366546893294038, |
|
"grad_norm": 5.923514366149902, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.7305, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.009528039081109451, |
|
"grad_norm": 4.387604236602783, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.8195, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.009689531268924866, |
|
"grad_norm": 7.801131248474121, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.091, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00985102345674028, |
|
"grad_norm": 4.7785964012146, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.7809, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.010012515644555695, |
|
"grad_norm": 6.387250900268555, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.8659, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.010174007832371108, |
|
"grad_norm": 7.59951114654541, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.6494, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.010335500020186524, |
|
"grad_norm": 8.04648208618164, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.8663, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.010496992208001939, |
|
"grad_norm": 8.109950065612793, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.0831, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.010658484395817352, |
|
"grad_norm": 6.599982261657715, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.1722, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010819976583632767, |
|
"grad_norm": 5.933468818664551, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.1879, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01098146877144818, |
|
"grad_norm": 6.831328868865967, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.7366, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.011142960959263596, |
|
"grad_norm": 3.481276273727417, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.6217, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.01130445314707901, |
|
"grad_norm": 5.323179721832275, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.9114, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.011465945334894425, |
|
"grad_norm": 5.245665550231934, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.9737, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.011627437522709838, |
|
"grad_norm": 5.951338768005371, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.9656, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.011788929710525253, |
|
"grad_norm": 10.116493225097656, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.49, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.011950421898340668, |
|
"grad_norm": 4.708444118499756, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.8565, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.012111914086156082, |
|
"grad_norm": 5.650876045227051, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.728, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.012273406273971497, |
|
"grad_norm": 4.68143367767334, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.9765, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.01243489846178691, |
|
"grad_norm": 5.334131240844727, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.3298, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.012596390649602326, |
|
"grad_norm": 6.564842700958252, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 1.0345, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.01275788283741774, |
|
"grad_norm": 4.808290958404541, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.8163, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.012919375025233154, |
|
"grad_norm": 3.600611925125122, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.8536, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01308086721304857, |
|
"grad_norm": 5.961524486541748, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.8228, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.013242359400863983, |
|
"grad_norm": 6.8201093673706055, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.046, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.013403851588679398, |
|
"grad_norm": 5.5537028312683105, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.5573, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.013565343776494812, |
|
"grad_norm": 4.81182336807251, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.8525, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.013726835964310227, |
|
"grad_norm": 6.7080841064453125, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.3358, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01388832815212564, |
|
"grad_norm": 4.635632038116455, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.0484, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.014049820339941056, |
|
"grad_norm": 5.049192428588867, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.8898, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.014211312527756469, |
|
"grad_norm": 6.034664154052734, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.0545, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.014372804715571884, |
|
"grad_norm": 5.578686714172363, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.8689, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0145342969033873, |
|
"grad_norm": 5.399660110473633, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.9705, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.014695789091202713, |
|
"grad_norm": 3.986812114715576, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.959, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.014857281279018128, |
|
"grad_norm": 3.3498876094818115, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.6344, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.015018773466833541, |
|
"grad_norm": 4.480000972747803, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.675, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.015180265654648957, |
|
"grad_norm": 5.378062725067139, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.9703, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.01534175784246437, |
|
"grad_norm": 6.625367164611816, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.2045, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.015503250030279785, |
|
"grad_norm": 7.684246063232422, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.2175, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0156647422180952, |
|
"grad_norm": 8.043671607971191, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.1995, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.015826234405910616, |
|
"grad_norm": 6.534055709838867, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.786, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.015987726593726027, |
|
"grad_norm": 6.658209323883057, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.9529, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.016149218781541443, |
|
"grad_norm": 6.959057331085205, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.1648, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.016149218781541443, |
|
"eval_loss": 0.24134224653244019, |
|
"eval_runtime": 769.3709, |
|
"eval_samples_per_second": 13.555, |
|
"eval_steps_per_second": 3.39, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.016310710969356858, |
|
"grad_norm": 4.9987311363220215, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.6489, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.016472203157172273, |
|
"grad_norm": 4.759182929992676, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.6947, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.016633695344987685, |
|
"grad_norm": 3.8671958446502686, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.6864, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0167951875328031, |
|
"grad_norm": 4.098058700561523, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.7082, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.016956679720618515, |
|
"grad_norm": 6.332775592803955, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9335, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.01711817190843393, |
|
"grad_norm": 5.647943019866943, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 1.0795, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.017279664096249345, |
|
"grad_norm": 6.842463970184326, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.8744, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.017441156284064757, |
|
"grad_norm": 3.7888684272766113, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.9048, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.017602648471880172, |
|
"grad_norm": 4.186934471130371, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.9536, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.017764140659695588, |
|
"grad_norm": 3.675734758377075, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.7804, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.017925632847511003, |
|
"grad_norm": 10.108420372009277, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.2615, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.018087125035326414, |
|
"grad_norm": 4.677420139312744, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.7271, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.01824861722314183, |
|
"grad_norm": 6.296616077423096, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 1.4184, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.018410109410957245, |
|
"grad_norm": 3.8865997791290283, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.6349, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01857160159877266, |
|
"grad_norm": 4.712935447692871, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.8963, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.018733093786588075, |
|
"grad_norm": 4.991330146789551, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.8648, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.018894585974403487, |
|
"grad_norm": 4.175543308258057, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.7846, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.019056078162218902, |
|
"grad_norm": 3.322171926498413, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.7698, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.019217570350034317, |
|
"grad_norm": 3.053602457046509, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.6644, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.019379062537849732, |
|
"grad_norm": 4.954108715057373, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 1.1093, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.019540554725665148, |
|
"grad_norm": 3.911067485809326, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.6895, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.01970204691348056, |
|
"grad_norm": 5.648387432098389, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 1.2158, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.019863539101295975, |
|
"grad_norm": 4.296334266662598, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.7483, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02002503128911139, |
|
"grad_norm": 4.768459320068359, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.9063, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.020186523476926805, |
|
"grad_norm": 4.852877616882324, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.6464, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.020348015664742217, |
|
"grad_norm": 3.614675521850586, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.714, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.020509507852557632, |
|
"grad_norm": 3.900026798248291, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.7799, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.020671000040373047, |
|
"grad_norm": 4.189906120300293, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.7379, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.020832492228188462, |
|
"grad_norm": 3.848201274871826, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.6732, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.020993984416003877, |
|
"grad_norm": 5.243552207946777, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.8563, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02115547660381929, |
|
"grad_norm": 5.969999313354492, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.2854, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.021316968791634704, |
|
"grad_norm": 5.380910873413086, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.0254, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.02147846097945012, |
|
"grad_norm": 4.808969974517822, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 1.0162, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.021639953167265535, |
|
"grad_norm": 4.541471481323242, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.1123, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.021801445355080946, |
|
"grad_norm": 3.330897331237793, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.6584, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02196293754289636, |
|
"grad_norm": 3.167905807495117, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.618, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.022124429730711777, |
|
"grad_norm": 6.538463115692139, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.2219, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.022285921918527192, |
|
"grad_norm": 3.817915678024292, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.7884, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.022447414106342607, |
|
"grad_norm": 3.567233085632324, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.6219, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.02260890629415802, |
|
"grad_norm": 7.849123001098633, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.7381, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.022770398481973434, |
|
"grad_norm": 4.571730136871338, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.7234, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.02293189066978885, |
|
"grad_norm": 4.714327335357666, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.56, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.023093382857604264, |
|
"grad_norm": 3.729198455810547, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.7249, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.023254875045419676, |
|
"grad_norm": 3.5794527530670166, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.6373, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.02341636723323509, |
|
"grad_norm": 7.21630859375, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.9469, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.023577859421050507, |
|
"grad_norm": 5.132265090942383, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.8685, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.023739351608865922, |
|
"grad_norm": 6.083048343658447, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.8698, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.023900843796681337, |
|
"grad_norm": 4.920403003692627, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.7564, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.02406233598449675, |
|
"grad_norm": 10.307737350463867, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.1976, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.024223828172312164, |
|
"grad_norm": 9.064608573913574, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.8906, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.024223828172312164, |
|
"eval_loss": 0.2087429314851761, |
|
"eval_runtime": 769.5887, |
|
"eval_samples_per_second": 13.551, |
|
"eval_steps_per_second": 3.389, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02438532036012758, |
|
"grad_norm": 3.442979097366333, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.5201, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.024546812547942994, |
|
"grad_norm": 5.38673210144043, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.9532, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.024708304735758406, |
|
"grad_norm": 4.110217571258545, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.6819, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.02486979692357382, |
|
"grad_norm": 5.627509593963623, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.3099, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.025031289111389236, |
|
"grad_norm": 4.468465328216553, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.6302, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.02519278129920465, |
|
"grad_norm": 5.513147830963135, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.7853, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.025354273487020067, |
|
"grad_norm": 5.395639896392822, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.9788, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.02551576567483548, |
|
"grad_norm": 4.572993278503418, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 1.0696, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.025677257862650894, |
|
"grad_norm": 3.8697474002838135, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.6712, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.02583875005046631, |
|
"grad_norm": 3.169379711151123, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.6972, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.026000242238281724, |
|
"grad_norm": 2.9400551319122314, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.5203, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.02616173442609714, |
|
"grad_norm": 4.197710037231445, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.5885, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.02632322661391255, |
|
"grad_norm": 4.708157539367676, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 1.189, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.026484718801727966, |
|
"grad_norm": 3.5737617015838623, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.7108, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.02664621098954338, |
|
"grad_norm": 4.992034435272217, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 1.0945, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.026807703177358796, |
|
"grad_norm": 5.485217094421387, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 1.0337, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.026969195365174208, |
|
"grad_norm": 5.019131183624268, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.0977, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.027130687552989623, |
|
"grad_norm": 3.8975744247436523, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.8933, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.02729217974080504, |
|
"grad_norm": 2.8902029991149902, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.4471, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.027453671928620454, |
|
"grad_norm": 3.9361510276794434, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.7946, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.02761516411643587, |
|
"grad_norm": 4.5258049964904785, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 1.0502, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.02777665630425128, |
|
"grad_norm": 5.8037943840026855, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.2896, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.027938148492066696, |
|
"grad_norm": 3.9754068851470947, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.771, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.02809964067988211, |
|
"grad_norm": 4.887168884277344, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.8007, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.028261132867697526, |
|
"grad_norm": 4.25801420211792, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.8883, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.028422625055512938, |
|
"grad_norm": 3.7918171882629395, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.7367, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.028584117243328353, |
|
"grad_norm": 5.017009735107422, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 1.0301, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.02874560943114377, |
|
"grad_norm": 3.8177950382232666, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.858, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.028907101618959184, |
|
"grad_norm": 3.8096659183502197, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.6779, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0290685938067746, |
|
"grad_norm": 3.4850761890411377, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.7676, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.02923008599459001, |
|
"grad_norm": 5.1498494148254395, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.237, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.029391578182405426, |
|
"grad_norm": 3.281991720199585, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.7704, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.02955307037022084, |
|
"grad_norm": 3.717332601547241, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.7846, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.029714562558036256, |
|
"grad_norm": 3.9455020427703857, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.7002, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.029876054745851668, |
|
"grad_norm": 3.668330430984497, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.4983, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.030037546933667083, |
|
"grad_norm": 3.9461002349853516, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.9362, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.030199039121482498, |
|
"grad_norm": 4.843723773956299, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.9576, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.030360531309297913, |
|
"grad_norm": 3.4709835052490234, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.5871, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.03052202349711333, |
|
"grad_norm": 4.323427200317383, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.9341, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.03068351568492874, |
|
"grad_norm": 7.534937858581543, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 1.0517, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.030845007872744155, |
|
"grad_norm": 9.260086059570312, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 1.5213, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.03100650006055957, |
|
"grad_norm": 4.949000358581543, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 1.0168, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.031167992248374986, |
|
"grad_norm": 5.339089870452881, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.9305, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.0313294844361904, |
|
"grad_norm": 4.739263534545898, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.6322, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.031490976624005816, |
|
"grad_norm": 5.8488335609436035, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 1.1369, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.03165246881182123, |
|
"grad_norm": 3.8751072883605957, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.8728, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.03181396099963664, |
|
"grad_norm": 5.9032745361328125, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 1.215, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.031975453187452055, |
|
"grad_norm": 3.801668643951416, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.6435, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.03213694537526747, |
|
"grad_norm": 3.6923794746398926, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.6713, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.032298437563082885, |
|
"grad_norm": 8.89438533782959, |
|
"learning_rate": 0.0, |
|
"loss": 1.257, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.032298437563082885, |
|
"eval_loss": 0.1947915256023407, |
|
"eval_runtime": 769.4243, |
|
"eval_samples_per_second": 13.554, |
|
"eval_steps_per_second": 3.39, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.861999239200768e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|