|
{ |
|
"best_metric": 2.022968292236328, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.008986138880776403, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 4.493069440388201e-05, |
|
"grad_norm": 0.9749875664710999, |
|
"learning_rate": 1.001e-05, |
|
"loss": 1.9179, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 4.493069440388201e-05, |
|
"eval_loss": 2.865260362625122, |
|
"eval_runtime": 1318.9517, |
|
"eval_samples_per_second": 7.106, |
|
"eval_steps_per_second": 1.776, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 8.986138880776402e-05, |
|
"grad_norm": 1.453717827796936, |
|
"learning_rate": 2.002e-05, |
|
"loss": 2.5071, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00013479208321164605, |
|
"grad_norm": 1.8178796768188477, |
|
"learning_rate": 3.0029999999999995e-05, |
|
"loss": 3.4818, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00017972277761552804, |
|
"grad_norm": 1.5889410972595215, |
|
"learning_rate": 4.004e-05, |
|
"loss": 2.6923, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00022465347201941005, |
|
"grad_norm": 1.5432227849960327, |
|
"learning_rate": 5.005e-05, |
|
"loss": 2.3713, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0002695841664232921, |
|
"grad_norm": 1.9612302780151367, |
|
"learning_rate": 6.005999999999999e-05, |
|
"loss": 2.5215, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0003145148608271741, |
|
"grad_norm": 2.0271384716033936, |
|
"learning_rate": 7.006999999999998e-05, |
|
"loss": 2.5916, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0003594455552310561, |
|
"grad_norm": 1.8546284437179565, |
|
"learning_rate": 8.008e-05, |
|
"loss": 2.799, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0004043762496349381, |
|
"grad_norm": 1.9049793481826782, |
|
"learning_rate": 9.009e-05, |
|
"loss": 2.7529, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0004493069440388201, |
|
"grad_norm": 2.092958688735962, |
|
"learning_rate": 0.0001001, |
|
"loss": 2.4417, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0004942376384427022, |
|
"grad_norm": 2.2183825969696045, |
|
"learning_rate": 9.957315789473684e-05, |
|
"loss": 2.9033, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0005391683328465842, |
|
"grad_norm": 2.383800983428955, |
|
"learning_rate": 9.904631578947367e-05, |
|
"loss": 2.8646, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0005840990272504661, |
|
"grad_norm": 2.405416250228882, |
|
"learning_rate": 9.851947368421052e-05, |
|
"loss": 2.1852, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0006290297216543482, |
|
"grad_norm": 2.645289182662964, |
|
"learning_rate": 9.799263157894736e-05, |
|
"loss": 2.5829, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0006739604160582302, |
|
"grad_norm": 2.2245421409606934, |
|
"learning_rate": 9.746578947368421e-05, |
|
"loss": 2.2014, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0007188911104621122, |
|
"grad_norm": 2.6380414962768555, |
|
"learning_rate": 9.693894736842104e-05, |
|
"loss": 2.8572, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0007638218048659942, |
|
"grad_norm": 2.561542510986328, |
|
"learning_rate": 9.641210526315789e-05, |
|
"loss": 2.503, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0008087524992698762, |
|
"grad_norm": 2.8893611431121826, |
|
"learning_rate": 9.588526315789473e-05, |
|
"loss": 2.7106, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0008536831936737582, |
|
"grad_norm": 2.734529495239258, |
|
"learning_rate": 9.535842105263157e-05, |
|
"loss": 2.6329, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0008986138880776402, |
|
"grad_norm": 2.347940683364868, |
|
"learning_rate": 9.483157894736841e-05, |
|
"loss": 2.2582, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0009435445824815223, |
|
"grad_norm": 2.2515718936920166, |
|
"learning_rate": 9.430473684210526e-05, |
|
"loss": 2.3672, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0009884752768854043, |
|
"grad_norm": 2.001131534576416, |
|
"learning_rate": 9.37778947368421e-05, |
|
"loss": 2.0849, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0010334059712892862, |
|
"grad_norm": 2.1524012088775635, |
|
"learning_rate": 9.325105263157894e-05, |
|
"loss": 1.9876, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0010783366656931684, |
|
"grad_norm": 2.1994237899780273, |
|
"learning_rate": 9.272421052631578e-05, |
|
"loss": 2.1119, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0011232673600970503, |
|
"grad_norm": 2.1171867847442627, |
|
"learning_rate": 9.219736842105263e-05, |
|
"loss": 2.1807, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0011681980545009323, |
|
"grad_norm": 2.242772102355957, |
|
"learning_rate": 9.167052631578946e-05, |
|
"loss": 2.6407, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0012131287489048144, |
|
"grad_norm": 2.039992570877075, |
|
"learning_rate": 9.114368421052632e-05, |
|
"loss": 2.214, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0012580594433086963, |
|
"grad_norm": 1.9102648496627808, |
|
"learning_rate": 9.061684210526315e-05, |
|
"loss": 1.8997, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0013029901377125783, |
|
"grad_norm": 1.8196017742156982, |
|
"learning_rate": 9.009e-05, |
|
"loss": 2.0056, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0013479208321164604, |
|
"grad_norm": 2.0965042114257812, |
|
"learning_rate": 8.956315789473683e-05, |
|
"loss": 2.0932, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0013928515265203424, |
|
"grad_norm": 1.8538213968276978, |
|
"learning_rate": 8.903631578947368e-05, |
|
"loss": 2.0703, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0014377822209242243, |
|
"grad_norm": 1.6811614036560059, |
|
"learning_rate": 8.850947368421052e-05, |
|
"loss": 2.0772, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0014827129153281065, |
|
"grad_norm": 2.006547212600708, |
|
"learning_rate": 8.798263157894736e-05, |
|
"loss": 2.0949, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0015276436097319884, |
|
"grad_norm": 2.2675957679748535, |
|
"learning_rate": 8.745578947368422e-05, |
|
"loss": 2.1614, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0015725743041358703, |
|
"grad_norm": 2.1679954528808594, |
|
"learning_rate": 8.692894736842105e-05, |
|
"loss": 2.4816, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0016175049985397525, |
|
"grad_norm": 2.089261531829834, |
|
"learning_rate": 8.64021052631579e-05, |
|
"loss": 2.1532, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0016624356929436344, |
|
"grad_norm": 2.0510172843933105, |
|
"learning_rate": 8.587526315789473e-05, |
|
"loss": 2.2617, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0017073663873475164, |
|
"grad_norm": 2.3127708435058594, |
|
"learning_rate": 8.534842105263157e-05, |
|
"loss": 2.6826, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0017522970817513985, |
|
"grad_norm": 2.004573345184326, |
|
"learning_rate": 8.482157894736842e-05, |
|
"loss": 2.2024, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0017972277761552804, |
|
"grad_norm": 1.9959832429885864, |
|
"learning_rate": 8.429473684210525e-05, |
|
"loss": 2.3131, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0018421584705591626, |
|
"grad_norm": 2.010679244995117, |
|
"learning_rate": 8.376789473684211e-05, |
|
"loss": 2.6558, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0018870891649630445, |
|
"grad_norm": 1.8978559970855713, |
|
"learning_rate": 8.324105263157894e-05, |
|
"loss": 1.7133, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0019320198593669265, |
|
"grad_norm": 1.9281487464904785, |
|
"learning_rate": 8.271421052631579e-05, |
|
"loss": 1.6634, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0019769505537708086, |
|
"grad_norm": 2.3254244327545166, |
|
"learning_rate": 8.218736842105262e-05, |
|
"loss": 2.209, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0020218812481746905, |
|
"grad_norm": 2.354820728302002, |
|
"learning_rate": 8.166052631578947e-05, |
|
"loss": 2.0182, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0020668119425785725, |
|
"grad_norm": 2.071244239807129, |
|
"learning_rate": 8.113368421052631e-05, |
|
"loss": 1.9602, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0021117426369824544, |
|
"grad_norm": 1.8261961936950684, |
|
"learning_rate": 8.060684210526315e-05, |
|
"loss": 1.7813, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.002156673331386337, |
|
"grad_norm": 2.7406086921691895, |
|
"learning_rate": 8.008e-05, |
|
"loss": 2.3802, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0022016040257902187, |
|
"grad_norm": 2.408278703689575, |
|
"learning_rate": 7.955315789473684e-05, |
|
"loss": 2.3663, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0022465347201941007, |
|
"grad_norm": 2.055316925048828, |
|
"learning_rate": 7.902631578947368e-05, |
|
"loss": 2.3711, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0022465347201941007, |
|
"eval_loss": 2.1835074424743652, |
|
"eval_runtime": 1324.1769, |
|
"eval_samples_per_second": 7.078, |
|
"eval_steps_per_second": 1.769, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0022914654145979826, |
|
"grad_norm": 1.5457913875579834, |
|
"learning_rate": 7.849947368421052e-05, |
|
"loss": 2.3197, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0023363961090018645, |
|
"grad_norm": 1.7222747802734375, |
|
"learning_rate": 7.797263157894736e-05, |
|
"loss": 2.3836, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0023813268034057465, |
|
"grad_norm": 1.8542462587356567, |
|
"learning_rate": 7.744578947368421e-05, |
|
"loss": 2.2357, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.002426257497809629, |
|
"grad_norm": 1.869113802909851, |
|
"learning_rate": 7.691894736842104e-05, |
|
"loss": 2.6175, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0024711881922135108, |
|
"grad_norm": 1.865387201309204, |
|
"learning_rate": 7.63921052631579e-05, |
|
"loss": 2.1006, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0025161188866173927, |
|
"grad_norm": 1.756447196006775, |
|
"learning_rate": 7.586526315789473e-05, |
|
"loss": 2.1646, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0025610495810212746, |
|
"grad_norm": 1.8337773084640503, |
|
"learning_rate": 7.533842105263158e-05, |
|
"loss": 2.1912, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0026059802754251566, |
|
"grad_norm": 2.07173228263855, |
|
"learning_rate": 7.481157894736841e-05, |
|
"loss": 2.5571, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0026509109698290385, |
|
"grad_norm": 1.9832768440246582, |
|
"learning_rate": 7.428473684210526e-05, |
|
"loss": 2.3895, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.002695841664232921, |
|
"grad_norm": 1.8779370784759521, |
|
"learning_rate": 7.375789473684209e-05, |
|
"loss": 1.9904, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.002740772358636803, |
|
"grad_norm": 2.2189080715179443, |
|
"learning_rate": 7.323105263157895e-05, |
|
"loss": 2.479, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0027857030530406847, |
|
"grad_norm": 2.1359362602233887, |
|
"learning_rate": 7.270421052631578e-05, |
|
"loss": 2.239, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0028306337474445667, |
|
"grad_norm": 2.0250144004821777, |
|
"learning_rate": 7.217736842105263e-05, |
|
"loss": 2.3436, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0028755644418484486, |
|
"grad_norm": 2.2144904136657715, |
|
"learning_rate": 7.165052631578947e-05, |
|
"loss": 2.0083, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.002920495136252331, |
|
"grad_norm": 2.109282970428467, |
|
"learning_rate": 7.11236842105263e-05, |
|
"loss": 2.2343, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.002965425830656213, |
|
"grad_norm": 1.87382173538208, |
|
"learning_rate": 7.059684210526315e-05, |
|
"loss": 2.4058, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.003010356525060095, |
|
"grad_norm": 2.178577184677124, |
|
"learning_rate": 7.006999999999998e-05, |
|
"loss": 1.5616, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.003055287219463977, |
|
"grad_norm": 2.0847856998443604, |
|
"learning_rate": 6.954315789473684e-05, |
|
"loss": 2.1887, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0031002179138678587, |
|
"grad_norm": 2.1739280223846436, |
|
"learning_rate": 6.901631578947368e-05, |
|
"loss": 2.2799, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0031451486082717407, |
|
"grad_norm": 1.968297004699707, |
|
"learning_rate": 6.848947368421052e-05, |
|
"loss": 2.2577, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.003190079302675623, |
|
"grad_norm": 2.0107929706573486, |
|
"learning_rate": 6.796263157894737e-05, |
|
"loss": 2.202, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.003235009997079505, |
|
"grad_norm": 1.9575681686401367, |
|
"learning_rate": 6.74357894736842e-05, |
|
"loss": 2.0874, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.003279940691483387, |
|
"grad_norm": 1.8866405487060547, |
|
"learning_rate": 6.690894736842105e-05, |
|
"loss": 1.9911, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.003324871385887269, |
|
"grad_norm": 1.875723123550415, |
|
"learning_rate": 6.638210526315788e-05, |
|
"loss": 1.9437, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0033698020802911508, |
|
"grad_norm": 1.9436726570129395, |
|
"learning_rate": 6.585526315789474e-05, |
|
"loss": 2.4976, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0034147327746950327, |
|
"grad_norm": 1.8745689392089844, |
|
"learning_rate": 6.532842105263157e-05, |
|
"loss": 1.9435, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.003459663469098915, |
|
"grad_norm": 2.6744322776794434, |
|
"learning_rate": 6.480157894736842e-05, |
|
"loss": 1.9938, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.003504594163502797, |
|
"grad_norm": 2.346561908721924, |
|
"learning_rate": 6.427473684210526e-05, |
|
"loss": 2.2839, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.003549524857906679, |
|
"grad_norm": 1.8465696573257446, |
|
"learning_rate": 6.37478947368421e-05, |
|
"loss": 2.0044, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.003594455552310561, |
|
"grad_norm": 2.5786619186401367, |
|
"learning_rate": 6.322105263157894e-05, |
|
"loss": 1.8605, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.003639386246714443, |
|
"grad_norm": 2.4463517665863037, |
|
"learning_rate": 6.269421052631577e-05, |
|
"loss": 2.2545, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.003684316941118325, |
|
"grad_norm": 2.071136474609375, |
|
"learning_rate": 6.216736842105263e-05, |
|
"loss": 2.1263, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.003729247635522207, |
|
"grad_norm": 1.967814326286316, |
|
"learning_rate": 6.164052631578947e-05, |
|
"loss": 1.865, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.003774178329926089, |
|
"grad_norm": 2.2158045768737793, |
|
"learning_rate": 6.111368421052631e-05, |
|
"loss": 2.2745, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.003819109024329971, |
|
"grad_norm": 2.4370172023773193, |
|
"learning_rate": 6.058684210526315e-05, |
|
"loss": 2.4499, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.003864039718733853, |
|
"grad_norm": 2.521904945373535, |
|
"learning_rate": 6.005999999999999e-05, |
|
"loss": 1.9882, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.003908970413137735, |
|
"grad_norm": 2.680880546569824, |
|
"learning_rate": 5.953315789473684e-05, |
|
"loss": 2.1553, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.003953901107541617, |
|
"grad_norm": 2.1294827461242676, |
|
"learning_rate": 5.9006315789473676e-05, |
|
"loss": 2.0562, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.003998831801945499, |
|
"grad_norm": 2.2988648414611816, |
|
"learning_rate": 5.847947368421053e-05, |
|
"loss": 2.3066, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.004043762496349381, |
|
"grad_norm": 2.1352288722991943, |
|
"learning_rate": 5.795263157894737e-05, |
|
"loss": 2.004, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.004088693190753263, |
|
"grad_norm": 2.0327036380767822, |
|
"learning_rate": 5.742578947368421e-05, |
|
"loss": 2.0546, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.004133623885157145, |
|
"grad_norm": 2.385735511779785, |
|
"learning_rate": 5.6898947368421046e-05, |
|
"loss": 1.8421, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.004178554579561027, |
|
"grad_norm": 2.20995831489563, |
|
"learning_rate": 5.6372105263157886e-05, |
|
"loss": 2.0368, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.004223485273964909, |
|
"grad_norm": 2.3523311614990234, |
|
"learning_rate": 5.584526315789473e-05, |
|
"loss": 2.2827, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.004268415968368791, |
|
"grad_norm": 2.2304766178131104, |
|
"learning_rate": 5.531842105263158e-05, |
|
"loss": 1.9297, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.004313346662772674, |
|
"grad_norm": 2.3118209838867188, |
|
"learning_rate": 5.4791578947368424e-05, |
|
"loss": 2.0775, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0043582773571765555, |
|
"grad_norm": 2.2064402103424072, |
|
"learning_rate": 5.426473684210526e-05, |
|
"loss": 2.0467, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0044032080515804374, |
|
"grad_norm": 2.449071168899536, |
|
"learning_rate": 5.37378947368421e-05, |
|
"loss": 2.2515, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.004448138745984319, |
|
"grad_norm": 2.0371315479278564, |
|
"learning_rate": 5.321105263157894e-05, |
|
"loss": 1.7399, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.004493069440388201, |
|
"grad_norm": 2.121981143951416, |
|
"learning_rate": 5.268421052631578e-05, |
|
"loss": 2.5136, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004493069440388201, |
|
"eval_loss": 2.089129686355591, |
|
"eval_runtime": 1321.9647, |
|
"eval_samples_per_second": 7.089, |
|
"eval_steps_per_second": 1.772, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004538000134792083, |
|
"grad_norm": 1.5324777364730835, |
|
"learning_rate": 5.2157368421052626e-05, |
|
"loss": 1.8549, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.004582930829195965, |
|
"grad_norm": 1.585769534111023, |
|
"learning_rate": 5.163052631578947e-05, |
|
"loss": 2.0778, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.004627861523599847, |
|
"grad_norm": 1.5087424516677856, |
|
"learning_rate": 5.110368421052632e-05, |
|
"loss": 1.7318, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.004672792218003729, |
|
"grad_norm": 1.8027831315994263, |
|
"learning_rate": 5.057684210526316e-05, |
|
"loss": 2.5847, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.004717722912407611, |
|
"grad_norm": 1.7983766794204712, |
|
"learning_rate": 5.005e-05, |
|
"loss": 2.3703, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.004762653606811493, |
|
"grad_norm": 2.110668659210205, |
|
"learning_rate": 4.9523157894736836e-05, |
|
"loss": 2.284, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.004807584301215375, |
|
"grad_norm": 1.8841075897216797, |
|
"learning_rate": 4.899631578947368e-05, |
|
"loss": 2.0461, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.004852514995619258, |
|
"grad_norm": 1.998826265335083, |
|
"learning_rate": 4.846947368421052e-05, |
|
"loss": 2.2872, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.00489744569002314, |
|
"grad_norm": 2.051478862762451, |
|
"learning_rate": 4.794263157894737e-05, |
|
"loss": 2.3491, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0049423763844270215, |
|
"grad_norm": 2.1531167030334473, |
|
"learning_rate": 4.7415789473684206e-05, |
|
"loss": 2.0238, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0049873070788309035, |
|
"grad_norm": 2.3136911392211914, |
|
"learning_rate": 4.688894736842105e-05, |
|
"loss": 2.0826, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.005032237773234785, |
|
"grad_norm": 2.1972248554229736, |
|
"learning_rate": 4.636210526315789e-05, |
|
"loss": 2.394, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.005077168467638667, |
|
"grad_norm": 2.2175815105438232, |
|
"learning_rate": 4.583526315789473e-05, |
|
"loss": 1.8484, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.005122099162042549, |
|
"grad_norm": 2.4054577350616455, |
|
"learning_rate": 4.530842105263158e-05, |
|
"loss": 2.3123, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.005167029856446431, |
|
"grad_norm": 2.4042704105377197, |
|
"learning_rate": 4.4781578947368416e-05, |
|
"loss": 1.8743, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.005211960550850313, |
|
"grad_norm": 2.772090435028076, |
|
"learning_rate": 4.425473684210526e-05, |
|
"loss": 2.1385, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.005256891245254195, |
|
"grad_norm": 2.160134792327881, |
|
"learning_rate": 4.372789473684211e-05, |
|
"loss": 1.9255, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.005301821939658077, |
|
"grad_norm": 2.093456745147705, |
|
"learning_rate": 4.320105263157895e-05, |
|
"loss": 2.3033, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.00534675263406196, |
|
"grad_norm": 1.9034905433654785, |
|
"learning_rate": 4.2674210526315786e-05, |
|
"loss": 2.052, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.005391683328465842, |
|
"grad_norm": 2.014765739440918, |
|
"learning_rate": 4.2147368421052626e-05, |
|
"loss": 1.991, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.005436614022869724, |
|
"grad_norm": 2.068211555480957, |
|
"learning_rate": 4.162052631578947e-05, |
|
"loss": 2.2685, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.005481544717273606, |
|
"grad_norm": 2.1787354946136475, |
|
"learning_rate": 4.109368421052631e-05, |
|
"loss": 2.2642, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.0055264754116774875, |
|
"grad_norm": 1.9796912670135498, |
|
"learning_rate": 4.056684210526316e-05, |
|
"loss": 1.7448, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0055714061060813695, |
|
"grad_norm": 2.124720573425293, |
|
"learning_rate": 4.004e-05, |
|
"loss": 1.854, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.005616336800485251, |
|
"grad_norm": 2.0373129844665527, |
|
"learning_rate": 3.951315789473684e-05, |
|
"loss": 1.8925, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.005661267494889133, |
|
"grad_norm": 1.8155055046081543, |
|
"learning_rate": 3.898631578947368e-05, |
|
"loss": 1.8812, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.005706198189293015, |
|
"grad_norm": 1.8750555515289307, |
|
"learning_rate": 3.845947368421052e-05, |
|
"loss": 2.3017, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.005751128883696897, |
|
"grad_norm": 1.9103080034255981, |
|
"learning_rate": 3.7932631578947367e-05, |
|
"loss": 1.6344, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.005796059578100779, |
|
"grad_norm": 2.2333719730377197, |
|
"learning_rate": 3.7405789473684206e-05, |
|
"loss": 2.137, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.005840990272504662, |
|
"grad_norm": 1.7498968839645386, |
|
"learning_rate": 3.6878947368421045e-05, |
|
"loss": 1.7955, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.005885920966908544, |
|
"grad_norm": 2.310913562774658, |
|
"learning_rate": 3.635210526315789e-05, |
|
"loss": 2.0951, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.005930851661312426, |
|
"grad_norm": 1.861606478691101, |
|
"learning_rate": 3.582526315789474e-05, |
|
"loss": 1.9386, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.005975782355716308, |
|
"grad_norm": 2.2353315353393555, |
|
"learning_rate": 3.5298421052631576e-05, |
|
"loss": 2.0958, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.00602071305012019, |
|
"grad_norm": 2.313389778137207, |
|
"learning_rate": 3.477157894736842e-05, |
|
"loss": 1.9005, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.006065643744524072, |
|
"grad_norm": 1.824509859085083, |
|
"learning_rate": 3.424473684210526e-05, |
|
"loss": 2.0138, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.006110574438927954, |
|
"grad_norm": 1.9519466161727905, |
|
"learning_rate": 3.37178947368421e-05, |
|
"loss": 1.8819, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.0061555051333318355, |
|
"grad_norm": 1.961268424987793, |
|
"learning_rate": 3.319105263157894e-05, |
|
"loss": 1.7123, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.0062004358277357174, |
|
"grad_norm": 2.225808620452881, |
|
"learning_rate": 3.2664210526315786e-05, |
|
"loss": 2.0749, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.006245366522139599, |
|
"grad_norm": 2.0264053344726562, |
|
"learning_rate": 3.213736842105263e-05, |
|
"loss": 1.9238, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.006290297216543481, |
|
"grad_norm": 1.8889691829681396, |
|
"learning_rate": 3.161052631578947e-05, |
|
"loss": 1.7536, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.006335227910947364, |
|
"grad_norm": 2.0300066471099854, |
|
"learning_rate": 3.108368421052632e-05, |
|
"loss": 1.744, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.006380158605351246, |
|
"grad_norm": 1.9760944843292236, |
|
"learning_rate": 3.0556842105263156e-05, |
|
"loss": 2.233, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.006425089299755128, |
|
"grad_norm": 2.7234182357788086, |
|
"learning_rate": 3.0029999999999995e-05, |
|
"loss": 2.2925, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.00647001999415901, |
|
"grad_norm": 2.251617670059204, |
|
"learning_rate": 2.9503157894736838e-05, |
|
"loss": 1.4938, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.006514950688562892, |
|
"grad_norm": 2.431299924850464, |
|
"learning_rate": 2.8976315789473684e-05, |
|
"loss": 1.942, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.006559881382966774, |
|
"grad_norm": 2.0769991874694824, |
|
"learning_rate": 2.8449473684210523e-05, |
|
"loss": 1.6405, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.006604812077370656, |
|
"grad_norm": 2.384251594543457, |
|
"learning_rate": 2.7922631578947366e-05, |
|
"loss": 2.2737, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.006649742771774538, |
|
"grad_norm": 2.013692617416382, |
|
"learning_rate": 2.7395789473684212e-05, |
|
"loss": 1.97, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.00669467346617842, |
|
"grad_norm": 2.7628982067108154, |
|
"learning_rate": 2.686894736842105e-05, |
|
"loss": 2.3824, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0067396041605823015, |
|
"grad_norm": 2.5335357189178467, |
|
"learning_rate": 2.634210526315789e-05, |
|
"loss": 2.0715, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0067396041605823015, |
|
"eval_loss": 2.0427417755126953, |
|
"eval_runtime": 1323.5526, |
|
"eval_samples_per_second": 7.081, |
|
"eval_steps_per_second": 1.77, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0067845348549861835, |
|
"grad_norm": 1.2939319610595703, |
|
"learning_rate": 2.5815263157894736e-05, |
|
"loss": 1.6482, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.006829465549390065, |
|
"grad_norm": 1.5967990159988403, |
|
"learning_rate": 2.528842105263158e-05, |
|
"loss": 2.0326, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.006874396243793948, |
|
"grad_norm": 1.8107272386550903, |
|
"learning_rate": 2.4761578947368418e-05, |
|
"loss": 2.2003, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.00691932693819783, |
|
"grad_norm": 1.8108916282653809, |
|
"learning_rate": 2.423473684210526e-05, |
|
"loss": 1.9096, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.006964257632601712, |
|
"grad_norm": 1.7029316425323486, |
|
"learning_rate": 2.3707894736842103e-05, |
|
"loss": 2.1272, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.007009188327005594, |
|
"grad_norm": 2.031301259994507, |
|
"learning_rate": 2.3181052631578946e-05, |
|
"loss": 2.2461, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.007054119021409476, |
|
"grad_norm": 1.6262093782424927, |
|
"learning_rate": 2.265421052631579e-05, |
|
"loss": 1.7345, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.007099049715813358, |
|
"grad_norm": 2.0770440101623535, |
|
"learning_rate": 2.212736842105263e-05, |
|
"loss": 2.322, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.00714398041021724, |
|
"grad_norm": 1.9273806810379028, |
|
"learning_rate": 2.1600526315789474e-05, |
|
"loss": 1.8112, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.007188911104621122, |
|
"grad_norm": 2.2276177406311035, |
|
"learning_rate": 2.1073684210526313e-05, |
|
"loss": 2.0591, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.007233841799025004, |
|
"grad_norm": 2.3354694843292236, |
|
"learning_rate": 2.0546842105263155e-05, |
|
"loss": 2.1254, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.007278772493428886, |
|
"grad_norm": 2.806878089904785, |
|
"learning_rate": 2.002e-05, |
|
"loss": 2.4075, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0073237031878327676, |
|
"grad_norm": 2.16422176361084, |
|
"learning_rate": 1.949315789473684e-05, |
|
"loss": 2.1182, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.00736863388223665, |
|
"grad_norm": 2.8581697940826416, |
|
"learning_rate": 1.8966315789473683e-05, |
|
"loss": 2.3394, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.007413564576640532, |
|
"grad_norm": 2.2438290119171143, |
|
"learning_rate": 1.8439473684210522e-05, |
|
"loss": 1.9132, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.007458495271044414, |
|
"grad_norm": 2.672802448272705, |
|
"learning_rate": 1.791263157894737e-05, |
|
"loss": 2.6549, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.007503425965448296, |
|
"grad_norm": 2.280754804611206, |
|
"learning_rate": 1.738578947368421e-05, |
|
"loss": 2.3295, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.007548356659852178, |
|
"grad_norm": 1.8638712167739868, |
|
"learning_rate": 1.685894736842105e-05, |
|
"loss": 1.9558, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.00759328735425606, |
|
"grad_norm": 1.9110428094863892, |
|
"learning_rate": 1.6332105263157893e-05, |
|
"loss": 2.0123, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.007638218048659942, |
|
"grad_norm": 2.131638765335083, |
|
"learning_rate": 1.5805263157894735e-05, |
|
"loss": 1.4546, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.007683148743063824, |
|
"grad_norm": 2.094499349594116, |
|
"learning_rate": 1.5278421052631578e-05, |
|
"loss": 1.9308, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.007728079437467706, |
|
"grad_norm": 1.8845446109771729, |
|
"learning_rate": 1.4751578947368419e-05, |
|
"loss": 1.6387, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.007773010131871588, |
|
"grad_norm": 1.6916247606277466, |
|
"learning_rate": 1.4224736842105262e-05, |
|
"loss": 1.7056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.00781794082627547, |
|
"grad_norm": 1.8374581336975098, |
|
"learning_rate": 1.3697894736842106e-05, |
|
"loss": 2.1303, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.007862871520679353, |
|
"grad_norm": 2.183013916015625, |
|
"learning_rate": 1.3171052631578945e-05, |
|
"loss": 1.9973, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.007907802215083234, |
|
"grad_norm": 2.2911391258239746, |
|
"learning_rate": 1.264421052631579e-05, |
|
"loss": 2.202, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.007952732909487116, |
|
"grad_norm": 2.632253885269165, |
|
"learning_rate": 1.211736842105263e-05, |
|
"loss": 2.3454, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.007997663603890998, |
|
"grad_norm": 2.447774887084961, |
|
"learning_rate": 1.1590526315789473e-05, |
|
"loss": 1.9924, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.00804259429829488, |
|
"grad_norm": 2.108670473098755, |
|
"learning_rate": 1.1063684210526316e-05, |
|
"loss": 2.2341, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.008087524992698762, |
|
"grad_norm": 2.132281541824341, |
|
"learning_rate": 1.0536842105263156e-05, |
|
"loss": 1.9931, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.008132455687102644, |
|
"grad_norm": 1.7290847301483154, |
|
"learning_rate": 1.001e-05, |
|
"loss": 1.7781, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.008177386381506526, |
|
"grad_norm": 2.1073453426361084, |
|
"learning_rate": 9.483157894736842e-06, |
|
"loss": 1.6803, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.008222317075910408, |
|
"grad_norm": 2.2671337127685547, |
|
"learning_rate": 8.956315789473684e-06, |
|
"loss": 2.4534, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.00826724777031429, |
|
"grad_norm": 2.219877243041992, |
|
"learning_rate": 8.429473684210525e-06, |
|
"loss": 1.7814, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.008312178464718172, |
|
"grad_norm": 1.8467822074890137, |
|
"learning_rate": 7.902631578947368e-06, |
|
"loss": 1.6241, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.008357109159122054, |
|
"grad_norm": 2.22536563873291, |
|
"learning_rate": 7.3757894736842095e-06, |
|
"loss": 1.6642, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.008402039853525936, |
|
"grad_norm": 2.0499343872070312, |
|
"learning_rate": 6.848947368421053e-06, |
|
"loss": 1.8032, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.008446970547929818, |
|
"grad_norm": 1.8307327032089233, |
|
"learning_rate": 6.322105263157895e-06, |
|
"loss": 1.6625, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.0084919012423337, |
|
"grad_norm": 2.4157087802886963, |
|
"learning_rate": 5.7952631578947365e-06, |
|
"loss": 1.8355, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.008536831936737582, |
|
"grad_norm": 2.4110615253448486, |
|
"learning_rate": 5.268421052631578e-06, |
|
"loss": 1.6817, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.008581762631141463, |
|
"grad_norm": 2.389373540878296, |
|
"learning_rate": 4.741578947368421e-06, |
|
"loss": 1.7818, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.008626693325545347, |
|
"grad_norm": 1.9679090976715088, |
|
"learning_rate": 4.2147368421052626e-06, |
|
"loss": 1.9776, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.008671624019949229, |
|
"grad_norm": 3.9061481952667236, |
|
"learning_rate": 3.6878947368421047e-06, |
|
"loss": 1.8022, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.008716554714353111, |
|
"grad_norm": 2.2966439723968506, |
|
"learning_rate": 3.1610526315789474e-06, |
|
"loss": 2.017, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.008761485408756993, |
|
"grad_norm": 2.285691738128662, |
|
"learning_rate": 2.634210526315789e-06, |
|
"loss": 2.2703, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.008806416103160875, |
|
"grad_norm": 2.352679967880249, |
|
"learning_rate": 2.1073684210526313e-06, |
|
"loss": 1.7985, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.008851346797564757, |
|
"grad_norm": 2.3461921215057373, |
|
"learning_rate": 1.5805263157894737e-06, |
|
"loss": 1.7361, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.008896277491968639, |
|
"grad_norm": 2.690016508102417, |
|
"learning_rate": 1.0536842105263156e-06, |
|
"loss": 2.1687, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.00894120818637252, |
|
"grad_norm": 2.5040085315704346, |
|
"learning_rate": 5.268421052631578e-07, |
|
"loss": 2.4819, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.008986138880776403, |
|
"grad_norm": 2.3786873817443848, |
|
"learning_rate": 0.0, |
|
"loss": 2.7339, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.008986138880776403, |
|
"eval_loss": 2.022968292236328, |
|
"eval_runtime": 1321.9224, |
|
"eval_samples_per_second": 7.09, |
|
"eval_steps_per_second": 1.772, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.562201408929792e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|