|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 59.881188118811885, |
|
"eval_steps": 1000, |
|
"global_step": 11340, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.956349206349207e-05, |
|
"loss": 0.0798, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.912257495590829e-05, |
|
"loss": 0.0816, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.868165784832452e-05, |
|
"loss": 0.0761, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.824514991181658e-05, |
|
"loss": 0.0723, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.7804232804232806e-05, |
|
"loss": 0.0601, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.736331569664903e-05, |
|
"loss": 0.0593, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.692239858906526e-05, |
|
"loss": 0.0692, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.648148148148148e-05, |
|
"loss": 0.0587, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.604056437389771e-05, |
|
"loss": 0.0598, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.559964726631393e-05, |
|
"loss": 0.0559, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"eval_accuracy": 0.9191418886184692, |
|
"eval_loss": 0.3096904754638672, |
|
"eval_runtime": 51.3243, |
|
"eval_samples_per_second": 47.229, |
|
"eval_steps_per_second": 5.904, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.515873015873016e-05, |
|
"loss": 0.0552, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 4.471781305114639e-05, |
|
"loss": 0.0519, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 4.428130511463845e-05, |
|
"loss": 0.0536, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 4.3840388007054675e-05, |
|
"loss": 0.0438, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.33994708994709e-05, |
|
"loss": 0.0436, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 4.295855379188713e-05, |
|
"loss": 0.0447, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 4.2517636684303355e-05, |
|
"loss": 0.0535, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 4.207671957671958e-05, |
|
"loss": 0.0467, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 4.16358024691358e-05, |
|
"loss": 0.0557, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 4.1194885361552036e-05, |
|
"loss": 0.047, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"eval_accuracy": 0.9191418886184692, |
|
"eval_loss": 0.34823155403137207, |
|
"eval_runtime": 51.4649, |
|
"eval_samples_per_second": 47.1, |
|
"eval_steps_per_second": 5.888, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 4.0753968253968256e-05, |
|
"loss": 0.0397, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.031305114638448e-05, |
|
"loss": 0.0475, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 3.987213403880071e-05, |
|
"loss": 0.0478, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 3.9431216931216936e-05, |
|
"loss": 0.0421, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 3.8994708994709e-05, |
|
"loss": 0.0408, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 3.8553791887125224e-05, |
|
"loss": 0.0368, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 3.8112874779541445e-05, |
|
"loss": 0.0379, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 3.767195767195768e-05, |
|
"loss": 0.052, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 3.72310405643739e-05, |
|
"loss": 0.0424, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 3.6790123456790125e-05, |
|
"loss": 0.0402, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"eval_accuracy": 0.9080032706260681, |
|
"eval_loss": 0.3889801502227783, |
|
"eval_runtime": 50.7451, |
|
"eval_samples_per_second": 47.768, |
|
"eval_steps_per_second": 5.971, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 3.634920634920635e-05, |
|
"loss": 0.0417, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 3.590828924162258e-05, |
|
"loss": 0.0419, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 3.54673721340388e-05, |
|
"loss": 0.0439, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 3.502645502645503e-05, |
|
"loss": 0.0446, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 3.458553791887125e-05, |
|
"loss": 0.0339, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 3.414462081128748e-05, |
|
"loss": 0.0301, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 3.3703703703703706e-05, |
|
"loss": 0.0277, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"learning_rate": 3.326278659611993e-05, |
|
"loss": 0.0333, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"learning_rate": 3.282186948853615e-05, |
|
"loss": 0.0298, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 3.2380952380952386e-05, |
|
"loss": 0.0328, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"eval_accuracy": 0.9150164723396301, |
|
"eval_loss": 0.37457939982414246, |
|
"eval_runtime": 50.3165, |
|
"eval_samples_per_second": 48.175, |
|
"eval_steps_per_second": 6.022, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"learning_rate": 3.1940035273368606e-05, |
|
"loss": 0.0298, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 22.18, |
|
"learning_rate": 3.149911816578483e-05, |
|
"loss": 0.0314, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 22.71, |
|
"learning_rate": 3.105820105820106e-05, |
|
"loss": 0.0253, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 23.23, |
|
"learning_rate": 3.061728395061729e-05, |
|
"loss": 0.0339, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 3.017636684303351e-05, |
|
"loss": 0.0266, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 24.29, |
|
"learning_rate": 2.973544973544974e-05, |
|
"loss": 0.0361, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 24.82, |
|
"learning_rate": 2.929453262786596e-05, |
|
"loss": 0.0305, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 25.35, |
|
"learning_rate": 2.885361552028219e-05, |
|
"loss": 0.0294, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 25.87, |
|
"learning_rate": 2.8412698412698414e-05, |
|
"loss": 0.0339, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"learning_rate": 2.797178130511464e-05, |
|
"loss": 0.0189, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"eval_accuracy": 0.9113036394119263, |
|
"eval_loss": 0.42735978960990906, |
|
"eval_runtime": 49.4145, |
|
"eval_samples_per_second": 49.054, |
|
"eval_steps_per_second": 6.132, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"learning_rate": 2.7530864197530864e-05, |
|
"loss": 0.0285, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 27.46, |
|
"learning_rate": 2.7089947089947094e-05, |
|
"loss": 0.0307, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"learning_rate": 2.6649029982363318e-05, |
|
"loss": 0.0269, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 28.51, |
|
"learning_rate": 2.6208112874779544e-05, |
|
"loss": 0.0292, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 29.04, |
|
"learning_rate": 2.5767195767195768e-05, |
|
"loss": 0.032, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 29.57, |
|
"learning_rate": 2.5326278659611995e-05, |
|
"loss": 0.0297, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 30.1, |
|
"learning_rate": 2.4885361552028218e-05, |
|
"loss": 0.027, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 0.0258, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 31.16, |
|
"learning_rate": 2.4003527336860672e-05, |
|
"loss": 0.0237, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 31.68, |
|
"learning_rate": 2.3562610229276895e-05, |
|
"loss": 0.0187, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 31.68, |
|
"eval_accuracy": 0.9100660085678101, |
|
"eval_loss": 0.4131234884262085, |
|
"eval_runtime": 49.5018, |
|
"eval_samples_per_second": 48.968, |
|
"eval_steps_per_second": 6.121, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 32.21, |
|
"learning_rate": 2.3121693121693122e-05, |
|
"loss": 0.0255, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 32.74, |
|
"learning_rate": 2.268077601410935e-05, |
|
"loss": 0.0244, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 33.27, |
|
"learning_rate": 2.2239858906525572e-05, |
|
"loss": 0.0273, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 33.8, |
|
"learning_rate": 2.17989417989418e-05, |
|
"loss": 0.0214, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"learning_rate": 2.1358024691358026e-05, |
|
"loss": 0.0265, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 34.85, |
|
"learning_rate": 2.091710758377425e-05, |
|
"loss": 0.0276, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 35.38, |
|
"learning_rate": 2.0476190476190476e-05, |
|
"loss": 0.0153, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 35.91, |
|
"learning_rate": 2.0035273368606703e-05, |
|
"loss": 0.0246, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 36.44, |
|
"learning_rate": 1.959435626102293e-05, |
|
"loss": 0.0266, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 36.96, |
|
"learning_rate": 1.9153439153439153e-05, |
|
"loss": 0.0203, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 36.96, |
|
"eval_accuracy": 0.9236798882484436, |
|
"eval_loss": 0.3643423020839691, |
|
"eval_runtime": 49.3507, |
|
"eval_samples_per_second": 49.118, |
|
"eval_steps_per_second": 6.14, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 37.49, |
|
"learning_rate": 1.871252204585538e-05, |
|
"loss": 0.0225, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 38.02, |
|
"learning_rate": 1.8271604938271607e-05, |
|
"loss": 0.0296, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 38.55, |
|
"learning_rate": 1.783068783068783e-05, |
|
"loss": 0.0181, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 39.08, |
|
"learning_rate": 1.7389770723104057e-05, |
|
"loss": 0.0184, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 39.6, |
|
"learning_rate": 1.6948853615520284e-05, |
|
"loss": 0.0191, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 40.13, |
|
"learning_rate": 1.6507936507936507e-05, |
|
"loss": 0.0224, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 40.66, |
|
"learning_rate": 1.6067019400352734e-05, |
|
"loss": 0.0161, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 41.19, |
|
"learning_rate": 1.562610229276896e-05, |
|
"loss": 0.0211, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 41.72, |
|
"learning_rate": 1.5185185185185186e-05, |
|
"loss": 0.0165, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 42.24, |
|
"learning_rate": 1.4744268077601411e-05, |
|
"loss": 0.0147, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 42.24, |
|
"eval_accuracy": 0.9294554591178894, |
|
"eval_loss": 0.3574332892894745, |
|
"eval_runtime": 49.7962, |
|
"eval_samples_per_second": 48.678, |
|
"eval_steps_per_second": 6.085, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 42.77, |
|
"learning_rate": 1.4303350970017638e-05, |
|
"loss": 0.0235, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 43.3, |
|
"learning_rate": 1.3862433862433863e-05, |
|
"loss": 0.0207, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 43.83, |
|
"learning_rate": 1.3421516754850088e-05, |
|
"loss": 0.0139, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 44.36, |
|
"learning_rate": 1.2980599647266315e-05, |
|
"loss": 0.0168, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 44.88, |
|
"learning_rate": 1.253968253968254e-05, |
|
"loss": 0.0146, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 45.41, |
|
"learning_rate": 1.2098765432098767e-05, |
|
"loss": 0.0149, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 45.94, |
|
"learning_rate": 1.1657848324514992e-05, |
|
"loss": 0.0155, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 46.47, |
|
"learning_rate": 1.1221340388007055e-05, |
|
"loss": 0.0196, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"learning_rate": 1.0780423280423282e-05, |
|
"loss": 0.0229, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"learning_rate": 1.0339506172839507e-05, |
|
"loss": 0.0148, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"eval_accuracy": 0.9220296740531921, |
|
"eval_loss": 0.36532989144325256, |
|
"eval_runtime": 50.0277, |
|
"eval_samples_per_second": 48.453, |
|
"eval_steps_per_second": 6.057, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 48.05, |
|
"learning_rate": 9.898589065255732e-06, |
|
"loss": 0.0133, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 48.58, |
|
"learning_rate": 9.457671957671959e-06, |
|
"loss": 0.0131, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 49.11, |
|
"learning_rate": 9.016754850088184e-06, |
|
"loss": 0.0121, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 49.64, |
|
"learning_rate": 8.575837742504409e-06, |
|
"loss": 0.0168, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 50.17, |
|
"learning_rate": 8.134920634920636e-06, |
|
"loss": 0.0148, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 50.69, |
|
"learning_rate": 7.694003527336861e-06, |
|
"loss": 0.0129, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 51.22, |
|
"learning_rate": 7.253086419753087e-06, |
|
"loss": 0.012, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 51.75, |
|
"learning_rate": 6.812169312169313e-06, |
|
"loss": 0.0112, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 52.28, |
|
"learning_rate": 6.371252204585539e-06, |
|
"loss": 0.0193, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 52.81, |
|
"learning_rate": 5.930335097001764e-06, |
|
"loss": 0.0137, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 52.81, |
|
"eval_accuracy": 0.9352310299873352, |
|
"eval_loss": 0.3257134258747101, |
|
"eval_runtime": 50.886, |
|
"eval_samples_per_second": 47.636, |
|
"eval_steps_per_second": 5.954, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 53.33, |
|
"learning_rate": 5.489417989417989e-06, |
|
"loss": 0.0171, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 53.86, |
|
"learning_rate": 5.048500881834215e-06, |
|
"loss": 0.0169, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 54.39, |
|
"learning_rate": 4.611992945326279e-06, |
|
"loss": 0.0136, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 54.92, |
|
"learning_rate": 4.171075837742505e-06, |
|
"loss": 0.0152, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 55.45, |
|
"learning_rate": 3.7301587301587305e-06, |
|
"loss": 0.015, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 55.97, |
|
"learning_rate": 3.289241622574956e-06, |
|
"loss": 0.0136, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 56.5, |
|
"learning_rate": 2.848324514991182e-06, |
|
"loss": 0.0156, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 57.03, |
|
"learning_rate": 2.4074074074074075e-06, |
|
"loss": 0.0137, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 57.56, |
|
"learning_rate": 1.9664902998236335e-06, |
|
"loss": 0.0147, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"learning_rate": 1.525573192239859e-06, |
|
"loss": 0.0174, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"eval_accuracy": 0.933993399143219, |
|
"eval_loss": 0.30968689918518066, |
|
"eval_runtime": 51.0931, |
|
"eval_samples_per_second": 47.443, |
|
"eval_steps_per_second": 5.93, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 58.61, |
|
"learning_rate": 1.0846560846560847e-06, |
|
"loss": 0.0163, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 59.14, |
|
"learning_rate": 6.437389770723105e-07, |
|
"loss": 0.0096, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 59.67, |
|
"learning_rate": 2.0282186948853617e-07, |
|
"loss": 0.0121, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 59.88, |
|
"step": 11340, |
|
"total_flos": 2.20514636224512e+19, |
|
"train_loss": 0.030625741817122836, |
|
"train_runtime": 25244.9182, |
|
"train_samples_per_second": 28.806, |
|
"train_steps_per_second": 0.449 |
|
}, |
|
{ |
|
"epoch": 59.88, |
|
"eval_accuracy": 0.9323432445526123, |
|
"eval_loss": 0.3117374777793884, |
|
"eval_runtime": 50.8983, |
|
"eval_samples_per_second": 47.624, |
|
"eval_steps_per_second": 5.953, |
|
"step": 11340 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 11340, |
|
"num_train_epochs": 60, |
|
"save_steps": 2000, |
|
"total_flos": 2.20514636224512e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|