|
{ |
|
"best_metric": 0.5459895730018616, |
|
"best_model_checkpoint": "nrshoudi/hubert-large-ls960-ft-V2-5/checkpoint-1230", |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 1640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5e-05, |
|
"loss": 19.707, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.5254127979278564, |
|
"eval_per": 1.0, |
|
"eval_runtime": 116.6109, |
|
"eval_samples_per_second": 6.972, |
|
"eval_steps_per_second": 3.49, |
|
"eval_wer": 1.0, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0001, |
|
"loss": 3.4906, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 3.2483434677124023, |
|
"eval_per": 1.0, |
|
"eval_runtime": 115.6174, |
|
"eval_samples_per_second": 7.032, |
|
"eval_steps_per_second": 3.52, |
|
"eval_wer": 1.0, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 3.233, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 3.1367974281311035, |
|
"eval_per": 1.0, |
|
"eval_runtime": 116.034, |
|
"eval_samples_per_second": 7.007, |
|
"eval_steps_per_second": 3.508, |
|
"eval_wer": 1.0, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 3.0468, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.960031509399414, |
|
"eval_per": 1.0, |
|
"eval_runtime": 116.4247, |
|
"eval_samples_per_second": 6.983, |
|
"eval_steps_per_second": 3.496, |
|
"eval_wer": 1.0, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 2.6751, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.334815263748169, |
|
"eval_per": 1.0, |
|
"eval_runtime": 116.7509, |
|
"eval_samples_per_second": 6.964, |
|
"eval_steps_per_second": 3.486, |
|
"eval_wer": 1.0, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 2.0881, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.7351281642913818, |
|
"eval_per": 0.8726326203712428, |
|
"eval_runtime": 117.1554, |
|
"eval_samples_per_second": 6.94, |
|
"eval_steps_per_second": 3.474, |
|
"eval_wer": 0.8568428911390042, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 1.4875, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.22636878490448, |
|
"eval_per": 0.613422218034486, |
|
"eval_runtime": 116.8569, |
|
"eval_samples_per_second": 6.957, |
|
"eval_steps_per_second": 3.483, |
|
"eval_wer": 0.6058513368484905, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.0922, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.9666064381599426, |
|
"eval_per": 0.3971779892584566, |
|
"eval_runtime": 117.3764, |
|
"eval_samples_per_second": 6.926, |
|
"eval_steps_per_second": 3.467, |
|
"eval_wer": 0.4067938966917083, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 0.8148, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.7746477127075195, |
|
"eval_per": 0.31381324790351456, |
|
"eval_runtime": 117.0529, |
|
"eval_samples_per_second": 6.946, |
|
"eval_steps_per_second": 3.477, |
|
"eval_wer": 0.32485651626149037, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 0.6332, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.6754913330078125, |
|
"eval_per": 0.23132007914821445, |
|
"eval_runtime": 117.3105, |
|
"eval_samples_per_second": 6.93, |
|
"eval_steps_per_second": 3.469, |
|
"eval_wer": 0.24767859642573842, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4797, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 0.6261923313140869, |
|
"eval_per": 0.14098275699613683, |
|
"eval_runtime": 116.5342, |
|
"eval_samples_per_second": 6.976, |
|
"eval_steps_per_second": 3.493, |
|
"eval_wer": 0.16121506229294014, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.3807, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 0.5765030384063721, |
|
"eval_per": 0.11716762461132574, |
|
"eval_runtime": 116.5005, |
|
"eval_samples_per_second": 6.979, |
|
"eval_steps_per_second": 3.494, |
|
"eval_wer": 0.13844430964490692, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.3195, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 0.5666270852088928, |
|
"eval_per": 0.09917082822952983, |
|
"eval_runtime": 116.7786, |
|
"eval_samples_per_second": 6.962, |
|
"eval_steps_per_second": 3.485, |
|
"eval_wer": 0.11912649899678036, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.2526, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 0.5758910775184631, |
|
"eval_per": 0.09698011872232168, |
|
"eval_runtime": 116.6256, |
|
"eval_samples_per_second": 6.971, |
|
"eval_steps_per_second": 3.49, |
|
"eval_wer": 0.1165134618076618, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2417, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 0.5459895730018616, |
|
"eval_per": 0.09462451710166776, |
|
"eval_runtime": 116.6754, |
|
"eval_samples_per_second": 6.968, |
|
"eval_steps_per_second": 3.488, |
|
"eval_wer": 0.113807101861789, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.2072, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 0.5550991892814636, |
|
"eval_per": 0.09116178271930651, |
|
"eval_runtime": 116.9202, |
|
"eval_samples_per_second": 6.953, |
|
"eval_steps_per_second": 3.481, |
|
"eval_wer": 0.10946759367271709, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.1881, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 0.5744515657424927, |
|
"eval_per": 0.09172712710826345, |
|
"eval_runtime": 116.943, |
|
"eval_samples_per_second": 6.952, |
|
"eval_steps_per_second": 3.48, |
|
"eval_wer": 0.11016751434837385, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.1888, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 0.5731486678123474, |
|
"eval_per": 0.09069066239517573, |
|
"eval_runtime": 118.5572, |
|
"eval_samples_per_second": 6.857, |
|
"eval_steps_per_second": 3.433, |
|
"eval_wer": 0.10942093229433998, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.202, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 0.5773624777793884, |
|
"eval_per": 0.08927730142278338, |
|
"eval_runtime": 116.8858, |
|
"eval_samples_per_second": 6.956, |
|
"eval_steps_per_second": 3.482, |
|
"eval_wer": 0.10806775232140357, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1813, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 0.5759608745574951, |
|
"eval_per": 0.08915952134175069, |
|
"eval_runtime": 117.5758, |
|
"eval_samples_per_second": 6.915, |
|
"eval_steps_per_second": 3.462, |
|
"eval_wer": 0.10848770472679763, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 1640, |
|
"total_flos": 5.333828756305947e+17, |
|
"train_loss": 2.0454907219584397, |
|
"train_runtime": 3434.6913, |
|
"train_samples_per_second": 0.949, |
|
"train_steps_per_second": 0.477 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 1640, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 5.333828756305947e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|