smollm2-whisper-turbo-uvx-14000 / trainer_state.json
reach-vb's picture
reach-vb HF staff
Upload folder using huggingface_hub
9a48300 verified
raw
history blame
37.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9999305603777515,
"eval_steps": 1000,
"global_step": 14400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 6.943962224845496e-05,
"grad_norm": 9.8125,
"learning_rate": 2e-06,
"loss": 0.7188,
"step": 1
},
{
"epoch": 0.006943962224845497,
"grad_norm": 0.1376953125,
"learning_rate": 0.0002,
"loss": 0.3907,
"step": 100
},
{
"epoch": 0.013887924449690994,
"grad_norm": 0.0849609375,
"learning_rate": 0.0004,
"loss": 0.2341,
"step": 200
},
{
"epoch": 0.02083188667453649,
"grad_norm": 0.146484375,
"learning_rate": 0.0006,
"loss": 0.2181,
"step": 300
},
{
"epoch": 0.027775848899381988,
"grad_norm": 0.10546875,
"learning_rate": 0.0008,
"loss": 0.197,
"step": 400
},
{
"epoch": 0.034719811124227486,
"grad_norm": 0.11376953125,
"learning_rate": 0.001,
"loss": 0.1695,
"step": 500
},
{
"epoch": 0.04166377334907298,
"grad_norm": 0.0849609375,
"learning_rate": 0.0012,
"loss": 0.14,
"step": 600
},
{
"epoch": 0.04860773557391848,
"grad_norm": 0.078125,
"learning_rate": 0.0014,
"loss": 0.1231,
"step": 700
},
{
"epoch": 0.055551697798763976,
"grad_norm": 0.0712890625,
"learning_rate": 0.0016,
"loss": 0.1141,
"step": 800
},
{
"epoch": 0.06249566002360947,
"grad_norm": 0.06298828125,
"learning_rate": 0.0018000000000000002,
"loss": 0.1077,
"step": 900
},
{
"epoch": 0.06943962224845497,
"grad_norm": 0.0615234375,
"learning_rate": 0.002,
"loss": 0.1045,
"step": 1000
},
{
"epoch": 0.06943962224845497,
"eval_covost2-en-de_loss": 1.4858413934707642,
"eval_covost2-en-de_runtime": 32.1589,
"eval_covost2-en-de_samples_per_second": 1.99,
"eval_covost2-en-de_steps_per_second": 0.062,
"step": 1000
},
{
"epoch": 0.06943962224845497,
"eval_covost2-zh-en_loss": 2.7152516841888428,
"eval_covost2-zh-en_runtime": 31.3842,
"eval_covost2-zh-en_samples_per_second": 2.039,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 1000
},
{
"epoch": 0.06943962224845497,
"eval_peoplespeech-clean-transcription_loss": 2.0398874282836914,
"eval_peoplespeech-clean-transcription_runtime": 32.088,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.995,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 1000
},
{
"epoch": 0.07638358447330046,
"grad_norm": 0.053955078125,
"learning_rate": 0.001999725185109816,
"loss": 0.101,
"step": 1100
},
{
"epoch": 0.08332754669814596,
"grad_norm": 0.0517578125,
"learning_rate": 0.0019989008914857113,
"loss": 0.0956,
"step": 1200
},
{
"epoch": 0.09027150892299146,
"grad_norm": 0.04443359375,
"learning_rate": 0.00199752757218401,
"loss": 0.0936,
"step": 1300
},
{
"epoch": 0.09721547114783696,
"grad_norm": 0.0390625,
"learning_rate": 0.001995605982021898,
"loss": 0.0917,
"step": 1400
},
{
"epoch": 0.10415943337268245,
"grad_norm": 0.0517578125,
"learning_rate": 0.0019931371771625545,
"loss": 0.0894,
"step": 1500
},
{
"epoch": 0.11110339559752795,
"grad_norm": 0.0419921875,
"learning_rate": 0.001990122514534651,
"loss": 0.0868,
"step": 1600
},
{
"epoch": 0.11804735782237345,
"grad_norm": 0.039306640625,
"learning_rate": 0.0019865636510865464,
"loss": 0.0861,
"step": 1700
},
{
"epoch": 0.12499132004721894,
"grad_norm": 0.047119140625,
"learning_rate": 0.001982462542875576,
"loss": 0.0854,
"step": 1800
},
{
"epoch": 0.13193528227206444,
"grad_norm": 0.0390625,
"learning_rate": 0.001977821443992945,
"loss": 0.0837,
"step": 1900
},
{
"epoch": 0.13887924449690994,
"grad_norm": 0.04052734375,
"learning_rate": 0.001972642905324813,
"loss": 0.0818,
"step": 2000
},
{
"epoch": 0.13887924449690994,
"eval_covost2-en-de_loss": 1.4137890338897705,
"eval_covost2-en-de_runtime": 32.5714,
"eval_covost2-en-de_samples_per_second": 1.965,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 2000
},
{
"epoch": 0.13887924449690994,
"eval_covost2-zh-en_loss": 2.667837381362915,
"eval_covost2-zh-en_runtime": 31.1685,
"eval_covost2-zh-en_samples_per_second": 2.053,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 2000
},
{
"epoch": 0.13887924449690994,
"eval_peoplespeech-clean-transcription_loss": 1.835880160331726,
"eval_peoplespeech-clean-transcription_runtime": 32.0265,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.998,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 2000
},
{
"epoch": 0.14582320672175544,
"grad_norm": 0.039794921875,
"learning_rate": 0.0019669297731502505,
"loss": 0.0813,
"step": 2100
},
{
"epoch": 0.15276716894660092,
"grad_norm": 0.03515625,
"learning_rate": 0.00196068518757684,
"loss": 0.0811,
"step": 2200
},
{
"epoch": 0.15971113117144642,
"grad_norm": 0.04443359375,
"learning_rate": 0.001953912580814779,
"loss": 0.0793,
"step": 2300
},
{
"epoch": 0.16665509339629192,
"grad_norm": 0.037841796875,
"learning_rate": 0.0019466156752904343,
"loss": 0.0788,
"step": 2400
},
{
"epoch": 0.17359905562113742,
"grad_norm": 0.04052734375,
"learning_rate": 0.0019387984816003866,
"loss": 0.0783,
"step": 2500
},
{
"epoch": 0.18054301784598292,
"grad_norm": 0.03466796875,
"learning_rate": 0.0019304652963070869,
"loss": 0.0772,
"step": 2600
},
{
"epoch": 0.18748698007082842,
"grad_norm": 0.036376953125,
"learning_rate": 0.0019216206995773372,
"loss": 0.0771,
"step": 2700
},
{
"epoch": 0.19443094229567393,
"grad_norm": 0.0400390625,
"learning_rate": 0.0019122695526648968,
"loss": 0.0766,
"step": 2800
},
{
"epoch": 0.2013749045205194,
"grad_norm": 0.0361328125,
"learning_rate": 0.0019024169952385887,
"loss": 0.0753,
"step": 2900
},
{
"epoch": 0.2083188667453649,
"grad_norm": 0.03125,
"learning_rate": 0.0018920684425573864,
"loss": 0.075,
"step": 3000
},
{
"epoch": 0.2083188667453649,
"eval_covost2-en-de_loss": 1.3828500509262085,
"eval_covost2-en-de_runtime": 32.2972,
"eval_covost2-en-de_samples_per_second": 1.982,
"eval_covost2-en-de_steps_per_second": 0.062,
"step": 3000
},
{
"epoch": 0.2083188667453649,
"eval_covost2-zh-en_loss": 2.646721839904785,
"eval_covost2-zh-en_runtime": 31.0128,
"eval_covost2-zh-en_samples_per_second": 2.064,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 3000
},
{
"epoch": 0.2083188667453649,
"eval_peoplespeech-clean-transcription_loss": 1.773127555847168,
"eval_peoplespeech-clean-transcription_runtime": 32.5192,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.968,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 3000
},
{
"epoch": 0.2152628289702104,
"grad_norm": 0.03369140625,
"learning_rate": 0.0018812295824940284,
"loss": 0.0743,
"step": 3100
},
{
"epoch": 0.2222067911950559,
"grad_norm": 0.034912109375,
"learning_rate": 0.0018699063724087904,
"loss": 0.074,
"step": 3200
},
{
"epoch": 0.2291507534199014,
"grad_norm": 0.033203125,
"learning_rate": 0.0018581050358751443,
"loss": 0.0742,
"step": 3300
},
{
"epoch": 0.2360947156447469,
"grad_norm": 0.03857421875,
"learning_rate": 0.0018458320592590974,
"loss": 0.0742,
"step": 3400
},
{
"epoch": 0.24303867786959238,
"grad_norm": 0.033935546875,
"learning_rate": 0.0018330941881540914,
"loss": 0.0728,
"step": 3500
},
{
"epoch": 0.24998264009443788,
"grad_norm": 0.031005859375,
"learning_rate": 0.0018198984236734246,
"loss": 0.0728,
"step": 3600
},
{
"epoch": 0.2569266023192834,
"grad_norm": 0.030029296875,
"learning_rate": 0.0018062520186022297,
"loss": 0.0714,
"step": 3700
},
{
"epoch": 0.2638705645441289,
"grad_norm": 0.02734375,
"learning_rate": 0.0017921624734111292,
"loss": 0.071,
"step": 3800
},
{
"epoch": 0.2708145267689744,
"grad_norm": 0.03271484375,
"learning_rate": 0.001777637532133752,
"loss": 0.0705,
"step": 3900
},
{
"epoch": 0.2777584889938199,
"grad_norm": 0.035400390625,
"learning_rate": 0.0017626851781103819,
"loss": 0.0714,
"step": 4000
},
{
"epoch": 0.2777584889938199,
"eval_covost2-en-de_loss": 1.3778624534606934,
"eval_covost2-en-de_runtime": 32.6181,
"eval_covost2-en-de_samples_per_second": 1.962,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 4000
},
{
"epoch": 0.2777584889938199,
"eval_covost2-zh-en_loss": 2.6438870429992676,
"eval_covost2-zh-en_runtime": 31.4603,
"eval_covost2-zh-en_samples_per_second": 2.034,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 4000
},
{
"epoch": 0.2777584889938199,
"eval_peoplespeech-clean-transcription_loss": 1.7361584901809692,
"eval_peoplespeech-clean-transcription_runtime": 32.442,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.973,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 4000
},
{
"epoch": 0.2847024512186654,
"grad_norm": 0.0281982421875,
"learning_rate": 0.001747313629600077,
"loss": 0.0713,
"step": 4100
},
{
"epoch": 0.2916464134435109,
"grad_norm": 0.028076171875,
"learning_rate": 0.001731531335263669,
"loss": 0.0699,
"step": 4200
},
{
"epoch": 0.2985903756683564,
"grad_norm": 0.0277099609375,
"learning_rate": 0.0017153469695201276,
"loss": 0.0702,
"step": 4300
},
{
"epoch": 0.30553433789320184,
"grad_norm": 0.031982421875,
"learning_rate": 0.0016987694277788418,
"loss": 0.0692,
"step": 4400
},
{
"epoch": 0.31247830011804734,
"grad_norm": 0.02880859375,
"learning_rate": 0.001681807821550438,
"loss": 0.0686,
"step": 4500
},
{
"epoch": 0.31942226234289284,
"grad_norm": 0.0289306640625,
"learning_rate": 0.0016644714734388218,
"loss": 0.0698,
"step": 4600
},
{
"epoch": 0.32636622456773834,
"grad_norm": 0.029541015625,
"learning_rate": 0.0016467699120171987,
"loss": 0.0683,
"step": 4700
},
{
"epoch": 0.33331018679258384,
"grad_norm": 0.034423828125,
"learning_rate": 0.001628712866590885,
"loss": 0.0687,
"step": 4800
},
{
"epoch": 0.34025414901742934,
"grad_norm": 0.0289306640625,
"learning_rate": 0.0016103102618497923,
"loss": 0.0684,
"step": 4900
},
{
"epoch": 0.34719811124227484,
"grad_norm": 0.0263671875,
"learning_rate": 0.0015915722124135226,
"loss": 0.0681,
"step": 5000
},
{
"epoch": 0.34719811124227484,
"eval_covost2-en-de_loss": 1.3711879253387451,
"eval_covost2-en-de_runtime": 32.6293,
"eval_covost2-en-de_samples_per_second": 1.961,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 5000
},
{
"epoch": 0.34719811124227484,
"eval_covost2-zh-en_loss": 2.6346511840820312,
"eval_covost2-zh-en_runtime": 32.1513,
"eval_covost2-zh-en_samples_per_second": 1.991,
"eval_covost2-zh-en_steps_per_second": 0.062,
"step": 5000
},
{
"epoch": 0.34719811124227484,
"eval_peoplespeech-clean-transcription_loss": 1.7350472211837769,
"eval_peoplespeech-clean-transcription_runtime": 32.5813,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.964,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061,
"step": 5000
},
{
"epoch": 0.35414207346712034,
"grad_norm": 0.028076171875,
"learning_rate": 0.001572509017272072,
"loss": 0.0693,
"step": 5100
},
{
"epoch": 0.36108603569196585,
"grad_norm": 0.030517578125,
"learning_rate": 0.0015531311541251993,
"loss": 0.0683,
"step": 5200
},
{
"epoch": 0.36802999791681135,
"grad_norm": 0.031494140625,
"learning_rate": 0.0015334492736235703,
"loss": 0.0677,
"step": 5300
},
{
"epoch": 0.37497396014165685,
"grad_norm": 0.0284423828125,
"learning_rate": 0.0015134741935148419,
"loss": 0.0669,
"step": 5400
},
{
"epoch": 0.38191792236650235,
"grad_norm": 0.0302734375,
"learning_rate": 0.0014932168926979072,
"loss": 0.0669,
"step": 5500
},
{
"epoch": 0.38886188459134785,
"grad_norm": 0.0260009765625,
"learning_rate": 0.0014726885051885652,
"loss": 0.0666,
"step": 5600
},
{
"epoch": 0.3958058468161933,
"grad_norm": 0.033447265625,
"learning_rate": 0.0014519003139999338,
"loss": 0.0659,
"step": 5700
},
{
"epoch": 0.4027498090410388,
"grad_norm": 0.0283203125,
"learning_rate": 0.0014308637449409706,
"loss": 0.0653,
"step": 5800
},
{
"epoch": 0.4096937712658843,
"grad_norm": 0.0283203125,
"learning_rate": 0.0014095903603365066,
"loss": 0.0662,
"step": 5900
},
{
"epoch": 0.4166377334907298,
"grad_norm": 0.0267333984375,
"learning_rate": 0.0013880918526722496,
"loss": 0.0665,
"step": 6000
},
{
"epoch": 0.4166377334907298,
"eval_covost2-en-de_loss": 1.3651559352874756,
"eval_covost2-en-de_runtime": 32.5621,
"eval_covost2-en-de_samples_per_second": 1.965,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 6000
},
{
"epoch": 0.4166377334907298,
"eval_covost2-zh-en_loss": 2.6372551918029785,
"eval_covost2-zh-en_runtime": 31.028,
"eval_covost2-zh-en_samples_per_second": 2.063,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 6000
},
{
"epoch": 0.4166377334907298,
"eval_peoplespeech-clean-transcription_loss": 1.7209596633911133,
"eval_peoplespeech-clean-transcription_runtime": 32.6773,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.959,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061,
"step": 6000
},
{
"epoch": 0.4235816957155753,
"grad_norm": 0.028564453125,
"learning_rate": 0.0013663800381682463,
"loss": 0.0658,
"step": 6100
},
{
"epoch": 0.4305256579404208,
"grad_norm": 0.0299072265625,
"learning_rate": 0.0013444668502843329,
"loss": 0.0657,
"step": 6200
},
{
"epoch": 0.4374696201652663,
"grad_norm": 0.0296630859375,
"learning_rate": 0.0013223643331611537,
"loss": 0.0655,
"step": 6300
},
{
"epoch": 0.4444135823901118,
"grad_norm": 0.0286865234375,
"learning_rate": 0.001300084635000341,
"loss": 0.0654,
"step": 6400
},
{
"epoch": 0.4513575446149573,
"grad_norm": 0.028564453125,
"learning_rate": 0.0012776400013875004,
"loss": 0.0655,
"step": 6500
},
{
"epoch": 0.4583015068398028,
"grad_norm": 0.030029296875,
"learning_rate": 0.0012550427685616766,
"loss": 0.0648,
"step": 6600
},
{
"epoch": 0.4652454690646483,
"grad_norm": 0.037109375,
"learning_rate": 0.0012323053566349834,
"loss": 0.0654,
"step": 6700
},
{
"epoch": 0.4721894312894938,
"grad_norm": 0.029296875,
"learning_rate": 0.0012094402627661448,
"loss": 0.0643,
"step": 6800
},
{
"epoch": 0.47913339351433926,
"grad_norm": 0.030517578125,
"learning_rate": 0.0011864600542916813,
"loss": 0.0646,
"step": 6900
},
{
"epoch": 0.48607735573918476,
"grad_norm": 0.037353515625,
"learning_rate": 0.0011633773618185302,
"loss": 0.0642,
"step": 7000
},
{
"epoch": 0.48607735573918476,
"eval_covost2-en-de_loss": 1.3594859838485718,
"eval_covost2-en-de_runtime": 32.6807,
"eval_covost2-en-de_samples_per_second": 1.958,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 7000
},
{
"epoch": 0.48607735573918476,
"eval_covost2-zh-en_loss": 2.626713514328003,
"eval_covost2-zh-en_runtime": 31.0228,
"eval_covost2-zh-en_samples_per_second": 2.063,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 7000
},
{
"epoch": 0.48607735573918476,
"eval_peoplespeech-clean-transcription_loss": 1.693739652633667,
"eval_peoplespeech-clean-transcription_runtime": 31.9776,
"eval_peoplespeech-clean-transcription_samples_per_second": 2.001,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.063,
"step": 7000
},
{
"epoch": 0.49302131796403026,
"grad_norm": 0.02978515625,
"learning_rate": 0.0011402048722818862,
"loss": 0.0656,
"step": 7100
},
{
"epoch": 0.49996528018887576,
"grad_norm": 0.0281982421875,
"learning_rate": 0.0011169553219720827,
"loss": 0.064,
"step": 7200
},
{
"epoch": 0.5069092424137213,
"grad_norm": 0.027099609375,
"learning_rate": 0.001093641489534351,
"loss": 0.0648,
"step": 7300
},
{
"epoch": 0.5138532046385668,
"grad_norm": 0.02783203125,
"learning_rate": 0.001070276188945293,
"loss": 0.0642,
"step": 7400
},
{
"epoch": 0.5207971668634123,
"grad_norm": 0.0281982421875,
"learning_rate": 0.00104687226246994,
"loss": 0.0635,
"step": 7500
},
{
"epoch": 0.5277411290882578,
"grad_norm": 0.030517578125,
"learning_rate": 0.0010234425736032607,
"loss": 0.0644,
"step": 7600
},
{
"epoch": 0.5346850913131033,
"grad_norm": 0.02978515625,
"learning_rate": 0.001,
"loss": 0.0645,
"step": 7700
},
{
"epoch": 0.5416290535379488,
"grad_norm": 0.0250244140625,
"learning_rate": 0.0009765574263967396,
"loss": 0.0635,
"step": 7800
},
{
"epoch": 0.5485730157627943,
"grad_norm": 0.033203125,
"learning_rate": 0.0009531277375300599,
"loss": 0.0635,
"step": 7900
},
{
"epoch": 0.5555169779876398,
"grad_norm": 0.03173828125,
"learning_rate": 0.0009297238110547074,
"loss": 0.0624,
"step": 8000
},
{
"epoch": 0.5555169779876398,
"eval_covost2-en-de_loss": 1.3578505516052246,
"eval_covost2-en-de_runtime": 32.6942,
"eval_covost2-en-de_samples_per_second": 1.958,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 8000
},
{
"epoch": 0.5555169779876398,
"eval_covost2-zh-en_loss": 2.6352272033691406,
"eval_covost2-zh-en_runtime": 32.2689,
"eval_covost2-zh-en_samples_per_second": 1.983,
"eval_covost2-zh-en_steps_per_second": 0.062,
"step": 8000
},
{
"epoch": 0.5555169779876398,
"eval_peoplespeech-clean-transcription_loss": 1.6875255107879639,
"eval_peoplespeech-clean-transcription_runtime": 34.4533,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.858,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.058,
"step": 8000
},
{
"epoch": 0.5624609402124853,
"grad_norm": 0.0274658203125,
"learning_rate": 0.0009063585104656494,
"loss": 0.064,
"step": 8100
},
{
"epoch": 0.5694049024373308,
"grad_norm": 0.033935546875,
"learning_rate": 0.0008830446780279176,
"loss": 0.0626,
"step": 8200
},
{
"epoch": 0.5763488646621763,
"grad_norm": 0.03955078125,
"learning_rate": 0.0008597951277181142,
"loss": 0.0638,
"step": 8300
},
{
"epoch": 0.5832928268870218,
"grad_norm": 0.02734375,
"learning_rate": 0.0008366226381814697,
"loss": 0.0632,
"step": 8400
},
{
"epoch": 0.5902367891118673,
"grad_norm": 0.0269775390625,
"learning_rate": 0.000813539945708319,
"loss": 0.0624,
"step": 8500
},
{
"epoch": 0.5971807513367128,
"grad_norm": 0.028564453125,
"learning_rate": 0.0007905597372338558,
"loss": 0.063,
"step": 8600
},
{
"epoch": 0.6041247135615583,
"grad_norm": 0.029541015625,
"learning_rate": 0.0007676946433650169,
"loss": 0.0626,
"step": 8700
},
{
"epoch": 0.6110686757864037,
"grad_norm": 0.0272216796875,
"learning_rate": 0.0007449572314383236,
"loss": 0.0634,
"step": 8800
},
{
"epoch": 0.6180126380112492,
"grad_norm": 0.034423828125,
"learning_rate": 0.0007223599986124993,
"loss": 0.0625,
"step": 8900
},
{
"epoch": 0.6249566002360947,
"grad_norm": 0.03173828125,
"learning_rate": 0.0006999153649996595,
"loss": 0.0618,
"step": 9000
},
{
"epoch": 0.6249566002360947,
"eval_covost2-en-de_loss": 1.3503832817077637,
"eval_covost2-en-de_runtime": 32.9543,
"eval_covost2-en-de_samples_per_second": 1.942,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 9000
},
{
"epoch": 0.6249566002360947,
"eval_covost2-zh-en_loss": 2.6205379962921143,
"eval_covost2-zh-en_runtime": 31.3271,
"eval_covost2-zh-en_samples_per_second": 2.043,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 9000
},
{
"epoch": 0.6249566002360947,
"eval_peoplespeech-clean-transcription_loss": 1.685594916343689,
"eval_peoplespeech-clean-transcription_runtime": 32.7924,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.952,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061,
"step": 9000
},
{
"epoch": 0.6319005624609402,
"grad_norm": 0.034912109375,
"learning_rate": 0.0006776356668388464,
"loss": 0.0614,
"step": 9100
},
{
"epoch": 0.6388445246857857,
"grad_norm": 0.0277099609375,
"learning_rate": 0.0006555331497156671,
"loss": 0.0616,
"step": 9200
},
{
"epoch": 0.6457884869106312,
"grad_norm": 0.031005859375,
"learning_rate": 0.0006336199618317538,
"loss": 0.0614,
"step": 9300
},
{
"epoch": 0.6527324491354767,
"grad_norm": 0.0322265625,
"learning_rate": 0.0006119081473277501,
"loss": 0.0616,
"step": 9400
},
{
"epoch": 0.6596764113603222,
"grad_norm": 0.029296875,
"learning_rate": 0.0005904096396634935,
"loss": 0.0609,
"step": 9500
},
{
"epoch": 0.6666203735851677,
"grad_norm": 0.0284423828125,
"learning_rate": 0.0005691362550590297,
"loss": 0.0609,
"step": 9600
},
{
"epoch": 0.6735643358100132,
"grad_norm": 0.0296630859375,
"learning_rate": 0.0005480996860000663,
"loss": 0.0611,
"step": 9700
},
{
"epoch": 0.6805082980348587,
"grad_norm": 0.031494140625,
"learning_rate": 0.0005273114948114346,
"loss": 0.0612,
"step": 9800
},
{
"epoch": 0.6874522602597042,
"grad_norm": 0.028564453125,
"learning_rate": 0.0005067831073020928,
"loss": 0.0615,
"step": 9900
},
{
"epoch": 0.6943962224845497,
"grad_norm": 0.02783203125,
"learning_rate": 0.00048652580648515787,
"loss": 0.0621,
"step": 10000
},
{
"epoch": 0.6943962224845497,
"eval_covost2-en-de_loss": 1.3505414724349976,
"eval_covost2-en-de_runtime": 32.9697,
"eval_covost2-en-de_samples_per_second": 1.941,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 10000
},
{
"epoch": 0.6943962224845497,
"eval_covost2-zh-en_loss": 2.613917827606201,
"eval_covost2-zh-en_runtime": 31.7013,
"eval_covost2-zh-en_samples_per_second": 2.019,
"eval_covost2-zh-en_steps_per_second": 0.063,
"step": 10000
},
{
"epoch": 0.6943962224845497,
"eval_peoplespeech-clean-transcription_loss": 1.6754916906356812,
"eval_peoplespeech-clean-transcription_runtime": 33.2239,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.926,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.06,
"step": 10000
},
{
"epoch": 0.7013401847093952,
"grad_norm": 0.0299072265625,
"learning_rate": 0.0004665507263764299,
"loss": 0.0614,
"step": 10100
},
{
"epoch": 0.7082841469342407,
"grad_norm": 0.0272216796875,
"learning_rate": 0.0004468688458748006,
"loss": 0.0616,
"step": 10200
},
{
"epoch": 0.7152281091590862,
"grad_norm": 0.03662109375,
"learning_rate": 0.0004274909827279283,
"loss": 0.0617,
"step": 10300
},
{
"epoch": 0.7221720713839317,
"grad_norm": 0.028564453125,
"learning_rate": 0.0004084277875864776,
"loss": 0.0612,
"step": 10400
},
{
"epoch": 0.7291160336087772,
"grad_norm": 0.035400390625,
"learning_rate": 0.00038968973815020803,
"loss": 0.0615,
"step": 10500
},
{
"epoch": 0.7360599958336227,
"grad_norm": 0.0322265625,
"learning_rate": 0.00037128713340911534,
"loss": 0.0607,
"step": 10600
},
{
"epoch": 0.7430039580584682,
"grad_norm": 0.050537109375,
"learning_rate": 0.00035323008798280133,
"loss": 0.0606,
"step": 10700
},
{
"epoch": 0.7499479202833137,
"grad_norm": 0.033203125,
"learning_rate": 0.00033552852656117837,
"loss": 0.0606,
"step": 10800
},
{
"epoch": 0.7568918825081592,
"grad_norm": 0.0308837890625,
"learning_rate": 0.00031819217844956217,
"loss": 0.0599,
"step": 10900
},
{
"epoch": 0.7638358447330047,
"grad_norm": 0.0283203125,
"learning_rate": 0.00030123057222115836,
"loss": 0.061,
"step": 11000
},
{
"epoch": 0.7638358447330047,
"eval_covost2-en-de_loss": 1.3495559692382812,
"eval_covost2-en-de_runtime": 32.3219,
"eval_covost2-en-de_samples_per_second": 1.98,
"eval_covost2-en-de_steps_per_second": 0.062,
"step": 11000
},
{
"epoch": 0.7638358447330047,
"eval_covost2-zh-en_loss": 2.6154580116271973,
"eval_covost2-zh-en_runtime": 31.7065,
"eval_covost2-zh-en_samples_per_second": 2.019,
"eval_covost2-zh-en_steps_per_second": 0.063,
"step": 11000
},
{
"epoch": 0.7638358447330047,
"eval_peoplespeech-clean-transcription_loss": 1.673677682876587,
"eval_peoplespeech-clean-transcription_runtime": 32.5731,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.965,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061,
"step": 11000
},
{
"epoch": 0.7707798069578502,
"grad_norm": 0.029541015625,
"learning_rate": 0.0002846530304798727,
"loss": 0.0602,
"step": 11100
},
{
"epoch": 0.7777237691826957,
"grad_norm": 0.0294189453125,
"learning_rate": 0.00026846866473633125,
"loss": 0.0608,
"step": 11200
},
{
"epoch": 0.7846677314075411,
"grad_norm": 0.0264892578125,
"learning_rate": 0.00025268637039992293,
"loss": 0.0611,
"step": 11300
},
{
"epoch": 0.7916116936323866,
"grad_norm": 0.024658203125,
"learning_rate": 0.00023731482188961818,
"loss": 0.0602,
"step": 11400
},
{
"epoch": 0.7985556558572321,
"grad_norm": 0.0294189453125,
"learning_rate": 0.00022236246786624792,
"loss": 0.0608,
"step": 11500
},
{
"epoch": 0.8054996180820776,
"grad_norm": 0.0322265625,
"learning_rate": 0.00020783752658887068,
"loss": 0.0602,
"step": 11600
},
{
"epoch": 0.8124435803069231,
"grad_norm": 0.035888671875,
"learning_rate": 0.0001937479813977703,
"loss": 0.0607,
"step": 11700
},
{
"epoch": 0.8193875425317686,
"grad_norm": 0.0262451171875,
"learning_rate": 0.00018010157632657541,
"loss": 0.0611,
"step": 11800
},
{
"epoch": 0.8263315047566141,
"grad_norm": 0.0296630859375,
"learning_rate": 0.00016690581184590858,
"loss": 0.0617,
"step": 11900
},
{
"epoch": 0.8332754669814596,
"grad_norm": 0.03564453125,
"learning_rate": 0.00015416794074090258,
"loss": 0.0615,
"step": 12000
},
{
"epoch": 0.8332754669814596,
"eval_covost2-en-de_loss": 1.3493196964263916,
"eval_covost2-en-de_runtime": 32.732,
"eval_covost2-en-de_samples_per_second": 1.955,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 12000
},
{
"epoch": 0.8332754669814596,
"eval_covost2-zh-en_loss": 2.615288734436035,
"eval_covost2-zh-en_runtime": 31.3635,
"eval_covost2-zh-en_samples_per_second": 2.041,
"eval_covost2-zh-en_steps_per_second": 0.064,
"step": 12000
},
{
"epoch": 0.8332754669814596,
"eval_peoplespeech-clean-transcription_loss": 1.6742818355560303,
"eval_peoplespeech-clean-transcription_runtime": 32.2391,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.985,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 12000
},
{
"epoch": 0.8402194292063051,
"grad_norm": 0.033203125,
"learning_rate": 0.00014189496412485593,
"loss": 0.061,
"step": 12100
},
{
"epoch": 0.8471633914311506,
"grad_norm": 0.0322265625,
"learning_rate": 0.00013009362759120978,
"loss": 0.0601,
"step": 12200
},
{
"epoch": 0.8541073536559961,
"grad_norm": 0.028076171875,
"learning_rate": 0.00011877041750597173,
"loss": 0.0613,
"step": 12300
},
{
"epoch": 0.8610513158808416,
"grad_norm": 0.0257568359375,
"learning_rate": 0.00010793155744261352,
"loss": 0.0614,
"step": 12400
},
{
"epoch": 0.8679952781056871,
"grad_norm": 0.02783203125,
"learning_rate": 9.758300476141169e-05,
"loss": 0.0606,
"step": 12500
},
{
"epoch": 0.8749392403305326,
"grad_norm": 0.0302734375,
"learning_rate": 8.773044733510338e-05,
"loss": 0.0604,
"step": 12600
},
{
"epoch": 0.8818832025553781,
"grad_norm": 0.02880859375,
"learning_rate": 7.837930042266262e-05,
"loss": 0.0601,
"step": 12700
},
{
"epoch": 0.8888271647802236,
"grad_norm": 0.02880859375,
"learning_rate": 6.953470369291348e-05,
"loss": 0.0607,
"step": 12800
},
{
"epoch": 0.8957711270050691,
"grad_norm": 0.027587890625,
"learning_rate": 6.120151839961363e-05,
"loss": 0.0606,
"step": 12900
},
{
"epoch": 0.9027150892299146,
"grad_norm": 0.036865234375,
"learning_rate": 5.338432470956589e-05,
"loss": 0.0608,
"step": 13000
},
{
"epoch": 0.9027150892299146,
"eval_covost2-en-de_loss": 1.3490619659423828,
"eval_covost2-en-de_runtime": 33.0003,
"eval_covost2-en-de_samples_per_second": 1.939,
"eval_covost2-en-de_steps_per_second": 0.061,
"step": 13000
},
{
"epoch": 0.9027150892299146,
"eval_covost2-zh-en_loss": 2.615211248397827,
"eval_covost2-zh-en_runtime": 31.7808,
"eval_covost2-zh-en_samples_per_second": 2.014,
"eval_covost2-zh-en_steps_per_second": 0.063,
"step": 13000
},
{
"epoch": 0.9027150892299146,
"eval_peoplespeech-clean-transcription_loss": 1.6742777824401855,
"eval_peoplespeech-clean-transcription_runtime": 32.3145,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.981,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 13000
},
{
"epoch": 0.9096590514547601,
"grad_norm": 0.0302734375,
"learning_rate": 4.6087419185220966e-05,
"loss": 0.06,
"step": 13100
},
{
"epoch": 0.9166030136796056,
"grad_norm": 0.028076171875,
"learning_rate": 3.931481242315993e-05,
"loss": 0.0601,
"step": 13200
},
{
"epoch": 0.9235469759044511,
"grad_norm": 0.03759765625,
"learning_rate": 3.307022684974936e-05,
"loss": 0.0602,
"step": 13300
},
{
"epoch": 0.9304909381292966,
"grad_norm": 0.02978515625,
"learning_rate": 2.7357094675186987e-05,
"loss": 0.0605,
"step": 13400
},
{
"epoch": 0.9374349003541421,
"grad_norm": 0.031005859375,
"learning_rate": 2.2178556007054874e-05,
"loss": 0.061,
"step": 13500
},
{
"epoch": 0.9443788625789876,
"grad_norm": 0.0289306640625,
"learning_rate": 1.7537457124423894e-05,
"loss": 0.0606,
"step": 13600
},
{
"epoch": 0.9513228248038331,
"grad_norm": 0.028076171875,
"learning_rate": 1.3436348913453578e-05,
"loss": 0.061,
"step": 13700
},
{
"epoch": 0.9582667870286785,
"grad_norm": 0.0289306640625,
"learning_rate": 9.877485465349056e-06,
"loss": 0.0606,
"step": 13800
},
{
"epoch": 0.965210749253524,
"grad_norm": 0.0291748046875,
"learning_rate": 6.862822837445881e-06,
"loss": 0.0613,
"step": 13900
},
{
"epoch": 0.9721547114783695,
"grad_norm": 0.0302734375,
"learning_rate": 4.394017978101905e-06,
"loss": 0.0615,
"step": 14000
},
{
"epoch": 0.9721547114783695,
"eval_covost2-en-de_loss": 1.3485227823257446,
"eval_covost2-en-de_runtime": 32.3946,
"eval_covost2-en-de_samples_per_second": 1.976,
"eval_covost2-en-de_steps_per_second": 0.062,
"step": 14000
},
{
"epoch": 0.9721547114783695,
"eval_covost2-zh-en_loss": 2.614840269088745,
"eval_covost2-zh-en_runtime": 31.6743,
"eval_covost2-zh-en_samples_per_second": 2.021,
"eval_covost2-zh-en_steps_per_second": 0.063,
"step": 14000
},
{
"epoch": 0.9721547114783695,
"eval_peoplespeech-clean-transcription_loss": 1.673971176147461,
"eval_peoplespeech-clean-transcription_runtime": 32.2005,
"eval_peoplespeech-clean-transcription_samples_per_second": 1.988,
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062,
"step": 14000
},
{
"epoch": 0.979098673703215,
"grad_norm": 0.027587890625,
"learning_rate": 2.472427815989886e-06,
"loss": 0.0607,
"step": 14100
},
{
"epoch": 0.9860426359280605,
"grad_norm": 0.031982421875,
"learning_rate": 1.099108514288627e-06,
"loss": 0.0608,
"step": 14200
},
{
"epoch": 0.992986598152906,
"grad_norm": 0.03466796875,
"learning_rate": 2.748148901841052e-07,
"loss": 0.0606,
"step": 14300
},
{
"epoch": 0.9999305603777515,
"grad_norm": 0.0277099609375,
"learning_rate": 0.0,
"loss": 0.0608,
"step": 14400
}
],
"logging_steps": 100,
"max_steps": 14400,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 3600,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.7890136187397734e+18,
"train_batch_size": 96,
"trial_name": null,
"trial_params": null
}