|
{ |
|
"best_metric": 7.059117317199707, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 0.35778175313059035, |
|
"eval_steps": 50, |
|
"global_step": 50, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.007155635062611807, |
|
"grad_norm": 63.07804870605469, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 55.8303, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007155635062611807, |
|
"eval_loss": 8.064038276672363, |
|
"eval_runtime": 21.6914, |
|
"eval_samples_per_second": 10.88, |
|
"eval_steps_per_second": 2.72, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.014311270125223614, |
|
"grad_norm": 47.44363784790039, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 57.0338, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02146690518783542, |
|
"grad_norm": 54.36549758911133, |
|
"learning_rate": 1.5e-06, |
|
"loss": 57.0304, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.028622540250447227, |
|
"grad_norm": 51.64623260498047, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 52.5478, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03577817531305903, |
|
"grad_norm": 59.063106536865234, |
|
"learning_rate": 2.5e-06, |
|
"loss": 57.2634, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04293381037567084, |
|
"grad_norm": 50.631046295166016, |
|
"learning_rate": 3e-06, |
|
"loss": 55.0657, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05008944543828265, |
|
"grad_norm": 65.40850830078125, |
|
"learning_rate": 3.5e-06, |
|
"loss": 58.9552, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.057245080500894455, |
|
"grad_norm": 52.03582000732422, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 60.8162, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06440071556350627, |
|
"grad_norm": 71.69737243652344, |
|
"learning_rate": 4.5e-06, |
|
"loss": 63.0307, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07155635062611806, |
|
"grad_norm": 66.26720428466797, |
|
"learning_rate": 5e-06, |
|
"loss": 62.048, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07871198568872988, |
|
"grad_norm": 47.20499038696289, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 59.9577, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08586762075134168, |
|
"grad_norm": 50.71089553833008, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 64.3391, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 56.87615203857422, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 62.9932, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1001788908765653, |
|
"grad_norm": 57.271766662597656, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 64.1028, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1073345259391771, |
|
"grad_norm": 56.15481948852539, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 67.4748, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11449016100178891, |
|
"grad_norm": 44.163082122802734, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 53.8473, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.12164579606440072, |
|
"grad_norm": 54.03547668457031, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 57.9262, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.12880143112701253, |
|
"grad_norm": 68.97274780273438, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 61.3954, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13595706618962433, |
|
"grad_norm": 53.48569869995117, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 64.557, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14311270125223613, |
|
"grad_norm": 64.57427978515625, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 67.3172, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15026833631484796, |
|
"grad_norm": 58.888912200927734, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 59.5499, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15742397137745975, |
|
"grad_norm": 53.7679443359375, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 61.4948, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16457960644007155, |
|
"grad_norm": 60.130409240722656, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 64.6237, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.17173524150268335, |
|
"grad_norm": 48.91883850097656, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 69.1527, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.17889087656529518, |
|
"grad_norm": 46.37879943847656, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 62.3713, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 50.134422302246094, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 64.926, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.19320214669051877, |
|
"grad_norm": 46.87827682495117, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 63.1828, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2003577817531306, |
|
"grad_norm": 54.75893783569336, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 60.6643, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2075134168157424, |
|
"grad_norm": 52.367431640625, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 68.0454, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2146690518783542, |
|
"grad_norm": 50.618350982666016, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 71.5049, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.22182468694096602, |
|
"grad_norm": 47.27493667602539, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 61.6825, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.22898032200357782, |
|
"grad_norm": 51.643550872802734, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 66.9774, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.23613595706618962, |
|
"grad_norm": 65.83971405029297, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 62.7222, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.24329159212880144, |
|
"grad_norm": 49.59482955932617, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 61.6222, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2504472271914132, |
|
"grad_norm": 53.057708740234375, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 55.4643, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25760286225402507, |
|
"grad_norm": 90.77571105957031, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 55.8806, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.26475849731663686, |
|
"grad_norm": 71.54025268554688, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 53.0835, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.27191413237924866, |
|
"grad_norm": 72.8661117553711, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 49.935, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 61.325645446777344, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 52.0931, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.28622540250447226, |
|
"grad_norm": 95.67042541503906, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 55.6016, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.29338103756708406, |
|
"grad_norm": 61.53566360473633, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 54.4276, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3005366726296959, |
|
"grad_norm": 95.13054656982422, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 51.2729, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 73.37828826904297, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 55.4188, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3148479427549195, |
|
"grad_norm": 73.3287124633789, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 53.3134, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3220035778175313, |
|
"grad_norm": 69.86181640625, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 51.2279, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3291592128801431, |
|
"grad_norm": 88.11177062988281, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 52.3788, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3363148479427549, |
|
"grad_norm": 77.78619384765625, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 55.5405, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3434704830053667, |
|
"grad_norm": 54.31289291381836, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 53.7706, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.35062611806797855, |
|
"grad_norm": 92.552734375, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 48.2176, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.35778175313059035, |
|
"grad_norm": 55.199222564697266, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 55.8397, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.35778175313059035, |
|
"eval_loss": 7.059117317199707, |
|
"eval_runtime": 22.0275, |
|
"eval_samples_per_second": 10.714, |
|
"eval_steps_per_second": 2.678, |
|
"step": 50 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.53464644354048e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|