|
{ |
|
"best_metric": 1.9785971641540527, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 0.013889130019618397, |
|
"eval_steps": 50, |
|
"global_step": 50, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00027778260039236795, |
|
"grad_norm": 11.772335052490234, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 33.1371, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00027778260039236795, |
|
"eval_loss": 2.143366813659668, |
|
"eval_runtime": 2204.6716, |
|
"eval_samples_per_second": 11.0, |
|
"eval_steps_per_second": 1.375, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005555652007847359, |
|
"grad_norm": 10.740192413330078, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 31.1104, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0008333478011771037, |
|
"grad_norm": 8.600647926330566, |
|
"learning_rate": 1.5e-06, |
|
"loss": 29.9754, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0011111304015694718, |
|
"grad_norm": 10.105558395385742, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 29.1562, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0013889130019618396, |
|
"grad_norm": 12.394813537597656, |
|
"learning_rate": 2.5e-06, |
|
"loss": 31.1149, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0016666956023542075, |
|
"grad_norm": 12.586294174194336, |
|
"learning_rate": 3e-06, |
|
"loss": 31.1174, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0019444782027465755, |
|
"grad_norm": 10.115208625793457, |
|
"learning_rate": 3.5e-06, |
|
"loss": 30.822, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0022222608031389436, |
|
"grad_norm": 9.80542278289795, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 31.9496, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.002500043403531311, |
|
"grad_norm": 15.492188453674316, |
|
"learning_rate": 4.5e-06, |
|
"loss": 30.6914, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0027778260039236793, |
|
"grad_norm": 14.333991050720215, |
|
"learning_rate": 5e-06, |
|
"loss": 31.0897, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0030556086043160473, |
|
"grad_norm": 13.288778305053711, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 32.038, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.003333391204708415, |
|
"grad_norm": 10.738077163696289, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 31.5108, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.003611173805100783, |
|
"grad_norm": 11.2579345703125, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 32.6261, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.003888956405493151, |
|
"grad_norm": 10.858186721801758, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 32.1063, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004166739005885519, |
|
"grad_norm": 12.74929428100586, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 32.9972, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004444521606277887, |
|
"grad_norm": 12.187442779541016, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 33.676, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.004722304206670255, |
|
"grad_norm": 11.097588539123535, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 31.9569, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005000086807062622, |
|
"grad_norm": 12.763700485229492, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 34.0068, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.005277869407454991, |
|
"grad_norm": 12.541844367980957, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 33.3713, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0055556520078473585, |
|
"grad_norm": 13.987171173095703, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 34.36, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005833434608239726, |
|
"grad_norm": 13.892500877380371, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 34.3124, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.006111217208632095, |
|
"grad_norm": 14.628053665161133, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 33.545, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.006388999809024462, |
|
"grad_norm": 14.313295364379883, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 33.1382, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00666678240941683, |
|
"grad_norm": 15.4104642868042, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 33.9549, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.006944565009809198, |
|
"grad_norm": 16.55370330810547, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 32.9898, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.007222347610201566, |
|
"grad_norm": 14.391225814819336, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 32.4941, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.007500130210593934, |
|
"grad_norm": 16.7861385345459, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 35.6379, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.007777912810986302, |
|
"grad_norm": 14.925787925720215, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 32.6469, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00805569541137867, |
|
"grad_norm": 16.225515365600586, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 33.9583, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.008333478011771037, |
|
"grad_norm": 16.99432373046875, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 34.1699, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008611260612163406, |
|
"grad_norm": 17.615966796875, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 32.8139, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.008889043212555774, |
|
"grad_norm": 22.280908584594727, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 35.1964, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.009166825812948141, |
|
"grad_norm": 18.062667846679688, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 33.659, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.00944460841334051, |
|
"grad_norm": 18.6680908203125, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 35.3859, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.009722391013732878, |
|
"grad_norm": 17.215133666992188, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 33.7911, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.010000173614125245, |
|
"grad_norm": 20.330347061157227, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 35.2101, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.010277956214517613, |
|
"grad_norm": 19.99478530883789, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 33.3331, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.010555738814909982, |
|
"grad_norm": 19.694744110107422, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 33.9149, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.010833521415302349, |
|
"grad_norm": 19.878915786743164, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 33.9199, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.011111304015694717, |
|
"grad_norm": 21.502235412597656, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 33.4184, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.011389086616087086, |
|
"grad_norm": 20.41208839416504, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 35.8002, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.011666869216479452, |
|
"grad_norm": 19.16145133972168, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 33.5787, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01194465181687182, |
|
"grad_norm": 19.13408851623535, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 33.0847, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01222243441726419, |
|
"grad_norm": 19.427921295166016, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 34.2262, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.012500217017656556, |
|
"grad_norm": 21.819917678833008, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 34.3069, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.012777999618048924, |
|
"grad_norm": 22.254148483276367, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 35.325, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.013055782218441293, |
|
"grad_norm": 26.261808395385742, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 36.7306, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01333356481883366, |
|
"grad_norm": 25.339427947998047, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 36.3985, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.013611347419226028, |
|
"grad_norm": 37.24362564086914, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 36.1939, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.013889130019618397, |
|
"grad_norm": 30.434406280517578, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 37.8932, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.013889130019618397, |
|
"eval_loss": 1.9785971641540527, |
|
"eval_runtime": 2210.751, |
|
"eval_samples_per_second": 10.97, |
|
"eval_steps_per_second": 1.371, |
|
"step": 50 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6171259006379622e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|