|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.918839153959666, |
|
"eval_steps": 500, |
|
"global_step": 40000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9385145105755044e-05, |
|
"loss": 8.6111, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8770290211510086e-05, |
|
"loss": 8.9414, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.815543531726513e-05, |
|
"loss": 9.0231, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.754058042302017e-05, |
|
"loss": 8.756, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.692572552877521e-05, |
|
"loss": 8.6571, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.631087063453025e-05, |
|
"loss": 8.5083, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.56960157402853e-05, |
|
"loss": 8.4503, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.5081160846040335e-05, |
|
"loss": 8.3489, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.4466305951795377e-05, |
|
"loss": 8.2406, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.385145105755042e-05, |
|
"loss": 8.2487, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.323659616330546e-05, |
|
"loss": 8.1667, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.262174126906051e-05, |
|
"loss": 8.0874, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.200688637481554e-05, |
|
"loss": 8.0537, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.1392031480570584e-05, |
|
"loss": 8.0119, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.0777176586325626e-05, |
|
"loss": 7.94, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.0162321692080674e-05, |
|
"loss": 7.9435, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.9547466797835716e-05, |
|
"loss": 7.8516, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.893261190359076e-05, |
|
"loss": 7.7969, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.831775700934579e-05, |
|
"loss": 7.7974, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.7702902115100834e-05, |
|
"loss": 7.7728, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.708804722085588e-05, |
|
"loss": 7.6974, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.6473192326610924e-05, |
|
"loss": 7.6804, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.5858337432365965e-05, |
|
"loss": 7.676, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.5243482538121e-05, |
|
"loss": 7.6323, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.462862764387605e-05, |
|
"loss": 7.6116, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.401377274963109e-05, |
|
"loss": 7.574, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.339891785538613e-05, |
|
"loss": 7.5571, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.278406296114117e-05, |
|
"loss": 7.5299, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.2169208066896215e-05, |
|
"loss": 7.4822, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.1554353172651256e-05, |
|
"loss": 7.4693, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.09394982784063e-05, |
|
"loss": 7.4753, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.032464338416134e-05, |
|
"loss": 7.4058, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.9709788489916378e-05, |
|
"loss": 7.4351, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.9094933595671426e-05, |
|
"loss": 7.3385, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.8480078701426464e-05, |
|
"loss": 7.3167, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.7865223807181506e-05, |
|
"loss": 7.3194, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.7250368912936547e-05, |
|
"loss": 7.3105, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.663551401869159e-05, |
|
"loss": 7.2525, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6020659124446634e-05, |
|
"loss": 7.2848, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5405804230201675e-05, |
|
"loss": 7.2648, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.4790949335956714e-05, |
|
"loss": 7.2688, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.417609444171176e-05, |
|
"loss": 7.2292, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.35612395474668e-05, |
|
"loss": 7.229, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.294638465322184e-05, |
|
"loss": 7.2068, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.2331529758976883e-05, |
|
"loss": 7.1212, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.171667486473192e-05, |
|
"loss": 7.1578, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.1101819970486966e-05, |
|
"loss": 7.0875, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.0486965076242008e-05, |
|
"loss": 7.1206, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.987211018199705e-05, |
|
"loss": 7.1629, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.925725528775209e-05, |
|
"loss": 7.0788, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.8642400393507133e-05, |
|
"loss": 7.0787, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.8027545499262174e-05, |
|
"loss": 7.0581, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.741269060501722e-05, |
|
"loss": 6.9689, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.6797835710772257e-05, |
|
"loss": 7.0185, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.6182980816527302e-05, |
|
"loss": 6.9885, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.5568125922282344e-05, |
|
"loss": 6.9848, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.4953271028037382e-05, |
|
"loss": 6.9358, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.4338416133792425e-05, |
|
"loss": 6.9342, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.3723561239547467e-05, |
|
"loss": 6.94, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.310870634530251e-05, |
|
"loss": 6.9609, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.249385145105755e-05, |
|
"loss": 6.9132, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.1878996556812592e-05, |
|
"loss": 6.8702, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.1264141662567635e-05, |
|
"loss": 6.9124, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.0649286768322676e-05, |
|
"loss": 6.8865, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.0034431874077718e-05, |
|
"loss": 6.8776, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.41957697983276e-06, |
|
"loss": 6.8486, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.804722085587803e-06, |
|
"loss": 6.8326, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 8.189867191342844e-06, |
|
"loss": 6.8254, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.575012297097884e-06, |
|
"loss": 6.82, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 6.960157402852927e-06, |
|
"loss": 6.782, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 6.345302508607968e-06, |
|
"loss": 6.7768, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 5.730447614363011e-06, |
|
"loss": 6.841, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.115592720118052e-06, |
|
"loss": 6.8454, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.500737825873094e-06, |
|
"loss": 6.8015, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.885882931628136e-06, |
|
"loss": 6.8061, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 6.8078, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.65617314313822e-06, |
|
"loss": 6.7983, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.0413182488932614e-06, |
|
"loss": 6.792, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.426463354648303e-06, |
|
"loss": 6.751, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 8.116084604033448e-07, |
|
"loss": 6.7829, |
|
"step": 40000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 40660, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 10000, |
|
"total_flos": 1.0574867779746048e+16, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|