|
{ |
|
"best_metric": 0.6264265498194235, |
|
"best_model_checkpoint": "case-analysis-roberta-base/checkpoint-2016", |
|
"epoch": 9.0, |
|
"eval_steps": 500, |
|
"global_step": 2016, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7527839643652561, |
|
"eval_f1_macro": 0.5389063030773956, |
|
"eval_f1_micro": 0.7527839643652561, |
|
"eval_f1_weighted": 0.7300520074128541, |
|
"eval_loss": 0.8771109580993652, |
|
"eval_macro_fpr": 0.13144039552419665, |
|
"eval_macro_sensitivity": 0.5401895401895401, |
|
"eval_macro_specificity": 0.8833499134100214, |
|
"eval_precision": 0.7119932520850578, |
|
"eval_precision_macro": 0.5403637213518124, |
|
"eval_recall": 0.7527839643652561, |
|
"eval_recall_macro": 0.5401895401895401, |
|
"eval_runtime": 6.9264, |
|
"eval_samples_per_second": 64.825, |
|
"eval_steps_per_second": 8.229, |
|
"eval_weighted_fpr": 0.09866666666666667, |
|
"eval_weighted_sensitivity": 0.7527839643652561, |
|
"eval_weighted_specificity": 0.7806156892748298, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7728285077951003, |
|
"eval_f1_macro": 0.5712387529890991, |
|
"eval_f1_micro": 0.7728285077951002, |
|
"eval_f1_weighted": 0.7555182045354769, |
|
"eval_loss": 0.7935857176780701, |
|
"eval_macro_fpr": 0.10804320901264429, |
|
"eval_macro_sensitivity": 0.59374376286141, |
|
"eval_macro_specificity": 0.9046495531497725, |
|
"eval_precision": 0.7420199260104934, |
|
"eval_precision_macro": 0.5529178338001868, |
|
"eval_recall": 0.7728285077951003, |
|
"eval_recall_macro": 0.59374376286141, |
|
"eval_runtime": 33.5269, |
|
"eval_samples_per_second": 13.392, |
|
"eval_steps_per_second": 1.7, |
|
"eval_weighted_fpr": 0.08923884514435695, |
|
"eval_weighted_sensitivity": 0.7728285077951003, |
|
"eval_weighted_specificity": 0.8457697048039892, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.232142857142857, |
|
"grad_norm": 10.557605743408203, |
|
"learning_rate": 4.631696428571429e-05, |
|
"loss": 0.8855, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7305122494432071, |
|
"eval_f1_macro": 0.5313357935186093, |
|
"eval_f1_micro": 0.730512249443207, |
|
"eval_f1_weighted": 0.7208063173961943, |
|
"eval_loss": 0.8126917481422424, |
|
"eval_macro_fpr": 0.1288106780460812, |
|
"eval_macro_sensitivity": 0.5599513761278467, |
|
"eval_macro_specificity": 0.8878517309481255, |
|
"eval_precision": 0.7320572933103704, |
|
"eval_precision_macro": 0.5335863697705803, |
|
"eval_recall": 0.7305122494432071, |
|
"eval_recall_macro": 0.5599513761278467, |
|
"eval_runtime": 35.5026, |
|
"eval_samples_per_second": 12.647, |
|
"eval_steps_per_second": 1.606, |
|
"eval_weighted_fpr": 0.10950226244343891, |
|
"eval_weighted_sensitivity": 0.7305122494432071, |
|
"eval_weighted_specificity": 0.8208946743492954, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.779510022271715, |
|
"eval_f1_macro": 0.5605264699901076, |
|
"eval_f1_micro": 0.779510022271715, |
|
"eval_f1_weighted": 0.756140232373208, |
|
"eval_loss": 1.0185617208480835, |
|
"eval_macro_fpr": 0.11840141313918778, |
|
"eval_macro_sensitivity": 0.565439197792139, |
|
"eval_macro_specificity": 0.8949862362922303, |
|
"eval_precision": 0.7503024658142448, |
|
"eval_precision_macro": 0.5721560130010834, |
|
"eval_recall": 0.779510022271715, |
|
"eval_recall_macro": 0.565439197792139, |
|
"eval_runtime": 42.1679, |
|
"eval_samples_per_second": 10.648, |
|
"eval_steps_per_second": 1.352, |
|
"eval_weighted_fpr": 0.08616187989556136, |
|
"eval_weighted_sensitivity": 0.779510022271715, |
|
"eval_weighted_specificity": 0.800434922897206, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.464285714285714, |
|
"grad_norm": 4.521566867828369, |
|
"learning_rate": 4.260416666666667e-05, |
|
"loss": 0.5551, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8084632516703786, |
|
"eval_f1_macro": 0.5892045454545454, |
|
"eval_f1_micro": 0.8084632516703786, |
|
"eval_f1_weighted": 0.7866926503340758, |
|
"eval_loss": 0.7591320276260376, |
|
"eval_macro_fpr": 0.09875084526504571, |
|
"eval_macro_sensitivity": 0.5962627065568242, |
|
"eval_macro_specificity": 0.91148202322346, |
|
"eval_precision": 0.7673786757699211, |
|
"eval_precision_macro": 0.583308107932168, |
|
"eval_recall": 0.8084632516703786, |
|
"eval_recall_macro": 0.5962627065568242, |
|
"eval_runtime": 42.7116, |
|
"eval_samples_per_second": 10.512, |
|
"eval_steps_per_second": 1.335, |
|
"eval_weighted_fpr": 0.07319148936170213, |
|
"eval_weighted_sensitivity": 0.8084632516703786, |
|
"eval_weighted_specificity": 0.8374648412234613, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8173719376391982, |
|
"eval_f1_macro": 0.603048355342882, |
|
"eval_f1_micro": 0.8173719376391981, |
|
"eval_f1_weighted": 0.7967392787416411, |
|
"eval_loss": 0.9521613717079163, |
|
"eval_macro_fpr": 0.09667046583838863, |
|
"eval_macro_sensitivity": 0.5987868120221062, |
|
"eval_macro_specificity": 0.9117780208671373, |
|
"eval_precision": 0.7816269250029019, |
|
"eval_precision_macro": 0.6116760996647348, |
|
"eval_recall": 0.8173719376391982, |
|
"eval_recall_macro": 0.5987868120221062, |
|
"eval_runtime": 40.3104, |
|
"eval_samples_per_second": 11.139, |
|
"eval_steps_per_second": 1.414, |
|
"eval_weighted_fpr": 0.06931530008453085, |
|
"eval_weighted_sensitivity": 0.8173719376391982, |
|
"eval_weighted_specificity": 0.8297401458293509, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 6.696428571428571, |
|
"grad_norm": 0.38720017671585083, |
|
"learning_rate": 3.888392857142857e-05, |
|
"loss": 0.386, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.7706013363028953, |
|
"eval_f1_macro": 0.5781817655906514, |
|
"eval_f1_micro": 0.7706013363028953, |
|
"eval_f1_weighted": 0.7656335044104077, |
|
"eval_loss": 1.0569394826889038, |
|
"eval_macro_fpr": 0.10894725594931005, |
|
"eval_macro_sensitivity": 0.5857963578551814, |
|
"eval_macro_specificity": 0.9056966184410946, |
|
"eval_precision": 0.7610212023939332, |
|
"eval_precision_macro": 0.5709751150784568, |
|
"eval_recall": 0.7706013363028953, |
|
"eval_recall_macro": 0.5857963578551814, |
|
"eval_runtime": 36.0908, |
|
"eval_samples_per_second": 12.441, |
|
"eval_steps_per_second": 1.579, |
|
"eval_weighted_fpr": 0.09027169149868536, |
|
"eval_weighted_sensitivity": 0.7706013363028953, |
|
"eval_weighted_specificity": 0.852185137461483, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.7572383073496659, |
|
"eval_f1_macro": 0.6161842736352976, |
|
"eval_f1_micro": 0.7572383073496659, |
|
"eval_f1_weighted": 0.7715266654127986, |
|
"eval_loss": 1.1956746578216553, |
|
"eval_macro_fpr": 0.10518103765035827, |
|
"eval_macro_sensitivity": 0.6263515234103469, |
|
"eval_macro_specificity": 0.9119340321339534, |
|
"eval_precision": 0.7917843915207833, |
|
"eval_precision_macro": 0.6175221125829533, |
|
"eval_recall": 0.7572383073496659, |
|
"eval_recall_macro": 0.6263515234103469, |
|
"eval_runtime": 39.6918, |
|
"eval_samples_per_second": 11.312, |
|
"eval_steps_per_second": 1.436, |
|
"eval_weighted_fpr": 0.09654561558901682, |
|
"eval_weighted_sensitivity": 0.7572383073496659, |
|
"eval_weighted_specificity": 0.8904978211861475, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 8.928571428571429, |
|
"grad_norm": 0.17783766984939575, |
|
"learning_rate": 3.516369047619048e-05, |
|
"loss": 0.2709, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.7728285077951003, |
|
"eval_f1_macro": 0.6264265498194235, |
|
"eval_f1_micro": 0.7728285077951002, |
|
"eval_f1_weighted": 0.7773023130906183, |
|
"eval_loss": 1.2091724872589111, |
|
"eval_macro_fpr": 0.10207367511358464, |
|
"eval_macro_sensitivity": 0.6301385419032478, |
|
"eval_macro_specificity": 0.9119745284052109, |
|
"eval_precision": 0.7896658353037499, |
|
"eval_precision_macro": 0.6331069825924028, |
|
"eval_recall": 0.7728285077951003, |
|
"eval_recall_macro": 0.6301385419032478, |
|
"eval_runtime": 39.549, |
|
"eval_samples_per_second": 11.353, |
|
"eval_steps_per_second": 1.441, |
|
"eval_weighted_fpr": 0.08923884514435695, |
|
"eval_weighted_sensitivity": 0.7728285077951003, |
|
"eval_weighted_specificity": 0.8750696058257429, |
|
"step": 2016 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 6720, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 500, |
|
"total_flos": 4243531301388288.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|