|
{ |
|
"best_metric": 0.08230920881032944, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.052171644711099516, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00026085822355549756, |
|
"grad_norm": 2.889893054962158, |
|
"learning_rate": 1.004e-05, |
|
"loss": 0.4277, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00026085822355549756, |
|
"eval_loss": 0.12571461498737335, |
|
"eval_runtime": 355.2992, |
|
"eval_samples_per_second": 4.543, |
|
"eval_steps_per_second": 1.137, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005217164471109951, |
|
"grad_norm": 2.151228666305542, |
|
"learning_rate": 2.008e-05, |
|
"loss": 0.4676, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0007825746706664928, |
|
"grad_norm": 1.6503167152404785, |
|
"learning_rate": 3.012e-05, |
|
"loss": 0.4041, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0010434328942219902, |
|
"grad_norm": 1.666961908340454, |
|
"learning_rate": 4.016e-05, |
|
"loss": 0.4058, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.001304291117777488, |
|
"grad_norm": 3.217031478881836, |
|
"learning_rate": 5.02e-05, |
|
"loss": 0.3677, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0015651493413329856, |
|
"grad_norm": 1.9754326343536377, |
|
"learning_rate": 6.024e-05, |
|
"loss": 0.3753, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0018260075648884831, |
|
"grad_norm": 1.1739203929901123, |
|
"learning_rate": 7.028e-05, |
|
"loss": 0.4414, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0020868657884439805, |
|
"grad_norm": 0.9883484840393066, |
|
"learning_rate": 8.032e-05, |
|
"loss": 0.3682, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0023477240119994785, |
|
"grad_norm": 1.0638494491577148, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 0.4203, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.002608582235554976, |
|
"grad_norm": 1.1801265478134155, |
|
"learning_rate": 0.0001004, |
|
"loss": 0.4001, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0028694404591104736, |
|
"grad_norm": 1.0033977031707764, |
|
"learning_rate": 9.987157894736842e-05, |
|
"loss": 0.3808, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.003130298682665971, |
|
"grad_norm": 1.0454702377319336, |
|
"learning_rate": 9.934315789473684e-05, |
|
"loss": 0.3992, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0033911569062214687, |
|
"grad_norm": 0.9567103981971741, |
|
"learning_rate": 9.881473684210525e-05, |
|
"loss": 0.4369, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0036520151297769663, |
|
"grad_norm": 0.9078320860862732, |
|
"learning_rate": 9.828631578947369e-05, |
|
"loss": 0.3368, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.003912873353332464, |
|
"grad_norm": 1.067278265953064, |
|
"learning_rate": 9.77578947368421e-05, |
|
"loss": 0.362, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004173731576887961, |
|
"grad_norm": 1.1500056982040405, |
|
"learning_rate": 9.722947368421052e-05, |
|
"loss": 0.3707, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.004434589800443459, |
|
"grad_norm": 0.9279374480247498, |
|
"learning_rate": 9.670105263157895e-05, |
|
"loss": 0.3435, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.004695448023998957, |
|
"grad_norm": 0.8641620874404907, |
|
"learning_rate": 9.617263157894737e-05, |
|
"loss": 0.3694, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.004956306247554454, |
|
"grad_norm": 0.9377633929252625, |
|
"learning_rate": 9.564421052631579e-05, |
|
"loss": 0.3506, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.005217164471109952, |
|
"grad_norm": 0.8363476991653442, |
|
"learning_rate": 9.511578947368421e-05, |
|
"loss": 0.3899, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005478022694665449, |
|
"grad_norm": 0.913992702960968, |
|
"learning_rate": 9.458736842105264e-05, |
|
"loss": 0.3662, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.005738880918220947, |
|
"grad_norm": 1.1835615634918213, |
|
"learning_rate": 9.405894736842106e-05, |
|
"loss": 0.4134, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.005999739141776444, |
|
"grad_norm": 0.8820966482162476, |
|
"learning_rate": 9.353052631578947e-05, |
|
"loss": 0.3832, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.006260597365331942, |
|
"grad_norm": 1.1732457876205444, |
|
"learning_rate": 9.300210526315789e-05, |
|
"loss": 0.3671, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0065214555888874395, |
|
"grad_norm": 1.1374287605285645, |
|
"learning_rate": 9.247368421052631e-05, |
|
"loss": 0.1064, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0067823138124429375, |
|
"grad_norm": 0.8893991112709045, |
|
"learning_rate": 9.194526315789473e-05, |
|
"loss": 0.0371, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.007043172035998435, |
|
"grad_norm": 6.425418376922607, |
|
"learning_rate": 9.141684210526316e-05, |
|
"loss": 0.0351, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.007304030259553933, |
|
"grad_norm": 2.917386770248413, |
|
"learning_rate": 9.088842105263158e-05, |
|
"loss": 0.0259, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00756488848310943, |
|
"grad_norm": 2.889286756515503, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 0.0523, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.007825746706664928, |
|
"grad_norm": 1.3778408765792847, |
|
"learning_rate": 8.983157894736843e-05, |
|
"loss": 0.0113, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008086604930220426, |
|
"grad_norm": 1.199445128440857, |
|
"learning_rate": 8.930315789473684e-05, |
|
"loss": 0.0075, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.008347463153775922, |
|
"grad_norm": 1.0043731927871704, |
|
"learning_rate": 8.877473684210526e-05, |
|
"loss": 0.0058, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00860832137733142, |
|
"grad_norm": 0.047168854624032974, |
|
"learning_rate": 8.824631578947368e-05, |
|
"loss": 0.0005, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.008869179600886918, |
|
"grad_norm": 0.5005334615707397, |
|
"learning_rate": 8.771789473684211e-05, |
|
"loss": 0.0045, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.009130037824442416, |
|
"grad_norm": 8.737451553344727, |
|
"learning_rate": 8.718947368421053e-05, |
|
"loss": 0.0441, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.009390896047997914, |
|
"grad_norm": 1.570122241973877, |
|
"learning_rate": 8.666105263157895e-05, |
|
"loss": 0.0158, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00965175427155341, |
|
"grad_norm": 2.2240395545959473, |
|
"learning_rate": 8.613263157894737e-05, |
|
"loss": 0.0246, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.009912612495108908, |
|
"grad_norm": 1.170030117034912, |
|
"learning_rate": 8.560421052631578e-05, |
|
"loss": 0.0105, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.010173470718664406, |
|
"grad_norm": 1.453275442123413, |
|
"learning_rate": 8.50757894736842e-05, |
|
"loss": 0.013, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.010434328942219904, |
|
"grad_norm": 1.6023317575454712, |
|
"learning_rate": 8.454736842105263e-05, |
|
"loss": 0.0142, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0106951871657754, |
|
"grad_norm": 2.5300304889678955, |
|
"learning_rate": 8.401894736842106e-05, |
|
"loss": 0.0245, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.010956045389330898, |
|
"grad_norm": 1.3093210458755493, |
|
"learning_rate": 8.349052631578948e-05, |
|
"loss": 0.0071, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.011216903612886396, |
|
"grad_norm": 3.7948031425476074, |
|
"learning_rate": 8.29621052631579e-05, |
|
"loss": 0.0053, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.011477761836441894, |
|
"grad_norm": 0.5037760138511658, |
|
"learning_rate": 8.243368421052632e-05, |
|
"loss": 0.0034, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01173862005999739, |
|
"grad_norm": 0.3311668634414673, |
|
"learning_rate": 8.190526315789474e-05, |
|
"loss": 0.0029, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.011999478283552889, |
|
"grad_norm": 0.4485372006893158, |
|
"learning_rate": 8.137684210526315e-05, |
|
"loss": 0.0017, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.012260336507108387, |
|
"grad_norm": 1.304323434829712, |
|
"learning_rate": 8.084842105263157e-05, |
|
"loss": 0.0034, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.012521194730663885, |
|
"grad_norm": 0.24832318723201752, |
|
"learning_rate": 8.032e-05, |
|
"loss": 0.0019, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.012782052954219381, |
|
"grad_norm": 1.1616764068603516, |
|
"learning_rate": 7.979157894736842e-05, |
|
"loss": 0.0031, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.013042911177774879, |
|
"grad_norm": 4.977385997772217, |
|
"learning_rate": 7.926315789473684e-05, |
|
"loss": 0.012, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.013042911177774879, |
|
"eval_loss": 0.26625388860702515, |
|
"eval_runtime": 357.7191, |
|
"eval_samples_per_second": 4.512, |
|
"eval_steps_per_second": 1.129, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.013303769401330377, |
|
"grad_norm": 21.080076217651367, |
|
"learning_rate": 7.873473684210526e-05, |
|
"loss": 0.898, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.013564627624885875, |
|
"grad_norm": 7.187935829162598, |
|
"learning_rate": 7.820631578947369e-05, |
|
"loss": 0.6007, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.013825485848441373, |
|
"grad_norm": 2.1182901859283447, |
|
"learning_rate": 7.76778947368421e-05, |
|
"loss": 0.4358, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01408634407199687, |
|
"grad_norm": 1.093733310699463, |
|
"learning_rate": 7.714947368421052e-05, |
|
"loss": 0.347, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.014347202295552367, |
|
"grad_norm": 0.9055514931678772, |
|
"learning_rate": 7.662105263157896e-05, |
|
"loss": 0.3856, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.014608060519107865, |
|
"grad_norm": 0.8669521808624268, |
|
"learning_rate": 7.609263157894737e-05, |
|
"loss": 0.3701, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.014868918742663363, |
|
"grad_norm": 0.9830688238143921, |
|
"learning_rate": 7.556421052631579e-05, |
|
"loss": 0.4053, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.01512977696621886, |
|
"grad_norm": 0.7812885046005249, |
|
"learning_rate": 7.503578947368421e-05, |
|
"loss": 0.354, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.015390635189774357, |
|
"grad_norm": 0.7740699648857117, |
|
"learning_rate": 7.450736842105263e-05, |
|
"loss": 0.3165, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.015651493413329855, |
|
"grad_norm": 0.7680047750473022, |
|
"learning_rate": 7.397894736842105e-05, |
|
"loss": 0.3313, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01591235163688535, |
|
"grad_norm": 0.8641350269317627, |
|
"learning_rate": 7.345052631578948e-05, |
|
"loss": 0.3395, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.01617320986044085, |
|
"grad_norm": 1.396049976348877, |
|
"learning_rate": 7.29221052631579e-05, |
|
"loss": 0.3642, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.016434068083996348, |
|
"grad_norm": 0.8436359763145447, |
|
"learning_rate": 7.239368421052631e-05, |
|
"loss": 0.3378, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.016694926307551844, |
|
"grad_norm": 0.8464940786361694, |
|
"learning_rate": 7.186526315789474e-05, |
|
"loss": 0.3668, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.016955784531107344, |
|
"grad_norm": 0.725881040096283, |
|
"learning_rate": 7.133684210526316e-05, |
|
"loss": 0.3661, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01721664275466284, |
|
"grad_norm": 0.6846409440040588, |
|
"learning_rate": 7.080842105263158e-05, |
|
"loss": 0.3638, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.01747750097821834, |
|
"grad_norm": 0.7216416001319885, |
|
"learning_rate": 7.028e-05, |
|
"loss": 0.3635, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.017738359201773836, |
|
"grad_norm": 0.6579917073249817, |
|
"learning_rate": 6.975157894736843e-05, |
|
"loss": 0.356, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.017999217425329332, |
|
"grad_norm": 0.8300607204437256, |
|
"learning_rate": 6.922315789473685e-05, |
|
"loss": 0.362, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.018260075648884832, |
|
"grad_norm": 0.8601360321044922, |
|
"learning_rate": 6.869473684210527e-05, |
|
"loss": 0.3178, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.018520933872440328, |
|
"grad_norm": 1.229761004447937, |
|
"learning_rate": 6.816631578947368e-05, |
|
"loss": 0.3038, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.018781792095995828, |
|
"grad_norm": 0.8368088603019714, |
|
"learning_rate": 6.76378947368421e-05, |
|
"loss": 0.3394, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.019042650319551324, |
|
"grad_norm": 0.76348876953125, |
|
"learning_rate": 6.710947368421052e-05, |
|
"loss": 0.3273, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.01930350854310682, |
|
"grad_norm": 0.8756521344184875, |
|
"learning_rate": 6.658105263157894e-05, |
|
"loss": 0.3483, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.01956436676666232, |
|
"grad_norm": 1.1323158740997314, |
|
"learning_rate": 6.605263157894737e-05, |
|
"loss": 0.1971, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.019825224990217816, |
|
"grad_norm": 0.7796782851219177, |
|
"learning_rate": 6.55242105263158e-05, |
|
"loss": 0.0966, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.020086083213773313, |
|
"grad_norm": 0.6780573725700378, |
|
"learning_rate": 6.499578947368422e-05, |
|
"loss": 0.0203, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.020346941437328812, |
|
"grad_norm": 23.24646759033203, |
|
"learning_rate": 6.446736842105264e-05, |
|
"loss": 0.2551, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02060779966088431, |
|
"grad_norm": 8.683259010314941, |
|
"learning_rate": 6.393894736842105e-05, |
|
"loss": 0.0885, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02086865788443981, |
|
"grad_norm": 0.6611659526824951, |
|
"learning_rate": 6.341052631578947e-05, |
|
"loss": 0.0034, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.021129516107995305, |
|
"grad_norm": 0.1200755164027214, |
|
"learning_rate": 6.288210526315789e-05, |
|
"loss": 0.0004, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0213903743315508, |
|
"grad_norm": 1.164768934249878, |
|
"learning_rate": 6.235368421052632e-05, |
|
"loss": 0.0091, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0216512325551063, |
|
"grad_norm": 0.17707791924476624, |
|
"learning_rate": 6.182526315789474e-05, |
|
"loss": 0.0013, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.021912090778661797, |
|
"grad_norm": 0.21379932761192322, |
|
"learning_rate": 6.129684210526316e-05, |
|
"loss": 0.0013, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.022172949002217297, |
|
"grad_norm": 1.1995000839233398, |
|
"learning_rate": 6.076842105263158e-05, |
|
"loss": 0.0081, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.022433807225772793, |
|
"grad_norm": 0.3035678565502167, |
|
"learning_rate": 6.024e-05, |
|
"loss": 0.0008, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02269466544932829, |
|
"grad_norm": 0.014499887824058533, |
|
"learning_rate": 5.971157894736842e-05, |
|
"loss": 0.0002, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02295552367288379, |
|
"grad_norm": 1.4160069227218628, |
|
"learning_rate": 5.9183157894736835e-05, |
|
"loss": 0.0115, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.023216381896439285, |
|
"grad_norm": 1.5838091373443604, |
|
"learning_rate": 5.8654736842105267e-05, |
|
"loss": 0.0009, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.02347724011999478, |
|
"grad_norm": 2.2831833362579346, |
|
"learning_rate": 5.8126315789473684e-05, |
|
"loss": 0.0095, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02373809834355028, |
|
"grad_norm": 1.3635400533676147, |
|
"learning_rate": 5.759789473684211e-05, |
|
"loss": 0.0018, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.023998956567105777, |
|
"grad_norm": 0.0419996902346611, |
|
"learning_rate": 5.706947368421053e-05, |
|
"loss": 0.0002, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.024259814790661277, |
|
"grad_norm": 0.029113048687577248, |
|
"learning_rate": 5.6541052631578945e-05, |
|
"loss": 0.0003, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.024520673014216773, |
|
"grad_norm": 1.1532421112060547, |
|
"learning_rate": 5.601263157894736e-05, |
|
"loss": 0.0072, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.02478153123777227, |
|
"grad_norm": 0.03277314826846123, |
|
"learning_rate": 5.5484210526315794e-05, |
|
"loss": 0.0004, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02504238946132777, |
|
"grad_norm": 0.19502609968185425, |
|
"learning_rate": 5.495578947368421e-05, |
|
"loss": 0.0008, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.025303247684883266, |
|
"grad_norm": 1.1677289009094238, |
|
"learning_rate": 5.442736842105264e-05, |
|
"loss": 0.0045, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.025564105908438762, |
|
"grad_norm": 0.24581104516983032, |
|
"learning_rate": 5.3898947368421055e-05, |
|
"loss": 0.0016, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.02582496413199426, |
|
"grad_norm": 2.174943685531616, |
|
"learning_rate": 5.337052631578947e-05, |
|
"loss": 0.0116, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.026085822355549758, |
|
"grad_norm": 0.24700959026813507, |
|
"learning_rate": 5.284210526315789e-05, |
|
"loss": 0.0014, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.026085822355549758, |
|
"eval_loss": 0.12110025435686111, |
|
"eval_runtime": 358.6905, |
|
"eval_samples_per_second": 4.5, |
|
"eval_steps_per_second": 1.126, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.026346680579105258, |
|
"grad_norm": 5.076243877410889, |
|
"learning_rate": 5.231368421052631e-05, |
|
"loss": 0.4922, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.026607538802660754, |
|
"grad_norm": 1.472834825515747, |
|
"learning_rate": 5.178526315789474e-05, |
|
"loss": 0.3784, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.02686839702621625, |
|
"grad_norm": 0.9007657766342163, |
|
"learning_rate": 5.1256842105263165e-05, |
|
"loss": 0.381, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.02712925524977175, |
|
"grad_norm": 0.9710745215415955, |
|
"learning_rate": 5.072842105263158e-05, |
|
"loss": 0.3776, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.027390113473327246, |
|
"grad_norm": 0.6388360857963562, |
|
"learning_rate": 5.02e-05, |
|
"loss": 0.3505, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.027650971696882746, |
|
"grad_norm": 0.8697831034660339, |
|
"learning_rate": 4.967157894736842e-05, |
|
"loss": 0.3618, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.027911829920438242, |
|
"grad_norm": 0.6823816895484924, |
|
"learning_rate": 4.914315789473684e-05, |
|
"loss": 0.3559, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.02817268814399374, |
|
"grad_norm": 0.6802661418914795, |
|
"learning_rate": 4.861473684210526e-05, |
|
"loss": 0.3502, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.028433546367549238, |
|
"grad_norm": 0.7415322065353394, |
|
"learning_rate": 4.8086315789473686e-05, |
|
"loss": 0.3634, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.028694404591104734, |
|
"grad_norm": 0.7177245020866394, |
|
"learning_rate": 4.7557894736842104e-05, |
|
"loss": 0.3371, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02895526281466023, |
|
"grad_norm": 0.7753718495368958, |
|
"learning_rate": 4.702947368421053e-05, |
|
"loss": 0.3397, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.02921612103821573, |
|
"grad_norm": 0.6841831207275391, |
|
"learning_rate": 4.6501052631578946e-05, |
|
"loss": 0.3363, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.029476979261771227, |
|
"grad_norm": 0.7129581570625305, |
|
"learning_rate": 4.5972631578947364e-05, |
|
"loss": 0.3124, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.029737837485326726, |
|
"grad_norm": 0.6826345324516296, |
|
"learning_rate": 4.544421052631579e-05, |
|
"loss": 0.3518, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.029998695708882223, |
|
"grad_norm": 0.7089869976043701, |
|
"learning_rate": 4.4915789473684213e-05, |
|
"loss": 0.3721, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03025955393243772, |
|
"grad_norm": 0.7004755139350891, |
|
"learning_rate": 4.438736842105263e-05, |
|
"loss": 0.3917, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03052041215599322, |
|
"grad_norm": 0.705506443977356, |
|
"learning_rate": 4.3858947368421056e-05, |
|
"loss": 0.3312, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.030781270379548715, |
|
"grad_norm": 0.6989331841468811, |
|
"learning_rate": 4.3330526315789474e-05, |
|
"loss": 0.3982, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.031042128603104215, |
|
"grad_norm": 0.6170200705528259, |
|
"learning_rate": 4.280210526315789e-05, |
|
"loss": 0.346, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03130298682665971, |
|
"grad_norm": 1.072172999382019, |
|
"learning_rate": 4.2273684210526317e-05, |
|
"loss": 0.3726, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03156384505021521, |
|
"grad_norm": 0.754610002040863, |
|
"learning_rate": 4.174526315789474e-05, |
|
"loss": 0.3519, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0318247032737707, |
|
"grad_norm": 0.9958357810974121, |
|
"learning_rate": 4.121684210526316e-05, |
|
"loss": 0.2739, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.03208556149732621, |
|
"grad_norm": 0.8653532266616821, |
|
"learning_rate": 4.068842105263158e-05, |
|
"loss": 0.155, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0323464197208817, |
|
"grad_norm": 1.0545744895935059, |
|
"learning_rate": 4.016e-05, |
|
"loss": 0.0706, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0326072779444372, |
|
"grad_norm": 0.9402056932449341, |
|
"learning_rate": 3.963157894736842e-05, |
|
"loss": 0.0434, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.032868136167992695, |
|
"grad_norm": 0.894481897354126, |
|
"learning_rate": 3.9103157894736844e-05, |
|
"loss": 0.0202, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.03312899439154819, |
|
"grad_norm": 0.7376012206077576, |
|
"learning_rate": 3.857473684210526e-05, |
|
"loss": 0.0084, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03338985261510369, |
|
"grad_norm": 1.427925705909729, |
|
"learning_rate": 3.804631578947369e-05, |
|
"loss": 0.0072, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.03365071083865919, |
|
"grad_norm": 0.9831644296646118, |
|
"learning_rate": 3.7517894736842105e-05, |
|
"loss": 0.0031, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.03391156906221469, |
|
"grad_norm": 0.8595508337020874, |
|
"learning_rate": 3.698947368421052e-05, |
|
"loss": 0.0033, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.034172427285770184, |
|
"grad_norm": 0.06450106203556061, |
|
"learning_rate": 3.646105263157895e-05, |
|
"loss": 0.0004, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.03443328550932568, |
|
"grad_norm": 0.9597758650779724, |
|
"learning_rate": 3.593263157894737e-05, |
|
"loss": 0.0023, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.034694143732881176, |
|
"grad_norm": 0.7004671692848206, |
|
"learning_rate": 3.540421052631579e-05, |
|
"loss": 0.0111, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.03495500195643668, |
|
"grad_norm": 0.9372411966323853, |
|
"learning_rate": 3.4875789473684215e-05, |
|
"loss": 0.004, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.035215860179992176, |
|
"grad_norm": 0.9186482429504395, |
|
"learning_rate": 3.434736842105263e-05, |
|
"loss": 0.0082, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03547671840354767, |
|
"grad_norm": 0.7771823406219482, |
|
"learning_rate": 3.381894736842105e-05, |
|
"loss": 0.0077, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.03573757662710317, |
|
"grad_norm": 0.15932439267635345, |
|
"learning_rate": 3.329052631578947e-05, |
|
"loss": 0.0013, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.035998434850658664, |
|
"grad_norm": 1.1858546733856201, |
|
"learning_rate": 3.27621052631579e-05, |
|
"loss": 0.003, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.03625929307421417, |
|
"grad_norm": 1.5219371318817139, |
|
"learning_rate": 3.223368421052632e-05, |
|
"loss": 0.0174, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.036520151297769664, |
|
"grad_norm": 0.7560831904411316, |
|
"learning_rate": 3.1705263157894736e-05, |
|
"loss": 0.0028, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03678100952132516, |
|
"grad_norm": 2.4772675037384033, |
|
"learning_rate": 3.117684210526316e-05, |
|
"loss": 0.0093, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.037041867744880656, |
|
"grad_norm": 0.05707191303372383, |
|
"learning_rate": 3.064842105263158e-05, |
|
"loss": 0.0004, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.03730272596843615, |
|
"grad_norm": 0.03188670799136162, |
|
"learning_rate": 3.012e-05, |
|
"loss": 0.0003, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.037563584191991656, |
|
"grad_norm": 0.041902583092451096, |
|
"learning_rate": 2.9591578947368418e-05, |
|
"loss": 0.0004, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.03782444241554715, |
|
"grad_norm": 1.0179723501205444, |
|
"learning_rate": 2.9063157894736842e-05, |
|
"loss": 0.0087, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03808530063910265, |
|
"grad_norm": 1.2331745624542236, |
|
"learning_rate": 2.8534736842105264e-05, |
|
"loss": 0.0101, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.038346158862658145, |
|
"grad_norm": 0.06799151748418808, |
|
"learning_rate": 2.800631578947368e-05, |
|
"loss": 0.0008, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.03860701708621364, |
|
"grad_norm": 0.06529802829027176, |
|
"learning_rate": 2.7477894736842106e-05, |
|
"loss": 0.0007, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.03886787530976914, |
|
"grad_norm": 2.640516757965088, |
|
"learning_rate": 2.6949473684210527e-05, |
|
"loss": 0.011, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.03912873353332464, |
|
"grad_norm": 0.2220402956008911, |
|
"learning_rate": 2.6421052631578945e-05, |
|
"loss": 0.0021, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03912873353332464, |
|
"eval_loss": 0.12124448269605637, |
|
"eval_runtime": 357.9478, |
|
"eval_samples_per_second": 4.509, |
|
"eval_steps_per_second": 1.129, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.039389591756880137, |
|
"grad_norm": 3.797510862350464, |
|
"learning_rate": 2.589263157894737e-05, |
|
"loss": 0.5013, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.03965044998043563, |
|
"grad_norm": 3.3140711784362793, |
|
"learning_rate": 2.536421052631579e-05, |
|
"loss": 0.4598, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.03991130820399113, |
|
"grad_norm": 1.1835687160491943, |
|
"learning_rate": 2.483578947368421e-05, |
|
"loss": 0.3732, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.040172166427546625, |
|
"grad_norm": 0.5971368551254272, |
|
"learning_rate": 2.430736842105263e-05, |
|
"loss": 0.3361, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.04043302465110213, |
|
"grad_norm": 0.7615634799003601, |
|
"learning_rate": 2.3778947368421052e-05, |
|
"loss": 0.317, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.040693882874657625, |
|
"grad_norm": 0.589562177658081, |
|
"learning_rate": 2.3250526315789473e-05, |
|
"loss": 0.3424, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.04095474109821312, |
|
"grad_norm": 0.5603517293930054, |
|
"learning_rate": 2.2722105263157894e-05, |
|
"loss": 0.3523, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.04121559932176862, |
|
"grad_norm": 0.6157901883125305, |
|
"learning_rate": 2.2193684210526316e-05, |
|
"loss": 0.3667, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.041476457545324114, |
|
"grad_norm": 0.5660136342048645, |
|
"learning_rate": 2.1665263157894737e-05, |
|
"loss": 0.2797, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.04173731576887962, |
|
"grad_norm": 0.5905348658561707, |
|
"learning_rate": 2.1136842105263158e-05, |
|
"loss": 0.3196, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04199817399243511, |
|
"grad_norm": 0.6353060603141785, |
|
"learning_rate": 2.060842105263158e-05, |
|
"loss": 0.3449, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.04225903221599061, |
|
"grad_norm": 0.6922320127487183, |
|
"learning_rate": 2.008e-05, |
|
"loss": 0.3504, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.042519890439546106, |
|
"grad_norm": 0.5601292252540588, |
|
"learning_rate": 1.9551578947368422e-05, |
|
"loss": 0.3378, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0427807486631016, |
|
"grad_norm": 0.5565058588981628, |
|
"learning_rate": 1.9023157894736843e-05, |
|
"loss": 0.3541, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.043041606886657105, |
|
"grad_norm": 0.5381285548210144, |
|
"learning_rate": 1.849473684210526e-05, |
|
"loss": 0.2956, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0433024651102126, |
|
"grad_norm": 0.8170627951622009, |
|
"learning_rate": 1.7966315789473686e-05, |
|
"loss": 0.3399, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.0435633233337681, |
|
"grad_norm": 0.7445321083068848, |
|
"learning_rate": 1.7437894736842107e-05, |
|
"loss": 0.3516, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.043824181557323594, |
|
"grad_norm": 0.6222259998321533, |
|
"learning_rate": 1.6909473684210525e-05, |
|
"loss": 0.3152, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.04408503978087909, |
|
"grad_norm": 0.6484302878379822, |
|
"learning_rate": 1.638105263157895e-05, |
|
"loss": 0.3919, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.04434589800443459, |
|
"grad_norm": 0.6885114908218384, |
|
"learning_rate": 1.5852631578947368e-05, |
|
"loss": 0.3483, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04460675622799009, |
|
"grad_norm": 0.6635403633117676, |
|
"learning_rate": 1.532421052631579e-05, |
|
"loss": 0.2702, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.044867614451545586, |
|
"grad_norm": 0.7835387587547302, |
|
"learning_rate": 1.4795789473684209e-05, |
|
"loss": 0.3227, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.04512847267510108, |
|
"grad_norm": 0.6229288578033447, |
|
"learning_rate": 1.4267368421052632e-05, |
|
"loss": 0.288, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.04538933089865658, |
|
"grad_norm": 0.7214296460151672, |
|
"learning_rate": 1.3738947368421053e-05, |
|
"loss": 0.358, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.045650189122212075, |
|
"grad_norm": 0.6343803405761719, |
|
"learning_rate": 1.3210526315789473e-05, |
|
"loss": 0.1716, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04591104734576758, |
|
"grad_norm": 0.470270037651062, |
|
"learning_rate": 1.2682105263157896e-05, |
|
"loss": 0.0489, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.046171905569323074, |
|
"grad_norm": 0.3558203876018524, |
|
"learning_rate": 1.2153684210526315e-05, |
|
"loss": 0.0174, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.04643276379287857, |
|
"grad_norm": 0.4911562502384186, |
|
"learning_rate": 1.1625263157894737e-05, |
|
"loss": 0.0214, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.046693622016434067, |
|
"grad_norm": 0.5593497157096863, |
|
"learning_rate": 1.1096842105263158e-05, |
|
"loss": 0.022, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.04695448023998956, |
|
"grad_norm": 0.4090302288532257, |
|
"learning_rate": 1.0568421052631579e-05, |
|
"loss": 0.0197, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.047215338463545066, |
|
"grad_norm": 0.44141674041748047, |
|
"learning_rate": 1.004e-05, |
|
"loss": 0.013, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.04747619668710056, |
|
"grad_norm": 0.38881024718284607, |
|
"learning_rate": 9.511578947368422e-06, |
|
"loss": 0.0129, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.04773705491065606, |
|
"grad_norm": 0.3517584502696991, |
|
"learning_rate": 8.983157894736843e-06, |
|
"loss": 0.013, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.047997913134211555, |
|
"grad_norm": 0.30295777320861816, |
|
"learning_rate": 8.454736842105263e-06, |
|
"loss": 0.0068, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.04825877135776705, |
|
"grad_norm": 0.22888092696666718, |
|
"learning_rate": 7.926315789473684e-06, |
|
"loss": 0.0032, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.048519629581322554, |
|
"grad_norm": 0.24423319101333618, |
|
"learning_rate": 7.397894736842104e-06, |
|
"loss": 0.0032, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.5701403021812439, |
|
"learning_rate": 6.8694736842105265e-06, |
|
"loss": 0.004, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.04904134602843355, |
|
"grad_norm": 0.7065002918243408, |
|
"learning_rate": 6.341052631578948e-06, |
|
"loss": 0.005, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.04930220425198904, |
|
"grad_norm": 0.642783522605896, |
|
"learning_rate": 5.812631578947368e-06, |
|
"loss": 0.004, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.04956306247554454, |
|
"grad_norm": 0.04088672995567322, |
|
"learning_rate": 5.2842105263157896e-06, |
|
"loss": 0.0003, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04982392069910004, |
|
"grad_norm": 0.6026747822761536, |
|
"learning_rate": 4.755789473684211e-06, |
|
"loss": 0.0056, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.05008477892265554, |
|
"grad_norm": 0.15268850326538086, |
|
"learning_rate": 4.227368421052631e-06, |
|
"loss": 0.0015, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.050345637146211035, |
|
"grad_norm": 0.20319561660289764, |
|
"learning_rate": 3.698947368421052e-06, |
|
"loss": 0.0008, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.05060649536976653, |
|
"grad_norm": 0.938482403755188, |
|
"learning_rate": 3.170526315789474e-06, |
|
"loss": 0.0017, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.05086735359332203, |
|
"grad_norm": 0.10921061784029007, |
|
"learning_rate": 2.6421052631578948e-06, |
|
"loss": 0.0004, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.051128211816877524, |
|
"grad_norm": 0.25236430764198303, |
|
"learning_rate": 2.1136842105263157e-06, |
|
"loss": 0.002, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.05138907004043303, |
|
"grad_norm": 0.3114216923713684, |
|
"learning_rate": 1.585263157894737e-06, |
|
"loss": 0.0012, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.05164992826398852, |
|
"grad_norm": 0.4255119264125824, |
|
"learning_rate": 1.0568421052631578e-06, |
|
"loss": 0.0071, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.05191078648754402, |
|
"grad_norm": 0.18521125614643097, |
|
"learning_rate": 5.284210526315789e-07, |
|
"loss": 0.0005, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.052171644711099516, |
|
"grad_norm": 2.783665180206299, |
|
"learning_rate": 0.0, |
|
"loss": 0.0279, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.052171644711099516, |
|
"eval_loss": 0.08230920881032944, |
|
"eval_runtime": 357.6332, |
|
"eval_samples_per_second": 4.513, |
|
"eval_steps_per_second": 1.13, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3057871528853504e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|