|
{ |
|
"best_metric": 0.4173365831375122, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.0784313725490196, |
|
"eval_steps": 100, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000784313725490196, |
|
"grad_norm": 13.154260635375977, |
|
"learning_rate": 7e-06, |
|
"loss": 4.2676, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000784313725490196, |
|
"eval_loss": 1.2241708040237427, |
|
"eval_runtime": 167.7852, |
|
"eval_samples_per_second": 12.802, |
|
"eval_steps_per_second": 3.201, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001568627450980392, |
|
"grad_norm": 15.154508590698242, |
|
"learning_rate": 1.4e-05, |
|
"loss": 2.7549, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002352941176470588, |
|
"grad_norm": 14.48685073852539, |
|
"learning_rate": 2.1e-05, |
|
"loss": 2.7878, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.003137254901960784, |
|
"grad_norm": 13.144845008850098, |
|
"learning_rate": 2.8e-05, |
|
"loss": 2.3061, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00392156862745098, |
|
"grad_norm": 9.391257286071777, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.9517, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004705882352941176, |
|
"grad_norm": 8.726778984069824, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.1089, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005490196078431373, |
|
"grad_norm": 7.819472789764404, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 1.7353, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.006274509803921568, |
|
"grad_norm": 7.163968086242676, |
|
"learning_rate": 5.6e-05, |
|
"loss": 1.7352, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007058823529411765, |
|
"grad_norm": 7.747889995574951, |
|
"learning_rate": 6.3e-05, |
|
"loss": 2.1403, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00784313725490196, |
|
"grad_norm": 7.355112075805664, |
|
"learning_rate": 7e-05, |
|
"loss": 1.7824, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008627450980392156, |
|
"grad_norm": 6.8065619468688965, |
|
"learning_rate": 6.999886445007847e-05, |
|
"loss": 1.7958, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.009411764705882352, |
|
"grad_norm": 7.497786045074463, |
|
"learning_rate": 6.999545787399811e-05, |
|
"loss": 1.8139, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01019607843137255, |
|
"grad_norm": 8.02735710144043, |
|
"learning_rate": 6.998978049280675e-05, |
|
"loss": 1.7139, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010980392156862745, |
|
"grad_norm": 7.836209297180176, |
|
"learning_rate": 6.998183267490152e-05, |
|
"loss": 1.7008, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.011764705882352941, |
|
"grad_norm": 6.723918914794922, |
|
"learning_rate": 6.997161493600493e-05, |
|
"loss": 1.5095, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012549019607843137, |
|
"grad_norm": 7.266279220581055, |
|
"learning_rate": 6.995912793913143e-05, |
|
"loss": 1.7114, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.013333333333333334, |
|
"grad_norm": 8.910981178283691, |
|
"learning_rate": 6.994437249454433e-05, |
|
"loss": 1.8847, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01411764705882353, |
|
"grad_norm": 7.561214923858643, |
|
"learning_rate": 6.992734955970331e-05, |
|
"loss": 1.4478, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.014901960784313726, |
|
"grad_norm": 11.097957611083984, |
|
"learning_rate": 6.990806023920219e-05, |
|
"loss": 1.8268, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"grad_norm": 8.547746658325195, |
|
"learning_rate": 6.988650578469734e-05, |
|
"loss": 1.9715, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01647058823529412, |
|
"grad_norm": 7.6952643394470215, |
|
"learning_rate": 6.986268759482644e-05, |
|
"loss": 1.3908, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.017254901960784313, |
|
"grad_norm": 9.783441543579102, |
|
"learning_rate": 6.983660721511769e-05, |
|
"loss": 1.6528, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01803921568627451, |
|
"grad_norm": 8.756048202514648, |
|
"learning_rate": 6.980826633788956e-05, |
|
"loss": 1.3471, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.018823529411764704, |
|
"grad_norm": 10.131261825561523, |
|
"learning_rate": 6.977766680214095e-05, |
|
"loss": 1.6056, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 9.784903526306152, |
|
"learning_rate": 6.974481059343188e-05, |
|
"loss": 1.349, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0203921568627451, |
|
"grad_norm": 10.655691146850586, |
|
"learning_rate": 6.970969984375466e-05, |
|
"loss": 1.0902, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.021176470588235293, |
|
"grad_norm": 10.72864055633545, |
|
"learning_rate": 6.967233683139552e-05, |
|
"loss": 1.4288, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02196078431372549, |
|
"grad_norm": 7.463002681732178, |
|
"learning_rate": 6.963272398078678e-05, |
|
"loss": 1.0672, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.022745098039215685, |
|
"grad_norm": 11.380589485168457, |
|
"learning_rate": 6.959086386234956e-05, |
|
"loss": 1.3336, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.023529411764705882, |
|
"grad_norm": 9.633968353271484, |
|
"learning_rate": 6.954675919232694e-05, |
|
"loss": 0.9633, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02431372549019608, |
|
"grad_norm": 8.88882827758789, |
|
"learning_rate": 6.950041283260778e-05, |
|
"loss": 1.0786, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.025098039215686273, |
|
"grad_norm": 14.510672569274902, |
|
"learning_rate": 6.945182779054092e-05, |
|
"loss": 1.4122, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02588235294117647, |
|
"grad_norm": 15.632862091064453, |
|
"learning_rate": 6.940100721874013e-05, |
|
"loss": 2.0142, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 12.684489250183105, |
|
"learning_rate": 6.934795441487947e-05, |
|
"loss": 1.6836, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.027450980392156862, |
|
"grad_norm": 13.649406433105469, |
|
"learning_rate": 6.929267282147936e-05, |
|
"loss": 1.7083, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02823529411764706, |
|
"grad_norm": 10.714078903198242, |
|
"learning_rate": 6.923516602568319e-05, |
|
"loss": 1.6407, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.029019607843137254, |
|
"grad_norm": 10.981186866760254, |
|
"learning_rate": 6.917543775902452e-05, |
|
"loss": 1.483, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02980392156862745, |
|
"grad_norm": 12.960887908935547, |
|
"learning_rate": 6.911349189718497e-05, |
|
"loss": 1.7072, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03058823529411765, |
|
"grad_norm": 11.793508529663086, |
|
"learning_rate": 6.904933245974274e-05, |
|
"loss": 2.088, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03137254901960784, |
|
"grad_norm": 14.335042953491211, |
|
"learning_rate": 6.898296360991182e-05, |
|
"loss": 1.5573, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03215686274509804, |
|
"grad_norm": 19.681434631347656, |
|
"learning_rate": 6.891438965427172e-05, |
|
"loss": 1.7118, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03294117647058824, |
|
"grad_norm": 14.27527904510498, |
|
"learning_rate": 6.884361504248818e-05, |
|
"loss": 2.44, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03372549019607843, |
|
"grad_norm": 16.58424186706543, |
|
"learning_rate": 6.87706443670243e-05, |
|
"loss": 1.6355, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.034509803921568626, |
|
"grad_norm": 20.70366096496582, |
|
"learning_rate": 6.869548236284267e-05, |
|
"loss": 2.2336, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03529411764705882, |
|
"grad_norm": 13.909195899963379, |
|
"learning_rate": 6.861813390709803e-05, |
|
"loss": 1.6299, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03607843137254902, |
|
"grad_norm": 18.837871551513672, |
|
"learning_rate": 6.853860401882081e-05, |
|
"loss": 2.6585, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03686274509803922, |
|
"grad_norm": 32.49602127075195, |
|
"learning_rate": 6.845689785859151e-05, |
|
"loss": 2.2312, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03764705882352941, |
|
"grad_norm": 22.12024688720703, |
|
"learning_rate": 6.837302072820576e-05, |
|
"loss": 2.5807, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.038431372549019606, |
|
"grad_norm": 32.349609375, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 2.6884, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 38.483177185058594, |
|
"learning_rate": 6.819877546815009e-05, |
|
"loss": 3.4595, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 11.971654891967773, |
|
"learning_rate": 6.810841864500537e-05, |
|
"loss": 3.9855, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0407843137254902, |
|
"grad_norm": 5.677441120147705, |
|
"learning_rate": 6.8015913464021e-05, |
|
"loss": 2.2597, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04156862745098039, |
|
"grad_norm": 3.608100652694702, |
|
"learning_rate": 6.79212659277256e-05, |
|
"loss": 1.8872, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.042352941176470586, |
|
"grad_norm": 3.213096857070923, |
|
"learning_rate": 6.782448217766216e-05, |
|
"loss": 1.4016, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.043137254901960784, |
|
"grad_norm": 4.3245744705200195, |
|
"learning_rate": 6.772556849398952e-05, |
|
"loss": 1.5969, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04392156862745098, |
|
"grad_norm": 4.572765350341797, |
|
"learning_rate": 6.762453129507487e-05, |
|
"loss": 1.5966, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04470588235294118, |
|
"grad_norm": 2.8827388286590576, |
|
"learning_rate": 6.752137713707726e-05, |
|
"loss": 1.0312, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04549019607843137, |
|
"grad_norm": 3.4303178787231445, |
|
"learning_rate": 6.741611271352217e-05, |
|
"loss": 1.3693, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04627450980392157, |
|
"grad_norm": 3.742342948913574, |
|
"learning_rate": 6.730874485486721e-05, |
|
"loss": 1.2792, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 3.946136713027954, |
|
"learning_rate": 6.719928052805884e-05, |
|
"loss": 1.4547, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04784313725490196, |
|
"grad_norm": 3.579709768295288, |
|
"learning_rate": 6.708772683608038e-05, |
|
"loss": 1.265, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04862745098039216, |
|
"grad_norm": 3.937344551086426, |
|
"learning_rate": 6.697409101749102e-05, |
|
"loss": 1.475, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04941176470588235, |
|
"grad_norm": 4.387577056884766, |
|
"learning_rate": 6.685838044595621e-05, |
|
"loss": 1.5258, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05019607843137255, |
|
"grad_norm": 5.537537574768066, |
|
"learning_rate": 6.674060262976909e-05, |
|
"loss": 1.426, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.050980392156862744, |
|
"grad_norm": 4.949011325836182, |
|
"learning_rate": 6.662076521136337e-05, |
|
"loss": 1.6677, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05176470588235294, |
|
"grad_norm": 5.1415114402771, |
|
"learning_rate": 6.649887596681742e-05, |
|
"loss": 1.5576, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05254901960784314, |
|
"grad_norm": 4.477947235107422, |
|
"learning_rate": 6.637494280534962e-05, |
|
"loss": 1.4792, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 5.3575758934021, |
|
"learning_rate": 6.624897376880523e-05, |
|
"loss": 1.6056, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05411764705882353, |
|
"grad_norm": 5.173808574676514, |
|
"learning_rate": 6.612097703113452e-05, |
|
"loss": 1.1905, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.054901960784313725, |
|
"grad_norm": 5.39131498336792, |
|
"learning_rate": 6.599096089786233e-05, |
|
"loss": 1.5183, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05568627450980392, |
|
"grad_norm": 6.076783180236816, |
|
"learning_rate": 6.585893380554929e-05, |
|
"loss": 1.0184, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05647058823529412, |
|
"grad_norm": 7.40262508392334, |
|
"learning_rate": 6.572490432124416e-05, |
|
"loss": 1.6043, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05725490196078432, |
|
"grad_norm": 6.334597587585449, |
|
"learning_rate": 6.558888114192812e-05, |
|
"loss": 1.3284, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05803921568627451, |
|
"grad_norm": 6.603546619415283, |
|
"learning_rate": 6.545087309395037e-05, |
|
"loss": 1.4609, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 10.423970222473145, |
|
"learning_rate": 6.531088913245535e-05, |
|
"loss": 1.5566, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0596078431372549, |
|
"grad_norm": 7.711127758026123, |
|
"learning_rate": 6.516893834080172e-05, |
|
"loss": 1.525, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0603921568627451, |
|
"grad_norm": 6.958899974822998, |
|
"learning_rate": 6.502502992997292e-05, |
|
"loss": 1.6027, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0611764705882353, |
|
"grad_norm": 8.05106258392334, |
|
"learning_rate": 6.487917323797953e-05, |
|
"loss": 1.5831, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06196078431372549, |
|
"grad_norm": 5.530167579650879, |
|
"learning_rate": 6.473137772925323e-05, |
|
"loss": 1.0131, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06274509803921569, |
|
"grad_norm": 6.529147624969482, |
|
"learning_rate": 6.458165299403281e-05, |
|
"loss": 1.026, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06352941176470588, |
|
"grad_norm": 4.573212623596191, |
|
"learning_rate": 6.443000874774177e-05, |
|
"loss": 0.6019, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06431372549019608, |
|
"grad_norm": 6.415572166442871, |
|
"learning_rate": 6.427645483035793e-05, |
|
"loss": 1.2427, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06509803921568627, |
|
"grad_norm": 12.587393760681152, |
|
"learning_rate": 6.412100120577495e-05, |
|
"loss": 0.8412, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06588235294117648, |
|
"grad_norm": 8.243964195251465, |
|
"learning_rate": 6.396365796115574e-05, |
|
"loss": 1.4314, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 10.298484802246094, |
|
"learning_rate": 6.380443530627796e-05, |
|
"loss": 1.471, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06745098039215686, |
|
"grad_norm": 7.470930099487305, |
|
"learning_rate": 6.364334357287154e-05, |
|
"loss": 1.6067, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06823529411764706, |
|
"grad_norm": 7.880890846252441, |
|
"learning_rate": 6.348039321394819e-05, |
|
"loss": 1.7093, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06901960784313725, |
|
"grad_norm": 8.50384521484375, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 1.5957, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06980392156862746, |
|
"grad_norm": 11.584599494934082, |
|
"learning_rate": 6.314895903392917e-05, |
|
"loss": 2.2822, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07058823529411765, |
|
"grad_norm": 8.506998062133789, |
|
"learning_rate": 6.298049671912254e-05, |
|
"loss": 1.3157, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07137254901960784, |
|
"grad_norm": 8.96076488494873, |
|
"learning_rate": 6.281021878998139e-05, |
|
"loss": 1.399, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07215686274509804, |
|
"grad_norm": 7.781743049621582, |
|
"learning_rate": 6.263813629559657e-05, |
|
"loss": 1.3365, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07294117647058823, |
|
"grad_norm": 11.301823616027832, |
|
"learning_rate": 6.246426040215451e-05, |
|
"loss": 1.931, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07372549019607844, |
|
"grad_norm": 14.643918991088867, |
|
"learning_rate": 6.228860239221277e-05, |
|
"loss": 1.949, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07450980392156863, |
|
"grad_norm": 12.800115585327148, |
|
"learning_rate": 6.21111736639679e-05, |
|
"loss": 1.9397, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07529411764705882, |
|
"grad_norm": 15.508123397827148, |
|
"learning_rate": 6.193198573051581e-05, |
|
"loss": 2.5479, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07607843137254902, |
|
"grad_norm": 22.137767791748047, |
|
"learning_rate": 6.175105021910468e-05, |
|
"loss": 2.0942, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07686274509803921, |
|
"grad_norm": 17.05497169494629, |
|
"learning_rate": 6.156837887038059e-05, |
|
"loss": 2.3568, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07764705882352942, |
|
"grad_norm": 18.51692771911621, |
|
"learning_rate": 6.138398353762557e-05, |
|
"loss": 2.4071, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 18.45566177368164, |
|
"learning_rate": 6.119787618598853e-05, |
|
"loss": 1.7988, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"eval_loss": 0.4173365831375122, |
|
"eval_runtime": 169.3484, |
|
"eval_samples_per_second": 12.684, |
|
"eval_steps_per_second": 3.171, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.513282097727406e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|