|
{ |
|
"best_metric": 0.6681632995605469, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.11302627860977678, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0011302627860977678, |
|
"grad_norm": 793.308837890625, |
|
"learning_rate": 1e-05, |
|
"loss": 11.6478, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011302627860977678, |
|
"eval_loss": 3.2303900718688965, |
|
"eval_runtime": 105.0885, |
|
"eval_samples_per_second": 14.179, |
|
"eval_steps_per_second": 3.549, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0022605255721955355, |
|
"grad_norm": 1157.48828125, |
|
"learning_rate": 2e-05, |
|
"loss": 12.4896, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003390788358293303, |
|
"grad_norm": 131.5410919189453, |
|
"learning_rate": 3e-05, |
|
"loss": 9.1943, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004521051144391071, |
|
"grad_norm": 68.70294952392578, |
|
"learning_rate": 4e-05, |
|
"loss": 5.6464, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005651313930488839, |
|
"grad_norm": 148.5250244140625, |
|
"learning_rate": 5e-05, |
|
"loss": 4.9647, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006781576716586606, |
|
"grad_norm": 39.88493728637695, |
|
"learning_rate": 6e-05, |
|
"loss": 3.6084, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007911839502684374, |
|
"grad_norm": 13.25629711151123, |
|
"learning_rate": 7e-05, |
|
"loss": 3.0777, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009042102288782142, |
|
"grad_norm": 15.91224193572998, |
|
"learning_rate": 8e-05, |
|
"loss": 2.9451, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.010172365074879909, |
|
"grad_norm": 17.490968704223633, |
|
"learning_rate": 9e-05, |
|
"loss": 2.8105, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.011302627860977677, |
|
"grad_norm": 9.884227752685547, |
|
"learning_rate": 0.0001, |
|
"loss": 2.7296, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012432890647075446, |
|
"grad_norm": 10.834426879882812, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 2.697, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.013563153433173212, |
|
"grad_norm": 40.39820861816406, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 3.3782, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01469341621927098, |
|
"grad_norm": 10.876916885375977, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 2.8799, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.015823679005368747, |
|
"grad_norm": 6.255303382873535, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 2.7307, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.016953941791466517, |
|
"grad_norm": 5.4854416847229, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 2.6597, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.018084204577564284, |
|
"grad_norm": 8.040909767150879, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 2.669, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01921446736366205, |
|
"grad_norm": 5.405959606170654, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 2.6124, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.020344730149759818, |
|
"grad_norm": 6.204880237579346, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 2.6543, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.021474992935857588, |
|
"grad_norm": 5.449324607849121, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 2.7599, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.022605255721955354, |
|
"grad_norm": 5.886956691741943, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 2.7869, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02373551850805312, |
|
"grad_norm": 4.629397392272949, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 2.757, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02486578129415089, |
|
"grad_norm": 4.251619815826416, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 2.5278, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.025996044080248658, |
|
"grad_norm": 5.332093715667725, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 2.7128, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.027126306866346424, |
|
"grad_norm": 4.628209590911865, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 2.6893, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.028256569652444195, |
|
"grad_norm": 5.050840377807617, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 2.873, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02938683243854196, |
|
"grad_norm": 15.831549644470215, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 2.764, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.030517095224639728, |
|
"grad_norm": 5.640730857849121, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.8243, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.031647358010737495, |
|
"grad_norm": 4.622921943664551, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 2.8895, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.032777620796835265, |
|
"grad_norm": 3.4673752784729004, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.6753, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.033907883582933035, |
|
"grad_norm": 4.400412559509277, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 2.918, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0350381463690308, |
|
"grad_norm": 30.6713809967041, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.8105, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03616840915512857, |
|
"grad_norm": 4.252901077270508, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 2.8125, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03729867194122634, |
|
"grad_norm": 4.446712970733643, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 2.7987, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0384289347273241, |
|
"grad_norm": 4.5988688468933105, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 2.8463, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03955919751342187, |
|
"grad_norm": 3.57090163230896, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 2.8395, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.040689460299519635, |
|
"grad_norm": 5.003127574920654, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 2.8054, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.041819723085617405, |
|
"grad_norm": 4.430306911468506, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 2.9095, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.042949985871715175, |
|
"grad_norm": 3.776151180267334, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 2.7794, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04408024865781294, |
|
"grad_norm": 4.0163373947143555, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 2.8656, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04521051144391071, |
|
"grad_norm": 3.8399384021759033, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 2.8012, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04634077423000848, |
|
"grad_norm": 3.859337568283081, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 2.9344, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04747103701610624, |
|
"grad_norm": 3.8300116062164307, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 2.8931, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04860129980220401, |
|
"grad_norm": 3.786048412322998, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 2.8088, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04973156258830178, |
|
"grad_norm": 3.4179418087005615, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 2.8516, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.050861825374399545, |
|
"grad_norm": 3.5727052688598633, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 2.7993, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.051992088160497316, |
|
"grad_norm": 3.7449238300323486, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 2.1631, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.053122350946595086, |
|
"grad_norm": 3.2603912353515625, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.8704, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05425261373269285, |
|
"grad_norm": 2.6630172729492188, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.6844, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05538287651879062, |
|
"grad_norm": 3.6026859283447266, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 2.0858, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05651313930488839, |
|
"grad_norm": 3.2763783931732178, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.8216, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05651313930488839, |
|
"eval_loss": 0.6686553955078125, |
|
"eval_runtime": 106.943, |
|
"eval_samples_per_second": 13.933, |
|
"eval_steps_per_second": 3.488, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05764340209098615, |
|
"grad_norm": 7.047059535980225, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.3943, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05877366487708392, |
|
"grad_norm": 3.35186767578125, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 2.1352, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05990392766318169, |
|
"grad_norm": 3.5137863159179688, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 2.2424, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.061034190449279456, |
|
"grad_norm": 3.1273257732391357, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 2.1845, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.062164453235377226, |
|
"grad_norm": 3.170482635498047, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.2313, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06329471602147499, |
|
"grad_norm": 2.9466161727905273, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 2.2675, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06442497880757277, |
|
"grad_norm": 3.1644484996795654, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 2.277, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06555524159367053, |
|
"grad_norm": 2.9599294662475586, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 2.2118, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06668550437976829, |
|
"grad_norm": 4.45035982131958, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 2.45, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06781576716586607, |
|
"grad_norm": 3.307894468307495, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.4022, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06894602995196383, |
|
"grad_norm": 3.315178155899048, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 2.3148, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0700762927380616, |
|
"grad_norm": 3.3259332180023193, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 2.3667, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07120655552415937, |
|
"grad_norm": 3.088707208633423, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 2.2734, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07233681831025714, |
|
"grad_norm": 4.132589340209961, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 2.4403, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0734670810963549, |
|
"grad_norm": 4.20663595199585, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 2.4266, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07459734388245268, |
|
"grad_norm": 3.900576114654541, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 2.4775, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07572760666855044, |
|
"grad_norm": 3.7162678241729736, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.4847, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0768578694546482, |
|
"grad_norm": 3.464790105819702, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 2.3735, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07798813224074597, |
|
"grad_norm": 3.6781411170959473, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 2.4899, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07911839502684374, |
|
"grad_norm": 3.7448372840881348, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 2.4988, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0802486578129415, |
|
"grad_norm": 3.722891330718994, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 2.5031, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08137892059903927, |
|
"grad_norm": 3.3679347038269043, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 2.4905, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08250918338513705, |
|
"grad_norm": 3.2018232345581055, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 2.5374, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08363944617123481, |
|
"grad_norm": 4.144588470458984, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 2.5555, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08476970895733257, |
|
"grad_norm": 3.0146422386169434, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 2.5006, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08589997174343035, |
|
"grad_norm": 3.384679079055786, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 2.6001, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08703023452952811, |
|
"grad_norm": 3.17840576171875, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 2.5854, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08816049731562588, |
|
"grad_norm": 3.397296905517578, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 2.5631, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08929076010172365, |
|
"grad_norm": 3.4653842449188232, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 2.6416, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09042102288782142, |
|
"grad_norm": 3.1254074573516846, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 2.5894, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09155128567391918, |
|
"grad_norm": 3.4320321083068848, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 2.68, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09268154846001696, |
|
"grad_norm": 3.4371142387390137, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.544, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09381181124611472, |
|
"grad_norm": 3.5839908123016357, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 2.7267, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09494207403221248, |
|
"grad_norm": 3.330444097518921, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 2.6227, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09607233681831026, |
|
"grad_norm": 3.0069053173065186, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 2.6964, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09720259960440802, |
|
"grad_norm": 3.130023956298828, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.7333, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09833286239050579, |
|
"grad_norm": 3.0710136890411377, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 2.6722, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09946312517660356, |
|
"grad_norm": 2.988614320755005, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 2.5794, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.10059338796270133, |
|
"grad_norm": 3.1522057056427, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 2.6536, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.10172365074879909, |
|
"grad_norm": 3.2672934532165527, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 2.6735, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10285391353489687, |
|
"grad_norm": 3.3788697719573975, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 2.6591, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.10398417632099463, |
|
"grad_norm": 4.3577046394348145, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 2.6826, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1051144391070924, |
|
"grad_norm": 3.8894002437591553, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 2.7146, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.10624470189319017, |
|
"grad_norm": 3.9112329483032227, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 2.4512, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.10737496467928793, |
|
"grad_norm": 3.968187093734741, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 2.336, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1085052274653857, |
|
"grad_norm": 3.755615234375, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.8332, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.10963549025148348, |
|
"grad_norm": 3.6911725997924805, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 2.0493, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.11076575303758124, |
|
"grad_norm": 4.834873199462891, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 2.0833, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.111896015823679, |
|
"grad_norm": 3.7316572666168213, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.7376, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.11302627860977678, |
|
"grad_norm": 3.422861099243164, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.6723, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11302627860977678, |
|
"eval_loss": 0.6681632995605469, |
|
"eval_runtime": 106.9564, |
|
"eval_samples_per_second": 13.931, |
|
"eval_steps_per_second": 3.487, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.431616484278272e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|