|
{ |
|
"best_metric": 1.1155003309249878, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.022313335006833458, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0001115666750341673, |
|
"grad_norm": 2.653162956237793, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.3102, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001115666750341673, |
|
"eval_loss": 2.114264488220215, |
|
"eval_runtime": 1178.428, |
|
"eval_samples_per_second": 12.811, |
|
"eval_steps_per_second": 3.203, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002231333500683346, |
|
"grad_norm": 2.449162244796753, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.1695, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0003347000251025019, |
|
"grad_norm": 2.531275510787964, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.1155, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0004462667001366692, |
|
"grad_norm": 2.4609928131103516, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.0768, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0005578333751708364, |
|
"grad_norm": 2.380425453186035, |
|
"learning_rate": 4e-05, |
|
"loss": 1.8721, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0006694000502050038, |
|
"grad_norm": 1.5351266860961914, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.7979, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.000780966725239171, |
|
"grad_norm": 1.5571147203445435, |
|
"learning_rate": 5.6e-05, |
|
"loss": 1.7501, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0008925334002733384, |
|
"grad_norm": 1.7607723474502563, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.6559, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0010041000753075056, |
|
"grad_norm": 1.6330875158309937, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.4968, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0011156667503416729, |
|
"grad_norm": 1.3503739833831787, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5725, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0012272334253758403, |
|
"grad_norm": 1.2828761339187622, |
|
"learning_rate": 7.999453219969877e-05, |
|
"loss": 1.4362, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0013388001004100076, |
|
"grad_norm": 1.2013221979141235, |
|
"learning_rate": 7.997813029363704e-05, |
|
"loss": 1.42, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0014503667754441748, |
|
"grad_norm": 1.2638128995895386, |
|
"learning_rate": 7.99507987659322e-05, |
|
"loss": 1.5151, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.001561933450478342, |
|
"grad_norm": 1.1523526906967163, |
|
"learning_rate": 7.991254508875098e-05, |
|
"loss": 1.4517, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0016735001255125093, |
|
"grad_norm": 1.09688138961792, |
|
"learning_rate": 7.98633797202668e-05, |
|
"loss": 1.3846, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0017850668005466768, |
|
"grad_norm": 1.146205186843872, |
|
"learning_rate": 7.980331610180046e-05, |
|
"loss": 1.3012, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.001896633475580844, |
|
"grad_norm": 0.9450773000717163, |
|
"learning_rate": 7.973237065414553e-05, |
|
"loss": 1.3625, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0020082001506150113, |
|
"grad_norm": 0.9548523426055908, |
|
"learning_rate": 7.965056277307902e-05, |
|
"loss": 1.4134, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0021197668256491787, |
|
"grad_norm": 0.9216581583023071, |
|
"learning_rate": 7.955791482405875e-05, |
|
"loss": 1.2652, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0022313335006833458, |
|
"grad_norm": 1.0505989789962769, |
|
"learning_rate": 7.94544521361089e-05, |
|
"loss": 1.423, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.002342900175717513, |
|
"grad_norm": 1.0446711778640747, |
|
"learning_rate": 7.93402029948953e-05, |
|
"loss": 1.3668, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0024544668507516807, |
|
"grad_norm": 0.9442985653877258, |
|
"learning_rate": 7.921519863499239e-05, |
|
"loss": 1.3191, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0025660335257858477, |
|
"grad_norm": 1.0523488521575928, |
|
"learning_rate": 7.907947323134398e-05, |
|
"loss": 1.3794, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.002677600200820015, |
|
"grad_norm": 1.0296598672866821, |
|
"learning_rate": 7.893306388992023e-05, |
|
"loss": 1.2788, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.002789166875854182, |
|
"grad_norm": 1.1668524742126465, |
|
"learning_rate": 7.877601063757323e-05, |
|
"loss": 1.4645, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0029007335508883497, |
|
"grad_norm": 1.0953292846679688, |
|
"learning_rate": 7.860835641109395e-05, |
|
"loss": 1.3605, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.003012300225922517, |
|
"grad_norm": 1.152095079421997, |
|
"learning_rate": 7.843014704547393e-05, |
|
"loss": 1.379, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.003123866900956684, |
|
"grad_norm": 1.132886290550232, |
|
"learning_rate": 7.824143126137431e-05, |
|
"loss": 1.3073, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0032354335759908516, |
|
"grad_norm": 1.0807931423187256, |
|
"learning_rate": 7.804226065180615e-05, |
|
"loss": 1.3226, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0033470002510250186, |
|
"grad_norm": 1.1306511163711548, |
|
"learning_rate": 7.783268966802539e-05, |
|
"loss": 1.3538, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.003458566926059186, |
|
"grad_norm": 0.9918445348739624, |
|
"learning_rate": 7.761277560464645e-05, |
|
"loss": 1.2237, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0035701336010933536, |
|
"grad_norm": 1.0658776760101318, |
|
"learning_rate": 7.738257858397844e-05, |
|
"loss": 1.0874, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0036817002761275206, |
|
"grad_norm": 1.0365428924560547, |
|
"learning_rate": 7.71421615395883e-05, |
|
"loss": 1.273, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.003793266951161688, |
|
"grad_norm": 1.19215989112854, |
|
"learning_rate": 7.68915901990954e-05, |
|
"loss": 1.2374, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0039048336261958555, |
|
"grad_norm": 1.0782256126403809, |
|
"learning_rate": 7.663093306620231e-05, |
|
"loss": 1.2026, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0040164003012300225, |
|
"grad_norm": 1.217738151550293, |
|
"learning_rate": 7.636026140196651e-05, |
|
"loss": 1.4224, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0041279669762641896, |
|
"grad_norm": 1.1175963878631592, |
|
"learning_rate": 7.607964920531837e-05, |
|
"loss": 1.2956, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0042395336512983575, |
|
"grad_norm": 1.1036632061004639, |
|
"learning_rate": 7.578917319283055e-05, |
|
"loss": 1.0602, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0043511003263325245, |
|
"grad_norm": 1.3284820318222046, |
|
"learning_rate": 7.548891277774448e-05, |
|
"loss": 1.3699, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0044626670013666915, |
|
"grad_norm": 1.09949791431427, |
|
"learning_rate": 7.517895004825956e-05, |
|
"loss": 1.1194, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.004574233676400859, |
|
"grad_norm": 1.3073527812957764, |
|
"learning_rate": 7.48593697450911e-05, |
|
"loss": 1.2744, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.004685800351435026, |
|
"grad_norm": 1.3388763666152954, |
|
"learning_rate": 7.453025923830296e-05, |
|
"loss": 1.257, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0047973670264691935, |
|
"grad_norm": 1.5201784372329712, |
|
"learning_rate": 7.419170850342156e-05, |
|
"loss": 1.2134, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004908933701503361, |
|
"grad_norm": 1.4326601028442383, |
|
"learning_rate": 7.384381009683742e-05, |
|
"loss": 1.1861, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.005020500376537528, |
|
"grad_norm": 1.4766429662704468, |
|
"learning_rate": 7.348665913050115e-05, |
|
"loss": 1.279, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.005132067051571695, |
|
"grad_norm": 1.4081569910049438, |
|
"learning_rate": 7.312035324592081e-05, |
|
"loss": 1.1668, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.005243633726605862, |
|
"grad_norm": 1.439784288406372, |
|
"learning_rate": 7.274499258746771e-05, |
|
"loss": 1.1706, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.00535520040164003, |
|
"grad_norm": 1.4231797456741333, |
|
"learning_rate": 7.236067977499791e-05, |
|
"loss": 1.0786, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.005466767076674197, |
|
"grad_norm": 1.403438925743103, |
|
"learning_rate": 7.196751987579699e-05, |
|
"loss": 1.0702, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.005578333751708364, |
|
"grad_norm": 1.865836501121521, |
|
"learning_rate": 7.156562037585576e-05, |
|
"loss": 1.0554, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005578333751708364, |
|
"eval_loss": 1.2656643390655518, |
|
"eval_runtime": 1183.4004, |
|
"eval_samples_per_second": 12.757, |
|
"eval_steps_per_second": 3.19, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005689900426742532, |
|
"grad_norm": 1.388903260231018, |
|
"learning_rate": 7.11550911504845e-05, |
|
"loss": 1.8208, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.005801467101776699, |
|
"grad_norm": 1.0922175645828247, |
|
"learning_rate": 7.073604443427437e-05, |
|
"loss": 1.4857, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.005913033776810866, |
|
"grad_norm": 1.1377652883529663, |
|
"learning_rate": 7.03085947904134e-05, |
|
"loss": 1.4642, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.006024600451845034, |
|
"grad_norm": 1.0178011655807495, |
|
"learning_rate": 6.987285907936617e-05, |
|
"loss": 1.3595, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.006136167126879201, |
|
"grad_norm": 1.0533956289291382, |
|
"learning_rate": 6.942895642692527e-05, |
|
"loss": 1.3205, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.006247733801913368, |
|
"grad_norm": 0.9825334548950195, |
|
"learning_rate": 6.897700819164357e-05, |
|
"loss": 1.2667, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.006359300476947536, |
|
"grad_norm": 0.9113156199455261, |
|
"learning_rate": 6.851713793165589e-05, |
|
"loss": 1.2543, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.006470867151981703, |
|
"grad_norm": 0.8248892426490784, |
|
"learning_rate": 6.804947137089955e-05, |
|
"loss": 1.3133, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00658243382701587, |
|
"grad_norm": 0.8727787733078003, |
|
"learning_rate": 6.757413636474263e-05, |
|
"loss": 1.3125, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.006694000502050037, |
|
"grad_norm": 0.9488885402679443, |
|
"learning_rate": 6.709126286502965e-05, |
|
"loss": 1.327, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.006805567177084205, |
|
"grad_norm": 0.8989211320877075, |
|
"learning_rate": 6.660098288455393e-05, |
|
"loss": 1.1596, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.006917133852118372, |
|
"grad_norm": 0.904154896736145, |
|
"learning_rate": 6.610343046096674e-05, |
|
"loss": 1.1647, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.007028700527152539, |
|
"grad_norm": 0.8586918115615845, |
|
"learning_rate": 6.559874162013267e-05, |
|
"loss": 1.1605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.007140267202186707, |
|
"grad_norm": 0.9001997709274292, |
|
"learning_rate": 6.508705433894149e-05, |
|
"loss": 1.3327, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.007251833877220874, |
|
"grad_norm": 0.8475323915481567, |
|
"learning_rate": 6.456850850758673e-05, |
|
"loss": 1.1643, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.007363400552255041, |
|
"grad_norm": 0.9225298762321472, |
|
"learning_rate": 6.404324589132101e-05, |
|
"loss": 1.2149, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.007474967227289209, |
|
"grad_norm": 0.8501834869384766, |
|
"learning_rate": 6.351141009169893e-05, |
|
"loss": 1.181, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.007586533902323376, |
|
"grad_norm": 0.9416823983192444, |
|
"learning_rate": 6.297314650731775e-05, |
|
"loss": 1.2624, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.007698100577357543, |
|
"grad_norm": 0.8258371949195862, |
|
"learning_rate": 6.242860229406692e-05, |
|
"loss": 1.1523, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.007809667252391711, |
|
"grad_norm": 0.8512648344039917, |
|
"learning_rate": 6.18779263248971e-05, |
|
"loss": 1.1725, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.007921233927425877, |
|
"grad_norm": 0.8364467024803162, |
|
"learning_rate": 6.132126914911976e-05, |
|
"loss": 1.1696, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.008032800602460045, |
|
"grad_norm": 0.7922412753105164, |
|
"learning_rate": 6.075878295124861e-05, |
|
"loss": 1.0674, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.008144367277494213, |
|
"grad_norm": 0.8955839276313782, |
|
"learning_rate": 6.019062150939376e-05, |
|
"loss": 1.2391, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.008255933952528379, |
|
"grad_norm": 0.883040726184845, |
|
"learning_rate": 5.9616940153220336e-05, |
|
"loss": 1.3788, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.008367500627562547, |
|
"grad_norm": 0.8692553639411926, |
|
"learning_rate": 5.903789572148295e-05, |
|
"loss": 1.1941, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.008479067302596715, |
|
"grad_norm": 0.9407691955566406, |
|
"learning_rate": 5.845364651914752e-05, |
|
"loss": 1.2981, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.008590633977630881, |
|
"grad_norm": 0.8941450119018555, |
|
"learning_rate": 5.786435227411227e-05, |
|
"loss": 1.1009, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.008702200652665049, |
|
"grad_norm": 0.9692277908325195, |
|
"learning_rate": 5.727017409353971e-05, |
|
"loss": 1.3274, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.008813767327699217, |
|
"grad_norm": 0.9406832456588745, |
|
"learning_rate": 5.667127441981162e-05, |
|
"loss": 1.2052, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.008925334002733383, |
|
"grad_norm": 0.9091105461120605, |
|
"learning_rate": 5.606781698611879e-05, |
|
"loss": 1.0985, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.009036900677767551, |
|
"grad_norm": 0.9770876169204712, |
|
"learning_rate": 5.5459966771698096e-05, |
|
"loss": 1.0522, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.009148467352801719, |
|
"grad_norm": 0.8914808630943298, |
|
"learning_rate": 5.4847889956728834e-05, |
|
"loss": 1.1276, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.009260034027835885, |
|
"grad_norm": 0.9220410585403442, |
|
"learning_rate": 5.423175387690067e-05, |
|
"loss": 1.1342, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.009371600702870053, |
|
"grad_norm": 0.9457625150680542, |
|
"learning_rate": 5.361172697766573e-05, |
|
"loss": 1.1892, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00948316737790422, |
|
"grad_norm": 0.9678511023521423, |
|
"learning_rate": 5.298797876818735e-05, |
|
"loss": 1.0223, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.009594734052938387, |
|
"grad_norm": 0.9996553659439087, |
|
"learning_rate": 5.23606797749979e-05, |
|
"loss": 1.0629, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.009706300727972555, |
|
"grad_norm": 1.065645456314087, |
|
"learning_rate": 5.17300014953786e-05, |
|
"loss": 1.1248, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.009817867403006723, |
|
"grad_norm": 1.0297746658325195, |
|
"learning_rate": 5.109611635047379e-05, |
|
"loss": 1.1184, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.009929434078040889, |
|
"grad_norm": 0.9987531900405884, |
|
"learning_rate": 5.04591976381528e-05, |
|
"loss": 1.1138, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.010041000753075057, |
|
"grad_norm": 0.9533745050430298, |
|
"learning_rate": 4.981941948563197e-05, |
|
"loss": 1.084, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010152567428109223, |
|
"grad_norm": 1.1118968725204468, |
|
"learning_rate": 4.9176956801870065e-05, |
|
"loss": 1.0032, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.01026413410314339, |
|
"grad_norm": 1.0484565496444702, |
|
"learning_rate": 4.853198522974988e-05, |
|
"loss": 0.9708, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.010375700778177559, |
|
"grad_norm": 0.941596508026123, |
|
"learning_rate": 4.788468109805921e-05, |
|
"loss": 0.9696, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.010487267453211725, |
|
"grad_norm": 1.13180673122406, |
|
"learning_rate": 4.7235221373284407e-05, |
|
"loss": 1.0798, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.010598834128245893, |
|
"grad_norm": 1.2236862182617188, |
|
"learning_rate": 4.658378361122936e-05, |
|
"loss": 1.1903, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01071040080328006, |
|
"grad_norm": 1.2033003568649292, |
|
"learning_rate": 4.593054590847368e-05, |
|
"loss": 1.0621, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.010821967478314227, |
|
"grad_norm": 1.0950212478637695, |
|
"learning_rate": 4.5275686853682765e-05, |
|
"loss": 0.9034, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.010933534153348395, |
|
"grad_norm": 1.3348255157470703, |
|
"learning_rate": 4.4619385478783456e-05, |
|
"loss": 1.1506, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.011045100828382563, |
|
"grad_norm": 1.4957432746887207, |
|
"learning_rate": 4.396182121001852e-05, |
|
"loss": 1.1369, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.011156667503416729, |
|
"grad_norm": 2.078827142715454, |
|
"learning_rate": 4.33031738188933e-05, |
|
"loss": 0.9903, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011156667503416729, |
|
"eval_loss": 1.1675224304199219, |
|
"eval_runtime": 1187.356, |
|
"eval_samples_per_second": 12.715, |
|
"eval_steps_per_second": 3.179, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011268234178450897, |
|
"grad_norm": 1.03377366065979, |
|
"learning_rate": 4.264362337302798e-05, |
|
"loss": 1.4868, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.011379800853485065, |
|
"grad_norm": 0.9007754325866699, |
|
"learning_rate": 4.1983350186928894e-05, |
|
"loss": 1.5376, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01149136752851923, |
|
"grad_norm": 0.8353412747383118, |
|
"learning_rate": 4.132253477269233e-05, |
|
"loss": 1.3117, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.011602934203553399, |
|
"grad_norm": 0.9711883068084717, |
|
"learning_rate": 4.0661357790654345e-05, |
|
"loss": 1.1541, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.011714500878587567, |
|
"grad_norm": 0.7863988280296326, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2769, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.011826067553621733, |
|
"grad_norm": 0.8335726857185364, |
|
"learning_rate": 3.933864220934566e-05, |
|
"loss": 1.156, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.0119376342286559, |
|
"grad_norm": 0.7659847736358643, |
|
"learning_rate": 3.8677465227307676e-05, |
|
"loss": 1.2839, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.012049200903690068, |
|
"grad_norm": 0.8807960152626038, |
|
"learning_rate": 3.8016649813071106e-05, |
|
"loss": 1.322, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.012160767578724235, |
|
"grad_norm": 0.8528708219528198, |
|
"learning_rate": 3.735637662697203e-05, |
|
"loss": 1.1657, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.012272334253758403, |
|
"grad_norm": 0.8083069324493408, |
|
"learning_rate": 3.669682618110671e-05, |
|
"loss": 1.2175, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01238390092879257, |
|
"grad_norm": 0.7970200777053833, |
|
"learning_rate": 3.6038178789981494e-05, |
|
"loss": 1.1687, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.012495467603826737, |
|
"grad_norm": 0.7781988382339478, |
|
"learning_rate": 3.538061452121656e-05, |
|
"loss": 1.136, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.012607034278860904, |
|
"grad_norm": 0.8137642741203308, |
|
"learning_rate": 3.472431314631724e-05, |
|
"loss": 1.2012, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.012718600953895072, |
|
"grad_norm": 0.8913825154304504, |
|
"learning_rate": 3.406945409152632e-05, |
|
"loss": 1.1926, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.012830167628929239, |
|
"grad_norm": 0.8783978819847107, |
|
"learning_rate": 3.341621638877064e-05, |
|
"loss": 1.188, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.012941734303963406, |
|
"grad_norm": 0.8640679121017456, |
|
"learning_rate": 3.276477862671562e-05, |
|
"loss": 1.0542, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.013053300978997573, |
|
"grad_norm": 0.8714284300804138, |
|
"learning_rate": 3.21153189019408e-05, |
|
"loss": 1.2211, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.01316486765403174, |
|
"grad_norm": 0.8280481696128845, |
|
"learning_rate": 3.146801477025013e-05, |
|
"loss": 1.0808, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.013276434329065908, |
|
"grad_norm": 0.8297896385192871, |
|
"learning_rate": 3.082304319812994e-05, |
|
"loss": 1.1227, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.013388001004100075, |
|
"grad_norm": 0.8418503403663635, |
|
"learning_rate": 3.0180580514368037e-05, |
|
"loss": 1.2152, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013499567679134242, |
|
"grad_norm": 0.7824004292488098, |
|
"learning_rate": 2.9540802361847212e-05, |
|
"loss": 1.1229, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.01361113435416841, |
|
"grad_norm": 0.8189719915390015, |
|
"learning_rate": 2.890388364952623e-05, |
|
"loss": 1.1421, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.013722701029202576, |
|
"grad_norm": 0.8146119713783264, |
|
"learning_rate": 2.8269998504621416e-05, |
|
"loss": 1.0736, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.013834267704236744, |
|
"grad_norm": 1.066524863243103, |
|
"learning_rate": 2.7639320225002108e-05, |
|
"loss": 1.2977, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.013945834379270912, |
|
"grad_norm": 0.8084144592285156, |
|
"learning_rate": 2.7012021231812666e-05, |
|
"loss": 1.201, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.014057401054305078, |
|
"grad_norm": 0.8299919962882996, |
|
"learning_rate": 2.638827302233428e-05, |
|
"loss": 1.1345, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.014168967729339246, |
|
"grad_norm": 0.8451680541038513, |
|
"learning_rate": 2.576824612309934e-05, |
|
"loss": 1.2455, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.014280534404373414, |
|
"grad_norm": 0.9045851826667786, |
|
"learning_rate": 2.5152110043271166e-05, |
|
"loss": 1.2498, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.01439210107940758, |
|
"grad_norm": 0.9111452698707581, |
|
"learning_rate": 2.454003322830192e-05, |
|
"loss": 1.1584, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.014503667754441748, |
|
"grad_norm": 0.8999844789505005, |
|
"learning_rate": 2.393218301388123e-05, |
|
"loss": 1.0248, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.014615234429475916, |
|
"grad_norm": 1.1039999723434448, |
|
"learning_rate": 2.3328725580188395e-05, |
|
"loss": 1.1425, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.014726801104510082, |
|
"grad_norm": 0.85543292760849, |
|
"learning_rate": 2.272982590646029e-05, |
|
"loss": 1.1249, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.01483836777954425, |
|
"grad_norm": 0.9125653505325317, |
|
"learning_rate": 2.2135647725887744e-05, |
|
"loss": 1.0836, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.014949934454578418, |
|
"grad_norm": 0.9096406698226929, |
|
"learning_rate": 2.1546353480852495e-05, |
|
"loss": 1.1764, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.015061501129612584, |
|
"grad_norm": 0.897408664226532, |
|
"learning_rate": 2.096210427851706e-05, |
|
"loss": 1.0352, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.015173067804646752, |
|
"grad_norm": 1.038303017616272, |
|
"learning_rate": 2.038305984677969e-05, |
|
"loss": 1.1442, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.01528463447968092, |
|
"grad_norm": 0.9297472834587097, |
|
"learning_rate": 1.9809378490606264e-05, |
|
"loss": 1.0321, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.015396201154715086, |
|
"grad_norm": 0.9409924745559692, |
|
"learning_rate": 1.9241217048751406e-05, |
|
"loss": 1.0004, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.015507767829749254, |
|
"grad_norm": 0.9569453597068787, |
|
"learning_rate": 1.867873085088026e-05, |
|
"loss": 0.9474, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.015619334504783422, |
|
"grad_norm": 1.0429985523223877, |
|
"learning_rate": 1.8122073675102935e-05, |
|
"loss": 1.0308, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01573090117981759, |
|
"grad_norm": 1.1001777648925781, |
|
"learning_rate": 1.75713977059331e-05, |
|
"loss": 1.013, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.015842467854851754, |
|
"grad_norm": 1.0191009044647217, |
|
"learning_rate": 1.702685349268226e-05, |
|
"loss": 0.9944, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.015954034529885922, |
|
"grad_norm": 1.0639243125915527, |
|
"learning_rate": 1.648858990830108e-05, |
|
"loss": 1.1787, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.01606560120492009, |
|
"grad_norm": 0.9979782104492188, |
|
"learning_rate": 1.5956754108678996e-05, |
|
"loss": 1.046, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.016177167879954258, |
|
"grad_norm": 1.1500177383422852, |
|
"learning_rate": 1.5431491492413288e-05, |
|
"loss": 1.0585, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.016288734554988426, |
|
"grad_norm": 1.1362090110778809, |
|
"learning_rate": 1.491294566105852e-05, |
|
"loss": 1.1034, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.016400301230022594, |
|
"grad_norm": 1.2018383741378784, |
|
"learning_rate": 1.4401258379867335e-05, |
|
"loss": 1.0853, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.016511867905056758, |
|
"grad_norm": 1.182171106338501, |
|
"learning_rate": 1.3896569539033253e-05, |
|
"loss": 1.1379, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.016623434580090926, |
|
"grad_norm": 1.328808307647705, |
|
"learning_rate": 1.3399017115446067e-05, |
|
"loss": 1.0739, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.016735001255125094, |
|
"grad_norm": 1.5698626041412354, |
|
"learning_rate": 1.2908737134970367e-05, |
|
"loss": 1.1242, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016735001255125094, |
|
"eval_loss": 1.1231499910354614, |
|
"eval_runtime": 1182.7105, |
|
"eval_samples_per_second": 12.765, |
|
"eval_steps_per_second": 3.192, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016846567930159262, |
|
"grad_norm": 1.347996711730957, |
|
"learning_rate": 1.242586363525737e-05, |
|
"loss": 1.6669, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.01695813460519343, |
|
"grad_norm": 0.7223047018051147, |
|
"learning_rate": 1.1950528629100457e-05, |
|
"loss": 1.4154, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.017069701280227598, |
|
"grad_norm": 0.7790156006813049, |
|
"learning_rate": 1.1482862068344121e-05, |
|
"loss": 1.3367, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.017181267955261762, |
|
"grad_norm": 0.7638139128684998, |
|
"learning_rate": 1.1022991808356442e-05, |
|
"loss": 1.0699, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.01729283463029593, |
|
"grad_norm": 0.8289293646812439, |
|
"learning_rate": 1.0571043573074737e-05, |
|
"loss": 1.2266, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.017404401305330098, |
|
"grad_norm": 0.8301885724067688, |
|
"learning_rate": 1.0127140920633857e-05, |
|
"loss": 1.3556, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.017515967980364266, |
|
"grad_norm": 0.7769188284873962, |
|
"learning_rate": 9.69140520958662e-06, |
|
"loss": 1.3362, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.017627534655398434, |
|
"grad_norm": 0.8720978498458862, |
|
"learning_rate": 9.263955565725648e-06, |
|
"loss": 1.0811, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.017739101330432598, |
|
"grad_norm": 0.7598316073417664, |
|
"learning_rate": 8.844908849515509e-06, |
|
"loss": 1.0352, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.017850668005466766, |
|
"grad_norm": 0.7994357347488403, |
|
"learning_rate": 8.434379624144261e-06, |
|
"loss": 1.1984, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.017962234680500934, |
|
"grad_norm": 0.841153621673584, |
|
"learning_rate": 8.032480124203013e-06, |
|
"loss": 1.2358, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.018073801355535102, |
|
"grad_norm": 0.8118249773979187, |
|
"learning_rate": 7.639320225002106e-06, |
|
"loss": 1.0032, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.01818536803056927, |
|
"grad_norm": 0.753839373588562, |
|
"learning_rate": 7.255007412532307e-06, |
|
"loss": 1.1015, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.018296934705603438, |
|
"grad_norm": 0.7360984086990356, |
|
"learning_rate": 6.8796467540791986e-06, |
|
"loss": 1.1351, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.018408501380637602, |
|
"grad_norm": 0.8430283069610596, |
|
"learning_rate": 6.513340869498859e-06, |
|
"loss": 1.1797, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.01852006805567177, |
|
"grad_norm": 0.7918187975883484, |
|
"learning_rate": 6.1561899031625794e-06, |
|
"loss": 1.2193, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.018631634730705938, |
|
"grad_norm": 0.7992611527442932, |
|
"learning_rate": 5.808291496578435e-06, |
|
"loss": 1.268, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.018743201405740106, |
|
"grad_norm": 0.7855931520462036, |
|
"learning_rate": 5.469740761697044e-06, |
|
"loss": 1.178, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.018854768080774274, |
|
"grad_norm": 0.8650913834571838, |
|
"learning_rate": 5.140630254908905e-06, |
|
"loss": 1.2404, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.01896633475580844, |
|
"grad_norm": 0.8121924996376038, |
|
"learning_rate": 4.821049951740442e-06, |
|
"loss": 1.1776, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.019077901430842606, |
|
"grad_norm": 0.7831167578697205, |
|
"learning_rate": 4.511087222255528e-06, |
|
"loss": 1.2594, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.019189468105876774, |
|
"grad_norm": 0.8442049026489258, |
|
"learning_rate": 4.2108268071694616e-06, |
|
"loss": 0.9845, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.01930103478091094, |
|
"grad_norm": 0.9648184180259705, |
|
"learning_rate": 3.9203507946816445e-06, |
|
"loss": 1.1247, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.01941260145594511, |
|
"grad_norm": 0.814885139465332, |
|
"learning_rate": 3.6397385980335e-06, |
|
"loss": 1.3107, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.019524168130979278, |
|
"grad_norm": 0.8654019236564636, |
|
"learning_rate": 3.3690669337977e-06, |
|
"loss": 1.2594, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.019635734806013445, |
|
"grad_norm": 0.9481486678123474, |
|
"learning_rate": 3.1084098009046106e-06, |
|
"loss": 1.1462, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.01974730148104761, |
|
"grad_norm": 0.7980362772941589, |
|
"learning_rate": 2.8578384604117217e-06, |
|
"loss": 1.117, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.019858868156081778, |
|
"grad_norm": 0.8693552613258362, |
|
"learning_rate": 2.6174214160215704e-06, |
|
"loss": 1.1145, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.019970434831115946, |
|
"grad_norm": 0.8843334317207336, |
|
"learning_rate": 2.3872243953535535e-06, |
|
"loss": 1.0491, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.020082001506150114, |
|
"grad_norm": 0.8390738368034363, |
|
"learning_rate": 2.1673103319746146e-06, |
|
"loss": 0.9996, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.02019356818118428, |
|
"grad_norm": 0.9163740873336792, |
|
"learning_rate": 1.957739348193859e-06, |
|
"loss": 1.198, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.020305134856218446, |
|
"grad_norm": 0.8822169899940491, |
|
"learning_rate": 1.7585687386256944e-06, |
|
"loss": 1.0289, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.020416701531252614, |
|
"grad_norm": 0.8542013168334961, |
|
"learning_rate": 1.5698529545260744e-06, |
|
"loss": 0.9726, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.02052826820628678, |
|
"grad_norm": 0.9263859987258911, |
|
"learning_rate": 1.3916435889060575e-06, |
|
"loss": 1.1067, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.02063983488132095, |
|
"grad_norm": 0.9441400170326233, |
|
"learning_rate": 1.2239893624267852e-06, |
|
"loss": 1.1066, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.020751401556355117, |
|
"grad_norm": 0.9362796545028687, |
|
"learning_rate": 1.0669361100797704e-06, |
|
"loss": 1.0002, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.020862968231389285, |
|
"grad_norm": 0.9713947772979736, |
|
"learning_rate": 9.205267686560293e-07, |
|
"loss": 1.1045, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.02097453490642345, |
|
"grad_norm": 0.9421945214271545, |
|
"learning_rate": 7.848013650076258e-07, |
|
"loss": 1.094, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.021086101581457618, |
|
"grad_norm": 0.9486850500106812, |
|
"learning_rate": 6.597970051047053e-07, |
|
"loss": 1.0785, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.021197668256491786, |
|
"grad_norm": 1.1238409280776978, |
|
"learning_rate": 5.455478638911071e-07, |
|
"loss": 1.1079, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.021309234931525953, |
|
"grad_norm": 1.0330231189727783, |
|
"learning_rate": 4.420851759412603e-07, |
|
"loss": 0.9687, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.02142080160656012, |
|
"grad_norm": 1.064351201057434, |
|
"learning_rate": 3.4943722692099224e-07, |
|
"loss": 1.0296, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.02153236828159429, |
|
"grad_norm": 1.0056447982788086, |
|
"learning_rate": 2.676293458544743e-07, |
|
"loss": 0.9839, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.021643934956628454, |
|
"grad_norm": 1.1110754013061523, |
|
"learning_rate": 1.9668389819954338e-07, |
|
"loss": 0.9976, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.02175550163166262, |
|
"grad_norm": 1.2064679861068726, |
|
"learning_rate": 1.3662027973320614e-07, |
|
"loss": 1.1326, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.02186706830669679, |
|
"grad_norm": 1.1165443658828735, |
|
"learning_rate": 8.745491124901861e-08, |
|
"loss": 0.9374, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.021978634981730957, |
|
"grad_norm": 1.0634437799453735, |
|
"learning_rate": 4.920123406781052e-08, |
|
"loss": 0.979, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.022090201656765125, |
|
"grad_norm": 1.4238533973693848, |
|
"learning_rate": 2.1869706362958044e-08, |
|
"loss": 1.0523, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.022201768331799293, |
|
"grad_norm": 1.357772707939148, |
|
"learning_rate": 5.467800301239834e-09, |
|
"loss": 1.0345, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.022313335006833458, |
|
"grad_norm": 1.6248823404312134, |
|
"learning_rate": 0.0, |
|
"loss": 0.9803, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022313335006833458, |
|
"eval_loss": 1.1155003309249878, |
|
"eval_runtime": 1183.9514, |
|
"eval_samples_per_second": 12.751, |
|
"eval_steps_per_second": 3.188, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.08868155248214e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|