ljttw's picture
Model save
949f664 verified
{
"best_metric": 0.9916771315123634,
"best_model_checkpoint": "convnext-tiny-224-finetuned-eurosat/checkpoint-2490",
"epoch": 2.9987959060806744,
"eval_steps": 500,
"global_step": 2490,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.012040939193257074,
"grad_norm": 13.281408309936523,
"learning_rate": 2.0080321285140564e-06,
"loss": 1.4682,
"step": 10
},
{
"epoch": 0.024081878386514148,
"grad_norm": 11.210328102111816,
"learning_rate": 4.016064257028113e-06,
"loss": 1.4426,
"step": 20
},
{
"epoch": 0.036122817579771226,
"grad_norm": 14.131393432617188,
"learning_rate": 6.024096385542169e-06,
"loss": 1.3941,
"step": 30
},
{
"epoch": 0.048163756773028296,
"grad_norm": 12.672858238220215,
"learning_rate": 8.032128514056226e-06,
"loss": 1.3162,
"step": 40
},
{
"epoch": 0.060204695966285374,
"grad_norm": 12.453862190246582,
"learning_rate": 1.0040160642570281e-05,
"loss": 1.1984,
"step": 50
},
{
"epoch": 0.07224563515954245,
"grad_norm": 18.517236709594727,
"learning_rate": 1.2048192771084338e-05,
"loss": 1.0524,
"step": 60
},
{
"epoch": 0.08428657435279951,
"grad_norm": 16.96401023864746,
"learning_rate": 1.4056224899598394e-05,
"loss": 0.8876,
"step": 70
},
{
"epoch": 0.09632751354605659,
"grad_norm": 41.16529083251953,
"learning_rate": 1.606425702811245e-05,
"loss": 0.7224,
"step": 80
},
{
"epoch": 0.10836845273931367,
"grad_norm": 48.28220748901367,
"learning_rate": 1.8072289156626505e-05,
"loss": 0.6089,
"step": 90
},
{
"epoch": 0.12040939193257075,
"grad_norm": 37.04490661621094,
"learning_rate": 2.0080321285140562e-05,
"loss": 0.5196,
"step": 100
},
{
"epoch": 0.13245033112582782,
"grad_norm": 32.357425689697266,
"learning_rate": 2.208835341365462e-05,
"loss": 0.4334,
"step": 110
},
{
"epoch": 0.1444912703190849,
"grad_norm": 41.277889251708984,
"learning_rate": 2.4096385542168677e-05,
"loss": 0.3515,
"step": 120
},
{
"epoch": 0.15653220951234195,
"grad_norm": 36.72848892211914,
"learning_rate": 2.6104417670682734e-05,
"loss": 0.2723,
"step": 130
},
{
"epoch": 0.16857314870559903,
"grad_norm": 48.640384674072266,
"learning_rate": 2.8112449799196788e-05,
"loss": 0.2383,
"step": 140
},
{
"epoch": 0.1806140878988561,
"grad_norm": 41.72344970703125,
"learning_rate": 3.012048192771085e-05,
"loss": 0.1954,
"step": 150
},
{
"epoch": 0.19265502709211318,
"grad_norm": 26.390892028808594,
"learning_rate": 3.21285140562249e-05,
"loss": 0.1937,
"step": 160
},
{
"epoch": 0.20469596628537026,
"grad_norm": 31.522932052612305,
"learning_rate": 3.413654618473896e-05,
"loss": 0.1657,
"step": 170
},
{
"epoch": 0.21673690547862734,
"grad_norm": 34.83334732055664,
"learning_rate": 3.614457831325301e-05,
"loss": 0.1678,
"step": 180
},
{
"epoch": 0.22877784467188442,
"grad_norm": 17.641130447387695,
"learning_rate": 3.815261044176707e-05,
"loss": 0.1367,
"step": 190
},
{
"epoch": 0.2408187838651415,
"grad_norm": 12.848413467407227,
"learning_rate": 4.0160642570281125e-05,
"loss": 0.1183,
"step": 200
},
{
"epoch": 0.25285972305839854,
"grad_norm": 12.54123306274414,
"learning_rate": 4.2168674698795186e-05,
"loss": 0.1026,
"step": 210
},
{
"epoch": 0.26490066225165565,
"grad_norm": 14.553470611572266,
"learning_rate": 4.417670682730924e-05,
"loss": 0.0932,
"step": 220
},
{
"epoch": 0.2769416014449127,
"grad_norm": 13.884803771972656,
"learning_rate": 4.61847389558233e-05,
"loss": 0.0697,
"step": 230
},
{
"epoch": 0.2889825406381698,
"grad_norm": 19.946374893188477,
"learning_rate": 4.8192771084337354e-05,
"loss": 0.0845,
"step": 240
},
{
"epoch": 0.30102347983142685,
"grad_norm": 25.795095443725586,
"learning_rate": 4.9977688531905406e-05,
"loss": 0.0927,
"step": 250
},
{
"epoch": 0.3130644190246839,
"grad_norm": 17.59208869934082,
"learning_rate": 4.97545738509594e-05,
"loss": 0.0621,
"step": 260
},
{
"epoch": 0.325105358217941,
"grad_norm": 14.659806251525879,
"learning_rate": 4.953145917001339e-05,
"loss": 0.0644,
"step": 270
},
{
"epoch": 0.33714629741119806,
"grad_norm": 15.293780326843262,
"learning_rate": 4.930834448906738e-05,
"loss": 0.0882,
"step": 280
},
{
"epoch": 0.34918723660445516,
"grad_norm": 11.717020988464355,
"learning_rate": 4.908522980812137e-05,
"loss": 0.0594,
"step": 290
},
{
"epoch": 0.3612281757977122,
"grad_norm": 29.984556198120117,
"learning_rate": 4.886211512717537e-05,
"loss": 0.0854,
"step": 300
},
{
"epoch": 0.3732691149909693,
"grad_norm": 10.837246894836426,
"learning_rate": 4.8639000446229364e-05,
"loss": 0.0782,
"step": 310
},
{
"epoch": 0.38531005418422637,
"grad_norm": 11.394307136535645,
"learning_rate": 4.8415885765283355e-05,
"loss": 0.0666,
"step": 320
},
{
"epoch": 0.3973509933774834,
"grad_norm": 31.305824279785156,
"learning_rate": 4.8192771084337354e-05,
"loss": 0.0529,
"step": 330
},
{
"epoch": 0.4093919325707405,
"grad_norm": 31.709115982055664,
"learning_rate": 4.7969656403391346e-05,
"loss": 0.0504,
"step": 340
},
{
"epoch": 0.4214328717639976,
"grad_norm": 3.85288143157959,
"learning_rate": 4.774654172244534e-05,
"loss": 0.0498,
"step": 350
},
{
"epoch": 0.4334738109572547,
"grad_norm": 11.564428329467773,
"learning_rate": 4.7523427041499336e-05,
"loss": 0.0635,
"step": 360
},
{
"epoch": 0.44551475015051173,
"grad_norm": 35.95602798461914,
"learning_rate": 4.730031236055333e-05,
"loss": 0.0464,
"step": 370
},
{
"epoch": 0.45755568934376883,
"grad_norm": 16.262226104736328,
"learning_rate": 4.707719767960732e-05,
"loss": 0.0404,
"step": 380
},
{
"epoch": 0.4695966285370259,
"grad_norm": 15.375874519348145,
"learning_rate": 4.685408299866131e-05,
"loss": 0.0399,
"step": 390
},
{
"epoch": 0.481637567730283,
"grad_norm": 5.388984680175781,
"learning_rate": 4.663096831771531e-05,
"loss": 0.0381,
"step": 400
},
{
"epoch": 0.49367850692354004,
"grad_norm": 1.3834246397018433,
"learning_rate": 4.64078536367693e-05,
"loss": 0.046,
"step": 410
},
{
"epoch": 0.5057194461167971,
"grad_norm": 29.666913986206055,
"learning_rate": 4.61847389558233e-05,
"loss": 0.0549,
"step": 420
},
{
"epoch": 0.5177603853100542,
"grad_norm": 11.310073852539062,
"learning_rate": 4.596162427487729e-05,
"loss": 0.0428,
"step": 430
},
{
"epoch": 0.5298013245033113,
"grad_norm": 14.786700248718262,
"learning_rate": 4.5738509593931284e-05,
"loss": 0.0376,
"step": 440
},
{
"epoch": 0.5418422636965683,
"grad_norm": 28.993574142456055,
"learning_rate": 4.5515394912985275e-05,
"loss": 0.0409,
"step": 450
},
{
"epoch": 0.5538832028898254,
"grad_norm": 12.82999038696289,
"learning_rate": 4.529228023203927e-05,
"loss": 0.0576,
"step": 460
},
{
"epoch": 0.5659241420830825,
"grad_norm": 25.034116744995117,
"learning_rate": 4.506916555109326e-05,
"loss": 0.0384,
"step": 470
},
{
"epoch": 0.5779650812763396,
"grad_norm": 10.026959419250488,
"learning_rate": 4.484605087014726e-05,
"loss": 0.0352,
"step": 480
},
{
"epoch": 0.5900060204695966,
"grad_norm": 4.99667501449585,
"learning_rate": 4.4622936189201256e-05,
"loss": 0.0333,
"step": 490
},
{
"epoch": 0.6020469596628537,
"grad_norm": 29.965518951416016,
"learning_rate": 4.439982150825525e-05,
"loss": 0.0425,
"step": 500
},
{
"epoch": 0.6140878988561108,
"grad_norm": 15.231363296508789,
"learning_rate": 4.417670682730924e-05,
"loss": 0.0414,
"step": 510
},
{
"epoch": 0.6261288380493678,
"grad_norm": 34.312477111816406,
"learning_rate": 4.395359214636323e-05,
"loss": 0.0411,
"step": 520
},
{
"epoch": 0.6381697772426249,
"grad_norm": 60.11940383911133,
"learning_rate": 4.373047746541722e-05,
"loss": 0.0436,
"step": 530
},
{
"epoch": 0.650210716435882,
"grad_norm": 2.1523265838623047,
"learning_rate": 4.350736278447122e-05,
"loss": 0.0364,
"step": 540
},
{
"epoch": 0.6622516556291391,
"grad_norm": 22.606645584106445,
"learning_rate": 4.328424810352521e-05,
"loss": 0.0348,
"step": 550
},
{
"epoch": 0.6742925948223961,
"grad_norm": 15.017701148986816,
"learning_rate": 4.306113342257921e-05,
"loss": 0.0301,
"step": 560
},
{
"epoch": 0.6863335340156532,
"grad_norm": 8.95254898071289,
"learning_rate": 4.2838018741633203e-05,
"loss": 0.0322,
"step": 570
},
{
"epoch": 0.6983744732089103,
"grad_norm": 12.393402099609375,
"learning_rate": 4.2614904060687195e-05,
"loss": 0.0207,
"step": 580
},
{
"epoch": 0.7104154124021673,
"grad_norm": 4.132020950317383,
"learning_rate": 4.239178937974119e-05,
"loss": 0.0479,
"step": 590
},
{
"epoch": 0.7224563515954244,
"grad_norm": 11.15093994140625,
"learning_rate": 4.2168674698795186e-05,
"loss": 0.0195,
"step": 600
},
{
"epoch": 0.7344972907886815,
"grad_norm": 19.265222549438477,
"learning_rate": 4.194556001784918e-05,
"loss": 0.0229,
"step": 610
},
{
"epoch": 0.7465382299819386,
"grad_norm": 14.732630729675293,
"learning_rate": 4.172244533690317e-05,
"loss": 0.0243,
"step": 620
},
{
"epoch": 0.7585791691751956,
"grad_norm": 2.9074995517730713,
"learning_rate": 4.149933065595716e-05,
"loss": 0.0324,
"step": 630
},
{
"epoch": 0.7706201083684527,
"grad_norm": 12.28730583190918,
"learning_rate": 4.127621597501116e-05,
"loss": 0.0341,
"step": 640
},
{
"epoch": 0.7826610475617098,
"grad_norm": 5.263113975524902,
"learning_rate": 4.105310129406515e-05,
"loss": 0.0345,
"step": 650
},
{
"epoch": 0.7947019867549668,
"grad_norm": 20.13115119934082,
"learning_rate": 4.082998661311915e-05,
"loss": 0.0371,
"step": 660
},
{
"epoch": 0.8067429259482239,
"grad_norm": 6.125397205352783,
"learning_rate": 4.060687193217314e-05,
"loss": 0.0218,
"step": 670
},
{
"epoch": 0.818783865141481,
"grad_norm": 4.126222133636475,
"learning_rate": 4.038375725122713e-05,
"loss": 0.0266,
"step": 680
},
{
"epoch": 0.8308248043347382,
"grad_norm": 4.383463382720947,
"learning_rate": 4.0160642570281125e-05,
"loss": 0.0472,
"step": 690
},
{
"epoch": 0.8428657435279951,
"grad_norm": 1.4781919717788696,
"learning_rate": 3.993752788933512e-05,
"loss": 0.0265,
"step": 700
},
{
"epoch": 0.8549066827212523,
"grad_norm": 7.968236446380615,
"learning_rate": 3.9714413208389115e-05,
"loss": 0.0326,
"step": 710
},
{
"epoch": 0.8669476219145094,
"grad_norm": 15.246943473815918,
"learning_rate": 3.949129852744311e-05,
"loss": 0.0256,
"step": 720
},
{
"epoch": 0.8789885611077664,
"grad_norm": 16.984224319458008,
"learning_rate": 3.9268183846497105e-05,
"loss": 0.0128,
"step": 730
},
{
"epoch": 0.8910295003010235,
"grad_norm": 15.068581581115723,
"learning_rate": 3.90450691655511e-05,
"loss": 0.0245,
"step": 740
},
{
"epoch": 0.9030704394942806,
"grad_norm": 0.4837843179702759,
"learning_rate": 3.882195448460509e-05,
"loss": 0.0091,
"step": 750
},
{
"epoch": 0.9151113786875377,
"grad_norm": 21.087526321411133,
"learning_rate": 3.859883980365908e-05,
"loss": 0.036,
"step": 760
},
{
"epoch": 0.9271523178807947,
"grad_norm": 13.440828323364258,
"learning_rate": 3.837572512271307e-05,
"loss": 0.0137,
"step": 770
},
{
"epoch": 0.9391932570740518,
"grad_norm": 4.101161956787109,
"learning_rate": 3.815261044176707e-05,
"loss": 0.0207,
"step": 780
},
{
"epoch": 0.9512341962673089,
"grad_norm": 9.98717975616455,
"learning_rate": 3.792949576082106e-05,
"loss": 0.032,
"step": 790
},
{
"epoch": 0.963275135460566,
"grad_norm": 20.295673370361328,
"learning_rate": 3.770638107987506e-05,
"loss": 0.0288,
"step": 800
},
{
"epoch": 0.975316074653823,
"grad_norm": 4.261445999145508,
"learning_rate": 3.748326639892905e-05,
"loss": 0.0231,
"step": 810
},
{
"epoch": 0.9873570138470801,
"grad_norm": 19.60774803161621,
"learning_rate": 3.7260151717983045e-05,
"loss": 0.0376,
"step": 820
},
{
"epoch": 0.9993979530403372,
"grad_norm": 7.224467754364014,
"learning_rate": 3.7037037037037037e-05,
"loss": 0.0369,
"step": 830
},
{
"epoch": 0.9993979530403372,
"eval_f1": 0.9770145182151784,
"eval_loss": 0.03777425363659859,
"eval_runtime": 102.228,
"eval_samples_per_second": 115.526,
"eval_steps_per_second": 3.619,
"step": 830
},
{
"epoch": 1.0117399157134257,
"grad_norm": 15.622239112854004,
"learning_rate": 3.6813922356091035e-05,
"loss": 0.0179,
"step": 840
},
{
"epoch": 1.0237808549066827,
"grad_norm": 0.41096824407577515,
"learning_rate": 3.659080767514503e-05,
"loss": 0.0076,
"step": 850
},
{
"epoch": 1.03582179409994,
"grad_norm": 18.979677200317383,
"learning_rate": 3.636769299419902e-05,
"loss": 0.024,
"step": 860
},
{
"epoch": 1.047862733293197,
"grad_norm": 9.764080047607422,
"learning_rate": 3.614457831325301e-05,
"loss": 0.0142,
"step": 870
},
{
"epoch": 1.059903672486454,
"grad_norm": 2.4317643642425537,
"learning_rate": 3.592146363230701e-05,
"loss": 0.0265,
"step": 880
},
{
"epoch": 1.0719446116797111,
"grad_norm": 15.766806602478027,
"learning_rate": 3.5698348951361e-05,
"loss": 0.0234,
"step": 890
},
{
"epoch": 1.083985550872968,
"grad_norm": 11.236108779907227,
"learning_rate": 3.5475234270415e-05,
"loss": 0.0142,
"step": 900
},
{
"epoch": 1.096026490066225,
"grad_norm": 2.210698127746582,
"learning_rate": 3.525211958946899e-05,
"loss": 0.0034,
"step": 910
},
{
"epoch": 1.1080674292594823,
"grad_norm": 0.30994459986686707,
"learning_rate": 3.502900490852298e-05,
"loss": 0.0038,
"step": 920
},
{
"epoch": 1.1201083684527393,
"grad_norm": 7.224539279937744,
"learning_rate": 3.4805890227576974e-05,
"loss": 0.0246,
"step": 930
},
{
"epoch": 1.1321493076459963,
"grad_norm": 0.3623636066913605,
"learning_rate": 3.4582775546630966e-05,
"loss": 0.006,
"step": 940
},
{
"epoch": 1.1441902468392535,
"grad_norm": 3.8137762546539307,
"learning_rate": 3.4359660865684965e-05,
"loss": 0.0176,
"step": 950
},
{
"epoch": 1.1562311860325105,
"grad_norm": 0.8698212504386902,
"learning_rate": 3.413654618473896e-05,
"loss": 0.0166,
"step": 960
},
{
"epoch": 1.1682721252257675,
"grad_norm": 14.512166976928711,
"learning_rate": 3.3913431503792955e-05,
"loss": 0.0179,
"step": 970
},
{
"epoch": 1.1803130644190247,
"grad_norm": 23.775272369384766,
"learning_rate": 3.369031682284695e-05,
"loss": 0.0247,
"step": 980
},
{
"epoch": 1.1923540036122817,
"grad_norm": 9.087154388427734,
"learning_rate": 3.346720214190094e-05,
"loss": 0.0212,
"step": 990
},
{
"epoch": 1.2043949428055387,
"grad_norm": 23.993961334228516,
"learning_rate": 3.324408746095493e-05,
"loss": 0.0198,
"step": 1000
},
{
"epoch": 1.216435881998796,
"grad_norm": 5.462221622467041,
"learning_rate": 3.302097278000892e-05,
"loss": 0.0077,
"step": 1010
},
{
"epoch": 1.228476821192053,
"grad_norm": 0.17719963192939758,
"learning_rate": 3.279785809906292e-05,
"loss": 0.0107,
"step": 1020
},
{
"epoch": 1.2405177603853101,
"grad_norm": 19.494287490844727,
"learning_rate": 3.257474341811691e-05,
"loss": 0.0125,
"step": 1030
},
{
"epoch": 1.2525586995785671,
"grad_norm": 7.95407772064209,
"learning_rate": 3.235162873717091e-05,
"loss": 0.0105,
"step": 1040
},
{
"epoch": 1.2645996387718241,
"grad_norm": 5.586263656616211,
"learning_rate": 3.21285140562249e-05,
"loss": 0.0178,
"step": 1050
},
{
"epoch": 1.2766405779650813,
"grad_norm": 0.2848265469074249,
"learning_rate": 3.1905399375278894e-05,
"loss": 0.0095,
"step": 1060
},
{
"epoch": 1.2886815171583383,
"grad_norm": 1.2747416496276855,
"learning_rate": 3.1682284694332886e-05,
"loss": 0.0046,
"step": 1070
},
{
"epoch": 1.3007224563515956,
"grad_norm": 0.1546485871076584,
"learning_rate": 3.1459170013386885e-05,
"loss": 0.0176,
"step": 1080
},
{
"epoch": 1.3127633955448526,
"grad_norm": 0.07813813537359238,
"learning_rate": 3.1236055332440876e-05,
"loss": 0.0153,
"step": 1090
},
{
"epoch": 1.3248043347381095,
"grad_norm": 0.5948330163955688,
"learning_rate": 3.101294065149487e-05,
"loss": 0.0133,
"step": 1100
},
{
"epoch": 1.3368452739313668,
"grad_norm": 26.891010284423828,
"learning_rate": 3.078982597054887e-05,
"loss": 0.026,
"step": 1110
},
{
"epoch": 1.3488862131246238,
"grad_norm": 5.57611083984375,
"learning_rate": 3.056671128960286e-05,
"loss": 0.0129,
"step": 1120
},
{
"epoch": 1.3609271523178808,
"grad_norm": 29.697946548461914,
"learning_rate": 3.034359660865685e-05,
"loss": 0.0136,
"step": 1130
},
{
"epoch": 1.372968091511138,
"grad_norm": 0.9194643497467041,
"learning_rate": 3.012048192771085e-05,
"loss": 0.0018,
"step": 1140
},
{
"epoch": 1.385009030704395,
"grad_norm": 0.1039569079875946,
"learning_rate": 2.989736724676484e-05,
"loss": 0.0158,
"step": 1150
},
{
"epoch": 1.397049969897652,
"grad_norm": 32.51161193847656,
"learning_rate": 2.9674252565818832e-05,
"loss": 0.0085,
"step": 1160
},
{
"epoch": 1.4090909090909092,
"grad_norm": 9.65622329711914,
"learning_rate": 2.9451137884872827e-05,
"loss": 0.0142,
"step": 1170
},
{
"epoch": 1.4211318482841662,
"grad_norm": 0.4631725251674652,
"learning_rate": 2.922802320392682e-05,
"loss": 0.0096,
"step": 1180
},
{
"epoch": 1.4331727874774232,
"grad_norm": 9.818026542663574,
"learning_rate": 2.900490852298081e-05,
"loss": 0.0055,
"step": 1190
},
{
"epoch": 1.4452137266706804,
"grad_norm": 5.643733978271484,
"learning_rate": 2.878179384203481e-05,
"loss": 0.0146,
"step": 1200
},
{
"epoch": 1.4572546658639374,
"grad_norm": 5.967852592468262,
"learning_rate": 2.85586791610888e-05,
"loss": 0.0228,
"step": 1210
},
{
"epoch": 1.4692956050571944,
"grad_norm": 0.1257736086845398,
"learning_rate": 2.8335564480142796e-05,
"loss": 0.003,
"step": 1220
},
{
"epoch": 1.4813365442504516,
"grad_norm": 14.535481452941895,
"learning_rate": 2.8112449799196788e-05,
"loss": 0.0156,
"step": 1230
},
{
"epoch": 1.4933774834437086,
"grad_norm": 1.1483675241470337,
"learning_rate": 2.788933511825078e-05,
"loss": 0.0051,
"step": 1240
},
{
"epoch": 1.5054184226369656,
"grad_norm": 0.11251900345087051,
"learning_rate": 2.7666220437304775e-05,
"loss": 0.0063,
"step": 1250
},
{
"epoch": 1.5174593618302228,
"grad_norm": 0.16821934282779694,
"learning_rate": 2.7443105756358774e-05,
"loss": 0.0066,
"step": 1260
},
{
"epoch": 1.5295003010234798,
"grad_norm": 1.6054164171218872,
"learning_rate": 2.7219991075412765e-05,
"loss": 0.0092,
"step": 1270
},
{
"epoch": 1.5415412402167368,
"grad_norm": 1.5014593601226807,
"learning_rate": 2.6996876394466757e-05,
"loss": 0.008,
"step": 1280
},
{
"epoch": 1.553582179409994,
"grad_norm": 27.260873794555664,
"learning_rate": 2.6773761713520752e-05,
"loss": 0.0277,
"step": 1290
},
{
"epoch": 1.5656231186032512,
"grad_norm": 6.338423728942871,
"learning_rate": 2.6550647032574744e-05,
"loss": 0.0202,
"step": 1300
},
{
"epoch": 1.577664057796508,
"grad_norm": 18.155574798583984,
"learning_rate": 2.6327532351628736e-05,
"loss": 0.0069,
"step": 1310
},
{
"epoch": 1.5897049969897652,
"grad_norm": 0.7306817770004272,
"learning_rate": 2.6104417670682734e-05,
"loss": 0.0074,
"step": 1320
},
{
"epoch": 1.6017459361830224,
"grad_norm": 6.356385231018066,
"learning_rate": 2.5881302989736726e-05,
"loss": 0.0125,
"step": 1330
},
{
"epoch": 1.6137868753762792,
"grad_norm": 16.603483200073242,
"learning_rate": 2.565818830879072e-05,
"loss": 0.0264,
"step": 1340
},
{
"epoch": 1.6258278145695364,
"grad_norm": 10.486541748046875,
"learning_rate": 2.5435073627844713e-05,
"loss": 0.0148,
"step": 1350
},
{
"epoch": 1.6378687537627936,
"grad_norm": 0.5101543664932251,
"learning_rate": 2.5211958946898705e-05,
"loss": 0.0158,
"step": 1360
},
{
"epoch": 1.6499096929560506,
"grad_norm": 19.748857498168945,
"learning_rate": 2.4988844265952703e-05,
"loss": 0.0132,
"step": 1370
},
{
"epoch": 1.6619506321493076,
"grad_norm": 0.3710501194000244,
"learning_rate": 2.4765729585006695e-05,
"loss": 0.0153,
"step": 1380
},
{
"epoch": 1.6739915713425648,
"grad_norm": 0.0527074858546257,
"learning_rate": 2.4542614904060687e-05,
"loss": 0.0119,
"step": 1390
},
{
"epoch": 1.6860325105358218,
"grad_norm": 1.91316556930542,
"learning_rate": 2.4319500223114682e-05,
"loss": 0.0104,
"step": 1400
},
{
"epoch": 1.6980734497290788,
"grad_norm": 2.1063218116760254,
"learning_rate": 2.4096385542168677e-05,
"loss": 0.0055,
"step": 1410
},
{
"epoch": 1.710114388922336,
"grad_norm": 9.150762557983398,
"learning_rate": 2.387327086122267e-05,
"loss": 0.0104,
"step": 1420
},
{
"epoch": 1.722155328115593,
"grad_norm": 0.08365010470151901,
"learning_rate": 2.3650156180276664e-05,
"loss": 0.0107,
"step": 1430
},
{
"epoch": 1.73419626730885,
"grad_norm": 8.636667251586914,
"learning_rate": 2.3427041499330656e-05,
"loss": 0.0099,
"step": 1440
},
{
"epoch": 1.7462372065021072,
"grad_norm": 1.0073809623718262,
"learning_rate": 2.320392681838465e-05,
"loss": 0.0171,
"step": 1450
},
{
"epoch": 1.7582781456953642,
"grad_norm": 5.5500969886779785,
"learning_rate": 2.2980812137438646e-05,
"loss": 0.0031,
"step": 1460
},
{
"epoch": 1.7703190848886212,
"grad_norm": 9.417837142944336,
"learning_rate": 2.2757697456492638e-05,
"loss": 0.01,
"step": 1470
},
{
"epoch": 1.7823600240818784,
"grad_norm": 18.310321807861328,
"learning_rate": 2.253458277554663e-05,
"loss": 0.0113,
"step": 1480
},
{
"epoch": 1.7944009632751354,
"grad_norm": 1.6077951192855835,
"learning_rate": 2.2311468094600628e-05,
"loss": 0.0136,
"step": 1490
},
{
"epoch": 1.8064419024683924,
"grad_norm": 2.8965089321136475,
"learning_rate": 2.208835341365462e-05,
"loss": 0.0109,
"step": 1500
},
{
"epoch": 1.8184828416616496,
"grad_norm": 1.5971770286560059,
"learning_rate": 2.186523873270861e-05,
"loss": 0.0016,
"step": 1510
},
{
"epoch": 1.8305237808549066,
"grad_norm": 5.2004170417785645,
"learning_rate": 2.1642124051762607e-05,
"loss": 0.0035,
"step": 1520
},
{
"epoch": 1.8425647200481636,
"grad_norm": 0.644066572189331,
"learning_rate": 2.1419009370816602e-05,
"loss": 0.0108,
"step": 1530
},
{
"epoch": 1.8546056592414208,
"grad_norm": 7.4833760261535645,
"learning_rate": 2.1195894689870593e-05,
"loss": 0.0047,
"step": 1540
},
{
"epoch": 1.866646598434678,
"grad_norm": 1.3332066535949707,
"learning_rate": 2.097278000892459e-05,
"loss": 0.0114,
"step": 1550
},
{
"epoch": 1.8786875376279348,
"grad_norm": 22.227462768554688,
"learning_rate": 2.074966532797858e-05,
"loss": 0.007,
"step": 1560
},
{
"epoch": 1.890728476821192,
"grad_norm": 0.06626732647418976,
"learning_rate": 2.0526550647032576e-05,
"loss": 0.0034,
"step": 1570
},
{
"epoch": 1.9027694160144493,
"grad_norm": 0.06219644472002983,
"learning_rate": 2.030343596608657e-05,
"loss": 0.0161,
"step": 1580
},
{
"epoch": 1.914810355207706,
"grad_norm": 14.115644454956055,
"learning_rate": 2.0080321285140562e-05,
"loss": 0.0213,
"step": 1590
},
{
"epoch": 1.9268512944009633,
"grad_norm": 0.06290453672409058,
"learning_rate": 1.9857206604194558e-05,
"loss": 0.0196,
"step": 1600
},
{
"epoch": 1.9388922335942205,
"grad_norm": 0.5245229601860046,
"learning_rate": 1.9634091923248553e-05,
"loss": 0.0067,
"step": 1610
},
{
"epoch": 1.9509331727874775,
"grad_norm": 2.8521981239318848,
"learning_rate": 1.9410977242302544e-05,
"loss": 0.0193,
"step": 1620
},
{
"epoch": 1.9629741119807345,
"grad_norm": 0.47254133224487305,
"learning_rate": 1.9187862561356536e-05,
"loss": 0.0199,
"step": 1630
},
{
"epoch": 1.9750150511739917,
"grad_norm": 0.11533841490745544,
"learning_rate": 1.896474788041053e-05,
"loss": 0.0031,
"step": 1640
},
{
"epoch": 1.9870559903672487,
"grad_norm": 12.329407691955566,
"learning_rate": 1.8741633199464527e-05,
"loss": 0.012,
"step": 1650
},
{
"epoch": 1.9990969295605057,
"grad_norm": 4.933974742889404,
"learning_rate": 1.8518518518518518e-05,
"loss": 0.0152,
"step": 1660
},
{
"epoch": 1.9990969295605057,
"eval_f1": 0.9903039700741034,
"eval_loss": 0.020218143239617348,
"eval_runtime": 101.1159,
"eval_samples_per_second": 116.797,
"eval_steps_per_second": 3.659,
"step": 1660
},
{
"epoch": 2.011438892233594,
"grad_norm": 0.7306141257286072,
"learning_rate": 1.8295403837572513e-05,
"loss": 0.0078,
"step": 1670
},
{
"epoch": 2.0234798314268514,
"grad_norm": 0.0547499842941761,
"learning_rate": 1.8072289156626505e-05,
"loss": 0.0102,
"step": 1680
},
{
"epoch": 2.035520770620108,
"grad_norm": 1.1440118551254272,
"learning_rate": 1.78491744756805e-05,
"loss": 0.0011,
"step": 1690
},
{
"epoch": 2.0475617098133654,
"grad_norm": 0.14842332899570465,
"learning_rate": 1.7626059794734495e-05,
"loss": 0.0087,
"step": 1700
},
{
"epoch": 2.0596026490066226,
"grad_norm": 0.18685872852802277,
"learning_rate": 1.7402945113788487e-05,
"loss": 0.002,
"step": 1710
},
{
"epoch": 2.07164358819988,
"grad_norm": 13.721619606018066,
"learning_rate": 1.7179830432842482e-05,
"loss": 0.0052,
"step": 1720
},
{
"epoch": 2.0836845273931366,
"grad_norm": 1.7337596416473389,
"learning_rate": 1.6956715751896478e-05,
"loss": 0.0007,
"step": 1730
},
{
"epoch": 2.095725466586394,
"grad_norm": 0.023796595633029938,
"learning_rate": 1.673360107095047e-05,
"loss": 0.0006,
"step": 1740
},
{
"epoch": 2.107766405779651,
"grad_norm": 0.21926841139793396,
"learning_rate": 1.651048639000446e-05,
"loss": 0.0002,
"step": 1750
},
{
"epoch": 2.119807344972908,
"grad_norm": 0.08207955211400986,
"learning_rate": 1.6287371709058456e-05,
"loss": 0.0006,
"step": 1760
},
{
"epoch": 2.131848284166165,
"grad_norm": 0.06928452104330063,
"learning_rate": 1.606425702811245e-05,
"loss": 0.0004,
"step": 1770
},
{
"epoch": 2.1438892233594222,
"grad_norm": 0.028248103335499763,
"learning_rate": 1.5841142347166443e-05,
"loss": 0.0007,
"step": 1780
},
{
"epoch": 2.155930162552679,
"grad_norm": 0.13549260795116425,
"learning_rate": 1.5618027666220438e-05,
"loss": 0.0012,
"step": 1790
},
{
"epoch": 2.167971101745936,
"grad_norm": 0.24678213894367218,
"learning_rate": 1.5394912985274433e-05,
"loss": 0.0002,
"step": 1800
},
{
"epoch": 2.1800120409391934,
"grad_norm": 0.043063148856163025,
"learning_rate": 1.5171798304328425e-05,
"loss": 0.0103,
"step": 1810
},
{
"epoch": 2.19205298013245,
"grad_norm": 7.088438510894775,
"learning_rate": 1.494868362338242e-05,
"loss": 0.0161,
"step": 1820
},
{
"epoch": 2.2040939193257074,
"grad_norm": 2.1872127056121826,
"learning_rate": 1.4725568942436414e-05,
"loss": 0.0127,
"step": 1830
},
{
"epoch": 2.2161348585189646,
"grad_norm": 2.7211828231811523,
"learning_rate": 1.4502454261490405e-05,
"loss": 0.0133,
"step": 1840
},
{
"epoch": 2.2281757977122214,
"grad_norm": 0.6726425290107727,
"learning_rate": 1.42793395805444e-05,
"loss": 0.0004,
"step": 1850
},
{
"epoch": 2.2402167369054786,
"grad_norm": 0.05588046833872795,
"learning_rate": 1.4056224899598394e-05,
"loss": 0.0003,
"step": 1860
},
{
"epoch": 2.252257676098736,
"grad_norm": 0.2764362394809723,
"learning_rate": 1.3833110218652387e-05,
"loss": 0.0008,
"step": 1870
},
{
"epoch": 2.2642986152919926,
"grad_norm": 0.05939013138413429,
"learning_rate": 1.3609995537706383e-05,
"loss": 0.0017,
"step": 1880
},
{
"epoch": 2.27633955448525,
"grad_norm": 43.3293342590332,
"learning_rate": 1.3386880856760376e-05,
"loss": 0.0015,
"step": 1890
},
{
"epoch": 2.288380493678507,
"grad_norm": 0.021860118955373764,
"learning_rate": 1.3163766175814368e-05,
"loss": 0.0093,
"step": 1900
},
{
"epoch": 2.300421432871764,
"grad_norm": 0.5367532968521118,
"learning_rate": 1.2940651494868363e-05,
"loss": 0.0049,
"step": 1910
},
{
"epoch": 2.312462372065021,
"grad_norm": 0.046667639166116714,
"learning_rate": 1.2717536813922356e-05,
"loss": 0.001,
"step": 1920
},
{
"epoch": 2.3245033112582782,
"grad_norm": 0.019831106066703796,
"learning_rate": 1.2494422132976352e-05,
"loss": 0.0063,
"step": 1930
},
{
"epoch": 2.336544250451535,
"grad_norm": 0.6081582903862,
"learning_rate": 1.2271307452030343e-05,
"loss": 0.006,
"step": 1940
},
{
"epoch": 2.3485851896447922,
"grad_norm": 18.08721351623535,
"learning_rate": 1.2048192771084338e-05,
"loss": 0.0008,
"step": 1950
},
{
"epoch": 2.3606261288380495,
"grad_norm": 4.2922892570495605,
"learning_rate": 1.1825078090138332e-05,
"loss": 0.0051,
"step": 1960
},
{
"epoch": 2.3726670680313067,
"grad_norm": 0.017288248986005783,
"learning_rate": 1.1601963409192325e-05,
"loss": 0.0002,
"step": 1970
},
{
"epoch": 2.3847080072245634,
"grad_norm": 0.011165632866322994,
"learning_rate": 1.1378848728246319e-05,
"loss": 0.0006,
"step": 1980
},
{
"epoch": 2.3967489464178207,
"grad_norm": 0.038627851754426956,
"learning_rate": 1.1155734047300314e-05,
"loss": 0.0048,
"step": 1990
},
{
"epoch": 2.4087898856110774,
"grad_norm": 0.03750608116388321,
"learning_rate": 1.0932619366354306e-05,
"loss": 0.0024,
"step": 2000
},
{
"epoch": 2.4208308248043346,
"grad_norm": 0.14723530411720276,
"learning_rate": 1.0709504685408301e-05,
"loss": 0.0004,
"step": 2010
},
{
"epoch": 2.432871763997592,
"grad_norm": 0.020417124032974243,
"learning_rate": 1.0486390004462294e-05,
"loss": 0.0006,
"step": 2020
},
{
"epoch": 2.444912703190849,
"grad_norm": 0.12044423818588257,
"learning_rate": 1.0263275323516288e-05,
"loss": 0.0279,
"step": 2030
},
{
"epoch": 2.456953642384106,
"grad_norm": 11.781253814697266,
"learning_rate": 1.0040160642570281e-05,
"loss": 0.0117,
"step": 2040
},
{
"epoch": 2.468994581577363,
"grad_norm": 3.7121262550354004,
"learning_rate": 9.817045961624276e-06,
"loss": 0.0065,
"step": 2050
},
{
"epoch": 2.4810355207706203,
"grad_norm": 0.06580865383148193,
"learning_rate": 9.593931280678268e-06,
"loss": 0.0033,
"step": 2060
},
{
"epoch": 2.493076459963877,
"grad_norm": 0.04538366571068764,
"learning_rate": 9.370816599732263e-06,
"loss": 0.0013,
"step": 2070
},
{
"epoch": 2.5051173991571343,
"grad_norm": 0.024890244007110596,
"learning_rate": 9.147701918786257e-06,
"loss": 0.0104,
"step": 2080
},
{
"epoch": 2.5171583383503915,
"grad_norm": 0.15436908602714539,
"learning_rate": 8.92458723784025e-06,
"loss": 0.012,
"step": 2090
},
{
"epoch": 2.5291992775436483,
"grad_norm": 0.18890848755836487,
"learning_rate": 8.701472556894244e-06,
"loss": 0.0006,
"step": 2100
},
{
"epoch": 2.5412402167369055,
"grad_norm": 0.014392940327525139,
"learning_rate": 8.478357875948239e-06,
"loss": 0.0084,
"step": 2110
},
{
"epoch": 2.5532811559301627,
"grad_norm": 0.05412567779421806,
"learning_rate": 8.25524319500223e-06,
"loss": 0.0082,
"step": 2120
},
{
"epoch": 2.5653220951234195,
"grad_norm": 0.08901382237672806,
"learning_rate": 8.032128514056226e-06,
"loss": 0.0131,
"step": 2130
},
{
"epoch": 2.5773630343166767,
"grad_norm": 0.03455715253949165,
"learning_rate": 7.809013833110219e-06,
"loss": 0.0002,
"step": 2140
},
{
"epoch": 2.589403973509934,
"grad_norm": 11.11595630645752,
"learning_rate": 7.5858991521642126e-06,
"loss": 0.0113,
"step": 2150
},
{
"epoch": 2.601444912703191,
"grad_norm": 3.2087442874908447,
"learning_rate": 7.362784471218207e-06,
"loss": 0.0094,
"step": 2160
},
{
"epoch": 2.613485851896448,
"grad_norm": 26.5289306640625,
"learning_rate": 7.1396697902722e-06,
"loss": 0.0035,
"step": 2170
},
{
"epoch": 2.625526791089705,
"grad_norm": 0.03234562277793884,
"learning_rate": 6.916555109326194e-06,
"loss": 0.0028,
"step": 2180
},
{
"epoch": 2.637567730282962,
"grad_norm": 0.032831065356731415,
"learning_rate": 6.693440428380188e-06,
"loss": 0.0025,
"step": 2190
},
{
"epoch": 2.649608669476219,
"grad_norm": 1.0490331649780273,
"learning_rate": 6.4703257474341815e-06,
"loss": 0.0024,
"step": 2200
},
{
"epoch": 2.6616496086694763,
"grad_norm": 0.14360417425632477,
"learning_rate": 6.247211066488176e-06,
"loss": 0.0015,
"step": 2210
},
{
"epoch": 2.6736905478627335,
"grad_norm": 5.024721622467041,
"learning_rate": 6.024096385542169e-06,
"loss": 0.0042,
"step": 2220
},
{
"epoch": 2.6857314870559903,
"grad_norm": 27.92305564880371,
"learning_rate": 5.800981704596163e-06,
"loss": 0.0163,
"step": 2230
},
{
"epoch": 2.6977724262492475,
"grad_norm": 0.07593529671430588,
"learning_rate": 5.577867023650157e-06,
"loss": 0.0015,
"step": 2240
},
{
"epoch": 2.7098133654425043,
"grad_norm": 0.033851731568574905,
"learning_rate": 5.3547523427041504e-06,
"loss": 0.0002,
"step": 2250
},
{
"epoch": 2.7218543046357615,
"grad_norm": 0.08031222969293594,
"learning_rate": 5.131637661758144e-06,
"loss": 0.0013,
"step": 2260
},
{
"epoch": 2.7338952438290187,
"grad_norm": 4.381354808807373,
"learning_rate": 4.908522980812138e-06,
"loss": 0.008,
"step": 2270
},
{
"epoch": 2.745936183022276,
"grad_norm": 0.03481106460094452,
"learning_rate": 4.685408299866132e-06,
"loss": 0.0015,
"step": 2280
},
{
"epoch": 2.7579771222155327,
"grad_norm": 20.805809020996094,
"learning_rate": 4.462293618920125e-06,
"loss": 0.0034,
"step": 2290
},
{
"epoch": 2.77001806140879,
"grad_norm": 0.6730156540870667,
"learning_rate": 4.239178937974119e-06,
"loss": 0.0005,
"step": 2300
},
{
"epoch": 2.7820590006020467,
"grad_norm": 0.05091029778122902,
"learning_rate": 4.016064257028113e-06,
"loss": 0.0083,
"step": 2310
},
{
"epoch": 2.794099939795304,
"grad_norm": 10.03028392791748,
"learning_rate": 3.7929495760821063e-06,
"loss": 0.0087,
"step": 2320
},
{
"epoch": 2.806140878988561,
"grad_norm": 0.1488943099975586,
"learning_rate": 3.5698348951361e-06,
"loss": 0.0083,
"step": 2330
},
{
"epoch": 2.8181818181818183,
"grad_norm": 0.9400956034660339,
"learning_rate": 3.346720214190094e-06,
"loss": 0.0005,
"step": 2340
},
{
"epoch": 2.830222757375075,
"grad_norm": 0.054990120232105255,
"learning_rate": 3.123605533244088e-06,
"loss": 0.0022,
"step": 2350
},
{
"epoch": 2.8422636965683323,
"grad_norm": 0.18718010187149048,
"learning_rate": 2.9004908522980813e-06,
"loss": 0.0059,
"step": 2360
},
{
"epoch": 2.8543046357615895,
"grad_norm": 0.06312959641218185,
"learning_rate": 2.6773761713520752e-06,
"loss": 0.007,
"step": 2370
},
{
"epoch": 2.8663455749548463,
"grad_norm": 0.41855642199516296,
"learning_rate": 2.454261490406069e-06,
"loss": 0.0062,
"step": 2380
},
{
"epoch": 2.8783865141481035,
"grad_norm": 0.06557010114192963,
"learning_rate": 2.2311468094600625e-06,
"loss": 0.0047,
"step": 2390
},
{
"epoch": 2.8904274533413608,
"grad_norm": 0.06863750517368317,
"learning_rate": 2.0080321285140564e-06,
"loss": 0.0003,
"step": 2400
},
{
"epoch": 2.902468392534618,
"grad_norm": 0.14578412473201752,
"learning_rate": 1.78491744756805e-06,
"loss": 0.0072,
"step": 2410
},
{
"epoch": 2.9145093317278747,
"grad_norm": 0.020962489768862724,
"learning_rate": 1.561802766622044e-06,
"loss": 0.0032,
"step": 2420
},
{
"epoch": 2.926550270921132,
"grad_norm": 0.07511858642101288,
"learning_rate": 1.3386880856760376e-06,
"loss": 0.0161,
"step": 2430
},
{
"epoch": 2.9385912101143887,
"grad_norm": 12.450850486755371,
"learning_rate": 1.1155734047300313e-06,
"loss": 0.0074,
"step": 2440
},
{
"epoch": 2.950632149307646,
"grad_norm": 0.02944479137659073,
"learning_rate": 8.92458723784025e-07,
"loss": 0.0002,
"step": 2450
},
{
"epoch": 2.962673088500903,
"grad_norm": 0.387787789106369,
"learning_rate": 6.693440428380188e-07,
"loss": 0.0018,
"step": 2460
},
{
"epoch": 2.9747140276941604,
"grad_norm": 0.2808152139186859,
"learning_rate": 4.462293618920125e-07,
"loss": 0.0055,
"step": 2470
},
{
"epoch": 2.986754966887417,
"grad_norm": 0.053598176687955856,
"learning_rate": 2.2311468094600626e-07,
"loss": 0.0051,
"step": 2480
},
{
"epoch": 2.9987959060806744,
"grad_norm": 0.3022330403327942,
"learning_rate": 0.0,
"loss": 0.0003,
"step": 2490
},
{
"epoch": 2.9987959060806744,
"eval_f1": 0.9916771315123634,
"eval_loss": 0.011577575467526913,
"eval_runtime": 101.2234,
"eval_samples_per_second": 116.673,
"eval_steps_per_second": 3.655,
"step": 2490
},
{
"epoch": 2.9987959060806744,
"step": 2490,
"total_flos": 1.3193601009550295e+19,
"train_loss": 0.06868853225451277,
"train_runtime": 5741.3098,
"train_samples_per_second": 55.537,
"train_steps_per_second": 0.434
}
],
"logging_steps": 10,
"max_steps": 2490,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.3193601009550295e+19,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}