jssky commited on
Commit
7593141
·
verified ·
1 Parent(s): 3755aca

Training in progress, step 279, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f7183e64a109a343469dca147a8c0f81c155762d2008823f579ca9d3c894683e
3
  size 80013120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:710f323d2d148d19a77874e7041b6682ca6fdf00fb8ff65d993905368b01d329
3
  size 80013120
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05740b4cc52050b284c0a5ae3bb1a0c79e4e11d33baa793c0b7886b2990dd202
3
- size 41119636
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d28277b8c5696e9609288685953a64988fc3bd9fe09e1d48c95ba342438e1db6
3
+ size 41120084
last-checkpoint/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74d5b0d31a7bfc657111f3d8a8c89bd9f54c57945ce1f937d44749b81c417e07
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d290d0f18d2c63d334eda98204765110cec7c5f5c7d088e8f0e88675b235ebea
3
  size 14244
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:51627bede7e359d4449d36ee1f729a3d0065d65146d217ca0847d4a1da7e2115
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bed55d74992475f85034de9808b502db25c265c1fbe10ac1ead4e6ef3743a36b
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.5003362474781439,
5
  "eval_steps": 93,
6
- "global_step": 186,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -1325,6 +1325,665 @@
1325
  "eval_samples_per_second": 14.676,
1326
  "eval_steps_per_second": 7.385,
1327
  "step": 186
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1328
  }
1329
  ],
1330
  "logging_steps": 1,
@@ -1344,7 +2003,7 @@
1344
  "attributes": {}
1345
  }
1346
  },
1347
- "total_flos": 6.05894718038999e+16,
1348
  "train_batch_size": 2,
1349
  "trial_name": null,
1350
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.7505043712172159,
5
  "eval_steps": 93,
6
+ "global_step": 279,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
1325
  "eval_samples_per_second": 14.676,
1326
  "eval_steps_per_second": 7.385,
1327
  "step": 186
1328
+ },
1329
+ {
1330
+ "epoch": 0.5030262273032953,
1331
+ "grad_norm": 4.27438497543335,
1332
+ "learning_rate": 0.0001034706759475182,
1333
+ "loss": 0.7565,
1334
+ "step": 187
1335
+ },
1336
+ {
1337
+ "epoch": 0.5057162071284466,
1338
+ "grad_norm": 6.716769218444824,
1339
+ "learning_rate": 0.0001026032357053512,
1340
+ "loss": 0.8371,
1341
+ "step": 188
1342
+ },
1343
+ {
1344
+ "epoch": 0.5084061869535978,
1345
+ "grad_norm": 2.8589985370635986,
1346
+ "learning_rate": 0.0001017355994012102,
1347
+ "loss": 0.2835,
1348
+ "step": 189
1349
+ },
1350
+ {
1351
+ "epoch": 0.5110961667787491,
1352
+ "grad_norm": 2.675102949142456,
1353
+ "learning_rate": 0.00010086783238088244,
1354
+ "loss": 0.2332,
1355
+ "step": 190
1356
+ },
1357
+ {
1358
+ "epoch": 0.5137861466039004,
1359
+ "grad_norm": 5.922438144683838,
1360
+ "learning_rate": 0.0001,
1361
+ "loss": 0.3974,
1362
+ "step": 191
1363
+ },
1364
+ {
1365
+ "epoch": 0.5164761264290518,
1366
+ "grad_norm": 9.693557739257812,
1367
+ "learning_rate": 9.913216761911755e-05,
1368
+ "loss": 0.8956,
1369
+ "step": 192
1370
+ },
1371
+ {
1372
+ "epoch": 0.5191661062542031,
1373
+ "grad_norm": 5.175564765930176,
1374
+ "learning_rate": 9.826440059878982e-05,
1375
+ "loss": 0.4924,
1376
+ "step": 193
1377
+ },
1378
+ {
1379
+ "epoch": 0.5218560860793544,
1380
+ "grad_norm": 4.789131164550781,
1381
+ "learning_rate": 9.739676429464881e-05,
1382
+ "loss": 0.6214,
1383
+ "step": 194
1384
+ },
1385
+ {
1386
+ "epoch": 0.5245460659045057,
1387
+ "grad_norm": 4.584465026855469,
1388
+ "learning_rate": 9.652932405248181e-05,
1389
+ "loss": 0.6807,
1390
+ "step": 195
1391
+ },
1392
+ {
1393
+ "epoch": 0.527236045729657,
1394
+ "grad_norm": 8.758447647094727,
1395
+ "learning_rate": 9.566214520330966e-05,
1396
+ "loss": 1.2434,
1397
+ "step": 196
1398
+ },
1399
+ {
1400
+ "epoch": 0.5299260255548084,
1401
+ "grad_norm": 8.869791984558105,
1402
+ "learning_rate": 9.479529305846652e-05,
1403
+ "loss": 0.903,
1404
+ "step": 197
1405
+ },
1406
+ {
1407
+ "epoch": 0.5326160053799597,
1408
+ "grad_norm": 3.716257333755493,
1409
+ "learning_rate": 9.392883290468083e-05,
1410
+ "loss": 0.3255,
1411
+ "step": 198
1412
+ },
1413
+ {
1414
+ "epoch": 0.535305985205111,
1415
+ "grad_norm": 5.400476455688477,
1416
+ "learning_rate": 9.306282999915839e-05,
1417
+ "loss": 0.4036,
1418
+ "step": 199
1419
+ },
1420
+ {
1421
+ "epoch": 0.5379959650302623,
1422
+ "grad_norm": 3.5948078632354736,
1423
+ "learning_rate": 9.219734956466752e-05,
1424
+ "loss": 0.1489,
1425
+ "step": 200
1426
+ },
1427
+ {
1428
+ "epoch": 0.5406859448554135,
1429
+ "grad_norm": 3.1145272254943848,
1430
+ "learning_rate": 9.133245678462663e-05,
1431
+ "loss": 1.6383,
1432
+ "step": 201
1433
+ },
1434
+ {
1435
+ "epoch": 0.543375924680565,
1436
+ "grad_norm": 3.916707754135132,
1437
+ "learning_rate": 9.046821679819527e-05,
1438
+ "loss": 1.7301,
1439
+ "step": 202
1440
+ },
1441
+ {
1442
+ "epoch": 0.5460659045057162,
1443
+ "grad_norm": 3.5734024047851562,
1444
+ "learning_rate": 8.960469469536786e-05,
1445
+ "loss": 1.1493,
1446
+ "step": 203
1447
+ },
1448
+ {
1449
+ "epoch": 0.5487558843308675,
1450
+ "grad_norm": 3.514202356338501,
1451
+ "learning_rate": 8.874195551207174e-05,
1452
+ "loss": 1.2045,
1453
+ "step": 204
1454
+ },
1455
+ {
1456
+ "epoch": 0.5514458641560188,
1457
+ "grad_norm": 3.9733996391296387,
1458
+ "learning_rate": 8.788006422526881e-05,
1459
+ "loss": 1.8557,
1460
+ "step": 205
1461
+ },
1462
+ {
1463
+ "epoch": 0.5541358439811701,
1464
+ "grad_norm": 4.543676853179932,
1465
+ "learning_rate": 8.701908574806197e-05,
1466
+ "loss": 1.3852,
1467
+ "step": 206
1468
+ },
1469
+ {
1470
+ "epoch": 0.5568258238063215,
1471
+ "grad_norm": 3.916227102279663,
1472
+ "learning_rate": 8.615908492480598e-05,
1473
+ "loss": 1.1968,
1474
+ "step": 207
1475
+ },
1476
+ {
1477
+ "epoch": 0.5595158036314728,
1478
+ "grad_norm": 4.097233295440674,
1479
+ "learning_rate": 8.530012652622397e-05,
1480
+ "loss": 1.2521,
1481
+ "step": 208
1482
+ },
1483
+ {
1484
+ "epoch": 0.5622057834566241,
1485
+ "grad_norm": 3.402857780456543,
1486
+ "learning_rate": 8.444227524452918e-05,
1487
+ "loss": 0.8308,
1488
+ "step": 209
1489
+ },
1490
+ {
1491
+ "epoch": 0.5648957632817754,
1492
+ "grad_norm": 4.228405952453613,
1493
+ "learning_rate": 8.358559568855249e-05,
1494
+ "loss": 1.1485,
1495
+ "step": 210
1496
+ },
1497
+ {
1498
+ "epoch": 0.5675857431069267,
1499
+ "grad_norm": 3.699495553970337,
1500
+ "learning_rate": 8.273015237887673e-05,
1501
+ "loss": 0.8204,
1502
+ "step": 211
1503
+ },
1504
+ {
1505
+ "epoch": 0.570275722932078,
1506
+ "grad_norm": 3.4216742515563965,
1507
+ "learning_rate": 8.187600974297714e-05,
1508
+ "loss": 0.7515,
1509
+ "step": 212
1510
+ },
1511
+ {
1512
+ "epoch": 0.5729657027572294,
1513
+ "grad_norm": 3.6286754608154297,
1514
+ "learning_rate": 8.102323211036904e-05,
1515
+ "loss": 0.6262,
1516
+ "step": 213
1517
+ },
1518
+ {
1519
+ "epoch": 0.5756556825823806,
1520
+ "grad_norm": 4.869045734405518,
1521
+ "learning_rate": 8.017188370776292e-05,
1522
+ "loss": 0.9851,
1523
+ "step": 214
1524
+ },
1525
+ {
1526
+ "epoch": 0.5783456624075319,
1527
+ "grad_norm": 5.106837749481201,
1528
+ "learning_rate": 7.932202865422726e-05,
1529
+ "loss": 1.122,
1530
+ "step": 215
1531
+ },
1532
+ {
1533
+ "epoch": 0.5810356422326832,
1534
+ "grad_norm": 4.490080833435059,
1535
+ "learning_rate": 7.847373095635937e-05,
1536
+ "loss": 0.8354,
1537
+ "step": 216
1538
+ },
1539
+ {
1540
+ "epoch": 0.5837256220578345,
1541
+ "grad_norm": 3.4671199321746826,
1542
+ "learning_rate": 7.762705450346462e-05,
1543
+ "loss": 0.8034,
1544
+ "step": 217
1545
+ },
1546
+ {
1547
+ "epoch": 0.5864156018829859,
1548
+ "grad_norm": 5.198472499847412,
1549
+ "learning_rate": 7.678206306274495e-05,
1550
+ "loss": 0.9827,
1551
+ "step": 218
1552
+ },
1553
+ {
1554
+ "epoch": 0.5891055817081372,
1555
+ "grad_norm": 3.910879135131836,
1556
+ "learning_rate": 7.59388202744959e-05,
1557
+ "loss": 0.5839,
1558
+ "step": 219
1559
+ },
1560
+ {
1561
+ "epoch": 0.5917955615332885,
1562
+ "grad_norm": 3.6499719619750977,
1563
+ "learning_rate": 7.509738964731389e-05,
1564
+ "loss": 0.4724,
1565
+ "step": 220
1566
+ },
1567
+ {
1568
+ "epoch": 0.5944855413584398,
1569
+ "grad_norm": 4.461284160614014,
1570
+ "learning_rate": 7.425783455331281e-05,
1571
+ "loss": 0.6628,
1572
+ "step": 221
1573
+ },
1574
+ {
1575
+ "epoch": 0.5971755211835911,
1576
+ "grad_norm": 3.570058584213257,
1577
+ "learning_rate": 7.342021822335143e-05,
1578
+ "loss": 0.455,
1579
+ "step": 222
1580
+ },
1581
+ {
1582
+ "epoch": 0.5998655010087425,
1583
+ "grad_norm": 4.93247652053833,
1584
+ "learning_rate": 7.258460374227085e-05,
1585
+ "loss": 1.1265,
1586
+ "step": 223
1587
+ },
1588
+ {
1589
+ "epoch": 0.6025554808338938,
1590
+ "grad_norm": 3.4849631786346436,
1591
+ "learning_rate": 7.175105404414362e-05,
1592
+ "loss": 0.3686,
1593
+ "step": 224
1594
+ },
1595
+ {
1596
+ "epoch": 0.605245460659045,
1597
+ "grad_norm": 2.503511428833008,
1598
+ "learning_rate": 7.091963190753376e-05,
1599
+ "loss": 0.3223,
1600
+ "step": 225
1601
+ },
1602
+ {
1603
+ "epoch": 0.6079354404841963,
1604
+ "grad_norm": 4.293323040008545,
1605
+ "learning_rate": 7.009039995076844e-05,
1606
+ "loss": 0.5115,
1607
+ "step": 226
1608
+ },
1609
+ {
1610
+ "epoch": 0.6106254203093476,
1611
+ "grad_norm": 4.681167125701904,
1612
+ "learning_rate": 6.926342062722223e-05,
1613
+ "loss": 0.8861,
1614
+ "step": 227
1615
+ },
1616
+ {
1617
+ "epoch": 0.613315400134499,
1618
+ "grad_norm": 4.363972187042236,
1619
+ "learning_rate": 6.843875622061304e-05,
1620
+ "loss": 0.714,
1621
+ "step": 228
1622
+ },
1623
+ {
1624
+ "epoch": 0.6160053799596503,
1625
+ "grad_norm": 4.711676597595215,
1626
+ "learning_rate": 6.761646884031164e-05,
1627
+ "loss": 0.9218,
1628
+ "step": 229
1629
+ },
1630
+ {
1631
+ "epoch": 0.6186953597848016,
1632
+ "grad_norm": 5.378860950469971,
1633
+ "learning_rate": 6.679662041666362e-05,
1634
+ "loss": 0.8466,
1635
+ "step": 230
1636
+ },
1637
+ {
1638
+ "epoch": 0.6213853396099529,
1639
+ "grad_norm": 4.945761203765869,
1640
+ "learning_rate": 6.597927269632526e-05,
1641
+ "loss": 0.8303,
1642
+ "step": 231
1643
+ },
1644
+ {
1645
+ "epoch": 0.6240753194351042,
1646
+ "grad_norm": 5.643571853637695,
1647
+ "learning_rate": 6.516448723761315e-05,
1648
+ "loss": 0.9418,
1649
+ "step": 232
1650
+ },
1651
+ {
1652
+ "epoch": 0.6267652992602556,
1653
+ "grad_norm": 3.7865352630615234,
1654
+ "learning_rate": 6.435232540586763e-05,
1655
+ "loss": 0.447,
1656
+ "step": 233
1657
+ },
1658
+ {
1659
+ "epoch": 0.6294552790854069,
1660
+ "grad_norm": 4.775557994842529,
1661
+ "learning_rate": 6.354284836883156e-05,
1662
+ "loss": 0.7068,
1663
+ "step": 234
1664
+ },
1665
+ {
1666
+ "epoch": 0.6321452589105582,
1667
+ "grad_norm": 4.313781261444092,
1668
+ "learning_rate": 6.273611709204304e-05,
1669
+ "loss": 0.4621,
1670
+ "step": 235
1671
+ },
1672
+ {
1673
+ "epoch": 0.6348352387357095,
1674
+ "grad_norm": 3.1749980449676514,
1675
+ "learning_rate": 6.193219233424414e-05,
1676
+ "loss": 0.2996,
1677
+ "step": 236
1678
+ },
1679
+ {
1680
+ "epoch": 0.6375252185608608,
1681
+ "grad_norm": 5.823219299316406,
1682
+ "learning_rate": 6.11311346428046e-05,
1683
+ "loss": 0.7987,
1684
+ "step": 237
1685
+ },
1686
+ {
1687
+ "epoch": 0.6402151983860122,
1688
+ "grad_norm": 4.825180530548096,
1689
+ "learning_rate": 6.033300434916203e-05,
1690
+ "loss": 0.9309,
1691
+ "step": 238
1692
+ },
1693
+ {
1694
+ "epoch": 0.6429051782111634,
1695
+ "grad_norm": 7.609663486480713,
1696
+ "learning_rate": 5.9537861564277654e-05,
1697
+ "loss": 0.6973,
1698
+ "step": 239
1699
+ },
1700
+ {
1701
+ "epoch": 0.6455951580363147,
1702
+ "grad_norm": 3.5007476806640625,
1703
+ "learning_rate": 5.8745766174109495e-05,
1704
+ "loss": 0.5831,
1705
+ "step": 240
1706
+ },
1707
+ {
1708
+ "epoch": 0.648285137861466,
1709
+ "grad_norm": 4.732518196105957,
1710
+ "learning_rate": 5.795677783510187e-05,
1711
+ "loss": 0.447,
1712
+ "step": 241
1713
+ },
1714
+ {
1715
+ "epoch": 0.6509751176866173,
1716
+ "grad_norm": 4.874922752380371,
1717
+ "learning_rate": 5.7170955969692265e-05,
1718
+ "loss": 0.429,
1719
+ "step": 242
1720
+ },
1721
+ {
1722
+ "epoch": 0.6536650975117687,
1723
+ "grad_norm": 2.738624095916748,
1724
+ "learning_rate": 5.638835976183627e-05,
1725
+ "loss": 0.316,
1726
+ "step": 243
1727
+ },
1728
+ {
1729
+ "epoch": 0.65635507733692,
1730
+ "grad_norm": 4.868707180023193,
1731
+ "learning_rate": 5.5609048152549794e-05,
1732
+ "loss": 0.528,
1733
+ "step": 244
1734
+ },
1735
+ {
1736
+ "epoch": 0.6590450571620713,
1737
+ "grad_norm": 2.545112371444702,
1738
+ "learning_rate": 5.483307983547026e-05,
1739
+ "loss": 0.2108,
1740
+ "step": 245
1741
+ },
1742
+ {
1743
+ "epoch": 0.6617350369872226,
1744
+ "grad_norm": 5.225026607513428,
1745
+ "learning_rate": 5.406051325243586e-05,
1746
+ "loss": 0.6883,
1747
+ "step": 246
1748
+ },
1749
+ {
1750
+ "epoch": 0.6644250168123739,
1751
+ "grad_norm": 6.781031608581543,
1752
+ "learning_rate": 5.329140658908423e-05,
1753
+ "loss": 0.7684,
1754
+ "step": 247
1755
+ },
1756
+ {
1757
+ "epoch": 0.6671149966375253,
1758
+ "grad_norm": 4.9204630851745605,
1759
+ "learning_rate": 5.2525817770470084e-05,
1760
+ "loss": 0.529,
1761
+ "step": 248
1762
+ },
1763
+ {
1764
+ "epoch": 0.6698049764626766,
1765
+ "grad_norm": 4.677596092224121,
1766
+ "learning_rate": 5.1763804456702545e-05,
1767
+ "loss": 0.1916,
1768
+ "step": 249
1769
+ },
1770
+ {
1771
+ "epoch": 0.6724949562878278,
1772
+ "grad_norm": 5.289106369018555,
1773
+ "learning_rate": 5.1005424038602724e-05,
1774
+ "loss": 0.5018,
1775
+ "step": 250
1776
+ },
1777
+ {
1778
+ "epoch": 0.6751849361129791,
1779
+ "grad_norm": 3.4332501888275146,
1780
+ "learning_rate": 5.025073363338111e-05,
1781
+ "loss": 1.4558,
1782
+ "step": 251
1783
+ },
1784
+ {
1785
+ "epoch": 0.6778749159381304,
1786
+ "grad_norm": 3.6167006492614746,
1787
+ "learning_rate": 4.949979008033596e-05,
1788
+ "loss": 1.3181,
1789
+ "step": 252
1790
+ },
1791
+ {
1792
+ "epoch": 0.6805648957632818,
1793
+ "grad_norm": 4.852591514587402,
1794
+ "learning_rate": 4.8752649936572304e-05,
1795
+ "loss": 1.5339,
1796
+ "step": 253
1797
+ },
1798
+ {
1799
+ "epoch": 0.6832548755884331,
1800
+ "grad_norm": 4.116457462310791,
1801
+ "learning_rate": 4.800936947274255e-05,
1802
+ "loss": 1.8746,
1803
+ "step": 254
1804
+ },
1805
+ {
1806
+ "epoch": 0.6859448554135844,
1807
+ "grad_norm": 3.9800620079040527,
1808
+ "learning_rate": 4.7270004668808397e-05,
1809
+ "loss": 1.8474,
1810
+ "step": 255
1811
+ },
1812
+ {
1813
+ "epoch": 0.6886348352387357,
1814
+ "grad_norm": 3.9870662689208984,
1815
+ "learning_rate": 4.65346112098246e-05,
1816
+ "loss": 1.148,
1817
+ "step": 256
1818
+ },
1819
+ {
1820
+ "epoch": 0.691324815063887,
1821
+ "grad_norm": 4.0448503494262695,
1822
+ "learning_rate": 4.5803244481745275e-05,
1823
+ "loss": 1.0557,
1824
+ "step": 257
1825
+ },
1826
+ {
1827
+ "epoch": 0.6940147948890383,
1828
+ "grad_norm": 4.282130241394043,
1829
+ "learning_rate": 4.5075959567252335e-05,
1830
+ "loss": 1.2731,
1831
+ "step": 258
1832
+ },
1833
+ {
1834
+ "epoch": 0.6967047747141897,
1835
+ "grad_norm": 4.488638401031494,
1836
+ "learning_rate": 4.435281124160715e-05,
1837
+ "loss": 1.3722,
1838
+ "step": 259
1839
+ },
1840
+ {
1841
+ "epoch": 0.699394754539341,
1842
+ "grad_norm": 3.9812207221984863,
1843
+ "learning_rate": 4.363385396852491e-05,
1844
+ "loss": 1.3536,
1845
+ "step": 260
1846
+ },
1847
+ {
1848
+ "epoch": 0.7020847343644923,
1849
+ "grad_norm": 5.047592639923096,
1850
+ "learning_rate": 4.291914189607297e-05,
1851
+ "loss": 0.7947,
1852
+ "step": 261
1853
+ },
1854
+ {
1855
+ "epoch": 0.7047747141896435,
1856
+ "grad_norm": 4.314056396484375,
1857
+ "learning_rate": 4.220872885259247e-05,
1858
+ "loss": 0.6368,
1859
+ "step": 262
1860
+ },
1861
+ {
1862
+ "epoch": 0.7074646940147948,
1863
+ "grad_norm": 4.209965705871582,
1864
+ "learning_rate": 4.1502668342644455e-05,
1865
+ "loss": 0.7786,
1866
+ "step": 263
1867
+ },
1868
+ {
1869
+ "epoch": 0.7101546738399462,
1870
+ "grad_norm": 5.0463151931762695,
1871
+ "learning_rate": 4.080101354298016e-05,
1872
+ "loss": 0.5957,
1873
+ "step": 264
1874
+ },
1875
+ {
1876
+ "epoch": 0.7128446536650975,
1877
+ "grad_norm": 5.0057373046875,
1878
+ "learning_rate": 4.0103817298535794e-05,
1879
+ "loss": 1.1632,
1880
+ "step": 265
1881
+ },
1882
+ {
1883
+ "epoch": 0.7155346334902488,
1884
+ "grad_norm": 4.640236854553223,
1885
+ "learning_rate": 3.9411132118452896e-05,
1886
+ "loss": 1.523,
1887
+ "step": 266
1888
+ },
1889
+ {
1890
+ "epoch": 0.7182246133154001,
1891
+ "grad_norm": 3.843372344970703,
1892
+ "learning_rate": 3.872301017212337e-05,
1893
+ "loss": 1.0536,
1894
+ "step": 267
1895
+ },
1896
+ {
1897
+ "epoch": 0.7209145931405514,
1898
+ "grad_norm": 4.180513858795166,
1899
+ "learning_rate": 3.8039503285260506e-05,
1900
+ "loss": 0.7683,
1901
+ "step": 268
1902
+ },
1903
+ {
1904
+ "epoch": 0.7236045729657028,
1905
+ "grad_norm": 7.624268054962158,
1906
+ "learning_rate": 3.73606629359955e-05,
1907
+ "loss": 0.817,
1908
+ "step": 269
1909
+ },
1910
+ {
1911
+ "epoch": 0.7262945527908541,
1912
+ "grad_norm": 5.006471633911133,
1913
+ "learning_rate": 3.6686540251000756e-05,
1914
+ "loss": 0.899,
1915
+ "step": 270
1916
+ },
1917
+ {
1918
+ "epoch": 0.7289845326160054,
1919
+ "grad_norm": 4.611303806304932,
1920
+ "learning_rate": 3.6017186001639036e-05,
1921
+ "loss": 0.3317,
1922
+ "step": 271
1923
+ },
1924
+ {
1925
+ "epoch": 0.7316745124411567,
1926
+ "grad_norm": 4.701048374176025,
1927
+ "learning_rate": 3.535265060013965e-05,
1928
+ "loss": 0.9114,
1929
+ "step": 272
1930
+ },
1931
+ {
1932
+ "epoch": 0.734364492266308,
1933
+ "grad_norm": 5.010507583618164,
1934
+ "learning_rate": 3.4692984095801796e-05,
1935
+ "loss": 0.6747,
1936
+ "step": 273
1937
+ },
1938
+ {
1939
+ "epoch": 0.7370544720914594,
1940
+ "grad_norm": 4.114429473876953,
1941
+ "learning_rate": 3.4038236171224946e-05,
1942
+ "loss": 0.5408,
1943
+ "step": 274
1944
+ },
1945
+ {
1946
+ "epoch": 0.7397444519166106,
1947
+ "grad_norm": 5.027583122253418,
1948
+ "learning_rate": 3.3388456138567225e-05,
1949
+ "loss": 0.4589,
1950
+ "step": 275
1951
+ },
1952
+ {
1953
+ "epoch": 0.7424344317417619,
1954
+ "grad_norm": 5.864731788635254,
1955
+ "learning_rate": 3.274369293583121e-05,
1956
+ "loss": 0.9165,
1957
+ "step": 276
1958
+ },
1959
+ {
1960
+ "epoch": 0.7451244115669132,
1961
+ "grad_norm": 4.747946739196777,
1962
+ "learning_rate": 3.210399512317849e-05,
1963
+ "loss": 0.5087,
1964
+ "step": 277
1965
+ },
1966
+ {
1967
+ "epoch": 0.7478143913920645,
1968
+ "grad_norm": 3.576078414916992,
1969
+ "learning_rate": 3.146941087927203e-05,
1970
+ "loss": 0.5728,
1971
+ "step": 278
1972
+ },
1973
+ {
1974
+ "epoch": 0.7505043712172159,
1975
+ "grad_norm": 2.740264892578125,
1976
+ "learning_rate": 3.0839987997647935e-05,
1977
+ "loss": 0.2307,
1978
+ "step": 279
1979
+ },
1980
+ {
1981
+ "epoch": 0.7505043712172159,
1982
+ "eval_loss": 0.7389675974845886,
1983
+ "eval_runtime": 10.7313,
1984
+ "eval_samples_per_second": 14.63,
1985
+ "eval_steps_per_second": 7.362,
1986
+ "step": 279
1987
  }
1988
  ],
1989
  "logging_steps": 1,
 
2003
  "attributes": {}
2004
  }
2005
  },
2006
+ "total_flos": 9.088420770584986e+16,
2007
  "train_batch_size": 2,
2008
  "trial_name": null,
2009
  "trial_params": null