alchemist69
commited on
Training in progress, step 300, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 42487072
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:92538451a6996f1a7e3b9b83864404bb3ccb343d832ec9907e58812c5240d308
|
3 |
size 42487072
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ce84646d34527628034cf61a4ed81334ebb0c33edbbc6952de45d82335f4167
|
3 |
+
size 21735482
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce57a800b7b8769229b1b1de3215a90f18a87e85a9d6a01a672a395af1e6a9b6
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebdb14d51e77eb18f9d6184de19bfac710da5493717593749289db85474b6091
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
-
"best_metric": 3.
|
3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 100,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -1431,6 +1431,714 @@
|
|
1431 |
"eval_samples_per_second": 173.872,
|
1432 |
"eval_steps_per_second": 43.479,
|
1433 |
"step": 200
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1434 |
}
|
1435 |
],
|
1436 |
"logging_steps": 1,
|
@@ -1459,7 +2167,7 @@
|
|
1459 |
"attributes": {}
|
1460 |
}
|
1461 |
},
|
1462 |
-
"total_flos":
|
1463 |
"train_batch_size": 8,
|
1464 |
"trial_name": null,
|
1465 |
"trial_params": null
|
|
|
1 |
{
|
2 |
+
"best_metric": 3.1531736850738525,
|
3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-300",
|
4 |
+
"epoch": 0.06139049470506983,
|
5 |
"eval_steps": 100,
|
6 |
+
"global_step": 300,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
1431 |
"eval_samples_per_second": 173.872,
|
1432 |
"eval_steps_per_second": 43.479,
|
1433 |
"step": 200
|
1434 |
+
},
|
1435 |
+
{
|
1436 |
+
"epoch": 0.04113163145239679,
|
1437 |
+
"grad_norm": 3.9187700748443604,
|
1438 |
+
"learning_rate": 6.883142508466054e-05,
|
1439 |
+
"loss": 13.2198,
|
1440 |
+
"step": 201
|
1441 |
+
},
|
1442 |
+
{
|
1443 |
+
"epoch": 0.04133626643474702,
|
1444 |
+
"grad_norm": 5.088418006896973,
|
1445 |
+
"learning_rate": 6.852787187549182e-05,
|
1446 |
+
"loss": 12.934,
|
1447 |
+
"step": 202
|
1448 |
+
},
|
1449 |
+
{
|
1450 |
+
"epoch": 0.04154090141709725,
|
1451 |
+
"grad_norm": 4.74566125869751,
|
1452 |
+
"learning_rate": 6.82235249939575e-05,
|
1453 |
+
"loss": 13.3393,
|
1454 |
+
"step": 203
|
1455 |
+
},
|
1456 |
+
{
|
1457 |
+
"epoch": 0.04174553639944749,
|
1458 |
+
"grad_norm": 4.898460865020752,
|
1459 |
+
"learning_rate": 6.7918397477265e-05,
|
1460 |
+
"loss": 13.5989,
|
1461 |
+
"step": 204
|
1462 |
+
},
|
1463 |
+
{
|
1464 |
+
"epoch": 0.04195017138179772,
|
1465 |
+
"grad_norm": 4.619757652282715,
|
1466 |
+
"learning_rate": 6.761250239606169e-05,
|
1467 |
+
"loss": 13.0342,
|
1468 |
+
"step": 205
|
1469 |
+
},
|
1470 |
+
{
|
1471 |
+
"epoch": 0.04215480636414795,
|
1472 |
+
"grad_norm": 4.482340335845947,
|
1473 |
+
"learning_rate": 6.730585285387465e-05,
|
1474 |
+
"loss": 13.0489,
|
1475 |
+
"step": 206
|
1476 |
+
},
|
1477 |
+
{
|
1478 |
+
"epoch": 0.04235944134649818,
|
1479 |
+
"grad_norm": 4.1753644943237305,
|
1480 |
+
"learning_rate": 6.699846198654971e-05,
|
1481 |
+
"loss": 13.2165,
|
1482 |
+
"step": 207
|
1483 |
+
},
|
1484 |
+
{
|
1485 |
+
"epoch": 0.04256407632884842,
|
1486 |
+
"grad_norm": 3.9566304683685303,
|
1487 |
+
"learning_rate": 6.669034296168855e-05,
|
1488 |
+
"loss": 13.2601,
|
1489 |
+
"step": 208
|
1490 |
+
},
|
1491 |
+
{
|
1492 |
+
"epoch": 0.04276871131119865,
|
1493 |
+
"grad_norm": 4.045615196228027,
|
1494 |
+
"learning_rate": 6.638150897808468e-05,
|
1495 |
+
"loss": 13.0854,
|
1496 |
+
"step": 209
|
1497 |
+
},
|
1498 |
+
{
|
1499 |
+
"epoch": 0.04297334629354888,
|
1500 |
+
"grad_norm": 3.9672138690948486,
|
1501 |
+
"learning_rate": 6.607197326515808e-05,
|
1502 |
+
"loss": 13.5277,
|
1503 |
+
"step": 210
|
1504 |
+
},
|
1505 |
+
{
|
1506 |
+
"epoch": 0.04317798127589911,
|
1507 |
+
"grad_norm": 3.8964602947235107,
|
1508 |
+
"learning_rate": 6.57617490823885e-05,
|
1509 |
+
"loss": 13.3445,
|
1510 |
+
"step": 211
|
1511 |
+
},
|
1512 |
+
{
|
1513 |
+
"epoch": 0.043382616258249344,
|
1514 |
+
"grad_norm": 3.9119648933410645,
|
1515 |
+
"learning_rate": 6.545084971874738e-05,
|
1516 |
+
"loss": 12.763,
|
1517 |
+
"step": 212
|
1518 |
+
},
|
1519 |
+
{
|
1520 |
+
"epoch": 0.04358725124059958,
|
1521 |
+
"grad_norm": 4.09339714050293,
|
1522 |
+
"learning_rate": 6.513928849212873e-05,
|
1523 |
+
"loss": 13.1653,
|
1524 |
+
"step": 213
|
1525 |
+
},
|
1526 |
+
{
|
1527 |
+
"epoch": 0.043791886222949813,
|
1528 |
+
"grad_norm": 4.33394193649292,
|
1529 |
+
"learning_rate": 6.482707874877854e-05,
|
1530 |
+
"loss": 13.1689,
|
1531 |
+
"step": 214
|
1532 |
+
},
|
1533 |
+
{
|
1534 |
+
"epoch": 0.043996521205300045,
|
1535 |
+
"grad_norm": 4.071203231811523,
|
1536 |
+
"learning_rate": 6.451423386272312e-05,
|
1537 |
+
"loss": 12.9157,
|
1538 |
+
"step": 215
|
1539 |
+
},
|
1540 |
+
{
|
1541 |
+
"epoch": 0.044201156187650276,
|
1542 |
+
"grad_norm": 4.155096054077148,
|
1543 |
+
"learning_rate": 6.420076723519614e-05,
|
1544 |
+
"loss": 12.9944,
|
1545 |
+
"step": 216
|
1546 |
+
},
|
1547 |
+
{
|
1548 |
+
"epoch": 0.044405791170000514,
|
1549 |
+
"grad_norm": 4.474510669708252,
|
1550 |
+
"learning_rate": 6.388669229406462e-05,
|
1551 |
+
"loss": 12.9211,
|
1552 |
+
"step": 217
|
1553 |
+
},
|
1554 |
+
{
|
1555 |
+
"epoch": 0.044610426152350745,
|
1556 |
+
"grad_norm": 4.203741550445557,
|
1557 |
+
"learning_rate": 6.357202249325371e-05,
|
1558 |
+
"loss": 12.5732,
|
1559 |
+
"step": 218
|
1560 |
+
},
|
1561 |
+
{
|
1562 |
+
"epoch": 0.044815061134700976,
|
1563 |
+
"grad_norm": 4.361083984375,
|
1564 |
+
"learning_rate": 6.32567713121704e-05,
|
1565 |
+
"loss": 13.1363,
|
1566 |
+
"step": 219
|
1567 |
+
},
|
1568 |
+
{
|
1569 |
+
"epoch": 0.04501969611705121,
|
1570 |
+
"grad_norm": 4.626219749450684,
|
1571 |
+
"learning_rate": 6.294095225512603e-05,
|
1572 |
+
"loss": 13.1044,
|
1573 |
+
"step": 220
|
1574 |
+
},
|
1575 |
+
{
|
1576 |
+
"epoch": 0.045224331099401445,
|
1577 |
+
"grad_norm": 4.69849967956543,
|
1578 |
+
"learning_rate": 6.26245788507579e-05,
|
1579 |
+
"loss": 12.7933,
|
1580 |
+
"step": 221
|
1581 |
+
},
|
1582 |
+
{
|
1583 |
+
"epoch": 0.045428966081751676,
|
1584 |
+
"grad_norm": 4.679666996002197,
|
1585 |
+
"learning_rate": 6.230766465144967e-05,
|
1586 |
+
"loss": 13.1581,
|
1587 |
+
"step": 222
|
1588 |
+
},
|
1589 |
+
{
|
1590 |
+
"epoch": 0.04563360106410191,
|
1591 |
+
"grad_norm": 4.953638553619385,
|
1592 |
+
"learning_rate": 6.199022323275083e-05,
|
1593 |
+
"loss": 13.0212,
|
1594 |
+
"step": 223
|
1595 |
+
},
|
1596 |
+
{
|
1597 |
+
"epoch": 0.04583823604645214,
|
1598 |
+
"grad_norm": 4.850236415863037,
|
1599 |
+
"learning_rate": 6.167226819279528e-05,
|
1600 |
+
"loss": 13.0019,
|
1601 |
+
"step": 224
|
1602 |
+
},
|
1603 |
+
{
|
1604 |
+
"epoch": 0.046042871028802376,
|
1605 |
+
"grad_norm": 4.989190578460693,
|
1606 |
+
"learning_rate": 6.135381315171867e-05,
|
1607 |
+
"loss": 12.2553,
|
1608 |
+
"step": 225
|
1609 |
+
},
|
1610 |
+
{
|
1611 |
+
"epoch": 0.04624750601115261,
|
1612 |
+
"grad_norm": 4.897017478942871,
|
1613 |
+
"learning_rate": 6.103487175107507e-05,
|
1614 |
+
"loss": 13.0096,
|
1615 |
+
"step": 226
|
1616 |
+
},
|
1617 |
+
{
|
1618 |
+
"epoch": 0.04645214099350284,
|
1619 |
+
"grad_norm": 4.998581886291504,
|
1620 |
+
"learning_rate": 6.071545765325254e-05,
|
1621 |
+
"loss": 12.4746,
|
1622 |
+
"step": 227
|
1623 |
+
},
|
1624 |
+
{
|
1625 |
+
"epoch": 0.04665677597585307,
|
1626 |
+
"grad_norm": 5.277119159698486,
|
1627 |
+
"learning_rate": 6.0395584540887963e-05,
|
1628 |
+
"loss": 12.6303,
|
1629 |
+
"step": 228
|
1630 |
+
},
|
1631 |
+
{
|
1632 |
+
"epoch": 0.04686141095820331,
|
1633 |
+
"grad_norm": 5.548853874206543,
|
1634 |
+
"learning_rate": 6.007526611628086e-05,
|
1635 |
+
"loss": 12.8215,
|
1636 |
+
"step": 229
|
1637 |
+
},
|
1638 |
+
{
|
1639 |
+
"epoch": 0.04706604594055354,
|
1640 |
+
"grad_norm": 5.378997325897217,
|
1641 |
+
"learning_rate": 5.9754516100806423e-05,
|
1642 |
+
"loss": 13.1972,
|
1643 |
+
"step": 230
|
1644 |
+
},
|
1645 |
+
{
|
1646 |
+
"epoch": 0.04727068092290377,
|
1647 |
+
"grad_norm": 5.815462589263916,
|
1648 |
+
"learning_rate": 5.9433348234327765e-05,
|
1649 |
+
"loss": 13.2821,
|
1650 |
+
"step": 231
|
1651 |
+
},
|
1652 |
+
{
|
1653 |
+
"epoch": 0.047475315905254,
|
1654 |
+
"grad_norm": 5.872306823730469,
|
1655 |
+
"learning_rate": 5.911177627460739e-05,
|
1656 |
+
"loss": 13.199,
|
1657 |
+
"step": 232
|
1658 |
+
},
|
1659 |
+
{
|
1660 |
+
"epoch": 0.04767995088760424,
|
1661 |
+
"grad_norm": 5.859600067138672,
|
1662 |
+
"learning_rate": 5.8789813996717736e-05,
|
1663 |
+
"loss": 12.6879,
|
1664 |
+
"step": 233
|
1665 |
+
},
|
1666 |
+
{
|
1667 |
+
"epoch": 0.04788458586995447,
|
1668 |
+
"grad_norm": 6.172786712646484,
|
1669 |
+
"learning_rate": 5.8467475192451226e-05,
|
1670 |
+
"loss": 13.2259,
|
1671 |
+
"step": 234
|
1672 |
+
},
|
1673 |
+
{
|
1674 |
+
"epoch": 0.0480892208523047,
|
1675 |
+
"grad_norm": 6.31078577041626,
|
1676 |
+
"learning_rate": 5.814477366972945e-05,
|
1677 |
+
"loss": 12.3852,
|
1678 |
+
"step": 235
|
1679 |
+
},
|
1680 |
+
{
|
1681 |
+
"epoch": 0.04829385583465493,
|
1682 |
+
"grad_norm": 6.534313201904297,
|
1683 |
+
"learning_rate": 5.782172325201155e-05,
|
1684 |
+
"loss": 13.3609,
|
1685 |
+
"step": 236
|
1686 |
+
},
|
1687 |
+
{
|
1688 |
+
"epoch": 0.04849849081700517,
|
1689 |
+
"grad_norm": 6.585941314697266,
|
1690 |
+
"learning_rate": 5.749833777770225e-05,
|
1691 |
+
"loss": 12.9351,
|
1692 |
+
"step": 237
|
1693 |
+
},
|
1694 |
+
{
|
1695 |
+
"epoch": 0.0487031257993554,
|
1696 |
+
"grad_norm": 6.7909159660339355,
|
1697 |
+
"learning_rate": 5.717463109955896e-05,
|
1698 |
+
"loss": 12.2785,
|
1699 |
+
"step": 238
|
1700 |
+
},
|
1701 |
+
{
|
1702 |
+
"epoch": 0.04890776078170563,
|
1703 |
+
"grad_norm": 7.011481761932373,
|
1704 |
+
"learning_rate": 5.685061708409841e-05,
|
1705 |
+
"loss": 13.0815,
|
1706 |
+
"step": 239
|
1707 |
+
},
|
1708 |
+
{
|
1709 |
+
"epoch": 0.04911239576405586,
|
1710 |
+
"grad_norm": 7.400633335113525,
|
1711 |
+
"learning_rate": 5.6526309611002594e-05,
|
1712 |
+
"loss": 12.5917,
|
1713 |
+
"step": 240
|
1714 |
+
},
|
1715 |
+
{
|
1716 |
+
"epoch": 0.0493170307464061,
|
1717 |
+
"grad_norm": 6.944792747497559,
|
1718 |
+
"learning_rate": 5.6201722572524275e-05,
|
1719 |
+
"loss": 12.8307,
|
1720 |
+
"step": 241
|
1721 |
+
},
|
1722 |
+
{
|
1723 |
+
"epoch": 0.04952166572875633,
|
1724 |
+
"grad_norm": 9.408550262451172,
|
1725 |
+
"learning_rate": 5.587686987289189e-05,
|
1726 |
+
"loss": 13.0507,
|
1727 |
+
"step": 242
|
1728 |
+
},
|
1729 |
+
{
|
1730 |
+
"epoch": 0.049726300711106564,
|
1731 |
+
"grad_norm": 8.005476951599121,
|
1732 |
+
"learning_rate": 5.5551765427713884e-05,
|
1733 |
+
"loss": 12.5769,
|
1734 |
+
"step": 243
|
1735 |
+
},
|
1736 |
+
{
|
1737 |
+
"epoch": 0.049930935693456795,
|
1738 |
+
"grad_norm": 8.190591812133789,
|
1739 |
+
"learning_rate": 5.522642316338268e-05,
|
1740 |
+
"loss": 12.2517,
|
1741 |
+
"step": 244
|
1742 |
+
},
|
1743 |
+
{
|
1744 |
+
"epoch": 0.050135570675807026,
|
1745 |
+
"grad_norm": 9.257965087890625,
|
1746 |
+
"learning_rate": 5.490085701647805e-05,
|
1747 |
+
"loss": 13.0658,
|
1748 |
+
"step": 245
|
1749 |
+
},
|
1750 |
+
{
|
1751 |
+
"epoch": 0.050340205658157264,
|
1752 |
+
"grad_norm": 9.173686027526855,
|
1753 |
+
"learning_rate": 5.457508093317013e-05,
|
1754 |
+
"loss": 12.6359,
|
1755 |
+
"step": 246
|
1756 |
+
},
|
1757 |
+
{
|
1758 |
+
"epoch": 0.050544840640507495,
|
1759 |
+
"grad_norm": 9.59607982635498,
|
1760 |
+
"learning_rate": 5.4249108868622086e-05,
|
1761 |
+
"loss": 13.0733,
|
1762 |
+
"step": 247
|
1763 |
+
},
|
1764 |
+
{
|
1765 |
+
"epoch": 0.050749475622857726,
|
1766 |
+
"grad_norm": 10.842556953430176,
|
1767 |
+
"learning_rate": 5.392295478639225e-05,
|
1768 |
+
"loss": 12.6057,
|
1769 |
+
"step": 248
|
1770 |
+
},
|
1771 |
+
{
|
1772 |
+
"epoch": 0.05095411060520796,
|
1773 |
+
"grad_norm": 11.467256546020508,
|
1774 |
+
"learning_rate": 5.359663265783598e-05,
|
1775 |
+
"loss": 12.7772,
|
1776 |
+
"step": 249
|
1777 |
+
},
|
1778 |
+
{
|
1779 |
+
"epoch": 0.051158745587558195,
|
1780 |
+
"grad_norm": 16.473846435546875,
|
1781 |
+
"learning_rate": 5.327015646150716e-05,
|
1782 |
+
"loss": 12.2446,
|
1783 |
+
"step": 250
|
1784 |
+
},
|
1785 |
+
{
|
1786 |
+
"epoch": 0.051363380569908426,
|
1787 |
+
"grad_norm": 2.952035903930664,
|
1788 |
+
"learning_rate": 5.294354018255945e-05,
|
1789 |
+
"loss": 13.1735,
|
1790 |
+
"step": 251
|
1791 |
+
},
|
1792 |
+
{
|
1793 |
+
"epoch": 0.05156801555225866,
|
1794 |
+
"grad_norm": 3.22007417678833,
|
1795 |
+
"learning_rate": 5.26167978121472e-05,
|
1796 |
+
"loss": 13.0792,
|
1797 |
+
"step": 252
|
1798 |
+
},
|
1799 |
+
{
|
1800 |
+
"epoch": 0.05177265053460889,
|
1801 |
+
"grad_norm": 3.549088716506958,
|
1802 |
+
"learning_rate": 5.228994334682604e-05,
|
1803 |
+
"loss": 12.7948,
|
1804 |
+
"step": 253
|
1805 |
+
},
|
1806 |
+
{
|
1807 |
+
"epoch": 0.051977285516959126,
|
1808 |
+
"grad_norm": 3.728848695755005,
|
1809 |
+
"learning_rate": 5.196299078795344e-05,
|
1810 |
+
"loss": 13.0125,
|
1811 |
+
"step": 254
|
1812 |
+
},
|
1813 |
+
{
|
1814 |
+
"epoch": 0.05218192049930936,
|
1815 |
+
"grad_norm": 3.749281167984009,
|
1816 |
+
"learning_rate": 5.1635954141088813e-05,
|
1817 |
+
"loss": 13.2622,
|
1818 |
+
"step": 255
|
1819 |
+
},
|
1820 |
+
{
|
1821 |
+
"epoch": 0.05238655548165959,
|
1822 |
+
"grad_norm": 3.824709415435791,
|
1823 |
+
"learning_rate": 5.1308847415393666e-05,
|
1824 |
+
"loss": 12.9234,
|
1825 |
+
"step": 256
|
1826 |
+
},
|
1827 |
+
{
|
1828 |
+
"epoch": 0.05259119046400982,
|
1829 |
+
"grad_norm": 3.764427661895752,
|
1830 |
+
"learning_rate": 5.0981684623031415e-05,
|
1831 |
+
"loss": 13.2274,
|
1832 |
+
"step": 257
|
1833 |
+
},
|
1834 |
+
{
|
1835 |
+
"epoch": 0.05279582544636006,
|
1836 |
+
"grad_norm": 3.7923285961151123,
|
1837 |
+
"learning_rate": 5.0654479778567223e-05,
|
1838 |
+
"loss": 13.1988,
|
1839 |
+
"step": 258
|
1840 |
+
},
|
1841 |
+
{
|
1842 |
+
"epoch": 0.05300046042871029,
|
1843 |
+
"grad_norm": 3.733365535736084,
|
1844 |
+
"learning_rate": 5.0327246898367597e-05,
|
1845 |
+
"loss": 12.5384,
|
1846 |
+
"step": 259
|
1847 |
+
},
|
1848 |
+
{
|
1849 |
+
"epoch": 0.05320509541106052,
|
1850 |
+
"grad_norm": 3.8030307292938232,
|
1851 |
+
"learning_rate": 5e-05,
|
1852 |
+
"loss": 12.991,
|
1853 |
+
"step": 260
|
1854 |
+
},
|
1855 |
+
{
|
1856 |
+
"epoch": 0.05340973039341075,
|
1857 |
+
"grad_norm": 3.7069780826568604,
|
1858 |
+
"learning_rate": 4.9672753101632415e-05,
|
1859 |
+
"loss": 12.7534,
|
1860 |
+
"step": 261
|
1861 |
+
},
|
1862 |
+
{
|
1863 |
+
"epoch": 0.05361436537576099,
|
1864 |
+
"grad_norm": 3.764336109161377,
|
1865 |
+
"learning_rate": 4.934552022143279e-05,
|
1866 |
+
"loss": 12.8254,
|
1867 |
+
"step": 262
|
1868 |
+
},
|
1869 |
+
{
|
1870 |
+
"epoch": 0.05381900035811122,
|
1871 |
+
"grad_norm": 3.753891944885254,
|
1872 |
+
"learning_rate": 4.901831537696859e-05,
|
1873 |
+
"loss": 12.6035,
|
1874 |
+
"step": 263
|
1875 |
+
},
|
1876 |
+
{
|
1877 |
+
"epoch": 0.05402363534046145,
|
1878 |
+
"grad_norm": 3.9714443683624268,
|
1879 |
+
"learning_rate": 4.869115258460635e-05,
|
1880 |
+
"loss": 12.6499,
|
1881 |
+
"step": 264
|
1882 |
+
},
|
1883 |
+
{
|
1884 |
+
"epoch": 0.05422827032281168,
|
1885 |
+
"grad_norm": 3.999743938446045,
|
1886 |
+
"learning_rate": 4.83640458589112e-05,
|
1887 |
+
"loss": 13.0991,
|
1888 |
+
"step": 265
|
1889 |
+
},
|
1890 |
+
{
|
1891 |
+
"epoch": 0.05443290530516192,
|
1892 |
+
"grad_norm": 3.9809932708740234,
|
1893 |
+
"learning_rate": 4.8037009212046586e-05,
|
1894 |
+
"loss": 12.6208,
|
1895 |
+
"step": 266
|
1896 |
+
},
|
1897 |
+
{
|
1898 |
+
"epoch": 0.05463754028751215,
|
1899 |
+
"grad_norm": 4.165307521820068,
|
1900 |
+
"learning_rate": 4.7710056653173976e-05,
|
1901 |
+
"loss": 13.0348,
|
1902 |
+
"step": 267
|
1903 |
+
},
|
1904 |
+
{
|
1905 |
+
"epoch": 0.05484217526986238,
|
1906 |
+
"grad_norm": 4.428051948547363,
|
1907 |
+
"learning_rate": 4.738320218785281e-05,
|
1908 |
+
"loss": 13.4851,
|
1909 |
+
"step": 268
|
1910 |
+
},
|
1911 |
+
{
|
1912 |
+
"epoch": 0.055046810252212613,
|
1913 |
+
"grad_norm": 4.276752948760986,
|
1914 |
+
"learning_rate": 4.7056459817440544e-05,
|
1915 |
+
"loss": 12.8883,
|
1916 |
+
"step": 269
|
1917 |
+
},
|
1918 |
+
{
|
1919 |
+
"epoch": 0.05525144523456285,
|
1920 |
+
"grad_norm": 4.741238594055176,
|
1921 |
+
"learning_rate": 4.6729843538492847e-05,
|
1922 |
+
"loss": 13.3597,
|
1923 |
+
"step": 270
|
1924 |
+
},
|
1925 |
+
{
|
1926 |
+
"epoch": 0.05545608021691308,
|
1927 |
+
"grad_norm": 4.348086833953857,
|
1928 |
+
"learning_rate": 4.640336734216403e-05,
|
1929 |
+
"loss": 12.9206,
|
1930 |
+
"step": 271
|
1931 |
+
},
|
1932 |
+
{
|
1933 |
+
"epoch": 0.055660715199263314,
|
1934 |
+
"grad_norm": 4.487641334533691,
|
1935 |
+
"learning_rate": 4.607704521360776e-05,
|
1936 |
+
"loss": 12.7532,
|
1937 |
+
"step": 272
|
1938 |
+
},
|
1939 |
+
{
|
1940 |
+
"epoch": 0.055865350181613545,
|
1941 |
+
"grad_norm": 4.975533485412598,
|
1942 |
+
"learning_rate": 4.575089113137792e-05,
|
1943 |
+
"loss": 12.5924,
|
1944 |
+
"step": 273
|
1945 |
+
},
|
1946 |
+
{
|
1947 |
+
"epoch": 0.05606998516396378,
|
1948 |
+
"grad_norm": 4.721080303192139,
|
1949 |
+
"learning_rate": 4.542491906682989e-05,
|
1950 |
+
"loss": 12.9426,
|
1951 |
+
"step": 274
|
1952 |
+
},
|
1953 |
+
{
|
1954 |
+
"epoch": 0.056274620146314014,
|
1955 |
+
"grad_norm": 4.957543849945068,
|
1956 |
+
"learning_rate": 4.509914298352197e-05,
|
1957 |
+
"loss": 12.5506,
|
1958 |
+
"step": 275
|
1959 |
+
},
|
1960 |
+
{
|
1961 |
+
"epoch": 0.056479255128664245,
|
1962 |
+
"grad_norm": 4.958243370056152,
|
1963 |
+
"learning_rate": 4.477357683661734e-05,
|
1964 |
+
"loss": 12.7699,
|
1965 |
+
"step": 276
|
1966 |
+
},
|
1967 |
+
{
|
1968 |
+
"epoch": 0.056683890111014476,
|
1969 |
+
"grad_norm": 5.35684061050415,
|
1970 |
+
"learning_rate": 4.444823457228612e-05,
|
1971 |
+
"loss": 12.8455,
|
1972 |
+
"step": 277
|
1973 |
+
},
|
1974 |
+
{
|
1975 |
+
"epoch": 0.056888525093364714,
|
1976 |
+
"grad_norm": 5.440086364746094,
|
1977 |
+
"learning_rate": 4.412313012710813e-05,
|
1978 |
+
"loss": 13.7743,
|
1979 |
+
"step": 278
|
1980 |
+
},
|
1981 |
+
{
|
1982 |
+
"epoch": 0.057093160075714945,
|
1983 |
+
"grad_norm": 5.20829439163208,
|
1984 |
+
"learning_rate": 4.379827742747575e-05,
|
1985 |
+
"loss": 13.318,
|
1986 |
+
"step": 279
|
1987 |
+
},
|
1988 |
+
{
|
1989 |
+
"epoch": 0.057297795058065176,
|
1990 |
+
"grad_norm": 5.2258405685424805,
|
1991 |
+
"learning_rate": 4.347369038899744e-05,
|
1992 |
+
"loss": 13.0874,
|
1993 |
+
"step": 280
|
1994 |
+
},
|
1995 |
+
{
|
1996 |
+
"epoch": 0.05750243004041541,
|
1997 |
+
"grad_norm": 5.654691219329834,
|
1998 |
+
"learning_rate": 4.3149382915901606e-05,
|
1999 |
+
"loss": 12.4869,
|
2000 |
+
"step": 281
|
2001 |
+
},
|
2002 |
+
{
|
2003 |
+
"epoch": 0.05770706502276564,
|
2004 |
+
"grad_norm": 5.957024097442627,
|
2005 |
+
"learning_rate": 4.282536890044104e-05,
|
2006 |
+
"loss": 12.8174,
|
2007 |
+
"step": 282
|
2008 |
+
},
|
2009 |
+
{
|
2010 |
+
"epoch": 0.057911700005115876,
|
2011 |
+
"grad_norm": 6.341736316680908,
|
2012 |
+
"learning_rate": 4.250166222229774e-05,
|
2013 |
+
"loss": 12.6841,
|
2014 |
+
"step": 283
|
2015 |
+
},
|
2016 |
+
{
|
2017 |
+
"epoch": 0.05811633498746611,
|
2018 |
+
"grad_norm": 6.56013822555542,
|
2019 |
+
"learning_rate": 4.2178276747988446e-05,
|
2020 |
+
"loss": 13.0789,
|
2021 |
+
"step": 284
|
2022 |
+
},
|
2023 |
+
{
|
2024 |
+
"epoch": 0.05832096996981634,
|
2025 |
+
"grad_norm": 6.450329780578613,
|
2026 |
+
"learning_rate": 4.185522633027057e-05,
|
2027 |
+
"loss": 12.6028,
|
2028 |
+
"step": 285
|
2029 |
+
},
|
2030 |
+
{
|
2031 |
+
"epoch": 0.05852560495216657,
|
2032 |
+
"grad_norm": 6.356710433959961,
|
2033 |
+
"learning_rate": 4.153252480754877e-05,
|
2034 |
+
"loss": 13.0871,
|
2035 |
+
"step": 286
|
2036 |
+
},
|
2037 |
+
{
|
2038 |
+
"epoch": 0.05873023993451681,
|
2039 |
+
"grad_norm": 6.647814750671387,
|
2040 |
+
"learning_rate": 4.1210186003282275e-05,
|
2041 |
+
"loss": 12.6458,
|
2042 |
+
"step": 287
|
2043 |
+
},
|
2044 |
+
{
|
2045 |
+
"epoch": 0.05893487491686704,
|
2046 |
+
"grad_norm": 6.441559314727783,
|
2047 |
+
"learning_rate": 4.088822372539263e-05,
|
2048 |
+
"loss": 12.2483,
|
2049 |
+
"step": 288
|
2050 |
+
},
|
2051 |
+
{
|
2052 |
+
"epoch": 0.05913950989921727,
|
2053 |
+
"grad_norm": 8.019023895263672,
|
2054 |
+
"learning_rate": 4.0566651765672246e-05,
|
2055 |
+
"loss": 12.7241,
|
2056 |
+
"step": 289
|
2057 |
+
},
|
2058 |
+
{
|
2059 |
+
"epoch": 0.0593441448815675,
|
2060 |
+
"grad_norm": 7.507869720458984,
|
2061 |
+
"learning_rate": 4.0245483899193595e-05,
|
2062 |
+
"loss": 13.3737,
|
2063 |
+
"step": 290
|
2064 |
+
},
|
2065 |
+
{
|
2066 |
+
"epoch": 0.05954877986391774,
|
2067 |
+
"grad_norm": 7.296957015991211,
|
2068 |
+
"learning_rate": 3.992473388371915e-05,
|
2069 |
+
"loss": 12.6952,
|
2070 |
+
"step": 291
|
2071 |
+
},
|
2072 |
+
{
|
2073 |
+
"epoch": 0.05975341484626797,
|
2074 |
+
"grad_norm": 8.110812187194824,
|
2075 |
+
"learning_rate": 3.960441545911204e-05,
|
2076 |
+
"loss": 12.2771,
|
2077 |
+
"step": 292
|
2078 |
+
},
|
2079 |
+
{
|
2080 |
+
"epoch": 0.0599580498286182,
|
2081 |
+
"grad_norm": 8.923057556152344,
|
2082 |
+
"learning_rate": 3.928454234674747e-05,
|
2083 |
+
"loss": 12.7108,
|
2084 |
+
"step": 293
|
2085 |
+
},
|
2086 |
+
{
|
2087 |
+
"epoch": 0.06016268481096843,
|
2088 |
+
"grad_norm": 10.090682983398438,
|
2089 |
+
"learning_rate": 3.896512824892495e-05,
|
2090 |
+
"loss": 13.1477,
|
2091 |
+
"step": 294
|
2092 |
+
},
|
2093 |
+
{
|
2094 |
+
"epoch": 0.06036731979331867,
|
2095 |
+
"grad_norm": 8.957847595214844,
|
2096 |
+
"learning_rate": 3.864618684828134e-05,
|
2097 |
+
"loss": 11.7159,
|
2098 |
+
"step": 295
|
2099 |
+
},
|
2100 |
+
{
|
2101 |
+
"epoch": 0.0605719547756689,
|
2102 |
+
"grad_norm": 10.131745338439941,
|
2103 |
+
"learning_rate": 3.832773180720475e-05,
|
2104 |
+
"loss": 12.191,
|
2105 |
+
"step": 296
|
2106 |
+
},
|
2107 |
+
{
|
2108 |
+
"epoch": 0.06077658975801913,
|
2109 |
+
"grad_norm": 10.587480545043945,
|
2110 |
+
"learning_rate": 3.800977676724919e-05,
|
2111 |
+
"loss": 12.6343,
|
2112 |
+
"step": 297
|
2113 |
+
},
|
2114 |
+
{
|
2115 |
+
"epoch": 0.060981224740369364,
|
2116 |
+
"grad_norm": 11.739582061767578,
|
2117 |
+
"learning_rate": 3.769233534855035e-05,
|
2118 |
+
"loss": 12.5622,
|
2119 |
+
"step": 298
|
2120 |
+
},
|
2121 |
+
{
|
2122 |
+
"epoch": 0.0611858597227196,
|
2123 |
+
"grad_norm": 14.421760559082031,
|
2124 |
+
"learning_rate": 3.73754211492421e-05,
|
2125 |
+
"loss": 12.8323,
|
2126 |
+
"step": 299
|
2127 |
+
},
|
2128 |
+
{
|
2129 |
+
"epoch": 0.06139049470506983,
|
2130 |
+
"grad_norm": 17.0710391998291,
|
2131 |
+
"learning_rate": 3.705904774487396e-05,
|
2132 |
+
"loss": 11.9398,
|
2133 |
+
"step": 300
|
2134 |
+
},
|
2135 |
+
{
|
2136 |
+
"epoch": 0.06139049470506983,
|
2137 |
+
"eval_loss": 3.1531736850738525,
|
2138 |
+
"eval_runtime": 47.3782,
|
2139 |
+
"eval_samples_per_second": 173.709,
|
2140 |
+
"eval_steps_per_second": 43.438,
|
2141 |
+
"step": 300
|
2142 |
}
|
2143 |
],
|
2144 |
"logging_steps": 1,
|
|
|
2167 |
"attributes": {}
|
2168 |
}
|
2169 |
},
|
2170 |
+
"total_flos": 5661823290310656.0,
|
2171 |
"train_batch_size": 8,
|
2172 |
"trial_name": null,
|
2173 |
"trial_params": null
|