CocoRoF commited on
Commit
e8dfede
·
verified ·
1 Parent(s): 2dd86cc

Training in progress, step 5000, checkpoint

Browse files
last-checkpoint/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3dc83da414bac75b82e0faabe85965bc1ed69e5ee0e1802d37cfd4c377a0bf0c
3
  size 368988278
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f673b823be36f3eb97a0d1d83dac231599658a1bf8bcffea23f67cafcc109b48
3
  size 368988278
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:579b6a1a47db2bcdaf49967b9a546edf75edc79ff1ff2dcb1a994076fe643add
3
  size 1107079290
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe2f2bef345acda275dcd6003cb182f7f1e032680f3a4edeefc1b29e48691847
3
  size 1107079290
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:78d3f197f6c6558fa8056324f1563ab9e957255f5a1a959362aa4eed7a9545db
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74386f26f36ed67f56395205881e5db2d0c28ffcbeed50dd95b28771d2dac588
3
  size 15984
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c1a9c65c2869356282cad6b4a0f7dff7f4dd68ab3d9d216c72b7d6cb524f860
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41c88f9de084200454883a13c3717941ea3fd433e2f8735507fc30611f9c5501
3
  size 15984
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:896febe768e17bae5022a95960c041f6425783774ec8859d99d3b149063b1bf9
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:965b00d4cb4710ebab57c8787b9925bb3f77b8eeba94a186ec4bc1c2f326ef3f
3
  size 15984
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eac482d57e966585467c8ef44dae2869bf7e5d92886f69c11ed7bccc34c07efe
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5dc374b8b9a4c45c950f9d136feab85a767081fa59f0c7d68ed3a62060c4949
3
  size 15984
last-checkpoint/rng_state_4.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1f27d227a20dc320ac283e0938fb2f6e5b475829a583f8c44d1a16a8c828307
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c7c212fb779217f1edac0baf44f67b608eefc1e0e4e3f5a9dd7eb557032c1bc
3
  size 15984
last-checkpoint/rng_state_5.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d05a7106aaeaec4b81704e3f4a998b5123cf9342a6733bd9fd2d578e99108c3b
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86e1effd626ce1e95dd68a0c8089fe19218f2b24dfe9e45ed2cab1c0ebc10ba1
3
  size 15984
last-checkpoint/rng_state_6.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b94120d8d88502ec8d8b623ec7550315caca003b44fcffbb5767ab0de91baefe
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:799cc83f60dfc1c4243cfd6403592112414a2eec494e6832f10221c96ff62c20
3
  size 15984
last-checkpoint/rng_state_7.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:332e4d901be380f740b5d8578f7b80ef1865c7fba83bc288c8a35852205cc668
3
  size 15984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:586777c398770c3255d3a1f48c7fef44ea9d89117c627c9ea490e16bfd9a49ba
3
  size 15984
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e442211737ccc9094723c53ccf5cc299f428d2455400cdd11b257d7edd84528c
3
  size 1000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34fccfe9285534cc1461fe7c44d5b307057be9ab740b9c40800411e6c74ade04
3
  size 1000
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.1808154778049001,
5
  "eval_steps": 2500,
6
- "global_step": 2500,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -1765,6 +1765,1764 @@
1765
  "eval_samples_per_second": 2008.956,
1766
  "eval_steps_per_second": 31.391,
1767
  "step": 2500
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1768
  }
1769
  ],
1770
  "logging_steps": 10,
@@ -1784,7 +3542,7 @@
1784
  "attributes": {}
1785
  }
1786
  },
1787
- "total_flos": 3.492321391804416e+18,
1788
  "train_batch_size": 16,
1789
  "trial_name": null,
1790
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.3616309556098002,
5
  "eval_steps": 2500,
6
+ "global_step": 5000,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
1765
  "eval_samples_per_second": 2008.956,
1766
  "eval_steps_per_second": 31.391,
1767
  "step": 2500
1768
+ },
1769
+ {
1770
+ "epoch": 0.1815387397161197,
1771
+ "grad_norm": 15.7265625,
1772
+ "learning_rate": 9.985817342056891e-07,
1773
+ "loss": 24.7492,
1774
+ "step": 2510
1775
+ },
1776
+ {
1777
+ "epoch": 0.1822620016273393,
1778
+ "grad_norm": 16.046875,
1779
+ "learning_rate": 9.985760837443573e-07,
1780
+ "loss": 24.7608,
1781
+ "step": 2520
1782
+ },
1783
+ {
1784
+ "epoch": 0.1829852635385589,
1785
+ "grad_norm": 16.90625,
1786
+ "learning_rate": 9.985704332830253e-07,
1787
+ "loss": 24.9355,
1788
+ "step": 2530
1789
+ },
1790
+ {
1791
+ "epoch": 0.1837085254497785,
1792
+ "grad_norm": 15.5625,
1793
+ "learning_rate": 9.985647828216935e-07,
1794
+ "loss": 25.3485,
1795
+ "step": 2540
1796
+ },
1797
+ {
1798
+ "epoch": 0.1844317873609981,
1799
+ "grad_norm": 15.8671875,
1800
+ "learning_rate": 9.985591323603615e-07,
1801
+ "loss": 25.1266,
1802
+ "step": 2550
1803
+ },
1804
+ {
1805
+ "epoch": 0.1851550492722177,
1806
+ "grad_norm": 17.3125,
1807
+ "learning_rate": 9.985534818990295e-07,
1808
+ "loss": 25.1881,
1809
+ "step": 2560
1810
+ },
1811
+ {
1812
+ "epoch": 0.1858783111834373,
1813
+ "grad_norm": 16.53125,
1814
+ "learning_rate": 9.985478314376977e-07,
1815
+ "loss": 24.9183,
1816
+ "step": 2570
1817
+ },
1818
+ {
1819
+ "epoch": 0.18660157309465691,
1820
+ "grad_norm": 15.8359375,
1821
+ "learning_rate": 9.985421809763657e-07,
1822
+ "loss": 25.1384,
1823
+ "step": 2580
1824
+ },
1825
+ {
1826
+ "epoch": 0.18732483500587652,
1827
+ "grad_norm": 17.84375,
1828
+ "learning_rate": 9.98536530515034e-07,
1829
+ "loss": 24.6692,
1830
+ "step": 2590
1831
+ },
1832
+ {
1833
+ "epoch": 0.1880480969170961,
1834
+ "grad_norm": 16.8125,
1835
+ "learning_rate": 9.98530880053702e-07,
1836
+ "loss": 24.7989,
1837
+ "step": 2600
1838
+ },
1839
+ {
1840
+ "epoch": 0.1887713588283157,
1841
+ "grad_norm": 16.078125,
1842
+ "learning_rate": 9.985252295923701e-07,
1843
+ "loss": 24.9476,
1844
+ "step": 2610
1845
+ },
1846
+ {
1847
+ "epoch": 0.1894946207395353,
1848
+ "grad_norm": 15.9765625,
1849
+ "learning_rate": 9.985195791310381e-07,
1850
+ "loss": 25.065,
1851
+ "step": 2620
1852
+ },
1853
+ {
1854
+ "epoch": 0.1902178826507549,
1855
+ "grad_norm": 16.171875,
1856
+ "learning_rate": 9.985139286697061e-07,
1857
+ "loss": 24.9492,
1858
+ "step": 2630
1859
+ },
1860
+ {
1861
+ "epoch": 0.1909411445619745,
1862
+ "grad_norm": 16.5,
1863
+ "learning_rate": 9.985082782083741e-07,
1864
+ "loss": 25.2619,
1865
+ "step": 2640
1866
+ },
1867
+ {
1868
+ "epoch": 0.1916644064731941,
1869
+ "grad_norm": 16.03125,
1870
+ "learning_rate": 9.985026277470424e-07,
1871
+ "loss": 25.2593,
1872
+ "step": 2650
1873
+ },
1874
+ {
1875
+ "epoch": 0.1923876683844137,
1876
+ "grad_norm": 15.625,
1877
+ "learning_rate": 9.984969772857106e-07,
1878
+ "loss": 24.9816,
1879
+ "step": 2660
1880
+ },
1881
+ {
1882
+ "epoch": 0.1931109302956333,
1883
+ "grad_norm": 15.890625,
1884
+ "learning_rate": 9.984913268243786e-07,
1885
+ "loss": 25.0609,
1886
+ "step": 2670
1887
+ },
1888
+ {
1889
+ "epoch": 0.1938341922068529,
1890
+ "grad_norm": 16.015625,
1891
+ "learning_rate": 9.984856763630466e-07,
1892
+ "loss": 25.2305,
1893
+ "step": 2680
1894
+ },
1895
+ {
1896
+ "epoch": 0.19455745411807251,
1897
+ "grad_norm": 19.59375,
1898
+ "learning_rate": 9.984800259017146e-07,
1899
+ "loss": 24.9294,
1900
+ "step": 2690
1901
+ },
1902
+ {
1903
+ "epoch": 0.19528071602929212,
1904
+ "grad_norm": 17.140625,
1905
+ "learning_rate": 9.984743754403828e-07,
1906
+ "loss": 25.3927,
1907
+ "step": 2700
1908
+ },
1909
+ {
1910
+ "epoch": 0.19600397794051172,
1911
+ "grad_norm": 17.21875,
1912
+ "learning_rate": 9.98468724979051e-07,
1913
+ "loss": 25.1831,
1914
+ "step": 2710
1915
+ },
1916
+ {
1917
+ "epoch": 0.1967272398517313,
1918
+ "grad_norm": 16.4375,
1919
+ "learning_rate": 9.98463074517719e-07,
1920
+ "loss": 25.128,
1921
+ "step": 2720
1922
+ },
1923
+ {
1924
+ "epoch": 0.1974505017629509,
1925
+ "grad_norm": 16.953125,
1926
+ "learning_rate": 9.98457424056387e-07,
1927
+ "loss": 25.0455,
1928
+ "step": 2730
1929
+ },
1930
+ {
1931
+ "epoch": 0.1981737636741705,
1932
+ "grad_norm": 16.5,
1933
+ "learning_rate": 9.984517735950552e-07,
1934
+ "loss": 24.7651,
1935
+ "step": 2740
1936
+ },
1937
+ {
1938
+ "epoch": 0.1988970255853901,
1939
+ "grad_norm": 16.40625,
1940
+ "learning_rate": 9.984461231337232e-07,
1941
+ "loss": 25.3457,
1942
+ "step": 2750
1943
+ },
1944
+ {
1945
+ "epoch": 0.1996202874966097,
1946
+ "grad_norm": 16.171875,
1947
+ "learning_rate": 9.984404726723914e-07,
1948
+ "loss": 25.3497,
1949
+ "step": 2760
1950
+ },
1951
+ {
1952
+ "epoch": 0.2003435494078293,
1953
+ "grad_norm": 16.125,
1954
+ "learning_rate": 9.984348222110594e-07,
1955
+ "loss": 25.1117,
1956
+ "step": 2770
1957
+ },
1958
+ {
1959
+ "epoch": 0.2010668113190489,
1960
+ "grad_norm": 16.09375,
1961
+ "learning_rate": 9.984291717497276e-07,
1962
+ "loss": 25.2813,
1963
+ "step": 2780
1964
+ },
1965
+ {
1966
+ "epoch": 0.2017900732302685,
1967
+ "grad_norm": 15.5078125,
1968
+ "learning_rate": 9.984235212883956e-07,
1969
+ "loss": 25.4148,
1970
+ "step": 2790
1971
+ },
1972
+ {
1973
+ "epoch": 0.20251333514148812,
1974
+ "grad_norm": 16.125,
1975
+ "learning_rate": 9.984178708270636e-07,
1976
+ "loss": 25.2476,
1977
+ "step": 2800
1978
+ },
1979
+ {
1980
+ "epoch": 0.20323659705270772,
1981
+ "grad_norm": 16.375,
1982
+ "learning_rate": 9.984122203657318e-07,
1983
+ "loss": 25.1569,
1984
+ "step": 2810
1985
+ },
1986
+ {
1987
+ "epoch": 0.20395985896392732,
1988
+ "grad_norm": 15.3984375,
1989
+ "learning_rate": 9.984065699043998e-07,
1990
+ "loss": 24.9938,
1991
+ "step": 2820
1992
+ },
1993
+ {
1994
+ "epoch": 0.20468312087514692,
1995
+ "grad_norm": 15.8984375,
1996
+ "learning_rate": 9.98400919443068e-07,
1997
+ "loss": 25.0097,
1998
+ "step": 2830
1999
+ },
2000
+ {
2001
+ "epoch": 0.20540638278636653,
2002
+ "grad_norm": 16.640625,
2003
+ "learning_rate": 9.98395268981736e-07,
2004
+ "loss": 25.2562,
2005
+ "step": 2840
2006
+ },
2007
+ {
2008
+ "epoch": 0.2061296446975861,
2009
+ "grad_norm": 15.3828125,
2010
+ "learning_rate": 9.98389618520404e-07,
2011
+ "loss": 24.8865,
2012
+ "step": 2850
2013
+ },
2014
+ {
2015
+ "epoch": 0.2068529066088057,
2016
+ "grad_norm": 15.5703125,
2017
+ "learning_rate": 9.983839680590722e-07,
2018
+ "loss": 25.2523,
2019
+ "step": 2860
2020
+ },
2021
+ {
2022
+ "epoch": 0.2075761685200253,
2023
+ "grad_norm": 15.90625,
2024
+ "learning_rate": 9.983783175977402e-07,
2025
+ "loss": 25.2208,
2026
+ "step": 2870
2027
+ },
2028
+ {
2029
+ "epoch": 0.2082994304312449,
2030
+ "grad_norm": 15.53125,
2031
+ "learning_rate": 9.983726671364084e-07,
2032
+ "loss": 25.2277,
2033
+ "step": 2880
2034
+ },
2035
+ {
2036
+ "epoch": 0.2090226923424645,
2037
+ "grad_norm": 15.375,
2038
+ "learning_rate": 9.983670166750764e-07,
2039
+ "loss": 24.9516,
2040
+ "step": 2890
2041
+ },
2042
+ {
2043
+ "epoch": 0.2097459542536841,
2044
+ "grad_norm": 17.4375,
2045
+ "learning_rate": 9.983613662137444e-07,
2046
+ "loss": 25.5087,
2047
+ "step": 2900
2048
+ },
2049
+ {
2050
+ "epoch": 0.21046921616490372,
2051
+ "grad_norm": 16.28125,
2052
+ "learning_rate": 9.983557157524126e-07,
2053
+ "loss": 25.1188,
2054
+ "step": 2910
2055
+ },
2056
+ {
2057
+ "epoch": 0.21119247807612332,
2058
+ "grad_norm": 16.390625,
2059
+ "learning_rate": 9.983500652910806e-07,
2060
+ "loss": 25.2662,
2061
+ "step": 2920
2062
+ },
2063
+ {
2064
+ "epoch": 0.21191573998734292,
2065
+ "grad_norm": 16.03125,
2066
+ "learning_rate": 9.983444148297488e-07,
2067
+ "loss": 24.9432,
2068
+ "step": 2930
2069
+ },
2070
+ {
2071
+ "epoch": 0.21263900189856252,
2072
+ "grad_norm": 17.234375,
2073
+ "learning_rate": 9.983387643684168e-07,
2074
+ "loss": 25.2459,
2075
+ "step": 2940
2076
+ },
2077
+ {
2078
+ "epoch": 0.21336226380978213,
2079
+ "grad_norm": 15.28125,
2080
+ "learning_rate": 9.983331139070848e-07,
2081
+ "loss": 24.9326,
2082
+ "step": 2950
2083
+ },
2084
+ {
2085
+ "epoch": 0.21408552572100173,
2086
+ "grad_norm": 18.0,
2087
+ "learning_rate": 9.98327463445753e-07,
2088
+ "loss": 25.2388,
2089
+ "step": 2960
2090
+ },
2091
+ {
2092
+ "epoch": 0.21480878763222133,
2093
+ "grad_norm": 17.1875,
2094
+ "learning_rate": 9.98321812984421e-07,
2095
+ "loss": 25.6366,
2096
+ "step": 2970
2097
+ },
2098
+ {
2099
+ "epoch": 0.2155320495434409,
2100
+ "grad_norm": 16.203125,
2101
+ "learning_rate": 9.983161625230892e-07,
2102
+ "loss": 25.1225,
2103
+ "step": 2980
2104
+ },
2105
+ {
2106
+ "epoch": 0.2162553114546605,
2107
+ "grad_norm": 16.828125,
2108
+ "learning_rate": 9.983105120617572e-07,
2109
+ "loss": 25.1165,
2110
+ "step": 2990
2111
+ },
2112
+ {
2113
+ "epoch": 0.2169785733658801,
2114
+ "grad_norm": 16.625,
2115
+ "learning_rate": 9.983048616004254e-07,
2116
+ "loss": 25.2762,
2117
+ "step": 3000
2118
+ },
2119
+ {
2120
+ "epoch": 0.21770183527709971,
2121
+ "grad_norm": 16.1875,
2122
+ "learning_rate": 9.982992111390934e-07,
2123
+ "loss": 24.9826,
2124
+ "step": 3010
2125
+ },
2126
+ {
2127
+ "epoch": 0.21842509718831932,
2128
+ "grad_norm": 16.890625,
2129
+ "learning_rate": 9.982935606777614e-07,
2130
+ "loss": 24.7336,
2131
+ "step": 3020
2132
+ },
2133
+ {
2134
+ "epoch": 0.21914835909953892,
2135
+ "grad_norm": 15.765625,
2136
+ "learning_rate": 9.982879102164296e-07,
2137
+ "loss": 25.1037,
2138
+ "step": 3030
2139
+ },
2140
+ {
2141
+ "epoch": 0.21987162101075852,
2142
+ "grad_norm": 16.734375,
2143
+ "learning_rate": 9.982822597550976e-07,
2144
+ "loss": 24.8429,
2145
+ "step": 3040
2146
+ },
2147
+ {
2148
+ "epoch": 0.22059488292197812,
2149
+ "grad_norm": 15.96875,
2150
+ "learning_rate": 9.982766092937658e-07,
2151
+ "loss": 25.369,
2152
+ "step": 3050
2153
+ },
2154
+ {
2155
+ "epoch": 0.22131814483319773,
2156
+ "grad_norm": 15.46875,
2157
+ "learning_rate": 9.982709588324338e-07,
2158
+ "loss": 25.6081,
2159
+ "step": 3060
2160
+ },
2161
+ {
2162
+ "epoch": 0.22204140674441733,
2163
+ "grad_norm": 16.59375,
2164
+ "learning_rate": 9.982653083711018e-07,
2165
+ "loss": 25.4421,
2166
+ "step": 3070
2167
+ },
2168
+ {
2169
+ "epoch": 0.22276466865563693,
2170
+ "grad_norm": 15.734375,
2171
+ "learning_rate": 9.9825965790977e-07,
2172
+ "loss": 24.9267,
2173
+ "step": 3080
2174
+ },
2175
+ {
2176
+ "epoch": 0.22348793056685654,
2177
+ "grad_norm": 15.7421875,
2178
+ "learning_rate": 9.98254007448438e-07,
2179
+ "loss": 25.1702,
2180
+ "step": 3090
2181
+ },
2182
+ {
2183
+ "epoch": 0.2242111924780761,
2184
+ "grad_norm": 16.34375,
2185
+ "learning_rate": 9.982483569871063e-07,
2186
+ "loss": 24.97,
2187
+ "step": 3100
2188
+ },
2189
+ {
2190
+ "epoch": 0.2249344543892957,
2191
+ "grad_norm": 16.328125,
2192
+ "learning_rate": 9.982427065257743e-07,
2193
+ "loss": 25.1489,
2194
+ "step": 3110
2195
+ },
2196
+ {
2197
+ "epoch": 0.22565771630051532,
2198
+ "grad_norm": 16.46875,
2199
+ "learning_rate": 9.982370560644423e-07,
2200
+ "loss": 25.291,
2201
+ "step": 3120
2202
+ },
2203
+ {
2204
+ "epoch": 0.22638097821173492,
2205
+ "grad_norm": 16.859375,
2206
+ "learning_rate": 9.982314056031105e-07,
2207
+ "loss": 25.1147,
2208
+ "step": 3130
2209
+ },
2210
+ {
2211
+ "epoch": 0.22710424012295452,
2212
+ "grad_norm": 16.1875,
2213
+ "learning_rate": 9.982257551417785e-07,
2214
+ "loss": 25.0294,
2215
+ "step": 3140
2216
+ },
2217
+ {
2218
+ "epoch": 0.22782750203417412,
2219
+ "grad_norm": 16.609375,
2220
+ "learning_rate": 9.982201046804467e-07,
2221
+ "loss": 24.9066,
2222
+ "step": 3150
2223
+ },
2224
+ {
2225
+ "epoch": 0.22855076394539373,
2226
+ "grad_norm": 16.296875,
2227
+ "learning_rate": 9.982144542191147e-07,
2228
+ "loss": 25.5522,
2229
+ "step": 3160
2230
+ },
2231
+ {
2232
+ "epoch": 0.22927402585661333,
2233
+ "grad_norm": 16.875,
2234
+ "learning_rate": 9.982088037577827e-07,
2235
+ "loss": 25.046,
2236
+ "step": 3170
2237
+ },
2238
+ {
2239
+ "epoch": 0.22999728776783293,
2240
+ "grad_norm": 17.34375,
2241
+ "learning_rate": 9.982031532964509e-07,
2242
+ "loss": 25.0408,
2243
+ "step": 3180
2244
+ },
2245
+ {
2246
+ "epoch": 0.23072054967905253,
2247
+ "grad_norm": 16.078125,
2248
+ "learning_rate": 9.981975028351189e-07,
2249
+ "loss": 25.6239,
2250
+ "step": 3190
2251
+ },
2252
+ {
2253
+ "epoch": 0.23144381159027214,
2254
+ "grad_norm": 14.75,
2255
+ "learning_rate": 9.98191852373787e-07,
2256
+ "loss": 24.7332,
2257
+ "step": 3200
2258
+ },
2259
+ {
2260
+ "epoch": 0.23216707350149174,
2261
+ "grad_norm": 16.265625,
2262
+ "learning_rate": 9.98186201912455e-07,
2263
+ "loss": 25.0991,
2264
+ "step": 3210
2265
+ },
2266
+ {
2267
+ "epoch": 0.23289033541271134,
2268
+ "grad_norm": 16.609375,
2269
+ "learning_rate": 9.981805514511233e-07,
2270
+ "loss": 24.9936,
2271
+ "step": 3220
2272
+ },
2273
+ {
2274
+ "epoch": 0.23361359732393092,
2275
+ "grad_norm": 17.03125,
2276
+ "learning_rate": 9.981749009897913e-07,
2277
+ "loss": 25.4223,
2278
+ "step": 3230
2279
+ },
2280
+ {
2281
+ "epoch": 0.23433685923515052,
2282
+ "grad_norm": 16.625,
2283
+ "learning_rate": 9.981692505284593e-07,
2284
+ "loss": 25.1375,
2285
+ "step": 3240
2286
+ },
2287
+ {
2288
+ "epoch": 0.23506012114637012,
2289
+ "grad_norm": 16.71875,
2290
+ "learning_rate": 9.981636000671275e-07,
2291
+ "loss": 25.1016,
2292
+ "step": 3250
2293
+ },
2294
+ {
2295
+ "epoch": 0.23578338305758972,
2296
+ "grad_norm": 16.15625,
2297
+ "learning_rate": 9.981579496057955e-07,
2298
+ "loss": 24.9417,
2299
+ "step": 3260
2300
+ },
2301
+ {
2302
+ "epoch": 0.23650664496880933,
2303
+ "grad_norm": 16.84375,
2304
+ "learning_rate": 9.981522991444637e-07,
2305
+ "loss": 25.2421,
2306
+ "step": 3270
2307
+ },
2308
+ {
2309
+ "epoch": 0.23722990688002893,
2310
+ "grad_norm": 15.8359375,
2311
+ "learning_rate": 9.981466486831317e-07,
2312
+ "loss": 25.1771,
2313
+ "step": 3280
2314
+ },
2315
+ {
2316
+ "epoch": 0.23795316879124853,
2317
+ "grad_norm": 16.65625,
2318
+ "learning_rate": 9.981409982217997e-07,
2319
+ "loss": 25.3145,
2320
+ "step": 3290
2321
+ },
2322
+ {
2323
+ "epoch": 0.23867643070246813,
2324
+ "grad_norm": 16.984375,
2325
+ "learning_rate": 9.98135347760468e-07,
2326
+ "loss": 24.93,
2327
+ "step": 3300
2328
+ },
2329
+ {
2330
+ "epoch": 0.23939969261368774,
2331
+ "grad_norm": 16.6875,
2332
+ "learning_rate": 9.98129697299136e-07,
2333
+ "loss": 25.5047,
2334
+ "step": 3310
2335
+ },
2336
+ {
2337
+ "epoch": 0.24012295452490734,
2338
+ "grad_norm": 16.28125,
2339
+ "learning_rate": 9.98124046837804e-07,
2340
+ "loss": 25.0701,
2341
+ "step": 3320
2342
+ },
2343
+ {
2344
+ "epoch": 0.24084621643612694,
2345
+ "grad_norm": 15.265625,
2346
+ "learning_rate": 9.98118396376472e-07,
2347
+ "loss": 25.1109,
2348
+ "step": 3330
2349
+ },
2350
+ {
2351
+ "epoch": 0.24156947834734654,
2352
+ "grad_norm": 16.5625,
2353
+ "learning_rate": 9.9811274591514e-07,
2354
+ "loss": 25.2658,
2355
+ "step": 3340
2356
+ },
2357
+ {
2358
+ "epoch": 0.24229274025856612,
2359
+ "grad_norm": 16.140625,
2360
+ "learning_rate": 9.981070954538083e-07,
2361
+ "loss": 24.9092,
2362
+ "step": 3350
2363
+ },
2364
+ {
2365
+ "epoch": 0.24301600216978572,
2366
+ "grad_norm": 16.71875,
2367
+ "learning_rate": 9.981014449924763e-07,
2368
+ "loss": 25.0737,
2369
+ "step": 3360
2370
+ },
2371
+ {
2372
+ "epoch": 0.24373926408100532,
2373
+ "grad_norm": 17.578125,
2374
+ "learning_rate": 9.980957945311445e-07,
2375
+ "loss": 24.9847,
2376
+ "step": 3370
2377
+ },
2378
+ {
2379
+ "epoch": 0.24446252599222493,
2380
+ "grad_norm": 16.125,
2381
+ "learning_rate": 9.980901440698125e-07,
2382
+ "loss": 25.1988,
2383
+ "step": 3380
2384
+ },
2385
+ {
2386
+ "epoch": 0.24518578790344453,
2387
+ "grad_norm": 17.296875,
2388
+ "learning_rate": 9.980844936084805e-07,
2389
+ "loss": 25.0352,
2390
+ "step": 3390
2391
+ },
2392
+ {
2393
+ "epoch": 0.24590904981466413,
2394
+ "grad_norm": 15.0078125,
2395
+ "learning_rate": 9.980788431471487e-07,
2396
+ "loss": 25.0363,
2397
+ "step": 3400
2398
+ },
2399
+ {
2400
+ "epoch": 0.24663231172588373,
2401
+ "grad_norm": 17.265625,
2402
+ "learning_rate": 9.980731926858167e-07,
2403
+ "loss": 24.8165,
2404
+ "step": 3410
2405
+ },
2406
+ {
2407
+ "epoch": 0.24735557363710334,
2408
+ "grad_norm": 18.09375,
2409
+ "learning_rate": 9.98067542224485e-07,
2410
+ "loss": 25.1422,
2411
+ "step": 3420
2412
+ },
2413
+ {
2414
+ "epoch": 0.24807883554832294,
2415
+ "grad_norm": 16.203125,
2416
+ "learning_rate": 9.98061891763153e-07,
2417
+ "loss": 24.8054,
2418
+ "step": 3430
2419
+ },
2420
+ {
2421
+ "epoch": 0.24880209745954254,
2422
+ "grad_norm": 16.703125,
2423
+ "learning_rate": 9.980562413018211e-07,
2424
+ "loss": 25.1454,
2425
+ "step": 3440
2426
+ },
2427
+ {
2428
+ "epoch": 0.24952535937076215,
2429
+ "grad_norm": 15.359375,
2430
+ "learning_rate": 9.980505908404891e-07,
2431
+ "loss": 25.3514,
2432
+ "step": 3450
2433
+ },
2434
+ {
2435
+ "epoch": 0.2502486212819817,
2436
+ "grad_norm": 15.3984375,
2437
+ "learning_rate": 9.980449403791571e-07,
2438
+ "loss": 25.141,
2439
+ "step": 3460
2440
+ },
2441
+ {
2442
+ "epoch": 0.25097188319320135,
2443
+ "grad_norm": 16.078125,
2444
+ "learning_rate": 9.980392899178253e-07,
2445
+ "loss": 24.8799,
2446
+ "step": 3470
2447
+ },
2448
+ {
2449
+ "epoch": 0.2516951451044209,
2450
+ "grad_norm": 16.515625,
2451
+ "learning_rate": 9.980336394564933e-07,
2452
+ "loss": 24.9967,
2453
+ "step": 3480
2454
+ },
2455
+ {
2456
+ "epoch": 0.25241840701564056,
2457
+ "grad_norm": 16.46875,
2458
+ "learning_rate": 9.980279889951615e-07,
2459
+ "loss": 25.1666,
2460
+ "step": 3490
2461
+ },
2462
+ {
2463
+ "epoch": 0.25314166892686013,
2464
+ "grad_norm": 16.25,
2465
+ "learning_rate": 9.980223385338295e-07,
2466
+ "loss": 25.3913,
2467
+ "step": 3500
2468
+ },
2469
+ {
2470
+ "epoch": 0.25386493083807976,
2471
+ "grad_norm": 15.6640625,
2472
+ "learning_rate": 9.980166880724975e-07,
2473
+ "loss": 25.0079,
2474
+ "step": 3510
2475
+ },
2476
+ {
2477
+ "epoch": 0.25458819274929934,
2478
+ "grad_norm": 16.015625,
2479
+ "learning_rate": 9.980110376111657e-07,
2480
+ "loss": 25.0063,
2481
+ "step": 3520
2482
+ },
2483
+ {
2484
+ "epoch": 0.25531145466051897,
2485
+ "grad_norm": 15.8515625,
2486
+ "learning_rate": 9.980053871498337e-07,
2487
+ "loss": 25.5363,
2488
+ "step": 3530
2489
+ },
2490
+ {
2491
+ "epoch": 0.25603471657173854,
2492
+ "grad_norm": 16.984375,
2493
+ "learning_rate": 9.97999736688502e-07,
2494
+ "loss": 25.1561,
2495
+ "step": 3540
2496
+ },
2497
+ {
2498
+ "epoch": 0.2567579784829581,
2499
+ "grad_norm": 16.640625,
2500
+ "learning_rate": 9.9799408622717e-07,
2501
+ "loss": 25.1949,
2502
+ "step": 3550
2503
+ },
2504
+ {
2505
+ "epoch": 0.25748124039417775,
2506
+ "grad_norm": 16.0,
2507
+ "learning_rate": 9.97988435765838e-07,
2508
+ "loss": 24.992,
2509
+ "step": 3560
2510
+ },
2511
+ {
2512
+ "epoch": 0.2582045023053973,
2513
+ "grad_norm": 16.375,
2514
+ "learning_rate": 9.979827853045062e-07,
2515
+ "loss": 25.1319,
2516
+ "step": 3570
2517
+ },
2518
+ {
2519
+ "epoch": 0.25892776421661695,
2520
+ "grad_norm": 15.9296875,
2521
+ "learning_rate": 9.979771348431742e-07,
2522
+ "loss": 25.0064,
2523
+ "step": 3580
2524
+ },
2525
+ {
2526
+ "epoch": 0.2596510261278365,
2527
+ "grad_norm": 16.265625,
2528
+ "learning_rate": 9.979714843818424e-07,
2529
+ "loss": 24.9227,
2530
+ "step": 3590
2531
+ },
2532
+ {
2533
+ "epoch": 0.26037428803905616,
2534
+ "grad_norm": 16.578125,
2535
+ "learning_rate": 9.979658339205104e-07,
2536
+ "loss": 25.1148,
2537
+ "step": 3600
2538
+ },
2539
+ {
2540
+ "epoch": 0.26109754995027573,
2541
+ "grad_norm": 18.4375,
2542
+ "learning_rate": 9.979601834591784e-07,
2543
+ "loss": 24.9109,
2544
+ "step": 3610
2545
+ },
2546
+ {
2547
+ "epoch": 0.26182081186149536,
2548
+ "grad_norm": 17.640625,
2549
+ "learning_rate": 9.979545329978466e-07,
2550
+ "loss": 25.1381,
2551
+ "step": 3620
2552
+ },
2553
+ {
2554
+ "epoch": 0.26254407377271494,
2555
+ "grad_norm": 15.78125,
2556
+ "learning_rate": 9.979488825365146e-07,
2557
+ "loss": 25.0138,
2558
+ "step": 3630
2559
+ },
2560
+ {
2561
+ "epoch": 0.26326733568393457,
2562
+ "grad_norm": 16.65625,
2563
+ "learning_rate": 9.979432320751828e-07,
2564
+ "loss": 25.0708,
2565
+ "step": 3640
2566
+ },
2567
+ {
2568
+ "epoch": 0.26399059759515414,
2569
+ "grad_norm": 15.875,
2570
+ "learning_rate": 9.979375816138508e-07,
2571
+ "loss": 25.0401,
2572
+ "step": 3650
2573
+ },
2574
+ {
2575
+ "epoch": 0.26471385950637377,
2576
+ "grad_norm": 16.359375,
2577
+ "learning_rate": 9.97931931152519e-07,
2578
+ "loss": 25.1882,
2579
+ "step": 3660
2580
+ },
2581
+ {
2582
+ "epoch": 0.26543712141759335,
2583
+ "grad_norm": 15.5625,
2584
+ "learning_rate": 9.97926280691187e-07,
2585
+ "loss": 25.2344,
2586
+ "step": 3670
2587
+ },
2588
+ {
2589
+ "epoch": 0.2661603833288129,
2590
+ "grad_norm": 15.921875,
2591
+ "learning_rate": 9.97920630229855e-07,
2592
+ "loss": 25.0361,
2593
+ "step": 3680
2594
+ },
2595
+ {
2596
+ "epoch": 0.26688364524003255,
2597
+ "grad_norm": 15.7890625,
2598
+ "learning_rate": 9.979149797685232e-07,
2599
+ "loss": 25.2095,
2600
+ "step": 3690
2601
+ },
2602
+ {
2603
+ "epoch": 0.2676069071512521,
2604
+ "grad_norm": 16.25,
2605
+ "learning_rate": 9.979093293071914e-07,
2606
+ "loss": 25.4166,
2607
+ "step": 3700
2608
+ },
2609
+ {
2610
+ "epoch": 0.26833016906247176,
2611
+ "grad_norm": 17.03125,
2612
+ "learning_rate": 9.979036788458594e-07,
2613
+ "loss": 25.1349,
2614
+ "step": 3710
2615
+ },
2616
+ {
2617
+ "epoch": 0.26905343097369133,
2618
+ "grad_norm": 15.703125,
2619
+ "learning_rate": 9.978980283845274e-07,
2620
+ "loss": 24.6449,
2621
+ "step": 3720
2622
+ },
2623
+ {
2624
+ "epoch": 0.26977669288491096,
2625
+ "grad_norm": 15.625,
2626
+ "learning_rate": 9.978923779231954e-07,
2627
+ "loss": 25.4002,
2628
+ "step": 3730
2629
+ },
2630
+ {
2631
+ "epoch": 0.27049995479613054,
2632
+ "grad_norm": 15.8671875,
2633
+ "learning_rate": 9.978867274618636e-07,
2634
+ "loss": 24.9894,
2635
+ "step": 3740
2636
+ },
2637
+ {
2638
+ "epoch": 0.27122321670735017,
2639
+ "grad_norm": 17.234375,
2640
+ "learning_rate": 9.978810770005318e-07,
2641
+ "loss": 25.3439,
2642
+ "step": 3750
2643
+ },
2644
+ {
2645
+ "epoch": 0.27194647861856974,
2646
+ "grad_norm": 16.171875,
2647
+ "learning_rate": 9.978754265391998e-07,
2648
+ "loss": 25.1633,
2649
+ "step": 3760
2650
+ },
2651
+ {
2652
+ "epoch": 0.2726697405297894,
2653
+ "grad_norm": 16.875,
2654
+ "learning_rate": 9.978697760778678e-07,
2655
+ "loss": 25.0982,
2656
+ "step": 3770
2657
+ },
2658
+ {
2659
+ "epoch": 0.27339300244100895,
2660
+ "grad_norm": 16.71875,
2661
+ "learning_rate": 9.978641256165358e-07,
2662
+ "loss": 25.2635,
2663
+ "step": 3780
2664
+ },
2665
+ {
2666
+ "epoch": 0.2741162643522286,
2667
+ "grad_norm": 16.59375,
2668
+ "learning_rate": 9.97858475155204e-07,
2669
+ "loss": 25.4958,
2670
+ "step": 3790
2671
+ },
2672
+ {
2673
+ "epoch": 0.27483952626344815,
2674
+ "grad_norm": 15.9921875,
2675
+ "learning_rate": 9.97852824693872e-07,
2676
+ "loss": 24.7305,
2677
+ "step": 3800
2678
+ },
2679
+ {
2680
+ "epoch": 0.2755627881746677,
2681
+ "grad_norm": 16.46875,
2682
+ "learning_rate": 9.978471742325402e-07,
2683
+ "loss": 25.187,
2684
+ "step": 3810
2685
+ },
2686
+ {
2687
+ "epoch": 0.27628605008588736,
2688
+ "grad_norm": 15.8984375,
2689
+ "learning_rate": 9.978415237712082e-07,
2690
+ "loss": 25.1958,
2691
+ "step": 3820
2692
+ },
2693
+ {
2694
+ "epoch": 0.27700931199710693,
2695
+ "grad_norm": 16.109375,
2696
+ "learning_rate": 9.978358733098762e-07,
2697
+ "loss": 25.5358,
2698
+ "step": 3830
2699
+ },
2700
+ {
2701
+ "epoch": 0.27773257390832656,
2702
+ "grad_norm": 15.5,
2703
+ "learning_rate": 9.978302228485444e-07,
2704
+ "loss": 24.9236,
2705
+ "step": 3840
2706
+ },
2707
+ {
2708
+ "epoch": 0.27845583581954614,
2709
+ "grad_norm": 16.375,
2710
+ "learning_rate": 9.978245723872124e-07,
2711
+ "loss": 25.3469,
2712
+ "step": 3850
2713
+ },
2714
+ {
2715
+ "epoch": 0.27917909773076577,
2716
+ "grad_norm": 16.15625,
2717
+ "learning_rate": 9.978189219258806e-07,
2718
+ "loss": 24.9141,
2719
+ "step": 3860
2720
+ },
2721
+ {
2722
+ "epoch": 0.27990235964198534,
2723
+ "grad_norm": 17.125,
2724
+ "learning_rate": 9.978132714645486e-07,
2725
+ "loss": 25.0336,
2726
+ "step": 3870
2727
+ },
2728
+ {
2729
+ "epoch": 0.280625621553205,
2730
+ "grad_norm": 15.640625,
2731
+ "learning_rate": 9.978076210032168e-07,
2732
+ "loss": 25.2943,
2733
+ "step": 3880
2734
+ },
2735
+ {
2736
+ "epoch": 0.28134888346442455,
2737
+ "grad_norm": 15.8984375,
2738
+ "learning_rate": 9.978019705418848e-07,
2739
+ "loss": 24.9711,
2740
+ "step": 3890
2741
+ },
2742
+ {
2743
+ "epoch": 0.2820721453756442,
2744
+ "grad_norm": 16.09375,
2745
+ "learning_rate": 9.977963200805528e-07,
2746
+ "loss": 25.2311,
2747
+ "step": 3900
2748
+ },
2749
+ {
2750
+ "epoch": 0.28279540728686375,
2751
+ "grad_norm": 16.578125,
2752
+ "learning_rate": 9.97790669619221e-07,
2753
+ "loss": 25.4184,
2754
+ "step": 3910
2755
+ },
2756
+ {
2757
+ "epoch": 0.28351866919808333,
2758
+ "grad_norm": 15.890625,
2759
+ "learning_rate": 9.977850191578892e-07,
2760
+ "loss": 25.2003,
2761
+ "step": 3920
2762
+ },
2763
+ {
2764
+ "epoch": 0.28424193110930296,
2765
+ "grad_norm": 17.75,
2766
+ "learning_rate": 9.977793686965572e-07,
2767
+ "loss": 25.3082,
2768
+ "step": 3930
2769
+ },
2770
+ {
2771
+ "epoch": 0.28496519302052253,
2772
+ "grad_norm": 16.03125,
2773
+ "learning_rate": 9.977737182352252e-07,
2774
+ "loss": 25.265,
2775
+ "step": 3940
2776
+ },
2777
+ {
2778
+ "epoch": 0.28568845493174216,
2779
+ "grad_norm": 16.65625,
2780
+ "learning_rate": 9.977680677738932e-07,
2781
+ "loss": 25.6086,
2782
+ "step": 3950
2783
+ },
2784
+ {
2785
+ "epoch": 0.28641171684296174,
2786
+ "grad_norm": 15.8671875,
2787
+ "learning_rate": 9.977624173125614e-07,
2788
+ "loss": 25.6748,
2789
+ "step": 3960
2790
+ },
2791
+ {
2792
+ "epoch": 0.28713497875418137,
2793
+ "grad_norm": 16.5625,
2794
+ "learning_rate": 9.977567668512297e-07,
2795
+ "loss": 25.3439,
2796
+ "step": 3970
2797
+ },
2798
+ {
2799
+ "epoch": 0.28785824066540094,
2800
+ "grad_norm": 16.78125,
2801
+ "learning_rate": 9.977511163898977e-07,
2802
+ "loss": 24.9551,
2803
+ "step": 3980
2804
+ },
2805
+ {
2806
+ "epoch": 0.2885815025766206,
2807
+ "grad_norm": 16.546875,
2808
+ "learning_rate": 9.977454659285656e-07,
2809
+ "loss": 25.1372,
2810
+ "step": 3990
2811
+ },
2812
+ {
2813
+ "epoch": 0.28930476448784015,
2814
+ "grad_norm": 15.8515625,
2815
+ "learning_rate": 9.977398154672336e-07,
2816
+ "loss": 25.5988,
2817
+ "step": 4000
2818
+ },
2819
+ {
2820
+ "epoch": 0.2900280263990598,
2821
+ "grad_norm": 17.25,
2822
+ "learning_rate": 9.977341650059019e-07,
2823
+ "loss": 25.5525,
2824
+ "step": 4010
2825
+ },
2826
+ {
2827
+ "epoch": 0.29075128831027935,
2828
+ "grad_norm": 16.734375,
2829
+ "learning_rate": 9.9772851454457e-07,
2830
+ "loss": 24.7127,
2831
+ "step": 4020
2832
+ },
2833
+ {
2834
+ "epoch": 0.291474550221499,
2835
+ "grad_norm": 15.8125,
2836
+ "learning_rate": 9.97722864083238e-07,
2837
+ "loss": 24.8364,
2838
+ "step": 4030
2839
+ },
2840
+ {
2841
+ "epoch": 0.29219781213271856,
2842
+ "grad_norm": 17.75,
2843
+ "learning_rate": 9.97717213621906e-07,
2844
+ "loss": 25.1328,
2845
+ "step": 4040
2846
+ },
2847
+ {
2848
+ "epoch": 0.29292107404393813,
2849
+ "grad_norm": 15.7890625,
2850
+ "learning_rate": 9.977115631605743e-07,
2851
+ "loss": 24.932,
2852
+ "step": 4050
2853
+ },
2854
+ {
2855
+ "epoch": 0.29364433595515776,
2856
+ "grad_norm": 16.71875,
2857
+ "learning_rate": 9.977059126992423e-07,
2858
+ "loss": 25.0942,
2859
+ "step": 4060
2860
+ },
2861
+ {
2862
+ "epoch": 0.29436759786637734,
2863
+ "grad_norm": 17.0625,
2864
+ "learning_rate": 9.977002622379105e-07,
2865
+ "loss": 25.0096,
2866
+ "step": 4070
2867
+ },
2868
+ {
2869
+ "epoch": 0.29509085977759697,
2870
+ "grad_norm": 15.59375,
2871
+ "learning_rate": 9.976946117765785e-07,
2872
+ "loss": 24.9936,
2873
+ "step": 4080
2874
+ },
2875
+ {
2876
+ "epoch": 0.29581412168881654,
2877
+ "grad_norm": 16.890625,
2878
+ "learning_rate": 9.976889613152465e-07,
2879
+ "loss": 25.1561,
2880
+ "step": 4090
2881
+ },
2882
+ {
2883
+ "epoch": 0.2965373836000362,
2884
+ "grad_norm": 16.5625,
2885
+ "learning_rate": 9.976833108539147e-07,
2886
+ "loss": 25.2981,
2887
+ "step": 4100
2888
+ },
2889
+ {
2890
+ "epoch": 0.29726064551125575,
2891
+ "grad_norm": 16.03125,
2892
+ "learning_rate": 9.976776603925827e-07,
2893
+ "loss": 24.9605,
2894
+ "step": 4110
2895
+ },
2896
+ {
2897
+ "epoch": 0.2979839074224754,
2898
+ "grad_norm": 16.09375,
2899
+ "learning_rate": 9.976720099312507e-07,
2900
+ "loss": 25.254,
2901
+ "step": 4120
2902
+ },
2903
+ {
2904
+ "epoch": 0.29870716933369496,
2905
+ "grad_norm": 15.390625,
2906
+ "learning_rate": 9.976663594699189e-07,
2907
+ "loss": 25.2773,
2908
+ "step": 4130
2909
+ },
2910
+ {
2911
+ "epoch": 0.2994304312449146,
2912
+ "grad_norm": 16.96875,
2913
+ "learning_rate": 9.97660709008587e-07,
2914
+ "loss": 25.3654,
2915
+ "step": 4140
2916
+ },
2917
+ {
2918
+ "epoch": 0.30015369315613416,
2919
+ "grad_norm": 16.328125,
2920
+ "learning_rate": 9.97655058547255e-07,
2921
+ "loss": 24.5768,
2922
+ "step": 4150
2923
+ },
2924
+ {
2925
+ "epoch": 0.3008769550673538,
2926
+ "grad_norm": 16.640625,
2927
+ "learning_rate": 9.97649408085923e-07,
2928
+ "loss": 25.0535,
2929
+ "step": 4160
2930
+ },
2931
+ {
2932
+ "epoch": 0.30160021697857337,
2933
+ "grad_norm": 15.484375,
2934
+ "learning_rate": 9.97643757624591e-07,
2935
+ "loss": 25.2813,
2936
+ "step": 4170
2937
+ },
2938
+ {
2939
+ "epoch": 0.30232347888979294,
2940
+ "grad_norm": 16.734375,
2941
+ "learning_rate": 9.976381071632593e-07,
2942
+ "loss": 25.172,
2943
+ "step": 4180
2944
+ },
2945
+ {
2946
+ "epoch": 0.30304674080101257,
2947
+ "grad_norm": 16.140625,
2948
+ "learning_rate": 9.976324567019275e-07,
2949
+ "loss": 25.2808,
2950
+ "step": 4190
2951
+ },
2952
+ {
2953
+ "epoch": 0.30377000271223215,
2954
+ "grad_norm": 14.953125,
2955
+ "learning_rate": 9.976268062405955e-07,
2956
+ "loss": 25.1555,
2957
+ "step": 4200
2958
+ },
2959
+ {
2960
+ "epoch": 0.3044932646234518,
2961
+ "grad_norm": 16.0,
2962
+ "learning_rate": 9.976211557792635e-07,
2963
+ "loss": 25.4662,
2964
+ "step": 4210
2965
+ },
2966
+ {
2967
+ "epoch": 0.30521652653467135,
2968
+ "grad_norm": 16.640625,
2969
+ "learning_rate": 9.976155053179315e-07,
2970
+ "loss": 25.3153,
2971
+ "step": 4220
2972
+ },
2973
+ {
2974
+ "epoch": 0.305939788445891,
2975
+ "grad_norm": 15.9609375,
2976
+ "learning_rate": 9.976098548565997e-07,
2977
+ "loss": 25.1885,
2978
+ "step": 4230
2979
+ },
2980
+ {
2981
+ "epoch": 0.30666305035711056,
2982
+ "grad_norm": 15.90625,
2983
+ "learning_rate": 9.97604204395268e-07,
2984
+ "loss": 24.9472,
2985
+ "step": 4240
2986
+ },
2987
+ {
2988
+ "epoch": 0.3073863122683302,
2989
+ "grad_norm": 15.515625,
2990
+ "learning_rate": 9.97598553933936e-07,
2991
+ "loss": 25.0862,
2992
+ "step": 4250
2993
+ },
2994
+ {
2995
+ "epoch": 0.30810957417954976,
2996
+ "grad_norm": 16.421875,
2997
+ "learning_rate": 9.97592903472604e-07,
2998
+ "loss": 24.8722,
2999
+ "step": 4260
3000
+ },
3001
+ {
3002
+ "epoch": 0.3088328360907694,
3003
+ "grad_norm": 16.15625,
3004
+ "learning_rate": 9.975872530112721e-07,
3005
+ "loss": 25.3074,
3006
+ "step": 4270
3007
+ },
3008
+ {
3009
+ "epoch": 0.30955609800198897,
3010
+ "grad_norm": 15.7109375,
3011
+ "learning_rate": 9.975816025499401e-07,
3012
+ "loss": 25.0129,
3013
+ "step": 4280
3014
+ },
3015
+ {
3016
+ "epoch": 0.3102793599132086,
3017
+ "grad_norm": 15.859375,
3018
+ "learning_rate": 9.975759520886083e-07,
3019
+ "loss": 25.239,
3020
+ "step": 4290
3021
+ },
3022
+ {
3023
+ "epoch": 0.31100262182442817,
3024
+ "grad_norm": 15.859375,
3025
+ "learning_rate": 9.975703016272763e-07,
3026
+ "loss": 24.9693,
3027
+ "step": 4300
3028
+ },
3029
+ {
3030
+ "epoch": 0.31172588373564775,
3031
+ "grad_norm": 16.0625,
3032
+ "learning_rate": 9.975646511659443e-07,
3033
+ "loss": 25.1788,
3034
+ "step": 4310
3035
+ },
3036
+ {
3037
+ "epoch": 0.3124491456468674,
3038
+ "grad_norm": 16.1875,
3039
+ "learning_rate": 9.975590007046125e-07,
3040
+ "loss": 25.3888,
3041
+ "step": 4320
3042
+ },
3043
+ {
3044
+ "epoch": 0.31317240755808695,
3045
+ "grad_norm": 16.359375,
3046
+ "learning_rate": 9.975533502432805e-07,
3047
+ "loss": 25.269,
3048
+ "step": 4330
3049
+ },
3050
+ {
3051
+ "epoch": 0.3138956694693066,
3052
+ "grad_norm": 16.796875,
3053
+ "learning_rate": 9.975476997819487e-07,
3054
+ "loss": 25.0877,
3055
+ "step": 4340
3056
+ },
3057
+ {
3058
+ "epoch": 0.31461893138052616,
3059
+ "grad_norm": 16.453125,
3060
+ "learning_rate": 9.975420493206167e-07,
3061
+ "loss": 24.8608,
3062
+ "step": 4350
3063
+ },
3064
+ {
3065
+ "epoch": 0.3153421932917458,
3066
+ "grad_norm": 16.71875,
3067
+ "learning_rate": 9.97536398859285e-07,
3068
+ "loss": 25.3031,
3069
+ "step": 4360
3070
+ },
3071
+ {
3072
+ "epoch": 0.31606545520296536,
3073
+ "grad_norm": 16.03125,
3074
+ "learning_rate": 9.97530748397953e-07,
3075
+ "loss": 25.1261,
3076
+ "step": 4370
3077
+ },
3078
+ {
3079
+ "epoch": 0.316788717114185,
3080
+ "grad_norm": 16.25,
3081
+ "learning_rate": 9.97525097936621e-07,
3082
+ "loss": 25.1505,
3083
+ "step": 4380
3084
+ },
3085
+ {
3086
+ "epoch": 0.31751197902540457,
3087
+ "grad_norm": 15.640625,
3088
+ "learning_rate": 9.975194474752891e-07,
3089
+ "loss": 25.0906,
3090
+ "step": 4390
3091
+ },
3092
+ {
3093
+ "epoch": 0.3182352409366242,
3094
+ "grad_norm": 16.109375,
3095
+ "learning_rate": 9.975137970139571e-07,
3096
+ "loss": 25.1493,
3097
+ "step": 4400
3098
+ },
3099
+ {
3100
+ "epoch": 0.3189585028478438,
3101
+ "grad_norm": 16.25,
3102
+ "learning_rate": 9.975081465526254e-07,
3103
+ "loss": 25.2401,
3104
+ "step": 4410
3105
+ },
3106
+ {
3107
+ "epoch": 0.3196817647590634,
3108
+ "grad_norm": 16.921875,
3109
+ "learning_rate": 9.975024960912934e-07,
3110
+ "loss": 25.1871,
3111
+ "step": 4420
3112
+ },
3113
+ {
3114
+ "epoch": 0.320405026670283,
3115
+ "grad_norm": 16.328125,
3116
+ "learning_rate": 9.974968456299613e-07,
3117
+ "loss": 25.03,
3118
+ "step": 4430
3119
+ },
3120
+ {
3121
+ "epoch": 0.32112828858150255,
3122
+ "grad_norm": 16.546875,
3123
+ "learning_rate": 9.974911951686296e-07,
3124
+ "loss": 25.1426,
3125
+ "step": 4440
3126
+ },
3127
+ {
3128
+ "epoch": 0.3218515504927222,
3129
+ "grad_norm": 16.328125,
3130
+ "learning_rate": 9.974855447072976e-07,
3131
+ "loss": 25.0376,
3132
+ "step": 4450
3133
+ },
3134
+ {
3135
+ "epoch": 0.32257481240394176,
3136
+ "grad_norm": 15.9375,
3137
+ "learning_rate": 9.974798942459658e-07,
3138
+ "loss": 24.6728,
3139
+ "step": 4460
3140
+ },
3141
+ {
3142
+ "epoch": 0.3232980743151614,
3143
+ "grad_norm": 16.546875,
3144
+ "learning_rate": 9.974742437846338e-07,
3145
+ "loss": 25.2818,
3146
+ "step": 4470
3147
+ },
3148
+ {
3149
+ "epoch": 0.32402133622638096,
3150
+ "grad_norm": 15.46875,
3151
+ "learning_rate": 9.974685933233018e-07,
3152
+ "loss": 25.5436,
3153
+ "step": 4480
3154
+ },
3155
+ {
3156
+ "epoch": 0.3247445981376006,
3157
+ "grad_norm": 16.703125,
3158
+ "learning_rate": 9.9746294286197e-07,
3159
+ "loss": 25.0338,
3160
+ "step": 4490
3161
+ },
3162
+ {
3163
+ "epoch": 0.32546786004882017,
3164
+ "grad_norm": 16.546875,
3165
+ "learning_rate": 9.97457292400638e-07,
3166
+ "loss": 24.9645,
3167
+ "step": 4500
3168
+ },
3169
+ {
3170
+ "epoch": 0.3261911219600398,
3171
+ "grad_norm": 15.9296875,
3172
+ "learning_rate": 9.974516419393062e-07,
3173
+ "loss": 25.2643,
3174
+ "step": 4510
3175
+ },
3176
+ {
3177
+ "epoch": 0.3269143838712594,
3178
+ "grad_norm": 15.984375,
3179
+ "learning_rate": 9.974459914779742e-07,
3180
+ "loss": 25.1678,
3181
+ "step": 4520
3182
+ },
3183
+ {
3184
+ "epoch": 0.327637645782479,
3185
+ "grad_norm": 16.046875,
3186
+ "learning_rate": 9.974403410166422e-07,
3187
+ "loss": 25.2445,
3188
+ "step": 4530
3189
+ },
3190
+ {
3191
+ "epoch": 0.3283609076936986,
3192
+ "grad_norm": 16.96875,
3193
+ "learning_rate": 9.974346905553104e-07,
3194
+ "loss": 24.8622,
3195
+ "step": 4540
3196
+ },
3197
+ {
3198
+ "epoch": 0.3290841696049182,
3199
+ "grad_norm": 16.84375,
3200
+ "learning_rate": 9.974290400939784e-07,
3201
+ "loss": 25.0903,
3202
+ "step": 4550
3203
+ },
3204
+ {
3205
+ "epoch": 0.3298074315161378,
3206
+ "grad_norm": 16.71875,
3207
+ "learning_rate": 9.974233896326466e-07,
3208
+ "loss": 24.9463,
3209
+ "step": 4560
3210
+ },
3211
+ {
3212
+ "epoch": 0.33053069342735736,
3213
+ "grad_norm": 16.765625,
3214
+ "learning_rate": 9.974177391713146e-07,
3215
+ "loss": 25.1533,
3216
+ "step": 4570
3217
+ },
3218
+ {
3219
+ "epoch": 0.331253955338577,
3220
+ "grad_norm": 16.828125,
3221
+ "learning_rate": 9.974120887099828e-07,
3222
+ "loss": 25.1094,
3223
+ "step": 4580
3224
+ },
3225
+ {
3226
+ "epoch": 0.33197721724979656,
3227
+ "grad_norm": 16.8125,
3228
+ "learning_rate": 9.974064382486508e-07,
3229
+ "loss": 25.1725,
3230
+ "step": 4590
3231
+ },
3232
+ {
3233
+ "epoch": 0.3327004791610162,
3234
+ "grad_norm": 16.65625,
3235
+ "learning_rate": 9.974007877873188e-07,
3236
+ "loss": 24.9989,
3237
+ "step": 4600
3238
+ },
3239
+ {
3240
+ "epoch": 0.33342374107223577,
3241
+ "grad_norm": 15.7109375,
3242
+ "learning_rate": 9.97395137325987e-07,
3243
+ "loss": 24.916,
3244
+ "step": 4610
3245
+ },
3246
+ {
3247
+ "epoch": 0.3341470029834554,
3248
+ "grad_norm": 16.140625,
3249
+ "learning_rate": 9.97389486864655e-07,
3250
+ "loss": 24.9188,
3251
+ "step": 4620
3252
+ },
3253
+ {
3254
+ "epoch": 0.334870264894675,
3255
+ "grad_norm": 17.515625,
3256
+ "learning_rate": 9.973838364033232e-07,
3257
+ "loss": 24.9345,
3258
+ "step": 4630
3259
+ },
3260
+ {
3261
+ "epoch": 0.3355935268058946,
3262
+ "grad_norm": 16.4375,
3263
+ "learning_rate": 9.973781859419912e-07,
3264
+ "loss": 25.1931,
3265
+ "step": 4640
3266
+ },
3267
+ {
3268
+ "epoch": 0.3363167887171142,
3269
+ "grad_norm": 17.125,
3270
+ "learning_rate": 9.973725354806592e-07,
3271
+ "loss": 25.1182,
3272
+ "step": 4650
3273
+ },
3274
+ {
3275
+ "epoch": 0.3370400506283338,
3276
+ "grad_norm": 16.515625,
3277
+ "learning_rate": 9.973668850193274e-07,
3278
+ "loss": 25.5517,
3279
+ "step": 4660
3280
+ },
3281
+ {
3282
+ "epoch": 0.3377633125395534,
3283
+ "grad_norm": 16.8125,
3284
+ "learning_rate": 9.973612345579954e-07,
3285
+ "loss": 24.8154,
3286
+ "step": 4670
3287
+ },
3288
+ {
3289
+ "epoch": 0.33848657445077296,
3290
+ "grad_norm": 16.40625,
3291
+ "learning_rate": 9.973555840966636e-07,
3292
+ "loss": 25.2684,
3293
+ "step": 4680
3294
+ },
3295
+ {
3296
+ "epoch": 0.3392098363619926,
3297
+ "grad_norm": 16.109375,
3298
+ "learning_rate": 9.973499336353316e-07,
3299
+ "loss": 25.024,
3300
+ "step": 4690
3301
+ },
3302
+ {
3303
+ "epoch": 0.33993309827321216,
3304
+ "grad_norm": 16.296875,
3305
+ "learning_rate": 9.973442831739996e-07,
3306
+ "loss": 25.1598,
3307
+ "step": 4700
3308
+ },
3309
+ {
3310
+ "epoch": 0.3406563601844318,
3311
+ "grad_norm": 16.46875,
3312
+ "learning_rate": 9.973386327126678e-07,
3313
+ "loss": 25.0369,
3314
+ "step": 4710
3315
+ },
3316
+ {
3317
+ "epoch": 0.34137962209565137,
3318
+ "grad_norm": 15.53125,
3319
+ "learning_rate": 9.973329822513358e-07,
3320
+ "loss": 25.4247,
3321
+ "step": 4720
3322
+ },
3323
+ {
3324
+ "epoch": 0.342102884006871,
3325
+ "grad_norm": 16.75,
3326
+ "learning_rate": 9.97327331790004e-07,
3327
+ "loss": 25.0935,
3328
+ "step": 4730
3329
+ },
3330
+ {
3331
+ "epoch": 0.3428261459180906,
3332
+ "grad_norm": 15.84375,
3333
+ "learning_rate": 9.97321681328672e-07,
3334
+ "loss": 24.8433,
3335
+ "step": 4740
3336
+ },
3337
+ {
3338
+ "epoch": 0.3435494078293102,
3339
+ "grad_norm": 16.953125,
3340
+ "learning_rate": 9.9731603086734e-07,
3341
+ "loss": 25.3383,
3342
+ "step": 4750
3343
+ },
3344
+ {
3345
+ "epoch": 0.3442726697405298,
3346
+ "grad_norm": 16.15625,
3347
+ "learning_rate": 9.973103804060082e-07,
3348
+ "loss": 24.9644,
3349
+ "step": 4760
3350
+ },
3351
+ {
3352
+ "epoch": 0.3449959316517494,
3353
+ "grad_norm": 17.34375,
3354
+ "learning_rate": 9.973047299446762e-07,
3355
+ "loss": 25.2677,
3356
+ "step": 4770
3357
+ },
3358
+ {
3359
+ "epoch": 0.345719193562969,
3360
+ "grad_norm": 17.125,
3361
+ "learning_rate": 9.972990794833444e-07,
3362
+ "loss": 25.1205,
3363
+ "step": 4780
3364
+ },
3365
+ {
3366
+ "epoch": 0.3464424554741886,
3367
+ "grad_norm": 16.015625,
3368
+ "learning_rate": 9.972934290220124e-07,
3369
+ "loss": 24.9001,
3370
+ "step": 4790
3371
+ },
3372
+ {
3373
+ "epoch": 0.3471657173854082,
3374
+ "grad_norm": 16.984375,
3375
+ "learning_rate": 9.972877785606806e-07,
3376
+ "loss": 24.6669,
3377
+ "step": 4800
3378
+ },
3379
+ {
3380
+ "epoch": 0.34788897929662776,
3381
+ "grad_norm": 15.9453125,
3382
+ "learning_rate": 9.972821280993486e-07,
3383
+ "loss": 24.8667,
3384
+ "step": 4810
3385
+ },
3386
+ {
3387
+ "epoch": 0.3486122412078474,
3388
+ "grad_norm": 15.609375,
3389
+ "learning_rate": 9.972764776380166e-07,
3390
+ "loss": 24.8402,
3391
+ "step": 4820
3392
+ },
3393
+ {
3394
+ "epoch": 0.34933550311906697,
3395
+ "grad_norm": 15.3203125,
3396
+ "learning_rate": 9.972708271766848e-07,
3397
+ "loss": 25.0204,
3398
+ "step": 4830
3399
+ },
3400
+ {
3401
+ "epoch": 0.3500587650302866,
3402
+ "grad_norm": 16.0625,
3403
+ "learning_rate": 9.972651767153528e-07,
3404
+ "loss": 25.1096,
3405
+ "step": 4840
3406
+ },
3407
+ {
3408
+ "epoch": 0.3507820269415062,
3409
+ "grad_norm": 15.2421875,
3410
+ "learning_rate": 9.97259526254021e-07,
3411
+ "loss": 25.4209,
3412
+ "step": 4850
3413
+ },
3414
+ {
3415
+ "epoch": 0.3515052888527258,
3416
+ "grad_norm": 18.0,
3417
+ "learning_rate": 9.97253875792689e-07,
3418
+ "loss": 25.3704,
3419
+ "step": 4860
3420
+ },
3421
+ {
3422
+ "epoch": 0.3522285507639454,
3423
+ "grad_norm": 15.8984375,
3424
+ "learning_rate": 9.97248225331357e-07,
3425
+ "loss": 25.0614,
3426
+ "step": 4870
3427
+ },
3428
+ {
3429
+ "epoch": 0.352951812675165,
3430
+ "grad_norm": 16.5,
3431
+ "learning_rate": 9.972425748700253e-07,
3432
+ "loss": 24.972,
3433
+ "step": 4880
3434
+ },
3435
+ {
3436
+ "epoch": 0.3536750745863846,
3437
+ "grad_norm": 16.765625,
3438
+ "learning_rate": 9.972369244086933e-07,
3439
+ "loss": 25.0881,
3440
+ "step": 4890
3441
+ },
3442
+ {
3443
+ "epoch": 0.3543983364976042,
3444
+ "grad_norm": 16.546875,
3445
+ "learning_rate": 9.972312739473615e-07,
3446
+ "loss": 25.3568,
3447
+ "step": 4900
3448
+ },
3449
+ {
3450
+ "epoch": 0.3551215984088238,
3451
+ "grad_norm": 17.03125,
3452
+ "learning_rate": 9.972256234860295e-07,
3453
+ "loss": 24.9578,
3454
+ "step": 4910
3455
+ },
3456
+ {
3457
+ "epoch": 0.3558448603200434,
3458
+ "grad_norm": 17.015625,
3459
+ "learning_rate": 9.972199730246975e-07,
3460
+ "loss": 25.1697,
3461
+ "step": 4920
3462
+ },
3463
+ {
3464
+ "epoch": 0.356568122231263,
3465
+ "grad_norm": 17.28125,
3466
+ "learning_rate": 9.972143225633657e-07,
3467
+ "loss": 25.3285,
3468
+ "step": 4930
3469
+ },
3470
+ {
3471
+ "epoch": 0.35729138414248257,
3472
+ "grad_norm": 16.671875,
3473
+ "learning_rate": 9.972086721020337e-07,
3474
+ "loss": 25.0862,
3475
+ "step": 4940
3476
+ },
3477
+ {
3478
+ "epoch": 0.3580146460537022,
3479
+ "grad_norm": 15.0703125,
3480
+ "learning_rate": 9.972030216407019e-07,
3481
+ "loss": 25.1828,
3482
+ "step": 4950
3483
+ },
3484
+ {
3485
+ "epoch": 0.3587379079649218,
3486
+ "grad_norm": 15.6484375,
3487
+ "learning_rate": 9.971973711793699e-07,
3488
+ "loss": 25.1832,
3489
+ "step": 4960
3490
+ },
3491
+ {
3492
+ "epoch": 0.3594611698761414,
3493
+ "grad_norm": 16.46875,
3494
+ "learning_rate": 9.97191720718038e-07,
3495
+ "loss": 25.3505,
3496
+ "step": 4970
3497
+ },
3498
+ {
3499
+ "epoch": 0.360184431787361,
3500
+ "grad_norm": 16.421875,
3501
+ "learning_rate": 9.97186070256706e-07,
3502
+ "loss": 24.9362,
3503
+ "step": 4980
3504
+ },
3505
+ {
3506
+ "epoch": 0.3609076936985806,
3507
+ "grad_norm": 16.6875,
3508
+ "learning_rate": 9.97180419795374e-07,
3509
+ "loss": 25.5243,
3510
+ "step": 4990
3511
+ },
3512
+ {
3513
+ "epoch": 0.3616309556098002,
3514
+ "grad_norm": 16.234375,
3515
+ "learning_rate": 9.971747693340423e-07,
3516
+ "loss": 25.2643,
3517
+ "step": 5000
3518
+ },
3519
+ {
3520
+ "epoch": 0.3616309556098002,
3521
+ "eval_loss": 1.5911972522735596,
3522
+ "eval_runtime": 376.056,
3523
+ "eval_samples_per_second": 1930.72,
3524
+ "eval_steps_per_second": 30.168,
3525
+ "step": 5000
3526
  }
3527
  ],
3528
  "logging_steps": 10,
 
3542
  "attributes": {}
3543
  }
3544
  },
3545
+ "total_flos": 6.984642783608832e+18,
3546
  "train_batch_size": 16,
3547
  "trial_name": null,
3548
  "trial_params": null