Training in progress, step 200, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 42487072
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c47670b132dd2edcb461863c6036215ff9426791f6d2edad95787f2bf9c4b0c0
|
3 |
size 42487072
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 21735354
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1bd0e3116f3e6ed58e866a2497073987ad79e1519a7ec0fde83c88a80e218921
|
3 |
size 21735354
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b9b9e7a306be6412315e098350a79bbe4983f3eeaaad687e5922807f860c487
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:530505d607699f384741067a5f9139d72f043713adb680898a3f1b5714170c97
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
-
"best_metric": 3.
|
3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 100,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -723,6 +723,714 @@
|
|
723 |
"eval_samples_per_second": 174.318,
|
724 |
"eval_steps_per_second": 43.59,
|
725 |
"step": 100
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
726 |
}
|
727 |
],
|
728 |
"logging_steps": 1,
|
@@ -751,7 +1459,7 @@
|
|
751 |
"attributes": {}
|
752 |
}
|
753 |
},
|
754 |
-
"total_flos":
|
755 |
"train_batch_size": 8,
|
756 |
"trial_name": null,
|
757 |
"trial_params": null
|
|
|
1 |
{
|
2 |
+
"best_metric": 3.208616256713867,
|
3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-200",
|
4 |
+
"epoch": 0.04092699647004656,
|
5 |
"eval_steps": 100,
|
6 |
+
"global_step": 200,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
723 |
"eval_samples_per_second": 174.318,
|
724 |
"eval_steps_per_second": 43.59,
|
725 |
"step": 100
|
726 |
+
},
|
727 |
+
{
|
728 |
+
"epoch": 0.02066813321737351,
|
729 |
+
"grad_norm": 11.214513778686523,
|
730 |
+
"learning_rate": 9.31367192988896e-05,
|
731 |
+
"loss": 14.0912,
|
732 |
+
"step": 101
|
733 |
+
},
|
734 |
+
{
|
735 |
+
"epoch": 0.020872768199723744,
|
736 |
+
"grad_norm": 10.00228214263916,
|
737 |
+
"learning_rate": 9.297032057507264e-05,
|
738 |
+
"loss": 13.9889,
|
739 |
+
"step": 102
|
740 |
+
},
|
741 |
+
{
|
742 |
+
"epoch": 0.021077403182073975,
|
743 |
+
"grad_norm": 10.31716251373291,
|
744 |
+
"learning_rate": 9.280208114573859e-05,
|
745 |
+
"loss": 14.1193,
|
746 |
+
"step": 103
|
747 |
+
},
|
748 |
+
{
|
749 |
+
"epoch": 0.02128203816442421,
|
750 |
+
"grad_norm": 9.314844131469727,
|
751 |
+
"learning_rate": 9.263200821770461e-05,
|
752 |
+
"loss": 13.7396,
|
753 |
+
"step": 104
|
754 |
+
},
|
755 |
+
{
|
756 |
+
"epoch": 0.02148667314677444,
|
757 |
+
"grad_norm": 7.567698001861572,
|
758 |
+
"learning_rate": 9.246010907632895e-05,
|
759 |
+
"loss": 13.7879,
|
760 |
+
"step": 105
|
761 |
+
},
|
762 |
+
{
|
763 |
+
"epoch": 0.021691308129124672,
|
764 |
+
"grad_norm": 6.531108856201172,
|
765 |
+
"learning_rate": 9.228639108519868e-05,
|
766 |
+
"loss": 13.5185,
|
767 |
+
"step": 106
|
768 |
+
},
|
769 |
+
{
|
770 |
+
"epoch": 0.021895943111474907,
|
771 |
+
"grad_norm": 5.202017307281494,
|
772 |
+
"learning_rate": 9.211086168581433e-05,
|
773 |
+
"loss": 13.3491,
|
774 |
+
"step": 107
|
775 |
+
},
|
776 |
+
{
|
777 |
+
"epoch": 0.022100578093825138,
|
778 |
+
"grad_norm": 4.530038356781006,
|
779 |
+
"learning_rate": 9.193352839727121e-05,
|
780 |
+
"loss": 13.2004,
|
781 |
+
"step": 108
|
782 |
+
},
|
783 |
+
{
|
784 |
+
"epoch": 0.022305213076175372,
|
785 |
+
"grad_norm": 4.831387996673584,
|
786 |
+
"learning_rate": 9.175439881593716e-05,
|
787 |
+
"loss": 13.4205,
|
788 |
+
"step": 109
|
789 |
+
},
|
790 |
+
{
|
791 |
+
"epoch": 0.022509848058525603,
|
792 |
+
"grad_norm": 4.692884922027588,
|
793 |
+
"learning_rate": 9.157348061512727e-05,
|
794 |
+
"loss": 13.4912,
|
795 |
+
"step": 110
|
796 |
+
},
|
797 |
+
{
|
798 |
+
"epoch": 0.022714483040875838,
|
799 |
+
"grad_norm": 5.204988479614258,
|
800 |
+
"learning_rate": 9.139078154477512e-05,
|
801 |
+
"loss": 13.1214,
|
802 |
+
"step": 111
|
803 |
+
},
|
804 |
+
{
|
805 |
+
"epoch": 0.02291911802322607,
|
806 |
+
"grad_norm": 4.781569004058838,
|
807 |
+
"learning_rate": 9.120630943110077e-05,
|
808 |
+
"loss": 12.6118,
|
809 |
+
"step": 112
|
810 |
+
},
|
811 |
+
{
|
812 |
+
"epoch": 0.023123753005576304,
|
813 |
+
"grad_norm": 4.754026412963867,
|
814 |
+
"learning_rate": 9.102007217627568e-05,
|
815 |
+
"loss": 13.186,
|
816 |
+
"step": 113
|
817 |
+
},
|
818 |
+
{
|
819 |
+
"epoch": 0.023328387987926535,
|
820 |
+
"grad_norm": 5.035665035247803,
|
821 |
+
"learning_rate": 9.083207775808396e-05,
|
822 |
+
"loss": 12.7322,
|
823 |
+
"step": 114
|
824 |
+
},
|
825 |
+
{
|
826 |
+
"epoch": 0.02353302297027677,
|
827 |
+
"grad_norm": 5.12575626373291,
|
828 |
+
"learning_rate": 9.064233422958077e-05,
|
829 |
+
"loss": 13.0182,
|
830 |
+
"step": 115
|
831 |
+
},
|
832 |
+
{
|
833 |
+
"epoch": 0.023737657952627,
|
834 |
+
"grad_norm": 5.39860200881958,
|
835 |
+
"learning_rate": 9.045084971874738e-05,
|
836 |
+
"loss": 13.4676,
|
837 |
+
"step": 116
|
838 |
+
},
|
839 |
+
{
|
840 |
+
"epoch": 0.023942292934977235,
|
841 |
+
"grad_norm": 5.005839824676514,
|
842 |
+
"learning_rate": 9.025763242814291e-05,
|
843 |
+
"loss": 13.0532,
|
844 |
+
"step": 117
|
845 |
+
},
|
846 |
+
{
|
847 |
+
"epoch": 0.024146927917327466,
|
848 |
+
"grad_norm": 5.046457290649414,
|
849 |
+
"learning_rate": 9.006269063455304e-05,
|
850 |
+
"loss": 13.336,
|
851 |
+
"step": 118
|
852 |
+
},
|
853 |
+
{
|
854 |
+
"epoch": 0.0243515628996777,
|
855 |
+
"grad_norm": 4.951815128326416,
|
856 |
+
"learning_rate": 8.986603268863536e-05,
|
857 |
+
"loss": 13.1308,
|
858 |
+
"step": 119
|
859 |
+
},
|
860 |
+
{
|
861 |
+
"epoch": 0.02455619788202793,
|
862 |
+
"grad_norm": 5.16800594329834,
|
863 |
+
"learning_rate": 8.966766701456177e-05,
|
864 |
+
"loss": 12.7553,
|
865 |
+
"step": 120
|
866 |
+
},
|
867 |
+
{
|
868 |
+
"epoch": 0.024760832864378166,
|
869 |
+
"grad_norm": 5.190509796142578,
|
870 |
+
"learning_rate": 8.94676021096575e-05,
|
871 |
+
"loss": 13.2239,
|
872 |
+
"step": 121
|
873 |
+
},
|
874 |
+
{
|
875 |
+
"epoch": 0.024965467846728397,
|
876 |
+
"grad_norm": 5.662418365478516,
|
877 |
+
"learning_rate": 8.926584654403724e-05,
|
878 |
+
"loss": 13.2593,
|
879 |
+
"step": 122
|
880 |
+
},
|
881 |
+
{
|
882 |
+
"epoch": 0.025170102829078632,
|
883 |
+
"grad_norm": 5.604646682739258,
|
884 |
+
"learning_rate": 8.906240896023794e-05,
|
885 |
+
"loss": 13.2693,
|
886 |
+
"step": 123
|
887 |
+
},
|
888 |
+
{
|
889 |
+
"epoch": 0.025374737811428863,
|
890 |
+
"grad_norm": 5.807793140411377,
|
891 |
+
"learning_rate": 8.885729807284856e-05,
|
892 |
+
"loss": 13.465,
|
893 |
+
"step": 124
|
894 |
+
},
|
895 |
+
{
|
896 |
+
"epoch": 0.025579372793779098,
|
897 |
+
"grad_norm": 6.032169818878174,
|
898 |
+
"learning_rate": 8.865052266813685e-05,
|
899 |
+
"loss": 13.0197,
|
900 |
+
"step": 125
|
901 |
+
},
|
902 |
+
{
|
903 |
+
"epoch": 0.02578400777612933,
|
904 |
+
"grad_norm": 5.969254970550537,
|
905 |
+
"learning_rate": 8.844209160367299e-05,
|
906 |
+
"loss": 12.91,
|
907 |
+
"step": 126
|
908 |
+
},
|
909 |
+
{
|
910 |
+
"epoch": 0.025988642758479563,
|
911 |
+
"grad_norm": 5.627323627471924,
|
912 |
+
"learning_rate": 8.823201380795001e-05,
|
913 |
+
"loss": 12.9693,
|
914 |
+
"step": 127
|
915 |
+
},
|
916 |
+
{
|
917 |
+
"epoch": 0.026193277740829794,
|
918 |
+
"grad_norm": 5.775904655456543,
|
919 |
+
"learning_rate": 8.802029828000156e-05,
|
920 |
+
"loss": 13.3716,
|
921 |
+
"step": 128
|
922 |
+
},
|
923 |
+
{
|
924 |
+
"epoch": 0.02639791272318003,
|
925 |
+
"grad_norm": 6.050631999969482,
|
926 |
+
"learning_rate": 8.780695408901613e-05,
|
927 |
+
"loss": 12.9946,
|
928 |
+
"step": 129
|
929 |
+
},
|
930 |
+
{
|
931 |
+
"epoch": 0.02660254770553026,
|
932 |
+
"grad_norm": 6.608086109161377,
|
933 |
+
"learning_rate": 8.759199037394887e-05,
|
934 |
+
"loss": 12.7268,
|
935 |
+
"step": 130
|
936 |
+
},
|
937 |
+
{
|
938 |
+
"epoch": 0.026807182687880494,
|
939 |
+
"grad_norm": 6.4099202156066895,
|
940 |
+
"learning_rate": 8.737541634312985e-05,
|
941 |
+
"loss": 13.3797,
|
942 |
+
"step": 131
|
943 |
+
},
|
944 |
+
{
|
945 |
+
"epoch": 0.027011817670230726,
|
946 |
+
"grad_norm": 6.958422660827637,
|
947 |
+
"learning_rate": 8.715724127386972e-05,
|
948 |
+
"loss": 13.2627,
|
949 |
+
"step": 132
|
950 |
+
},
|
951 |
+
{
|
952 |
+
"epoch": 0.02721645265258096,
|
953 |
+
"grad_norm": 6.657001495361328,
|
954 |
+
"learning_rate": 8.693747451206232e-05,
|
955 |
+
"loss": 13.1662,
|
956 |
+
"step": 133
|
957 |
+
},
|
958 |
+
{
|
959 |
+
"epoch": 0.02742108763493119,
|
960 |
+
"grad_norm": 6.775047302246094,
|
961 |
+
"learning_rate": 8.671612547178428e-05,
|
962 |
+
"loss": 12.8757,
|
963 |
+
"step": 134
|
964 |
+
},
|
965 |
+
{
|
966 |
+
"epoch": 0.027625722617281426,
|
967 |
+
"grad_norm": 6.7623419761657715,
|
968 |
+
"learning_rate": 8.649320363489179e-05,
|
969 |
+
"loss": 12.5799,
|
970 |
+
"step": 135
|
971 |
+
},
|
972 |
+
{
|
973 |
+
"epoch": 0.027830357599631657,
|
974 |
+
"grad_norm": 7.408362865447998,
|
975 |
+
"learning_rate": 8.626871855061438e-05,
|
976 |
+
"loss": 13.8727,
|
977 |
+
"step": 136
|
978 |
+
},
|
979 |
+
{
|
980 |
+
"epoch": 0.02803499258198189,
|
981 |
+
"grad_norm": 6.984137535095215,
|
982 |
+
"learning_rate": 8.604267983514594e-05,
|
983 |
+
"loss": 12.6957,
|
984 |
+
"step": 137
|
985 |
+
},
|
986 |
+
{
|
987 |
+
"epoch": 0.028239627564332122,
|
988 |
+
"grad_norm": 7.494143486022949,
|
989 |
+
"learning_rate": 8.581509717123273e-05,
|
990 |
+
"loss": 13.5292,
|
991 |
+
"step": 138
|
992 |
+
},
|
993 |
+
{
|
994 |
+
"epoch": 0.028444262546682357,
|
995 |
+
"grad_norm": 7.043254375457764,
|
996 |
+
"learning_rate": 8.558598030775857e-05,
|
997 |
+
"loss": 12.5103,
|
998 |
+
"step": 139
|
999 |
+
},
|
1000 |
+
{
|
1001 |
+
"epoch": 0.028648897529032588,
|
1002 |
+
"grad_norm": 7.2675957679748535,
|
1003 |
+
"learning_rate": 8.535533905932738e-05,
|
1004 |
+
"loss": 12.8951,
|
1005 |
+
"step": 140
|
1006 |
+
},
|
1007 |
+
{
|
1008 |
+
"epoch": 0.02885353251138282,
|
1009 |
+
"grad_norm": 7.874957084655762,
|
1010 |
+
"learning_rate": 8.51231833058426e-05,
|
1011 |
+
"loss": 12.9737,
|
1012 |
+
"step": 141
|
1013 |
+
},
|
1014 |
+
{
|
1015 |
+
"epoch": 0.029058167493733054,
|
1016 |
+
"grad_norm": 8.002019882202148,
|
1017 |
+
"learning_rate": 8.488952299208401e-05,
|
1018 |
+
"loss": 12.8148,
|
1019 |
+
"step": 142
|
1020 |
+
},
|
1021 |
+
{
|
1022 |
+
"epoch": 0.029262802476083285,
|
1023 |
+
"grad_norm": 8.36933422088623,
|
1024 |
+
"learning_rate": 8.46543681272818e-05,
|
1025 |
+
"loss": 12.4946,
|
1026 |
+
"step": 143
|
1027 |
+
},
|
1028 |
+
{
|
1029 |
+
"epoch": 0.02946743745843352,
|
1030 |
+
"grad_norm": 9.498835563659668,
|
1031 |
+
"learning_rate": 8.44177287846877e-05,
|
1032 |
+
"loss": 13.271,
|
1033 |
+
"step": 144
|
1034 |
+
},
|
1035 |
+
{
|
1036 |
+
"epoch": 0.02967207244078375,
|
1037 |
+
"grad_norm": 8.976995468139648,
|
1038 |
+
"learning_rate": 8.417961510114356e-05,
|
1039 |
+
"loss": 12.5241,
|
1040 |
+
"step": 145
|
1041 |
+
},
|
1042 |
+
{
|
1043 |
+
"epoch": 0.029876707423133985,
|
1044 |
+
"grad_norm": 9.178775787353516,
|
1045 |
+
"learning_rate": 8.39400372766471e-05,
|
1046 |
+
"loss": 12.4166,
|
1047 |
+
"step": 146
|
1048 |
+
},
|
1049 |
+
{
|
1050 |
+
"epoch": 0.030081342405484216,
|
1051 |
+
"grad_norm": 10.875651359558105,
|
1052 |
+
"learning_rate": 8.36990055739149e-05,
|
1053 |
+
"loss": 12.7323,
|
1054 |
+
"step": 147
|
1055 |
+
},
|
1056 |
+
{
|
1057 |
+
"epoch": 0.03028597738783445,
|
1058 |
+
"grad_norm": 11.843050003051758,
|
1059 |
+
"learning_rate": 8.345653031794292e-05,
|
1060 |
+
"loss": 12.6294,
|
1061 |
+
"step": 148
|
1062 |
+
},
|
1063 |
+
{
|
1064 |
+
"epoch": 0.030490612370184682,
|
1065 |
+
"grad_norm": 12.797874450683594,
|
1066 |
+
"learning_rate": 8.321262189556409e-05,
|
1067 |
+
"loss": 11.9468,
|
1068 |
+
"step": 149
|
1069 |
+
},
|
1070 |
+
{
|
1071 |
+
"epoch": 0.030695247352534916,
|
1072 |
+
"grad_norm": 21.556180953979492,
|
1073 |
+
"learning_rate": 8.296729075500344e-05,
|
1074 |
+
"loss": 14.375,
|
1075 |
+
"step": 150
|
1076 |
+
},
|
1077 |
+
{
|
1078 |
+
"epoch": 0.030899882334885147,
|
1079 |
+
"grad_norm": 5.878223419189453,
|
1080 |
+
"learning_rate": 8.272054740543052e-05,
|
1081 |
+
"loss": 13.2625,
|
1082 |
+
"step": 151
|
1083 |
+
},
|
1084 |
+
{
|
1085 |
+
"epoch": 0.031104517317235382,
|
1086 |
+
"grad_norm": 6.683862209320068,
|
1087 |
+
"learning_rate": 8.247240241650918e-05,
|
1088 |
+
"loss": 13.4469,
|
1089 |
+
"step": 152
|
1090 |
+
},
|
1091 |
+
{
|
1092 |
+
"epoch": 0.03130915229958561,
|
1093 |
+
"grad_norm": 6.695138931274414,
|
1094 |
+
"learning_rate": 8.222286641794488e-05,
|
1095 |
+
"loss": 13.8935,
|
1096 |
+
"step": 153
|
1097 |
+
},
|
1098 |
+
{
|
1099 |
+
"epoch": 0.031513787281935844,
|
1100 |
+
"grad_norm": 6.529450416564941,
|
1101 |
+
"learning_rate": 8.197195009902924e-05,
|
1102 |
+
"loss": 13.2827,
|
1103 |
+
"step": 154
|
1104 |
+
},
|
1105 |
+
{
|
1106 |
+
"epoch": 0.03171842226428608,
|
1107 |
+
"grad_norm": 5.889492034912109,
|
1108 |
+
"learning_rate": 8.171966420818228e-05,
|
1109 |
+
"loss": 13.2815,
|
1110 |
+
"step": 155
|
1111 |
+
},
|
1112 |
+
{
|
1113 |
+
"epoch": 0.03192305724663631,
|
1114 |
+
"grad_norm": 5.005529403686523,
|
1115 |
+
"learning_rate": 8.146601955249188e-05,
|
1116 |
+
"loss": 13.1348,
|
1117 |
+
"step": 156
|
1118 |
+
},
|
1119 |
+
{
|
1120 |
+
"epoch": 0.032127692228986544,
|
1121 |
+
"grad_norm": 4.527781009674072,
|
1122 |
+
"learning_rate": 8.121102699725089e-05,
|
1123 |
+
"loss": 12.9616,
|
1124 |
+
"step": 157
|
1125 |
+
},
|
1126 |
+
{
|
1127 |
+
"epoch": 0.032332327211336775,
|
1128 |
+
"grad_norm": 3.992450714111328,
|
1129 |
+
"learning_rate": 8.095469746549172e-05,
|
1130 |
+
"loss": 13.2171,
|
1131 |
+
"step": 158
|
1132 |
+
},
|
1133 |
+
{
|
1134 |
+
"epoch": 0.032536962193687013,
|
1135 |
+
"grad_norm": 3.9536304473876953,
|
1136 |
+
"learning_rate": 8.069704193751832e-05,
|
1137 |
+
"loss": 13.5083,
|
1138 |
+
"step": 159
|
1139 |
+
},
|
1140 |
+
{
|
1141 |
+
"epoch": 0.032741597176037245,
|
1142 |
+
"grad_norm": 4.0044264793396,
|
1143 |
+
"learning_rate": 8.043807145043604e-05,
|
1144 |
+
"loss": 13.5044,
|
1145 |
+
"step": 160
|
1146 |
+
},
|
1147 |
+
{
|
1148 |
+
"epoch": 0.032946232158387476,
|
1149 |
+
"grad_norm": 4.166686058044434,
|
1150 |
+
"learning_rate": 8.017779709767858e-05,
|
1151 |
+
"loss": 12.9416,
|
1152 |
+
"step": 161
|
1153 |
+
},
|
1154 |
+
{
|
1155 |
+
"epoch": 0.03315086714073771,
|
1156 |
+
"grad_norm": 4.292598724365234,
|
1157 |
+
"learning_rate": 7.991623002853296e-05,
|
1158 |
+
"loss": 12.932,
|
1159 |
+
"step": 162
|
1160 |
+
},
|
1161 |
+
{
|
1162 |
+
"epoch": 0.033355502123087945,
|
1163 |
+
"grad_norm": 4.622048377990723,
|
1164 |
+
"learning_rate": 7.965338144766186e-05,
|
1165 |
+
"loss": 13.1667,
|
1166 |
+
"step": 163
|
1167 |
+
},
|
1168 |
+
{
|
1169 |
+
"epoch": 0.033560137105438176,
|
1170 |
+
"grad_norm": 4.218106746673584,
|
1171 |
+
"learning_rate": 7.938926261462366e-05,
|
1172 |
+
"loss": 12.9429,
|
1173 |
+
"step": 164
|
1174 |
+
},
|
1175 |
+
{
|
1176 |
+
"epoch": 0.03376477208778841,
|
1177 |
+
"grad_norm": 4.615002155303955,
|
1178 |
+
"learning_rate": 7.912388484339012e-05,
|
1179 |
+
"loss": 13.4383,
|
1180 |
+
"step": 165
|
1181 |
+
},
|
1182 |
+
{
|
1183 |
+
"epoch": 0.03396940707013864,
|
1184 |
+
"grad_norm": 4.371853828430176,
|
1185 |
+
"learning_rate": 7.88572595018617e-05,
|
1186 |
+
"loss": 12.6596,
|
1187 |
+
"step": 166
|
1188 |
+
},
|
1189 |
+
{
|
1190 |
+
"epoch": 0.034174042052488876,
|
1191 |
+
"grad_norm": 4.507296562194824,
|
1192 |
+
"learning_rate": 7.858939801138061e-05,
|
1193 |
+
"loss": 13.0297,
|
1194 |
+
"step": 167
|
1195 |
+
},
|
1196 |
+
{
|
1197 |
+
"epoch": 0.03437867703483911,
|
1198 |
+
"grad_norm": 4.6610941886901855,
|
1199 |
+
"learning_rate": 7.832031184624164e-05,
|
1200 |
+
"loss": 12.6801,
|
1201 |
+
"step": 168
|
1202 |
+
},
|
1203 |
+
{
|
1204 |
+
"epoch": 0.03458331201718934,
|
1205 |
+
"grad_norm": 4.3974714279174805,
|
1206 |
+
"learning_rate": 7.80500125332005e-05,
|
1207 |
+
"loss": 12.6394,
|
1208 |
+
"step": 169
|
1209 |
+
},
|
1210 |
+
{
|
1211 |
+
"epoch": 0.03478794699953957,
|
1212 |
+
"grad_norm": 4.65360689163208,
|
1213 |
+
"learning_rate": 7.777851165098012e-05,
|
1214 |
+
"loss": 13.2642,
|
1215 |
+
"step": 170
|
1216 |
+
},
|
1217 |
+
{
|
1218 |
+
"epoch": 0.03499258198188981,
|
1219 |
+
"grad_norm": 4.651695251464844,
|
1220 |
+
"learning_rate": 7.750582082977467e-05,
|
1221 |
+
"loss": 13.3055,
|
1222 |
+
"step": 171
|
1223 |
+
},
|
1224 |
+
{
|
1225 |
+
"epoch": 0.03519721696424004,
|
1226 |
+
"grad_norm": 5.114010810852051,
|
1227 |
+
"learning_rate": 7.723195175075136e-05,
|
1228 |
+
"loss": 13.0045,
|
1229 |
+
"step": 172
|
1230 |
+
},
|
1231 |
+
{
|
1232 |
+
"epoch": 0.03540185194659027,
|
1233 |
+
"grad_norm": 5.113755702972412,
|
1234 |
+
"learning_rate": 7.695691614555003e-05,
|
1235 |
+
"loss": 12.9366,
|
1236 |
+
"step": 173
|
1237 |
+
},
|
1238 |
+
{
|
1239 |
+
"epoch": 0.0356064869289405,
|
1240 |
+
"grad_norm": 5.089533805847168,
|
1241 |
+
"learning_rate": 7.668072579578058e-05,
|
1242 |
+
"loss": 12.959,
|
1243 |
+
"step": 174
|
1244 |
+
},
|
1245 |
+
{
|
1246 |
+
"epoch": 0.03581112191129073,
|
1247 |
+
"grad_norm": 5.559483051300049,
|
1248 |
+
"learning_rate": 7.64033925325184e-05,
|
1249 |
+
"loss": 13.2842,
|
1250 |
+
"step": 175
|
1251 |
+
},
|
1252 |
+
{
|
1253 |
+
"epoch": 0.03601575689364097,
|
1254 |
+
"grad_norm": 5.3359761238098145,
|
1255 |
+
"learning_rate": 7.612492823579745e-05,
|
1256 |
+
"loss": 13.0262,
|
1257 |
+
"step": 176
|
1258 |
+
},
|
1259 |
+
{
|
1260 |
+
"epoch": 0.0362203918759912,
|
1261 |
+
"grad_norm": 5.409842014312744,
|
1262 |
+
"learning_rate": 7.584534483410137e-05,
|
1263 |
+
"loss": 13.0076,
|
1264 |
+
"step": 177
|
1265 |
+
},
|
1266 |
+
{
|
1267 |
+
"epoch": 0.03642502685834143,
|
1268 |
+
"grad_norm": 5.253081321716309,
|
1269 |
+
"learning_rate": 7.55646543038526e-05,
|
1270 |
+
"loss": 11.9703,
|
1271 |
+
"step": 178
|
1272 |
+
},
|
1273 |
+
{
|
1274 |
+
"epoch": 0.03662966184069166,
|
1275 |
+
"grad_norm": 5.482647895812988,
|
1276 |
+
"learning_rate": 7.528286866889924e-05,
|
1277 |
+
"loss": 12.6692,
|
1278 |
+
"step": 179
|
1279 |
+
},
|
1280 |
+
{
|
1281 |
+
"epoch": 0.0368342968230419,
|
1282 |
+
"grad_norm": 5.659306049346924,
|
1283 |
+
"learning_rate": 7.500000000000001e-05,
|
1284 |
+
"loss": 13.0874,
|
1285 |
+
"step": 180
|
1286 |
+
},
|
1287 |
+
{
|
1288 |
+
"epoch": 0.03703893180539213,
|
1289 |
+
"grad_norm": 5.71022891998291,
|
1290 |
+
"learning_rate": 7.471606041430723e-05,
|
1291 |
+
"loss": 12.9602,
|
1292 |
+
"step": 181
|
1293 |
+
},
|
1294 |
+
{
|
1295 |
+
"epoch": 0.03724356678774236,
|
1296 |
+
"grad_norm": 6.031240940093994,
|
1297 |
+
"learning_rate": 7.443106207484776e-05,
|
1298 |
+
"loss": 12.8276,
|
1299 |
+
"step": 182
|
1300 |
+
},
|
1301 |
+
{
|
1302 |
+
"epoch": 0.037448201770092594,
|
1303 |
+
"grad_norm": 5.916280746459961,
|
1304 |
+
"learning_rate": 7.414501719000187e-05,
|
1305 |
+
"loss": 12.7036,
|
1306 |
+
"step": 183
|
1307 |
+
},
|
1308 |
+
{
|
1309 |
+
"epoch": 0.03765283675244283,
|
1310 |
+
"grad_norm": 6.090421676635742,
|
1311 |
+
"learning_rate": 7.385793801298042e-05,
|
1312 |
+
"loss": 12.5362,
|
1313 |
+
"step": 184
|
1314 |
+
},
|
1315 |
+
{
|
1316 |
+
"epoch": 0.03785747173479306,
|
1317 |
+
"grad_norm": 6.97968053817749,
|
1318 |
+
"learning_rate": 7.35698368412999e-05,
|
1319 |
+
"loss": 13.1994,
|
1320 |
+
"step": 185
|
1321 |
+
},
|
1322 |
+
{
|
1323 |
+
"epoch": 0.038062106717143294,
|
1324 |
+
"grad_norm": 6.6946587562561035,
|
1325 |
+
"learning_rate": 7.328072601625557e-05,
|
1326 |
+
"loss": 12.9428,
|
1327 |
+
"step": 186
|
1328 |
+
},
|
1329 |
+
{
|
1330 |
+
"epoch": 0.038266741699493526,
|
1331 |
+
"grad_norm": 6.86458158493042,
|
1332 |
+
"learning_rate": 7.2990617922393e-05,
|
1333 |
+
"loss": 13.5336,
|
1334 |
+
"step": 187
|
1335 |
+
},
|
1336 |
+
{
|
1337 |
+
"epoch": 0.038471376681843764,
|
1338 |
+
"grad_norm": 7.41053581237793,
|
1339 |
+
"learning_rate": 7.269952498697734e-05,
|
1340 |
+
"loss": 13.1752,
|
1341 |
+
"step": 188
|
1342 |
+
},
|
1343 |
+
{
|
1344 |
+
"epoch": 0.038676011664193995,
|
1345 |
+
"grad_norm": 6.769413948059082,
|
1346 |
+
"learning_rate": 7.240745967946113e-05,
|
1347 |
+
"loss": 12.3618,
|
1348 |
+
"step": 189
|
1349 |
+
},
|
1350 |
+
{
|
1351 |
+
"epoch": 0.038880646646544226,
|
1352 |
+
"grad_norm": 8.171807289123535,
|
1353 |
+
"learning_rate": 7.211443451095007e-05,
|
1354 |
+
"loss": 13.4612,
|
1355 |
+
"step": 190
|
1356 |
+
},
|
1357 |
+
{
|
1358 |
+
"epoch": 0.03908528162889446,
|
1359 |
+
"grad_norm": 7.6870598793029785,
|
1360 |
+
"learning_rate": 7.18204620336671e-05,
|
1361 |
+
"loss": 12.8721,
|
1362 |
+
"step": 191
|
1363 |
+
},
|
1364 |
+
{
|
1365 |
+
"epoch": 0.039289916611244695,
|
1366 |
+
"grad_norm": 7.984126567840576,
|
1367 |
+
"learning_rate": 7.152555484041476e-05,
|
1368 |
+
"loss": 12.5025,
|
1369 |
+
"step": 192
|
1370 |
+
},
|
1371 |
+
{
|
1372 |
+
"epoch": 0.039494551593594926,
|
1373 |
+
"grad_norm": 8.748424530029297,
|
1374 |
+
"learning_rate": 7.122972556403567e-05,
|
1375 |
+
"loss": 12.5803,
|
1376 |
+
"step": 193
|
1377 |
+
},
|
1378 |
+
{
|
1379 |
+
"epoch": 0.03969918657594516,
|
1380 |
+
"grad_norm": 8.19789981842041,
|
1381 |
+
"learning_rate": 7.09329868768714e-05,
|
1382 |
+
"loss": 13.0793,
|
1383 |
+
"step": 194
|
1384 |
+
},
|
1385 |
+
{
|
1386 |
+
"epoch": 0.03990382155829539,
|
1387 |
+
"grad_norm": 8.25755786895752,
|
1388 |
+
"learning_rate": 7.063535149021973e-05,
|
1389 |
+
"loss": 13.2436,
|
1390 |
+
"step": 195
|
1391 |
+
},
|
1392 |
+
{
|
1393 |
+
"epoch": 0.040108456540645626,
|
1394 |
+
"grad_norm": 10.084080696105957,
|
1395 |
+
"learning_rate": 7.033683215379002e-05,
|
1396 |
+
"loss": 12.4769,
|
1397 |
+
"step": 196
|
1398 |
+
},
|
1399 |
+
{
|
1400 |
+
"epoch": 0.04031309152299586,
|
1401 |
+
"grad_norm": 11.04244327545166,
|
1402 |
+
"learning_rate": 7.003744165515705e-05,
|
1403 |
+
"loss": 13.3229,
|
1404 |
+
"step": 197
|
1405 |
+
},
|
1406 |
+
{
|
1407 |
+
"epoch": 0.04051772650534609,
|
1408 |
+
"grad_norm": 10.718149185180664,
|
1409 |
+
"learning_rate": 6.973719281921335e-05,
|
1410 |
+
"loss": 13.0458,
|
1411 |
+
"step": 198
|
1412 |
+
},
|
1413 |
+
{
|
1414 |
+
"epoch": 0.04072236148769632,
|
1415 |
+
"grad_norm": 12.596996307373047,
|
1416 |
+
"learning_rate": 6.943609850761979e-05,
|
1417 |
+
"loss": 13.2156,
|
1418 |
+
"step": 199
|
1419 |
+
},
|
1420 |
+
{
|
1421 |
+
"epoch": 0.04092699647004656,
|
1422 |
+
"grad_norm": 16.626497268676758,
|
1423 |
+
"learning_rate": 6.91341716182545e-05,
|
1424 |
+
"loss": 12.1366,
|
1425 |
+
"step": 200
|
1426 |
+
},
|
1427 |
+
{
|
1428 |
+
"epoch": 0.04092699647004656,
|
1429 |
+
"eval_loss": 3.208616256713867,
|
1430 |
+
"eval_runtime": 47.3335,
|
1431 |
+
"eval_samples_per_second": 173.872,
|
1432 |
+
"eval_steps_per_second": 43.479,
|
1433 |
+
"step": 200
|
1434 |
}
|
1435 |
],
|
1436 |
"logging_steps": 1,
|
|
|
1459 |
"attributes": {}
|
1460 |
}
|
1461 |
},
|
1462 |
+
"total_flos": 3771413852848128.0,
|
1463 |
"train_batch_size": 8,
|
1464 |
"trial_name": null,
|
1465 |
"trial_params": null
|