Training in progress, step 280, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 80013120
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c9d19d032b2720552ed5a8c04c8453d710ed0eed172ae313734cb428d3f003fc
|
3 |
size 80013120
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:08e5683a29463e32746f14f186f042dd447b12cafcad678bbbddb34b9249098a
|
3 |
+
size 41120084
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3369e2942ff752b68da734b9eaf1a12b8c42e1d8b80214950313c71f22a426be
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9fe9c01b8c53647998de80cbc88fe3102f7ee94466c3d3ba6db0d6d4b3bdc06d
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 140,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -995,6 +995,994 @@
|
|
995 |
"eval_samples_per_second": 17.654,
|
996 |
"eval_steps_per_second": 8.827,
|
997 |
"step": 140
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
998 |
}
|
999 |
],
|
1000 |
"logging_steps": 1,
|
@@ -1014,7 +2002,7 @@
|
|
1014 |
"attributes": {}
|
1015 |
}
|
1016 |
},
|
1017 |
-
"total_flos":
|
1018 |
"train_batch_size": 2,
|
1019 |
"trial_name": null,
|
1020 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.5,
|
5 |
"eval_steps": 140,
|
6 |
+
"global_step": 280,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
995 |
"eval_samples_per_second": 17.654,
|
996 |
"eval_steps_per_second": 8.827,
|
997 |
"step": 140
|
998 |
+
},
|
999 |
+
{
|
1000 |
+
"epoch": 0.2517857142857143,
|
1001 |
+
"grad_norm": 0.36249810457229614,
|
1002 |
+
"learning_rate": 0.00017328668381631318,
|
1003 |
+
"loss": 1.1208,
|
1004 |
+
"step": 141
|
1005 |
+
},
|
1006 |
+
{
|
1007 |
+
"epoch": 0.25357142857142856,
|
1008 |
+
"grad_norm": 0.3750612735748291,
|
1009 |
+
"learning_rate": 0.00017289686274214118,
|
1010 |
+
"loss": 1.2502,
|
1011 |
+
"step": 142
|
1012 |
+
},
|
1013 |
+
{
|
1014 |
+
"epoch": 0.25535714285714284,
|
1015 |
+
"grad_norm": 0.4201869070529938,
|
1016 |
+
"learning_rate": 0.0001725046632837007,
|
1017 |
+
"loss": 1.1947,
|
1018 |
+
"step": 143
|
1019 |
+
},
|
1020 |
+
{
|
1021 |
+
"epoch": 0.2571428571428571,
|
1022 |
+
"grad_norm": 0.4865645468235016,
|
1023 |
+
"learning_rate": 0.00017211009823716694,
|
1024 |
+
"loss": 0.8749,
|
1025 |
+
"step": 144
|
1026 |
+
},
|
1027 |
+
{
|
1028 |
+
"epoch": 0.25892857142857145,
|
1029 |
+
"grad_norm": 0.38693225383758545,
|
1030 |
+
"learning_rate": 0.00017171318047589637,
|
1031 |
+
"loss": 1.2495,
|
1032 |
+
"step": 145
|
1033 |
+
},
|
1034 |
+
{
|
1035 |
+
"epoch": 0.26071428571428573,
|
1036 |
+
"grad_norm": 0.40707525610923767,
|
1037 |
+
"learning_rate": 0.00017131392295000674,
|
1038 |
+
"loss": 1.2321,
|
1039 |
+
"step": 146
|
1040 |
+
},
|
1041 |
+
{
|
1042 |
+
"epoch": 0.2625,
|
1043 |
+
"grad_norm": 0.39570894837379456,
|
1044 |
+
"learning_rate": 0.00017091233868595467,
|
1045 |
+
"loss": 1.301,
|
1046 |
+
"step": 147
|
1047 |
+
},
|
1048 |
+
{
|
1049 |
+
"epoch": 0.2642857142857143,
|
1050 |
+
"grad_norm": 0.4085226058959961,
|
1051 |
+
"learning_rate": 0.00017050844078611056,
|
1052 |
+
"loss": 1.5369,
|
1053 |
+
"step": 148
|
1054 |
+
},
|
1055 |
+
{
|
1056 |
+
"epoch": 0.26607142857142857,
|
1057 |
+
"grad_norm": 0.47094810009002686,
|
1058 |
+
"learning_rate": 0.0001701022424283311,
|
1059 |
+
"loss": 1.9374,
|
1060 |
+
"step": 149
|
1061 |
+
},
|
1062 |
+
{
|
1063 |
+
"epoch": 0.26785714285714285,
|
1064 |
+
"grad_norm": 0.8517308831214905,
|
1065 |
+
"learning_rate": 0.00016969375686552937,
|
1066 |
+
"loss": 1.808,
|
1067 |
+
"step": 150
|
1068 |
+
},
|
1069 |
+
{
|
1070 |
+
"epoch": 0.26964285714285713,
|
1071 |
+
"grad_norm": 0.1922745406627655,
|
1072 |
+
"learning_rate": 0.00016928299742524234,
|
1073 |
+
"loss": 1.6608,
|
1074 |
+
"step": 151
|
1075 |
+
},
|
1076 |
+
{
|
1077 |
+
"epoch": 0.2714285714285714,
|
1078 |
+
"grad_norm": 0.2090916484594345,
|
1079 |
+
"learning_rate": 0.00016886997750919619,
|
1080 |
+
"loss": 1.8009,
|
1081 |
+
"step": 152
|
1082 |
+
},
|
1083 |
+
{
|
1084 |
+
"epoch": 0.2732142857142857,
|
1085 |
+
"grad_norm": 0.21698515117168427,
|
1086 |
+
"learning_rate": 0.00016845471059286887,
|
1087 |
+
"loss": 1.7821,
|
1088 |
+
"step": 153
|
1089 |
+
},
|
1090 |
+
{
|
1091 |
+
"epoch": 0.275,
|
1092 |
+
"grad_norm": 0.21791532635688782,
|
1093 |
+
"learning_rate": 0.00016803721022505067,
|
1094 |
+
"loss": 1.5901,
|
1095 |
+
"step": 154
|
1096 |
+
},
|
1097 |
+
{
|
1098 |
+
"epoch": 0.2767857142857143,
|
1099 |
+
"grad_norm": 0.22199980914592743,
|
1100 |
+
"learning_rate": 0.00016761749002740193,
|
1101 |
+
"loss": 1.7047,
|
1102 |
+
"step": 155
|
1103 |
+
},
|
1104 |
+
{
|
1105 |
+
"epoch": 0.2785714285714286,
|
1106 |
+
"grad_norm": 0.2096625566482544,
|
1107 |
+
"learning_rate": 0.0001671955636940088,
|
1108 |
+
"loss": 1.6898,
|
1109 |
+
"step": 156
|
1110 |
+
},
|
1111 |
+
{
|
1112 |
+
"epoch": 0.28035714285714286,
|
1113 |
+
"grad_norm": 0.22975414991378784,
|
1114 |
+
"learning_rate": 0.00016677144499093626,
|
1115 |
+
"loss": 1.7631,
|
1116 |
+
"step": 157
|
1117 |
+
},
|
1118 |
+
{
|
1119 |
+
"epoch": 0.28214285714285714,
|
1120 |
+
"grad_norm": 0.2187148928642273,
|
1121 |
+
"learning_rate": 0.0001663451477557792,
|
1122 |
+
"loss": 1.7872,
|
1123 |
+
"step": 158
|
1124 |
+
},
|
1125 |
+
{
|
1126 |
+
"epoch": 0.2839285714285714,
|
1127 |
+
"grad_norm": 0.2257414609193802,
|
1128 |
+
"learning_rate": 0.0001659166858972107,
|
1129 |
+
"loss": 1.7732,
|
1130 |
+
"step": 159
|
1131 |
+
},
|
1132 |
+
{
|
1133 |
+
"epoch": 0.2857142857142857,
|
1134 |
+
"grad_norm": 0.22986693680286407,
|
1135 |
+
"learning_rate": 0.00016548607339452853,
|
1136 |
+
"loss": 1.7031,
|
1137 |
+
"step": 160
|
1138 |
+
},
|
1139 |
+
{
|
1140 |
+
"epoch": 0.2875,
|
1141 |
+
"grad_norm": 0.21585014462471008,
|
1142 |
+
"learning_rate": 0.0001650533242971987,
|
1143 |
+
"loss": 1.8421,
|
1144 |
+
"step": 161
|
1145 |
+
},
|
1146 |
+
{
|
1147 |
+
"epoch": 0.2892857142857143,
|
1148 |
+
"grad_norm": 0.22519604861736298,
|
1149 |
+
"learning_rate": 0.00016461845272439741,
|
1150 |
+
"loss": 1.6529,
|
1151 |
+
"step": 162
|
1152 |
+
},
|
1153 |
+
{
|
1154 |
+
"epoch": 0.2910714285714286,
|
1155 |
+
"grad_norm": 0.22279705107212067,
|
1156 |
+
"learning_rate": 0.0001641814728645502,
|
1157 |
+
"loss": 1.9288,
|
1158 |
+
"step": 163
|
1159 |
+
},
|
1160 |
+
{
|
1161 |
+
"epoch": 0.29285714285714287,
|
1162 |
+
"grad_norm": 0.22392615675926208,
|
1163 |
+
"learning_rate": 0.000163742398974869,
|
1164 |
+
"loss": 1.693,
|
1165 |
+
"step": 164
|
1166 |
+
},
|
1167 |
+
{
|
1168 |
+
"epoch": 0.29464285714285715,
|
1169 |
+
"grad_norm": 0.22729454934597015,
|
1170 |
+
"learning_rate": 0.00016330124538088705,
|
1171 |
+
"loss": 1.7027,
|
1172 |
+
"step": 165
|
1173 |
+
},
|
1174 |
+
{
|
1175 |
+
"epoch": 0.29642857142857143,
|
1176 |
+
"grad_norm": 0.2229882776737213,
|
1177 |
+
"learning_rate": 0.00016285802647599156,
|
1178 |
+
"loss": 1.8262,
|
1179 |
+
"step": 166
|
1180 |
+
},
|
1181 |
+
{
|
1182 |
+
"epoch": 0.2982142857142857,
|
1183 |
+
"grad_norm": 0.25520074367523193,
|
1184 |
+
"learning_rate": 0.00016241275672095395,
|
1185 |
+
"loss": 1.6009,
|
1186 |
+
"step": 167
|
1187 |
+
},
|
1188 |
+
{
|
1189 |
+
"epoch": 0.3,
|
1190 |
+
"grad_norm": 0.24272315204143524,
|
1191 |
+
"learning_rate": 0.00016196545064345812,
|
1192 |
+
"loss": 1.9227,
|
1193 |
+
"step": 168
|
1194 |
+
},
|
1195 |
+
{
|
1196 |
+
"epoch": 0.30178571428571427,
|
1197 |
+
"grad_norm": 0.24380216002464294,
|
1198 |
+
"learning_rate": 0.00016151612283762652,
|
1199 |
+
"loss": 1.5198,
|
1200 |
+
"step": 169
|
1201 |
+
},
|
1202 |
+
{
|
1203 |
+
"epoch": 0.30357142857142855,
|
1204 |
+
"grad_norm": 0.3242342472076416,
|
1205 |
+
"learning_rate": 0.00016106478796354382,
|
1206 |
+
"loss": 1.6981,
|
1207 |
+
"step": 170
|
1208 |
+
},
|
1209 |
+
{
|
1210 |
+
"epoch": 0.3053571428571429,
|
1211 |
+
"grad_norm": 0.277855783700943,
|
1212 |
+
"learning_rate": 0.00016061146074677885,
|
1213 |
+
"loss": 1.7011,
|
1214 |
+
"step": 171
|
1215 |
+
},
|
1216 |
+
{
|
1217 |
+
"epoch": 0.30714285714285716,
|
1218 |
+
"grad_norm": 0.2710039019584656,
|
1219 |
+
"learning_rate": 0.00016015615597790388,
|
1220 |
+
"loss": 1.7522,
|
1221 |
+
"step": 172
|
1222 |
+
},
|
1223 |
+
{
|
1224 |
+
"epoch": 0.30892857142857144,
|
1225 |
+
"grad_norm": 0.26541268825531006,
|
1226 |
+
"learning_rate": 0.00015969888851201226,
|
1227 |
+
"loss": 1.3804,
|
1228 |
+
"step": 173
|
1229 |
+
},
|
1230 |
+
{
|
1231 |
+
"epoch": 0.3107142857142857,
|
1232 |
+
"grad_norm": 0.28985923528671265,
|
1233 |
+
"learning_rate": 0.00015923967326823368,
|
1234 |
+
"loss": 1.6453,
|
1235 |
+
"step": 174
|
1236 |
+
},
|
1237 |
+
{
|
1238 |
+
"epoch": 0.3125,
|
1239 |
+
"grad_norm": 0.33939245343208313,
|
1240 |
+
"learning_rate": 0.00015877852522924732,
|
1241 |
+
"loss": 1.1725,
|
1242 |
+
"step": 175
|
1243 |
+
},
|
1244 |
+
{
|
1245 |
+
"epoch": 0.3142857142857143,
|
1246 |
+
"grad_norm": 0.29770731925964355,
|
1247 |
+
"learning_rate": 0.0001583154594407932,
|
1248 |
+
"loss": 1.6746,
|
1249 |
+
"step": 176
|
1250 |
+
},
|
1251 |
+
{
|
1252 |
+
"epoch": 0.31607142857142856,
|
1253 |
+
"grad_norm": 0.3280562460422516,
|
1254 |
+
"learning_rate": 0.0001578504910111811,
|
1255 |
+
"loss": 1.1357,
|
1256 |
+
"step": 177
|
1257 |
+
},
|
1258 |
+
{
|
1259 |
+
"epoch": 0.31785714285714284,
|
1260 |
+
"grad_norm": 0.2856597304344177,
|
1261 |
+
"learning_rate": 0.00015738363511079776,
|
1262 |
+
"loss": 1.1127,
|
1263 |
+
"step": 178
|
1264 |
+
},
|
1265 |
+
{
|
1266 |
+
"epoch": 0.3196428571428571,
|
1267 |
+
"grad_norm": 0.316491961479187,
|
1268 |
+
"learning_rate": 0.00015691490697161182,
|
1269 |
+
"loss": 1.4281,
|
1270 |
+
"step": 179
|
1271 |
+
},
|
1272 |
+
{
|
1273 |
+
"epoch": 0.32142857142857145,
|
1274 |
+
"grad_norm": 0.3632654845714569,
|
1275 |
+
"learning_rate": 0.00015644432188667695,
|
1276 |
+
"loss": 1.3413,
|
1277 |
+
"step": 180
|
1278 |
+
},
|
1279 |
+
{
|
1280 |
+
"epoch": 0.32321428571428573,
|
1281 |
+
"grad_norm": 0.34329405426979065,
|
1282 |
+
"learning_rate": 0.00015597189520963277,
|
1283 |
+
"loss": 1.0579,
|
1284 |
+
"step": 181
|
1285 |
+
},
|
1286 |
+
{
|
1287 |
+
"epoch": 0.325,
|
1288 |
+
"grad_norm": 0.32447105646133423,
|
1289 |
+
"learning_rate": 0.00015549764235420405,
|
1290 |
+
"loss": 1.243,
|
1291 |
+
"step": 182
|
1292 |
+
},
|
1293 |
+
{
|
1294 |
+
"epoch": 0.3267857142857143,
|
1295 |
+
"grad_norm": 0.3558500409126282,
|
1296 |
+
"learning_rate": 0.0001550215787936977,
|
1297 |
+
"loss": 1.1376,
|
1298 |
+
"step": 183
|
1299 |
+
},
|
1300 |
+
{
|
1301 |
+
"epoch": 0.32857142857142857,
|
1302 |
+
"grad_norm": 0.3373570740222931,
|
1303 |
+
"learning_rate": 0.00015454372006049803,
|
1304 |
+
"loss": 1.1251,
|
1305 |
+
"step": 184
|
1306 |
+
},
|
1307 |
+
{
|
1308 |
+
"epoch": 0.33035714285714285,
|
1309 |
+
"grad_norm": 0.36412546038627625,
|
1310 |
+
"learning_rate": 0.00015406408174555976,
|
1311 |
+
"loss": 1.3238,
|
1312 |
+
"step": 185
|
1313 |
+
},
|
1314 |
+
{
|
1315 |
+
"epoch": 0.33214285714285713,
|
1316 |
+
"grad_norm": 0.364442378282547,
|
1317 |
+
"learning_rate": 0.00015358267949789966,
|
1318 |
+
"loss": 0.9448,
|
1319 |
+
"step": 186
|
1320 |
+
},
|
1321 |
+
{
|
1322 |
+
"epoch": 0.3339285714285714,
|
1323 |
+
"grad_norm": 0.3172107934951782,
|
1324 |
+
"learning_rate": 0.00015309952902408576,
|
1325 |
+
"loss": 1.2744,
|
1326 |
+
"step": 187
|
1327 |
+
},
|
1328 |
+
{
|
1329 |
+
"epoch": 0.3357142857142857,
|
1330 |
+
"grad_norm": 0.34173399209976196,
|
1331 |
+
"learning_rate": 0.00015261464608772488,
|
1332 |
+
"loss": 1.0923,
|
1333 |
+
"step": 188
|
1334 |
+
},
|
1335 |
+
{
|
1336 |
+
"epoch": 0.3375,
|
1337 |
+
"grad_norm": 0.33419185876846313,
|
1338 |
+
"learning_rate": 0.0001521280465089484,
|
1339 |
+
"loss": 1.2762,
|
1340 |
+
"step": 189
|
1341 |
+
},
|
1342 |
+
{
|
1343 |
+
"epoch": 0.3392857142857143,
|
1344 |
+
"grad_norm": 0.3866868317127228,
|
1345 |
+
"learning_rate": 0.0001516397461638962,
|
1346 |
+
"loss": 0.9595,
|
1347 |
+
"step": 190
|
1348 |
+
},
|
1349 |
+
{
|
1350 |
+
"epoch": 0.3410714285714286,
|
1351 |
+
"grad_norm": 0.3978990614414215,
|
1352 |
+
"learning_rate": 0.00015114976098419842,
|
1353 |
+
"loss": 0.9993,
|
1354 |
+
"step": 191
|
1355 |
+
},
|
1356 |
+
{
|
1357 |
+
"epoch": 0.34285714285714286,
|
1358 |
+
"grad_norm": 0.3546142876148224,
|
1359 |
+
"learning_rate": 0.00015065810695645584,
|
1360 |
+
"loss": 1.3421,
|
1361 |
+
"step": 192
|
1362 |
+
},
|
1363 |
+
{
|
1364 |
+
"epoch": 0.34464285714285714,
|
1365 |
+
"grad_norm": 0.39728498458862305,
|
1366 |
+
"learning_rate": 0.00015016480012171828,
|
1367 |
+
"loss": 1.1209,
|
1368 |
+
"step": 193
|
1369 |
+
},
|
1370 |
+
{
|
1371 |
+
"epoch": 0.3464285714285714,
|
1372 |
+
"grad_norm": 0.4170741140842438,
|
1373 |
+
"learning_rate": 0.00014966985657496114,
|
1374 |
+
"loss": 1.0024,
|
1375 |
+
"step": 194
|
1376 |
+
},
|
1377 |
+
{
|
1378 |
+
"epoch": 0.3482142857142857,
|
1379 |
+
"grad_norm": 0.4226652681827545,
|
1380 |
+
"learning_rate": 0.0001491732924645604,
|
1381 |
+
"loss": 1.3139,
|
1382 |
+
"step": 195
|
1383 |
+
},
|
1384 |
+
{
|
1385 |
+
"epoch": 0.35,
|
1386 |
+
"grad_norm": 0.3712114691734314,
|
1387 |
+
"learning_rate": 0.00014867512399176563,
|
1388 |
+
"loss": 1.1574,
|
1389 |
+
"step": 196
|
1390 |
+
},
|
1391 |
+
{
|
1392 |
+
"epoch": 0.3517857142857143,
|
1393 |
+
"grad_norm": 0.3655322790145874,
|
1394 |
+
"learning_rate": 0.00014817536741017152,
|
1395 |
+
"loss": 1.6149,
|
1396 |
+
"step": 197
|
1397 |
+
},
|
1398 |
+
{
|
1399 |
+
"epoch": 0.3535714285714286,
|
1400 |
+
"grad_norm": 0.4362059533596039,
|
1401 |
+
"learning_rate": 0.0001476740390251875,
|
1402 |
+
"loss": 1.7657,
|
1403 |
+
"step": 198
|
1404 |
+
},
|
1405 |
+
{
|
1406 |
+
"epoch": 0.35535714285714287,
|
1407 |
+
"grad_norm": 0.43134769797325134,
|
1408 |
+
"learning_rate": 0.00014717115519350567,
|
1409 |
+
"loss": 1.7167,
|
1410 |
+
"step": 199
|
1411 |
+
},
|
1412 |
+
{
|
1413 |
+
"epoch": 0.35714285714285715,
|
1414 |
+
"grad_norm": 0.7784890532493591,
|
1415 |
+
"learning_rate": 0.00014666673232256738,
|
1416 |
+
"loss": 2.036,
|
1417 |
+
"step": 200
|
1418 |
+
},
|
1419 |
+
{
|
1420 |
+
"epoch": 0.35892857142857143,
|
1421 |
+
"grad_norm": 0.17376984655857086,
|
1422 |
+
"learning_rate": 0.0001461607868700276,
|
1423 |
+
"loss": 1.4856,
|
1424 |
+
"step": 201
|
1425 |
+
},
|
1426 |
+
{
|
1427 |
+
"epoch": 0.3607142857142857,
|
1428 |
+
"grad_norm": 0.2141953408718109,
|
1429 |
+
"learning_rate": 0.00014565333534321826,
|
1430 |
+
"loss": 1.7491,
|
1431 |
+
"step": 202
|
1432 |
+
},
|
1433 |
+
{
|
1434 |
+
"epoch": 0.3625,
|
1435 |
+
"grad_norm": 0.22548137605190277,
|
1436 |
+
"learning_rate": 0.00014514439429860943,
|
1437 |
+
"loss": 1.8457,
|
1438 |
+
"step": 203
|
1439 |
+
},
|
1440 |
+
{
|
1441 |
+
"epoch": 0.36428571428571427,
|
1442 |
+
"grad_norm": 0.20618294179439545,
|
1443 |
+
"learning_rate": 0.0001446339803412692,
|
1444 |
+
"loss": 1.4987,
|
1445 |
+
"step": 204
|
1446 |
+
},
|
1447 |
+
{
|
1448 |
+
"epoch": 0.36607142857142855,
|
1449 |
+
"grad_norm": 0.21025151014328003,
|
1450 |
+
"learning_rate": 0.00014412211012432212,
|
1451 |
+
"loss": 1.5568,
|
1452 |
+
"step": 205
|
1453 |
+
},
|
1454 |
+
{
|
1455 |
+
"epoch": 0.3678571428571429,
|
1456 |
+
"grad_norm": 0.21678180992603302,
|
1457 |
+
"learning_rate": 0.00014360880034840554,
|
1458 |
+
"loss": 1.7841,
|
1459 |
+
"step": 206
|
1460 |
+
},
|
1461 |
+
{
|
1462 |
+
"epoch": 0.36964285714285716,
|
1463 |
+
"grad_norm": 0.20914790034294128,
|
1464 |
+
"learning_rate": 0.0001430940677611249,
|
1465 |
+
"loss": 1.6693,
|
1466 |
+
"step": 207
|
1467 |
+
},
|
1468 |
+
{
|
1469 |
+
"epoch": 0.37142857142857144,
|
1470 |
+
"grad_norm": 0.21597585082054138,
|
1471 |
+
"learning_rate": 0.00014257792915650728,
|
1472 |
+
"loss": 1.648,
|
1473 |
+
"step": 208
|
1474 |
+
},
|
1475 |
+
{
|
1476 |
+
"epoch": 0.3732142857142857,
|
1477 |
+
"grad_norm": 0.23697789013385773,
|
1478 |
+
"learning_rate": 0.00014206040137445348,
|
1479 |
+
"loss": 1.7616,
|
1480 |
+
"step": 209
|
1481 |
+
},
|
1482 |
+
{
|
1483 |
+
"epoch": 0.375,
|
1484 |
+
"grad_norm": 0.2535800635814667,
|
1485 |
+
"learning_rate": 0.00014154150130018866,
|
1486 |
+
"loss": 2.0279,
|
1487 |
+
"step": 210
|
1488 |
+
},
|
1489 |
+
{
|
1490 |
+
"epoch": 0.3767857142857143,
|
1491 |
+
"grad_norm": 0.21204812824726105,
|
1492 |
+
"learning_rate": 0.0001410212458637112,
|
1493 |
+
"loss": 1.8472,
|
1494 |
+
"step": 211
|
1495 |
+
},
|
1496 |
+
{
|
1497 |
+
"epoch": 0.37857142857142856,
|
1498 |
+
"grad_norm": 0.36059629917144775,
|
1499 |
+
"learning_rate": 0.00014049965203924054,
|
1500 |
+
"loss": 1.8042,
|
1501 |
+
"step": 212
|
1502 |
+
},
|
1503 |
+
{
|
1504 |
+
"epoch": 0.38035714285714284,
|
1505 |
+
"grad_norm": 0.21400661766529083,
|
1506 |
+
"learning_rate": 0.0001399767368446634,
|
1507 |
+
"loss": 1.698,
|
1508 |
+
"step": 213
|
1509 |
+
},
|
1510 |
+
{
|
1511 |
+
"epoch": 0.3821428571428571,
|
1512 |
+
"grad_norm": 0.24055758118629456,
|
1513 |
+
"learning_rate": 0.00013945251734097828,
|
1514 |
+
"loss": 1.8758,
|
1515 |
+
"step": 214
|
1516 |
+
},
|
1517 |
+
{
|
1518 |
+
"epoch": 0.38392857142857145,
|
1519 |
+
"grad_norm": 0.23605166375637054,
|
1520 |
+
"learning_rate": 0.00013892701063173918,
|
1521 |
+
"loss": 1.7425,
|
1522 |
+
"step": 215
|
1523 |
+
},
|
1524 |
+
{
|
1525 |
+
"epoch": 0.38571428571428573,
|
1526 |
+
"grad_norm": 0.23343758285045624,
|
1527 |
+
"learning_rate": 0.00013840023386249713,
|
1528 |
+
"loss": 1.8683,
|
1529 |
+
"step": 216
|
1530 |
+
},
|
1531 |
+
{
|
1532 |
+
"epoch": 0.3875,
|
1533 |
+
"grad_norm": 0.2475200593471527,
|
1534 |
+
"learning_rate": 0.00013787220422024134,
|
1535 |
+
"loss": 1.9091,
|
1536 |
+
"step": 217
|
1537 |
+
},
|
1538 |
+
{
|
1539 |
+
"epoch": 0.3892857142857143,
|
1540 |
+
"grad_norm": 0.2618944048881531,
|
1541 |
+
"learning_rate": 0.00013734293893283783,
|
1542 |
+
"loss": 1.5086,
|
1543 |
+
"step": 218
|
1544 |
+
},
|
1545 |
+
{
|
1546 |
+
"epoch": 0.39107142857142857,
|
1547 |
+
"grad_norm": 0.2627498209476471,
|
1548 |
+
"learning_rate": 0.00013681245526846783,
|
1549 |
+
"loss": 1.3878,
|
1550 |
+
"step": 219
|
1551 |
+
},
|
1552 |
+
{
|
1553 |
+
"epoch": 0.39285714285714285,
|
1554 |
+
"grad_norm": 0.24390314519405365,
|
1555 |
+
"learning_rate": 0.0001362807705350641,
|
1556 |
+
"loss": 1.7332,
|
1557 |
+
"step": 220
|
1558 |
+
},
|
1559 |
+
{
|
1560 |
+
"epoch": 0.39464285714285713,
|
1561 |
+
"grad_norm": 0.2768295705318451,
|
1562 |
+
"learning_rate": 0.00013574790207974646,
|
1563 |
+
"loss": 1.3123,
|
1564 |
+
"step": 221
|
1565 |
+
},
|
1566 |
+
{
|
1567 |
+
"epoch": 0.3964285714285714,
|
1568 |
+
"grad_norm": 0.2606358230113983,
|
1569 |
+
"learning_rate": 0.0001352138672882555,
|
1570 |
+
"loss": 1.4506,
|
1571 |
+
"step": 222
|
1572 |
+
},
|
1573 |
+
{
|
1574 |
+
"epoch": 0.3982142857142857,
|
1575 |
+
"grad_norm": 0.24806426465511322,
|
1576 |
+
"learning_rate": 0.00013467868358438563,
|
1577 |
+
"loss": 1.7087,
|
1578 |
+
"step": 223
|
1579 |
+
},
|
1580 |
+
{
|
1581 |
+
"epoch": 0.4,
|
1582 |
+
"grad_norm": 0.2664608061313629,
|
1583 |
+
"learning_rate": 0.00013414236842941644,
|
1584 |
+
"loss": 1.3124,
|
1585 |
+
"step": 224
|
1586 |
+
},
|
1587 |
+
{
|
1588 |
+
"epoch": 0.4017857142857143,
|
1589 |
+
"grad_norm": 0.2661263346672058,
|
1590 |
+
"learning_rate": 0.00013360493932154302,
|
1591 |
+
"loss": 1.2174,
|
1592 |
+
"step": 225
|
1593 |
+
},
|
1594 |
+
{
|
1595 |
+
"epoch": 0.4035714285714286,
|
1596 |
+
"grad_norm": 0.3460038900375366,
|
1597 |
+
"learning_rate": 0.00013306641379530514,
|
1598 |
+
"loss": 0.6889,
|
1599 |
+
"step": 226
|
1600 |
+
},
|
1601 |
+
{
|
1602 |
+
"epoch": 0.40535714285714286,
|
1603 |
+
"grad_norm": 0.2929069995880127,
|
1604 |
+
"learning_rate": 0.000132526809421015,
|
1605 |
+
"loss": 0.9457,
|
1606 |
+
"step": 227
|
1607 |
+
},
|
1608 |
+
{
|
1609 |
+
"epoch": 0.40714285714285714,
|
1610 |
+
"grad_norm": 0.3459819257259369,
|
1611 |
+
"learning_rate": 0.00013198614380418412,
|
1612 |
+
"loss": 1.2547,
|
1613 |
+
"step": 228
|
1614 |
+
},
|
1615 |
+
{
|
1616 |
+
"epoch": 0.4089285714285714,
|
1617 |
+
"grad_norm": 0.30105313658714294,
|
1618 |
+
"learning_rate": 0.00013144443458494882,
|
1619 |
+
"loss": 0.957,
|
1620 |
+
"step": 229
|
1621 |
+
},
|
1622 |
+
{
|
1623 |
+
"epoch": 0.4107142857142857,
|
1624 |
+
"grad_norm": 0.3461960256099701,
|
1625 |
+
"learning_rate": 0.00013090169943749476,
|
1626 |
+
"loss": 1.3146,
|
1627 |
+
"step": 230
|
1628 |
+
},
|
1629 |
+
{
|
1630 |
+
"epoch": 0.4125,
|
1631 |
+
"grad_norm": 0.34542855620384216,
|
1632 |
+
"learning_rate": 0.00013035795606948023,
|
1633 |
+
"loss": 1.1128,
|
1634 |
+
"step": 231
|
1635 |
+
},
|
1636 |
+
{
|
1637 |
+
"epoch": 0.4142857142857143,
|
1638 |
+
"grad_norm": 0.37605586647987366,
|
1639 |
+
"learning_rate": 0.00012981322222145846,
|
1640 |
+
"loss": 1.5095,
|
1641 |
+
"step": 232
|
1642 |
+
},
|
1643 |
+
{
|
1644 |
+
"epoch": 0.4160714285714286,
|
1645 |
+
"grad_norm": 0.37267056107521057,
|
1646 |
+
"learning_rate": 0.00012926751566629875,
|
1647 |
+
"loss": 1.071,
|
1648 |
+
"step": 233
|
1649 |
+
},
|
1650 |
+
{
|
1651 |
+
"epoch": 0.41785714285714287,
|
1652 |
+
"grad_norm": 0.3052172064781189,
|
1653 |
+
"learning_rate": 0.00012872085420860665,
|
1654 |
+
"loss": 1.3136,
|
1655 |
+
"step": 234
|
1656 |
+
},
|
1657 |
+
{
|
1658 |
+
"epoch": 0.41964285714285715,
|
1659 |
+
"grad_norm": 0.36694592237472534,
|
1660 |
+
"learning_rate": 0.00012817325568414297,
|
1661 |
+
"loss": 1.2439,
|
1662 |
+
"step": 235
|
1663 |
+
},
|
1664 |
+
{
|
1665 |
+
"epoch": 0.42142857142857143,
|
1666 |
+
"grad_norm": 0.36055245995521545,
|
1667 |
+
"learning_rate": 0.00012762473795924204,
|
1668 |
+
"loss": 1.1165,
|
1669 |
+
"step": 236
|
1670 |
+
},
|
1671 |
+
{
|
1672 |
+
"epoch": 0.4232142857142857,
|
1673 |
+
"grad_norm": 0.3014545738697052,
|
1674 |
+
"learning_rate": 0.00012707531893022854,
|
1675 |
+
"loss": 1.5423,
|
1676 |
+
"step": 237
|
1677 |
+
},
|
1678 |
+
{
|
1679 |
+
"epoch": 0.425,
|
1680 |
+
"grad_norm": 0.3208891749382019,
|
1681 |
+
"learning_rate": 0.00012652501652283377,
|
1682 |
+
"loss": 1.1813,
|
1683 |
+
"step": 238
|
1684 |
+
},
|
1685 |
+
{
|
1686 |
+
"epoch": 0.42678571428571427,
|
1687 |
+
"grad_norm": 0.38703230023384094,
|
1688 |
+
"learning_rate": 0.00012597384869161084,
|
1689 |
+
"loss": 0.7706,
|
1690 |
+
"step": 239
|
1691 |
+
},
|
1692 |
+
{
|
1693 |
+
"epoch": 0.42857142857142855,
|
1694 |
+
"grad_norm": 0.38256821036338806,
|
1695 |
+
"learning_rate": 0.00012542183341934872,
|
1696 |
+
"loss": 1.0565,
|
1697 |
+
"step": 240
|
1698 |
+
},
|
1699 |
+
{
|
1700 |
+
"epoch": 0.4303571428571429,
|
1701 |
+
"grad_norm": 0.3555380702018738,
|
1702 |
+
"learning_rate": 0.0001248689887164855,
|
1703 |
+
"loss": 0.849,
|
1704 |
+
"step": 241
|
1705 |
+
},
|
1706 |
+
{
|
1707 |
+
"epoch": 0.43214285714285716,
|
1708 |
+
"grad_norm": 0.3472703993320465,
|
1709 |
+
"learning_rate": 0.00012431533262052098,
|
1710 |
+
"loss": 1.3984,
|
1711 |
+
"step": 242
|
1712 |
+
},
|
1713 |
+
{
|
1714 |
+
"epoch": 0.43392857142857144,
|
1715 |
+
"grad_norm": 0.3631349503993988,
|
1716 |
+
"learning_rate": 0.000123760883195428,
|
1717 |
+
"loss": 0.8955,
|
1718 |
+
"step": 243
|
1719 |
+
},
|
1720 |
+
{
|
1721 |
+
"epoch": 0.4357142857142857,
|
1722 |
+
"grad_norm": 0.349295973777771,
|
1723 |
+
"learning_rate": 0.00012320565853106316,
|
1724 |
+
"loss": 0.8866,
|
1725 |
+
"step": 244
|
1726 |
+
},
|
1727 |
+
{
|
1728 |
+
"epoch": 0.4375,
|
1729 |
+
"grad_norm": 0.33635953068733215,
|
1730 |
+
"learning_rate": 0.00012264967674257646,
|
1731 |
+
"loss": 1.2419,
|
1732 |
+
"step": 245
|
1733 |
+
},
|
1734 |
+
{
|
1735 |
+
"epoch": 0.4392857142857143,
|
1736 |
+
"grad_norm": 0.3833181858062744,
|
1737 |
+
"learning_rate": 0.00012209295596982042,
|
1738 |
+
"loss": 1.5507,
|
1739 |
+
"step": 246
|
1740 |
+
},
|
1741 |
+
{
|
1742 |
+
"epoch": 0.44107142857142856,
|
1743 |
+
"grad_norm": 0.3737214505672455,
|
1744 |
+
"learning_rate": 0.00012153551437675821,
|
1745 |
+
"loss": 1.4881,
|
1746 |
+
"step": 247
|
1747 |
+
},
|
1748 |
+
{
|
1749 |
+
"epoch": 0.44285714285714284,
|
1750 |
+
"grad_norm": 0.4705282747745514,
|
1751 |
+
"learning_rate": 0.00012097737015087094,
|
1752 |
+
"loss": 1.4864,
|
1753 |
+
"step": 248
|
1754 |
+
},
|
1755 |
+
{
|
1756 |
+
"epoch": 0.4446428571428571,
|
1757 |
+
"grad_norm": 0.39539188146591187,
|
1758 |
+
"learning_rate": 0.00012041854150256433,
|
1759 |
+
"loss": 1.7855,
|
1760 |
+
"step": 249
|
1761 |
+
},
|
1762 |
+
{
|
1763 |
+
"epoch": 0.44642857142857145,
|
1764 |
+
"grad_norm": 0.7369075417518616,
|
1765 |
+
"learning_rate": 0.00011985904666457455,
|
1766 |
+
"loss": 2.01,
|
1767 |
+
"step": 250
|
1768 |
+
},
|
1769 |
+
{
|
1770 |
+
"epoch": 0.44821428571428573,
|
1771 |
+
"grad_norm": 0.18146094679832458,
|
1772 |
+
"learning_rate": 0.00011929890389137337,
|
1773 |
+
"loss": 1.5898,
|
1774 |
+
"step": 251
|
1775 |
+
},
|
1776 |
+
{
|
1777 |
+
"epoch": 0.45,
|
1778 |
+
"grad_norm": 0.21558880805969238,
|
1779 |
+
"learning_rate": 0.00011873813145857249,
|
1780 |
+
"loss": 1.5816,
|
1781 |
+
"step": 252
|
1782 |
+
},
|
1783 |
+
{
|
1784 |
+
"epoch": 0.4517857142857143,
|
1785 |
+
"grad_norm": 0.19599275290966034,
|
1786 |
+
"learning_rate": 0.00011817674766232734,
|
1787 |
+
"loss": 1.6433,
|
1788 |
+
"step": 253
|
1789 |
+
},
|
1790 |
+
{
|
1791 |
+
"epoch": 0.45357142857142857,
|
1792 |
+
"grad_norm": 0.22075910866260529,
|
1793 |
+
"learning_rate": 0.00011761477081874015,
|
1794 |
+
"loss": 1.6005,
|
1795 |
+
"step": 254
|
1796 |
+
},
|
1797 |
+
{
|
1798 |
+
"epoch": 0.45535714285714285,
|
1799 |
+
"grad_norm": 0.19471955299377441,
|
1800 |
+
"learning_rate": 0.0001170522192632624,
|
1801 |
+
"loss": 1.7133,
|
1802 |
+
"step": 255
|
1803 |
+
},
|
1804 |
+
{
|
1805 |
+
"epoch": 0.45714285714285713,
|
1806 |
+
"grad_norm": 0.19876879453659058,
|
1807 |
+
"learning_rate": 0.00011648911135009634,
|
1808 |
+
"loss": 1.5085,
|
1809 |
+
"step": 256
|
1810 |
+
},
|
1811 |
+
{
|
1812 |
+
"epoch": 0.4589285714285714,
|
1813 |
+
"grad_norm": 0.20565317571163177,
|
1814 |
+
"learning_rate": 0.00011592546545159645,
|
1815 |
+
"loss": 1.7386,
|
1816 |
+
"step": 257
|
1817 |
+
},
|
1818 |
+
{
|
1819 |
+
"epoch": 0.4607142857142857,
|
1820 |
+
"grad_norm": 0.24483506381511688,
|
1821 |
+
"learning_rate": 0.00011536129995766996,
|
1822 |
+
"loss": 1.7162,
|
1823 |
+
"step": 258
|
1824 |
+
},
|
1825 |
+
{
|
1826 |
+
"epoch": 0.4625,
|
1827 |
+
"grad_norm": 0.21543823182582855,
|
1828 |
+
"learning_rate": 0.00011479663327517667,
|
1829 |
+
"loss": 1.6966,
|
1830 |
+
"step": 259
|
1831 |
+
},
|
1832 |
+
{
|
1833 |
+
"epoch": 0.4642857142857143,
|
1834 |
+
"grad_norm": 0.2661048471927643,
|
1835 |
+
"learning_rate": 0.00011423148382732853,
|
1836 |
+
"loss": 1.8821,
|
1837 |
+
"step": 260
|
1838 |
+
},
|
1839 |
+
{
|
1840 |
+
"epoch": 0.4660714285714286,
|
1841 |
+
"grad_norm": 0.24292460083961487,
|
1842 |
+
"learning_rate": 0.00011366587005308858,
|
1843 |
+
"loss": 1.7085,
|
1844 |
+
"step": 261
|
1845 |
+
},
|
1846 |
+
{
|
1847 |
+
"epoch": 0.46785714285714286,
|
1848 |
+
"grad_norm": 0.216167613863945,
|
1849 |
+
"learning_rate": 0.0001130998104065693,
|
1850 |
+
"loss": 1.7298,
|
1851 |
+
"step": 262
|
1852 |
+
},
|
1853 |
+
{
|
1854 |
+
"epoch": 0.46964285714285714,
|
1855 |
+
"grad_norm": 0.2111697793006897,
|
1856 |
+
"learning_rate": 0.00011253332335643043,
|
1857 |
+
"loss": 1.8098,
|
1858 |
+
"step": 263
|
1859 |
+
},
|
1860 |
+
{
|
1861 |
+
"epoch": 0.4714285714285714,
|
1862 |
+
"grad_norm": 0.23981061577796936,
|
1863 |
+
"learning_rate": 0.00011196642738527659,
|
1864 |
+
"loss": 1.7026,
|
1865 |
+
"step": 264
|
1866 |
+
},
|
1867 |
+
{
|
1868 |
+
"epoch": 0.4732142857142857,
|
1869 |
+
"grad_norm": 0.2623251676559448,
|
1870 |
+
"learning_rate": 0.00011139914098905406,
|
1871 |
+
"loss": 1.7894,
|
1872 |
+
"step": 265
|
1873 |
+
},
|
1874 |
+
{
|
1875 |
+
"epoch": 0.475,
|
1876 |
+
"grad_norm": 0.2482486367225647,
|
1877 |
+
"learning_rate": 0.00011083148267644747,
|
1878 |
+
"loss": 1.9019,
|
1879 |
+
"step": 266
|
1880 |
+
},
|
1881 |
+
{
|
1882 |
+
"epoch": 0.4767857142857143,
|
1883 |
+
"grad_norm": 0.238911435008049,
|
1884 |
+
"learning_rate": 0.00011026347096827578,
|
1885 |
+
"loss": 1.6809,
|
1886 |
+
"step": 267
|
1887 |
+
},
|
1888 |
+
{
|
1889 |
+
"epoch": 0.4785714285714286,
|
1890 |
+
"grad_norm": 0.24704696238040924,
|
1891 |
+
"learning_rate": 0.00010969512439688816,
|
1892 |
+
"loss": 1.6607,
|
1893 |
+
"step": 268
|
1894 |
+
},
|
1895 |
+
{
|
1896 |
+
"epoch": 0.48035714285714287,
|
1897 |
+
"grad_norm": 0.25105100870132446,
|
1898 |
+
"learning_rate": 0.00010912646150555919,
|
1899 |
+
"loss": 1.5895,
|
1900 |
+
"step": 269
|
1901 |
+
},
|
1902 |
+
{
|
1903 |
+
"epoch": 0.48214285714285715,
|
1904 |
+
"grad_norm": 0.2849842607975006,
|
1905 |
+
"learning_rate": 0.00010855750084788398,
|
1906 |
+
"loss": 2.0812,
|
1907 |
+
"step": 270
|
1908 |
+
},
|
1909 |
+
{
|
1910 |
+
"epoch": 0.48392857142857143,
|
1911 |
+
"grad_norm": 0.2599342167377472,
|
1912 |
+
"learning_rate": 0.00010798826098717276,
|
1913 |
+
"loss": 1.1569,
|
1914 |
+
"step": 271
|
1915 |
+
},
|
1916 |
+
{
|
1917 |
+
"epoch": 0.4857142857142857,
|
1918 |
+
"grad_norm": 0.28037258982658386,
|
1919 |
+
"learning_rate": 0.00010741876049584523,
|
1920 |
+
"loss": 1.1928,
|
1921 |
+
"step": 272
|
1922 |
+
},
|
1923 |
+
{
|
1924 |
+
"epoch": 0.4875,
|
1925 |
+
"grad_norm": 0.29920563101768494,
|
1926 |
+
"learning_rate": 0.00010684901795482456,
|
1927 |
+
"loss": 1.2244,
|
1928 |
+
"step": 273
|
1929 |
+
},
|
1930 |
+
{
|
1931 |
+
"epoch": 0.48928571428571427,
|
1932 |
+
"grad_norm": 0.2799164354801178,
|
1933 |
+
"learning_rate": 0.00010627905195293135,
|
1934 |
+
"loss": 1.4455,
|
1935 |
+
"step": 274
|
1936 |
+
},
|
1937 |
+
{
|
1938 |
+
"epoch": 0.49107142857142855,
|
1939 |
+
"grad_norm": 0.23873603343963623,
|
1940 |
+
"learning_rate": 0.00010570888108627681,
|
1941 |
+
"loss": 0.852,
|
1942 |
+
"step": 275
|
1943 |
+
},
|
1944 |
+
{
|
1945 |
+
"epoch": 0.4928571428571429,
|
1946 |
+
"grad_norm": 0.2817741632461548,
|
1947 |
+
"learning_rate": 0.00010513852395765631,
|
1948 |
+
"loss": 1.3203,
|
1949 |
+
"step": 276
|
1950 |
+
},
|
1951 |
+
{
|
1952 |
+
"epoch": 0.49464285714285716,
|
1953 |
+
"grad_norm": 0.27295514941215515,
|
1954 |
+
"learning_rate": 0.00010456799917594233,
|
1955 |
+
"loss": 0.749,
|
1956 |
+
"step": 277
|
1957 |
+
},
|
1958 |
+
{
|
1959 |
+
"epoch": 0.49642857142857144,
|
1960 |
+
"grad_norm": 0.30728739500045776,
|
1961 |
+
"learning_rate": 0.00010399732535547734,
|
1962 |
+
"loss": 1.0083,
|
1963 |
+
"step": 278
|
1964 |
+
},
|
1965 |
+
{
|
1966 |
+
"epoch": 0.4982142857142857,
|
1967 |
+
"grad_norm": 0.32001444697380066,
|
1968 |
+
"learning_rate": 0.00010342652111546635,
|
1969 |
+
"loss": 1.573,
|
1970 |
+
"step": 279
|
1971 |
+
},
|
1972 |
+
{
|
1973 |
+
"epoch": 0.5,
|
1974 |
+
"grad_norm": 0.3400101065635681,
|
1975 |
+
"learning_rate": 0.00010285560507936961,
|
1976 |
+
"loss": 1.0757,
|
1977 |
+
"step": 280
|
1978 |
+
},
|
1979 |
+
{
|
1980 |
+
"epoch": 0.5,
|
1981 |
+
"eval_loss": 1.4439584016799927,
|
1982 |
+
"eval_runtime": 13.9741,
|
1983 |
+
"eval_samples_per_second": 16.888,
|
1984 |
+
"eval_steps_per_second": 8.444,
|
1985 |
+
"step": 280
|
1986 |
}
|
1987 |
],
|
1988 |
"logging_steps": 1,
|
|
|
2002 |
"attributes": {}
|
2003 |
}
|
2004 |
},
|
2005 |
+
"total_flos": 9.129139501635994e+16,
|
2006 |
"train_batch_size": 2,
|
2007 |
"trial_name": null,
|
2008 |
"trial_params": null
|