Training in progress, step 1361, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 83945296
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b78fd15c46dcf15f75b09cb2357aabcf936b417d6555e3754361301c30894032
|
3 |
size 83945296
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43123028
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c163b335b27da9c129b6cd686ac9c804785ea95d5db01258d121079bafee8594
|
3 |
size 43123028
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3d513bea553ffa3e4019a8e86247aebaa3073b0f0dde4ba1ed11bb3682dd0de
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0320ca488c6f32eb5a39b619567c930e9bcec3c5e695b8a0ed3efa21e00d99b0
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 341,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7200,6 +7200,2372 @@
|
|
7200 |
"eval_samples_per_second": 27.605,
|
7201 |
"eval_steps_per_second": 13.802,
|
7202 |
"step": 1023
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7203 |
}
|
7204 |
],
|
7205 |
"logging_steps": 1,
|
@@ -7214,12 +9580,12 @@
|
|
7214 |
"should_evaluate": false,
|
7215 |
"should_log": false,
|
7216 |
"should_save": true,
|
7217 |
-
"should_training_stop":
|
7218 |
},
|
7219 |
"attributes": {}
|
7220 |
}
|
7221 |
},
|
7222 |
-
"total_flos": 2.
|
7223 |
"train_batch_size": 2,
|
7224 |
"trial_name": null,
|
7225 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.2668889106775174,
|
5 |
"eval_steps": 341,
|
6 |
+
"global_step": 1361,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7200 |
"eval_samples_per_second": 27.605,
|
7201 |
"eval_steps_per_second": 13.802,
|
7202 |
"step": 1023
|
7203 |
+
},
|
7204 |
+
{
|
7205 |
+
"epoch": 0.20080400039219531,
|
7206 |
+
"grad_norm": 7.308141231536865,
|
7207 |
+
"learning_rate": 2.9166107417709888e-05,
|
7208 |
+
"loss": 1.0916,
|
7209 |
+
"step": 1024
|
7210 |
+
},
|
7211 |
+
{
|
7212 |
+
"epoch": 0.20100009804882832,
|
7213 |
+
"grad_norm": 3.7046291828155518,
|
7214 |
+
"learning_rate": 2.9002156680009906e-05,
|
7215 |
+
"loss": 1.7084,
|
7216 |
+
"step": 1025
|
7217 |
+
},
|
7218 |
+
{
|
7219 |
+
"epoch": 0.20119619570546132,
|
7220 |
+
"grad_norm": 3.8992371559143066,
|
7221 |
+
"learning_rate": 2.883858985636165e-05,
|
7222 |
+
"loss": 1.0185,
|
7223 |
+
"step": 1026
|
7224 |
+
},
|
7225 |
+
{
|
7226 |
+
"epoch": 0.20139229336209433,
|
7227 |
+
"grad_norm": 4.895712852478027,
|
7228 |
+
"learning_rate": 2.867540783123711e-05,
|
7229 |
+
"loss": 1.1252,
|
7230 |
+
"step": 1027
|
7231 |
+
},
|
7232 |
+
{
|
7233 |
+
"epoch": 0.20158839101872733,
|
7234 |
+
"grad_norm": 10.118478775024414,
|
7235 |
+
"learning_rate": 2.851261148702744e-05,
|
7236 |
+
"loss": 1.8931,
|
7237 |
+
"step": 1028
|
7238 |
+
},
|
7239 |
+
{
|
7240 |
+
"epoch": 0.20178448867536034,
|
7241 |
+
"grad_norm": 9.701197624206543,
|
7242 |
+
"learning_rate": 2.835020170403837e-05,
|
7243 |
+
"loss": 2.1721,
|
7244 |
+
"step": 1029
|
7245 |
+
},
|
7246 |
+
{
|
7247 |
+
"epoch": 0.20198058633199334,
|
7248 |
+
"grad_norm": 5.3619184494018555,
|
7249 |
+
"learning_rate": 2.8188179360485267e-05,
|
7250 |
+
"loss": 1.3209,
|
7251 |
+
"step": 1030
|
7252 |
+
},
|
7253 |
+
{
|
7254 |
+
"epoch": 0.20217668398862634,
|
7255 |
+
"grad_norm": 7.866724967956543,
|
7256 |
+
"learning_rate": 2.8026545332488418e-05,
|
7257 |
+
"loss": 1.4948,
|
7258 |
+
"step": 1031
|
7259 |
+
},
|
7260 |
+
{
|
7261 |
+
"epoch": 0.20237278164525935,
|
7262 |
+
"grad_norm": 5.446761608123779,
|
7263 |
+
"learning_rate": 2.7865300494068435e-05,
|
7264 |
+
"loss": 1.041,
|
7265 |
+
"step": 1032
|
7266 |
+
},
|
7267 |
+
{
|
7268 |
+
"epoch": 0.20256887930189235,
|
7269 |
+
"grad_norm": 8.317418098449707,
|
7270 |
+
"learning_rate": 2.7704445717141368e-05,
|
7271 |
+
"loss": 1.0779,
|
7272 |
+
"step": 1033
|
7273 |
+
},
|
7274 |
+
{
|
7275 |
+
"epoch": 0.20276497695852536,
|
7276 |
+
"grad_norm": 4.434285640716553,
|
7277 |
+
"learning_rate": 2.7543981871514023e-05,
|
7278 |
+
"loss": 1.9139,
|
7279 |
+
"step": 1034
|
7280 |
+
},
|
7281 |
+
{
|
7282 |
+
"epoch": 0.20296107461515836,
|
7283 |
+
"grad_norm": 14.01183795928955,
|
7284 |
+
"learning_rate": 2.7383909824879294e-05,
|
7285 |
+
"loss": 2.4565,
|
7286 |
+
"step": 1035
|
7287 |
+
},
|
7288 |
+
{
|
7289 |
+
"epoch": 0.20315717227179136,
|
7290 |
+
"grad_norm": 4.523947715759277,
|
7291 |
+
"learning_rate": 2.7224230442811526e-05,
|
7292 |
+
"loss": 1.7748,
|
7293 |
+
"step": 1036
|
7294 |
+
},
|
7295 |
+
{
|
7296 |
+
"epoch": 0.20335326992842437,
|
7297 |
+
"grad_norm": 10.790266990661621,
|
7298 |
+
"learning_rate": 2.7064944588761688e-05,
|
7299 |
+
"loss": 1.9152,
|
7300 |
+
"step": 1037
|
7301 |
+
},
|
7302 |
+
{
|
7303 |
+
"epoch": 0.20354936758505734,
|
7304 |
+
"grad_norm": 7.028364181518555,
|
7305 |
+
"learning_rate": 2.6906053124052798e-05,
|
7306 |
+
"loss": 1.6433,
|
7307 |
+
"step": 1038
|
7308 |
+
},
|
7309 |
+
{
|
7310 |
+
"epoch": 0.20374546524169035,
|
7311 |
+
"grad_norm": 8.6144437789917,
|
7312 |
+
"learning_rate": 2.674755690787526e-05,
|
7313 |
+
"loss": 2.3653,
|
7314 |
+
"step": 1039
|
7315 |
+
},
|
7316 |
+
{
|
7317 |
+
"epoch": 0.20394156289832335,
|
7318 |
+
"grad_norm": 4.6696343421936035,
|
7319 |
+
"learning_rate": 2.6589456797282253e-05,
|
7320 |
+
"loss": 1.0972,
|
7321 |
+
"step": 1040
|
7322 |
+
},
|
7323 |
+
{
|
7324 |
+
"epoch": 0.20413766055495636,
|
7325 |
+
"grad_norm": 7.880300998687744,
|
7326 |
+
"learning_rate": 2.6431753647184988e-05,
|
7327 |
+
"loss": 1.47,
|
7328 |
+
"step": 1041
|
7329 |
+
},
|
7330 |
+
{
|
7331 |
+
"epoch": 0.20433375821158936,
|
7332 |
+
"grad_norm": 11.923666954040527,
|
7333 |
+
"learning_rate": 2.6274448310348178e-05,
|
7334 |
+
"loss": 2.1949,
|
7335 |
+
"step": 1042
|
7336 |
+
},
|
7337 |
+
{
|
7338 |
+
"epoch": 0.20452985586822237,
|
7339 |
+
"grad_norm": 11.636192321777344,
|
7340 |
+
"learning_rate": 2.6117541637385402e-05,
|
7341 |
+
"loss": 1.3088,
|
7342 |
+
"step": 1043
|
7343 |
+
},
|
7344 |
+
{
|
7345 |
+
"epoch": 0.20472595352485537,
|
7346 |
+
"grad_norm": 4.3036885261535645,
|
7347 |
+
"learning_rate": 2.5961034476754487e-05,
|
7348 |
+
"loss": 1.5687,
|
7349 |
+
"step": 1044
|
7350 |
+
},
|
7351 |
+
{
|
7352 |
+
"epoch": 0.20492205118148837,
|
7353 |
+
"grad_norm": 4.218216419219971,
|
7354 |
+
"learning_rate": 2.5804927674752922e-05,
|
7355 |
+
"loss": 0.672,
|
7356 |
+
"step": 1045
|
7357 |
+
},
|
7358 |
+
{
|
7359 |
+
"epoch": 0.20511814883812138,
|
7360 |
+
"grad_norm": 4.2901291847229,
|
7361 |
+
"learning_rate": 2.564922207551337e-05,
|
7362 |
+
"loss": 1.048,
|
7363 |
+
"step": 1046
|
7364 |
+
},
|
7365 |
+
{
|
7366 |
+
"epoch": 0.20531424649475438,
|
7367 |
+
"grad_norm": 6.806373119354248,
|
7368 |
+
"learning_rate": 2.549391852099896e-05,
|
7369 |
+
"loss": 2.2606,
|
7370 |
+
"step": 1047
|
7371 |
+
},
|
7372 |
+
{
|
7373 |
+
"epoch": 0.2055103441513874,
|
7374 |
+
"grad_norm": 5.39298152923584,
|
7375 |
+
"learning_rate": 2.5339017850998802e-05,
|
7376 |
+
"loss": 1.5175,
|
7377 |
+
"step": 1048
|
7378 |
+
},
|
7379 |
+
{
|
7380 |
+
"epoch": 0.2057064418080204,
|
7381 |
+
"grad_norm": 7.037325859069824,
|
7382 |
+
"learning_rate": 2.5184520903123476e-05,
|
7383 |
+
"loss": 1.3757,
|
7384 |
+
"step": 1049
|
7385 |
+
},
|
7386 |
+
{
|
7387 |
+
"epoch": 0.2059025394646534,
|
7388 |
+
"grad_norm": 6.3087239265441895,
|
7389 |
+
"learning_rate": 2.503042851280043e-05,
|
7390 |
+
"loss": 1.0206,
|
7391 |
+
"step": 1050
|
7392 |
+
},
|
7393 |
+
{
|
7394 |
+
"epoch": 0.2060986371212864,
|
7395 |
+
"grad_norm": 6.11033821105957,
|
7396 |
+
"learning_rate": 2.4876741513269597e-05,
|
7397 |
+
"loss": 1.99,
|
7398 |
+
"step": 1051
|
7399 |
+
},
|
7400 |
+
{
|
7401 |
+
"epoch": 0.2062947347779194,
|
7402 |
+
"grad_norm": 5.525476932525635,
|
7403 |
+
"learning_rate": 2.4723460735578697e-05,
|
7404 |
+
"loss": 0.7599,
|
7405 |
+
"step": 1052
|
7406 |
+
},
|
7407 |
+
{
|
7408 |
+
"epoch": 0.2064908324345524,
|
7409 |
+
"grad_norm": 6.608320236206055,
|
7410 |
+
"learning_rate": 2.4570587008578895e-05,
|
7411 |
+
"loss": 1.2481,
|
7412 |
+
"step": 1053
|
7413 |
+
},
|
7414 |
+
{
|
7415 |
+
"epoch": 0.2066869300911854,
|
7416 |
+
"grad_norm": 3.596764087677002,
|
7417 |
+
"learning_rate": 2.4418121158920227e-05,
|
7418 |
+
"loss": 0.713,
|
7419 |
+
"step": 1054
|
7420 |
+
},
|
7421 |
+
{
|
7422 |
+
"epoch": 0.20688302774781842,
|
7423 |
+
"grad_norm": 7.637975692749023,
|
7424 |
+
"learning_rate": 2.4266064011047264e-05,
|
7425 |
+
"loss": 1.3772,
|
7426 |
+
"step": 1055
|
7427 |
+
},
|
7428 |
+
{
|
7429 |
+
"epoch": 0.20707912540445142,
|
7430 |
+
"grad_norm": 4.366866111755371,
|
7431 |
+
"learning_rate": 2.411441638719445e-05,
|
7432 |
+
"loss": 0.5525,
|
7433 |
+
"step": 1056
|
7434 |
+
},
|
7435 |
+
{
|
7436 |
+
"epoch": 0.20727522306108442,
|
7437 |
+
"grad_norm": 5.603202819824219,
|
7438 |
+
"learning_rate": 2.396317910738184e-05,
|
7439 |
+
"loss": 0.9035,
|
7440 |
+
"step": 1057
|
7441 |
+
},
|
7442 |
+
{
|
7443 |
+
"epoch": 0.20747132071771743,
|
7444 |
+
"grad_norm": 7.776876449584961,
|
7445 |
+
"learning_rate": 2.3812352989410537e-05,
|
7446 |
+
"loss": 1.6217,
|
7447 |
+
"step": 1058
|
7448 |
+
},
|
7449 |
+
{
|
7450 |
+
"epoch": 0.20766741837435043,
|
7451 |
+
"grad_norm": 9.07315444946289,
|
7452 |
+
"learning_rate": 2.366193884885841e-05,
|
7453 |
+
"loss": 2.2412,
|
7454 |
+
"step": 1059
|
7455 |
+
},
|
7456 |
+
{
|
7457 |
+
"epoch": 0.20786351603098344,
|
7458 |
+
"grad_norm": 7.473457336425781,
|
7459 |
+
"learning_rate": 2.3511937499075508e-05,
|
7460 |
+
"loss": 1.6256,
|
7461 |
+
"step": 1060
|
7462 |
+
},
|
7463 |
+
{
|
7464 |
+
"epoch": 0.20805961368761644,
|
7465 |
+
"grad_norm": 10.729793548583984,
|
7466 |
+
"learning_rate": 2.3362349751179825e-05,
|
7467 |
+
"loss": 1.247,
|
7468 |
+
"step": 1061
|
7469 |
+
},
|
7470 |
+
{
|
7471 |
+
"epoch": 0.20825571134424944,
|
7472 |
+
"grad_norm": 6.086622714996338,
|
7473 |
+
"learning_rate": 2.3213176414052738e-05,
|
7474 |
+
"loss": 1.613,
|
7475 |
+
"step": 1062
|
7476 |
+
},
|
7477 |
+
{
|
7478 |
+
"epoch": 0.20845180900088245,
|
7479 |
+
"grad_norm": 6.6213836669921875,
|
7480 |
+
"learning_rate": 2.30644182943349e-05,
|
7481 |
+
"loss": 0.9007,
|
7482 |
+
"step": 1063
|
7483 |
+
},
|
7484 |
+
{
|
7485 |
+
"epoch": 0.20864790665751545,
|
7486 |
+
"grad_norm": 4.5187602043151855,
|
7487 |
+
"learning_rate": 2.2916076196421587e-05,
|
7488 |
+
"loss": 1.3616,
|
7489 |
+
"step": 1064
|
7490 |
+
},
|
7491 |
+
{
|
7492 |
+
"epoch": 0.20884400431414846,
|
7493 |
+
"grad_norm": 8.176258087158203,
|
7494 |
+
"learning_rate": 2.276815092245851e-05,
|
7495 |
+
"loss": 1.1822,
|
7496 |
+
"step": 1065
|
7497 |
+
},
|
7498 |
+
{
|
7499 |
+
"epoch": 0.20904010197078146,
|
7500 |
+
"grad_norm": 6.9451494216918945,
|
7501 |
+
"learning_rate": 2.2620643272337426e-05,
|
7502 |
+
"loss": 1.2958,
|
7503 |
+
"step": 1066
|
7504 |
+
},
|
7505 |
+
{
|
7506 |
+
"epoch": 0.20923619962741447,
|
7507 |
+
"grad_norm": 4.266102313995361,
|
7508 |
+
"learning_rate": 2.2473554043691913e-05,
|
7509 |
+
"loss": 1.5274,
|
7510 |
+
"step": 1067
|
7511 |
+
},
|
7512 |
+
{
|
7513 |
+
"epoch": 0.20943229728404747,
|
7514 |
+
"grad_norm": 7.066012382507324,
|
7515 |
+
"learning_rate": 2.2326884031892882e-05,
|
7516 |
+
"loss": 1.0549,
|
7517 |
+
"step": 1068
|
7518 |
+
},
|
7519 |
+
{
|
7520 |
+
"epoch": 0.20962839494068045,
|
7521 |
+
"grad_norm": 11.074556350708008,
|
7522 |
+
"learning_rate": 2.21806340300444e-05,
|
7523 |
+
"loss": 1.3012,
|
7524 |
+
"step": 1069
|
7525 |
+
},
|
7526 |
+
{
|
7527 |
+
"epoch": 0.20982449259731345,
|
7528 |
+
"grad_norm": 5.717066287994385,
|
7529 |
+
"learning_rate": 2.203480482897935e-05,
|
7530 |
+
"loss": 1.4355,
|
7531 |
+
"step": 1070
|
7532 |
+
},
|
7533 |
+
{
|
7534 |
+
"epoch": 0.21002059025394645,
|
7535 |
+
"grad_norm": 5.230037689208984,
|
7536 |
+
"learning_rate": 2.188939721725526e-05,
|
7537 |
+
"loss": 1.7493,
|
7538 |
+
"step": 1071
|
7539 |
+
},
|
7540 |
+
{
|
7541 |
+
"epoch": 0.21021668791057946,
|
7542 |
+
"grad_norm": 4.878988265991211,
|
7543 |
+
"learning_rate": 2.174441198114985e-05,
|
7544 |
+
"loss": 0.7765,
|
7545 |
+
"step": 1072
|
7546 |
+
},
|
7547 |
+
{
|
7548 |
+
"epoch": 0.21041278556721246,
|
7549 |
+
"grad_norm": 4.619425296783447,
|
7550 |
+
"learning_rate": 2.159984990465691e-05,
|
7551 |
+
"loss": 1.8523,
|
7552 |
+
"step": 1073
|
7553 |
+
},
|
7554 |
+
{
|
7555 |
+
"epoch": 0.21060888322384547,
|
7556 |
+
"grad_norm": 4.031160354614258,
|
7557 |
+
"learning_rate": 2.1455711769482113e-05,
|
7558 |
+
"loss": 0.9848,
|
7559 |
+
"step": 1074
|
7560 |
+
},
|
7561 |
+
{
|
7562 |
+
"epoch": 0.21080498088047847,
|
7563 |
+
"grad_norm": 7.096545219421387,
|
7564 |
+
"learning_rate": 2.131199835503861e-05,
|
7565 |
+
"loss": 1.4097,
|
7566 |
+
"step": 1075
|
7567 |
+
},
|
7568 |
+
{
|
7569 |
+
"epoch": 0.21100107853711147,
|
7570 |
+
"grad_norm": 5.833085060119629,
|
7571 |
+
"learning_rate": 2.1168710438442953e-05,
|
7572 |
+
"loss": 0.9551,
|
7573 |
+
"step": 1076
|
7574 |
+
},
|
7575 |
+
{
|
7576 |
+
"epoch": 0.21119717619374448,
|
7577 |
+
"grad_norm": 5.613104343414307,
|
7578 |
+
"learning_rate": 2.1025848794510815e-05,
|
7579 |
+
"loss": 1.9112,
|
7580 |
+
"step": 1077
|
7581 |
+
},
|
7582 |
+
{
|
7583 |
+
"epoch": 0.21139327385037748,
|
7584 |
+
"grad_norm": 7.113245010375977,
|
7585 |
+
"learning_rate": 2.0883414195752972e-05,
|
7586 |
+
"loss": 1.2791,
|
7587 |
+
"step": 1078
|
7588 |
+
},
|
7589 |
+
{
|
7590 |
+
"epoch": 0.2115893715070105,
|
7591 |
+
"grad_norm": 5.34241247177124,
|
7592 |
+
"learning_rate": 2.0741407412370838e-05,
|
7593 |
+
"loss": 1.4305,
|
7594 |
+
"step": 1079
|
7595 |
+
},
|
7596 |
+
{
|
7597 |
+
"epoch": 0.2117854691636435,
|
7598 |
+
"grad_norm": 4.97748327255249,
|
7599 |
+
"learning_rate": 2.0599829212252553e-05,
|
7600 |
+
"loss": 1.0597,
|
7601 |
+
"step": 1080
|
7602 |
+
},
|
7603 |
+
{
|
7604 |
+
"epoch": 0.2119815668202765,
|
7605 |
+
"grad_norm": 6.11815881729126,
|
7606 |
+
"learning_rate": 2.045868036096864e-05,
|
7607 |
+
"loss": 1.3793,
|
7608 |
+
"step": 1081
|
7609 |
+
},
|
7610 |
+
{
|
7611 |
+
"epoch": 0.2121776644769095,
|
7612 |
+
"grad_norm": 7.822902202606201,
|
7613 |
+
"learning_rate": 2.031796162176811e-05,
|
7614 |
+
"loss": 1.7602,
|
7615 |
+
"step": 1082
|
7616 |
+
},
|
7617 |
+
{
|
7618 |
+
"epoch": 0.2123737621335425,
|
7619 |
+
"grad_norm": 5.7074875831604,
|
7620 |
+
"learning_rate": 2.0177673755574044e-05,
|
7621 |
+
"loss": 1.9033,
|
7622 |
+
"step": 1083
|
7623 |
+
},
|
7624 |
+
{
|
7625 |
+
"epoch": 0.2125698597901755,
|
7626 |
+
"grad_norm": 5.141830921173096,
|
7627 |
+
"learning_rate": 2.0037817520979686e-05,
|
7628 |
+
"loss": 0.6569,
|
7629 |
+
"step": 1084
|
7630 |
+
},
|
7631 |
+
{
|
7632 |
+
"epoch": 0.2127659574468085,
|
7633 |
+
"grad_norm": 8.139151573181152,
|
7634 |
+
"learning_rate": 1.9898393674244243e-05,
|
7635 |
+
"loss": 1.6038,
|
7636 |
+
"step": 1085
|
7637 |
+
},
|
7638 |
+
{
|
7639 |
+
"epoch": 0.21296205510344152,
|
7640 |
+
"grad_norm": 6.489431858062744,
|
7641 |
+
"learning_rate": 1.975940296928882e-05,
|
7642 |
+
"loss": 1.7581,
|
7643 |
+
"step": 1086
|
7644 |
+
},
|
7645 |
+
{
|
7646 |
+
"epoch": 0.21315815276007452,
|
7647 |
+
"grad_norm": 9.372897148132324,
|
7648 |
+
"learning_rate": 1.9620846157692418e-05,
|
7649 |
+
"loss": 2.5077,
|
7650 |
+
"step": 1087
|
7651 |
+
},
|
7652 |
+
{
|
7653 |
+
"epoch": 0.21335425041670752,
|
7654 |
+
"grad_norm": 11.757204055786133,
|
7655 |
+
"learning_rate": 1.9482723988687734e-05,
|
7656 |
+
"loss": 2.0571,
|
7657 |
+
"step": 1088
|
7658 |
+
},
|
7659 |
+
{
|
7660 |
+
"epoch": 0.21355034807334053,
|
7661 |
+
"grad_norm": 4.995403289794922,
|
7662 |
+
"learning_rate": 1.934503720915719e-05,
|
7663 |
+
"loss": 1.3159,
|
7664 |
+
"step": 1089
|
7665 |
+
},
|
7666 |
+
{
|
7667 |
+
"epoch": 0.21374644572997353,
|
7668 |
+
"grad_norm": 4.217586994171143,
|
7669 |
+
"learning_rate": 1.9207786563628894e-05,
|
7670 |
+
"loss": 0.7797,
|
7671 |
+
"step": 1090
|
7672 |
+
},
|
7673 |
+
{
|
7674 |
+
"epoch": 0.21394254338660654,
|
7675 |
+
"grad_norm": 5.487307548522949,
|
7676 |
+
"learning_rate": 1.907097279427258e-05,
|
7677 |
+
"loss": 1.2424,
|
7678 |
+
"step": 1091
|
7679 |
+
},
|
7680 |
+
{
|
7681 |
+
"epoch": 0.21413864104323954,
|
7682 |
+
"grad_norm": 5.5197014808654785,
|
7683 |
+
"learning_rate": 1.8934596640895607e-05,
|
7684 |
+
"loss": 1.6582,
|
7685 |
+
"step": 1092
|
7686 |
+
},
|
7687 |
+
{
|
7688 |
+
"epoch": 0.21433473869987255,
|
7689 |
+
"grad_norm": 9.223129272460938,
|
7690 |
+
"learning_rate": 1.8798658840939055e-05,
|
7691 |
+
"loss": 1.5407,
|
7692 |
+
"step": 1093
|
7693 |
+
},
|
7694 |
+
{
|
7695 |
+
"epoch": 0.21453083635650555,
|
7696 |
+
"grad_norm": 6.948054790496826,
|
7697 |
+
"learning_rate": 1.8663160129473535e-05,
|
7698 |
+
"loss": 0.7758,
|
7699 |
+
"step": 1094
|
7700 |
+
},
|
7701 |
+
{
|
7702 |
+
"epoch": 0.21472693401313855,
|
7703 |
+
"grad_norm": 5.582278251647949,
|
7704 |
+
"learning_rate": 1.8528101239195395e-05,
|
7705 |
+
"loss": 2.0049,
|
7706 |
+
"step": 1095
|
7707 |
+
},
|
7708 |
+
{
|
7709 |
+
"epoch": 0.21492303166977156,
|
7710 |
+
"grad_norm": 4.116588115692139,
|
7711 |
+
"learning_rate": 1.8393482900422644e-05,
|
7712 |
+
"loss": 1.8004,
|
7713 |
+
"step": 1096
|
7714 |
+
},
|
7715 |
+
{
|
7716 |
+
"epoch": 0.21511912932640456,
|
7717 |
+
"grad_norm": 6.368351936340332,
|
7718 |
+
"learning_rate": 1.8259305841091123e-05,
|
7719 |
+
"loss": 1.4293,
|
7720 |
+
"step": 1097
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 0.21531522698303757,
|
7724 |
+
"grad_norm": 5.690855503082275,
|
7725 |
+
"learning_rate": 1.8125570786750455e-05,
|
7726 |
+
"loss": 1.1567,
|
7727 |
+
"step": 1098
|
7728 |
+
},
|
7729 |
+
{
|
7730 |
+
"epoch": 0.21551132463967054,
|
7731 |
+
"grad_norm": 7.080834865570068,
|
7732 |
+
"learning_rate": 1.7992278460560152e-05,
|
7733 |
+
"loss": 1.1252,
|
7734 |
+
"step": 1099
|
7735 |
+
},
|
7736 |
+
{
|
7737 |
+
"epoch": 0.21570742229630355,
|
7738 |
+
"grad_norm": 3.924699544906616,
|
7739 |
+
"learning_rate": 1.785942958328569e-05,
|
7740 |
+
"loss": 1.9084,
|
7741 |
+
"step": 1100
|
7742 |
+
},
|
7743 |
+
{
|
7744 |
+
"epoch": 0.21590351995293655,
|
7745 |
+
"grad_norm": 4.245781421661377,
|
7746 |
+
"learning_rate": 1.772702487329474e-05,
|
7747 |
+
"loss": 1.2304,
|
7748 |
+
"step": 1101
|
7749 |
+
},
|
7750 |
+
{
|
7751 |
+
"epoch": 0.21609961760956956,
|
7752 |
+
"grad_norm": 11.68691349029541,
|
7753 |
+
"learning_rate": 1.7595065046553085e-05,
|
7754 |
+
"loss": 1.0978,
|
7755 |
+
"step": 1102
|
7756 |
+
},
|
7757 |
+
{
|
7758 |
+
"epoch": 0.21629571526620256,
|
7759 |
+
"grad_norm": 5.583014011383057,
|
7760 |
+
"learning_rate": 1.7463550816620876e-05,
|
7761 |
+
"loss": 2.6622,
|
7762 |
+
"step": 1103
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 0.21649181292283556,
|
7766 |
+
"grad_norm": 6.454019546508789,
|
7767 |
+
"learning_rate": 1.7332482894648726e-05,
|
7768 |
+
"loss": 1.7374,
|
7769 |
+
"step": 1104
|
7770 |
+
},
|
7771 |
+
{
|
7772 |
+
"epoch": 0.21668791057946857,
|
7773 |
+
"grad_norm": 9.824748992919922,
|
7774 |
+
"learning_rate": 1.7201861989373926e-05,
|
7775 |
+
"loss": 2.4338,
|
7776 |
+
"step": 1105
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 0.21688400823610157,
|
7780 |
+
"grad_norm": 4.780369758605957,
|
7781 |
+
"learning_rate": 1.7071688807116525e-05,
|
7782 |
+
"loss": 0.9636,
|
7783 |
+
"step": 1106
|
7784 |
+
},
|
7785 |
+
{
|
7786 |
+
"epoch": 0.21708010589273458,
|
7787 |
+
"grad_norm": 6.861015796661377,
|
7788 |
+
"learning_rate": 1.694196405177556e-05,
|
7789 |
+
"loss": 1.1233,
|
7790 |
+
"step": 1107
|
7791 |
+
},
|
7792 |
+
{
|
7793 |
+
"epoch": 0.21727620354936758,
|
7794 |
+
"grad_norm": 4.883321762084961,
|
7795 |
+
"learning_rate": 1.681268842482523e-05,
|
7796 |
+
"loss": 0.8476,
|
7797 |
+
"step": 1108
|
7798 |
+
},
|
7799 |
+
{
|
7800 |
+
"epoch": 0.21747230120600058,
|
7801 |
+
"grad_norm": 4.856523513793945,
|
7802 |
+
"learning_rate": 1.6683862625311164e-05,
|
7803 |
+
"loss": 1.4524,
|
7804 |
+
"step": 1109
|
7805 |
+
},
|
7806 |
+
{
|
7807 |
+
"epoch": 0.2176683988626336,
|
7808 |
+
"grad_norm": 5.7625017166137695,
|
7809 |
+
"learning_rate": 1.6555487349846544e-05,
|
7810 |
+
"loss": 1.5877,
|
7811 |
+
"step": 1110
|
7812 |
+
},
|
7813 |
+
{
|
7814 |
+
"epoch": 0.2178644965192666,
|
7815 |
+
"grad_norm": 6.9636359214782715,
|
7816 |
+
"learning_rate": 1.6427563292608416e-05,
|
7817 |
+
"loss": 1.3804,
|
7818 |
+
"step": 1111
|
7819 |
+
},
|
7820 |
+
{
|
7821 |
+
"epoch": 0.2180605941758996,
|
7822 |
+
"grad_norm": 5.53889799118042,
|
7823 |
+
"learning_rate": 1.630009114533384e-05,
|
7824 |
+
"loss": 1.4286,
|
7825 |
+
"step": 1112
|
7826 |
+
},
|
7827 |
+
{
|
7828 |
+
"epoch": 0.2182566918325326,
|
7829 |
+
"grad_norm": 8.081591606140137,
|
7830 |
+
"learning_rate": 1.6173071597316335e-05,
|
7831 |
+
"loss": 2.5756,
|
7832 |
+
"step": 1113
|
7833 |
+
},
|
7834 |
+
{
|
7835 |
+
"epoch": 0.2184527894891656,
|
7836 |
+
"grad_norm": 9.321168899536133,
|
7837 |
+
"learning_rate": 1.604650533540194e-05,
|
7838 |
+
"loss": 2.107,
|
7839 |
+
"step": 1114
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 0.2186488871457986,
|
7843 |
+
"grad_norm": 7.095883846282959,
|
7844 |
+
"learning_rate": 1.5920393043985593e-05,
|
7845 |
+
"loss": 0.9074,
|
7846 |
+
"step": 1115
|
7847 |
+
},
|
7848 |
+
{
|
7849 |
+
"epoch": 0.2188449848024316,
|
7850 |
+
"grad_norm": 10.197619438171387,
|
7851 |
+
"learning_rate": 1.579473540500751e-05,
|
7852 |
+
"loss": 1.7506,
|
7853 |
+
"step": 1116
|
7854 |
+
},
|
7855 |
+
{
|
7856 |
+
"epoch": 0.21904108245906462,
|
7857 |
+
"grad_norm": 8.685538291931152,
|
7858 |
+
"learning_rate": 1.5669533097949328e-05,
|
7859 |
+
"loss": 1.6941,
|
7860 |
+
"step": 1117
|
7861 |
+
},
|
7862 |
+
{
|
7863 |
+
"epoch": 0.21923718011569762,
|
7864 |
+
"grad_norm": 6.081611156463623,
|
7865 |
+
"learning_rate": 1.5544786799830568e-05,
|
7866 |
+
"loss": 2.3827,
|
7867 |
+
"step": 1118
|
7868 |
+
},
|
7869 |
+
{
|
7870 |
+
"epoch": 0.21943327777233063,
|
7871 |
+
"grad_norm": 4.3908162117004395,
|
7872 |
+
"learning_rate": 1.5420497185204873e-05,
|
7873 |
+
"loss": 1.1959,
|
7874 |
+
"step": 1119
|
7875 |
+
},
|
7876 |
+
{
|
7877 |
+
"epoch": 0.21962937542896363,
|
7878 |
+
"grad_norm": 13.473033905029297,
|
7879 |
+
"learning_rate": 1.5296664926156525e-05,
|
7880 |
+
"loss": 2.7042,
|
7881 |
+
"step": 1120
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 0.21982547308559663,
|
7885 |
+
"grad_norm": 7.351769924163818,
|
7886 |
+
"learning_rate": 1.5173290692296582e-05,
|
7887 |
+
"loss": 2.4473,
|
7888 |
+
"step": 1121
|
7889 |
+
},
|
7890 |
+
{
|
7891 |
+
"epoch": 0.22002157074222964,
|
7892 |
+
"grad_norm": 4.283702373504639,
|
7893 |
+
"learning_rate": 1.5050375150759433e-05,
|
7894 |
+
"loss": 1.2878,
|
7895 |
+
"step": 1122
|
7896 |
+
},
|
7897 |
+
{
|
7898 |
+
"epoch": 0.22021766839886264,
|
7899 |
+
"grad_norm": 6.013007164001465,
|
7900 |
+
"learning_rate": 1.4927918966199094e-05,
|
7901 |
+
"loss": 1.7944,
|
7902 |
+
"step": 1123
|
7903 |
+
},
|
7904 |
+
{
|
7905 |
+
"epoch": 0.22041376605549565,
|
7906 |
+
"grad_norm": 6.42008638381958,
|
7907 |
+
"learning_rate": 1.4805922800785733e-05,
|
7908 |
+
"loss": 1.727,
|
7909 |
+
"step": 1124
|
7910 |
+
},
|
7911 |
+
{
|
7912 |
+
"epoch": 0.22060986371212865,
|
7913 |
+
"grad_norm": 8.836663246154785,
|
7914 |
+
"learning_rate": 1.4684387314201919e-05,
|
7915 |
+
"loss": 2.6804,
|
7916 |
+
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.22080596136876166,
|
7920 |
+
"grad_norm": 6.903241157531738,
|
7921 |
+
"learning_rate": 1.4563313163639192e-05,
|
7922 |
+
"loss": 1.1425,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.22100205902539466,
|
7927 |
+
"grad_norm": 25.018720626831055,
|
7928 |
+
"learning_rate": 1.4442701003794434e-05,
|
7929 |
+
"loss": 2.5687,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.22119815668202766,
|
7934 |
+
"grad_norm": 4.393866062164307,
|
7935 |
+
"learning_rate": 1.4322551486866364e-05,
|
7936 |
+
"loss": 1.2885,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.22139425433866064,
|
7941 |
+
"grad_norm": 10.157917976379395,
|
7942 |
+
"learning_rate": 1.4202865262552045e-05,
|
7943 |
+
"loss": 2.6514,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.22159035199529364,
|
7948 |
+
"grad_norm": 8.52134895324707,
|
7949 |
+
"learning_rate": 1.4083642978043277e-05,
|
7950 |
+
"loss": 1.5537,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.22178644965192665,
|
7955 |
+
"grad_norm": 7.985105991363525,
|
7956 |
+
"learning_rate": 1.3964885278023176e-05,
|
7957 |
+
"loss": 1.631,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.22198254730855965,
|
7962 |
+
"grad_norm": 6.039412021636963,
|
7963 |
+
"learning_rate": 1.3846592804662638e-05,
|
7964 |
+
"loss": 0.9408,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.22217864496519266,
|
7969 |
+
"grad_norm": 6.0496697425842285,
|
7970 |
+
"learning_rate": 1.3728766197616905e-05,
|
7971 |
+
"loss": 2.2771,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.22237474262182566,
|
7976 |
+
"grad_norm": 6.791964530944824,
|
7977 |
+
"learning_rate": 1.3611406094022095e-05,
|
7978 |
+
"loss": 1.5164,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.22257084027845866,
|
7983 |
+
"grad_norm": 5.74752950668335,
|
7984 |
+
"learning_rate": 1.349451312849177e-05,
|
7985 |
+
"loss": 1.7676,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.22276693793509167,
|
7990 |
+
"grad_norm": 5.087756633758545,
|
7991 |
+
"learning_rate": 1.337808793311346e-05,
|
7992 |
+
"loss": 0.8442,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.22296303559172467,
|
7997 |
+
"grad_norm": 6.2235426902771,
|
7998 |
+
"learning_rate": 1.3262131137445266e-05,
|
7999 |
+
"loss": 1.7659,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.22315913324835768,
|
8004 |
+
"grad_norm": 5.494975566864014,
|
8005 |
+
"learning_rate": 1.314664336851248e-05,
|
8006 |
+
"loss": 2.0008,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.22335523090499068,
|
8011 |
+
"grad_norm": 7.123453617095947,
|
8012 |
+
"learning_rate": 1.3031625250804191e-05,
|
8013 |
+
"loss": 1.3864,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.22355132856162369,
|
8018 |
+
"grad_norm": 4.093453407287598,
|
8019 |
+
"learning_rate": 1.2917077406269872e-05,
|
8020 |
+
"loss": 0.8456,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.2237474262182567,
|
8025 |
+
"grad_norm": 9.605305671691895,
|
8026 |
+
"learning_rate": 1.280300045431605e-05,
|
8027 |
+
"loss": 2.1936,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.2239435238748897,
|
8032 |
+
"grad_norm": 5.773606777191162,
|
8033 |
+
"learning_rate": 1.2689395011802896e-05,
|
8034 |
+
"loss": 1.3258,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.2241396215315227,
|
8039 |
+
"grad_norm": 5.294035911560059,
|
8040 |
+
"learning_rate": 1.2576261693041036e-05,
|
8041 |
+
"loss": 1.6611,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.2243357191881557,
|
8046 |
+
"grad_norm": 8.730361938476562,
|
8047 |
+
"learning_rate": 1.2463601109788058e-05,
|
8048 |
+
"loss": 2.3499,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.2245318168447887,
|
8053 |
+
"grad_norm": 7.3704304695129395,
|
8054 |
+
"learning_rate": 1.235141387124531e-05,
|
8055 |
+
"loss": 1.6554,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.2247279145014217,
|
8060 |
+
"grad_norm": 6.475876331329346,
|
8061 |
+
"learning_rate": 1.2239700584054514e-05,
|
8062 |
+
"loss": 1.5567,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.22492401215805471,
|
8067 |
+
"grad_norm": 5.318692207336426,
|
8068 |
+
"learning_rate": 1.2128461852294647e-05,
|
8069 |
+
"loss": 1.6012,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.22512010981468772,
|
8074 |
+
"grad_norm": 8.062721252441406,
|
8075 |
+
"learning_rate": 1.2017698277478506e-05,
|
8076 |
+
"loss": 1.0389,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.22531620747132072,
|
8081 |
+
"grad_norm": 8.7105131149292,
|
8082 |
+
"learning_rate": 1.1907410458549517e-05,
|
8083 |
+
"loss": 3.1607,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.22551230512795373,
|
8088 |
+
"grad_norm": 8.629892349243164,
|
8089 |
+
"learning_rate": 1.1797598991878512e-05,
|
8090 |
+
"loss": 1.8994,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.22570840278458673,
|
8095 |
+
"grad_norm": 8.824653625488281,
|
8096 |
+
"learning_rate": 1.1688264471260546e-05,
|
8097 |
+
"loss": 0.8091,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.22590450044121974,
|
8102 |
+
"grad_norm": 4.759090900421143,
|
8103 |
+
"learning_rate": 1.1579407487911564e-05,
|
8104 |
+
"loss": 1.3273,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.22610059809785274,
|
8109 |
+
"grad_norm": 4.78541374206543,
|
8110 |
+
"learning_rate": 1.1471028630465285e-05,
|
8111 |
+
"loss": 0.8232,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.22629669575448574,
|
8116 |
+
"grad_norm": 6.233590602874756,
|
8117 |
+
"learning_rate": 1.136312848497002e-05,
|
8118 |
+
"loss": 2.525,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.22649279341111875,
|
8123 |
+
"grad_norm": 7.085466384887695,
|
8124 |
+
"learning_rate": 1.1255707634885526e-05,
|
8125 |
+
"loss": 1.6632,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.22668889106775175,
|
8130 |
+
"grad_norm": 4.938376426696777,
|
8131 |
+
"learning_rate": 1.1148766661079768e-05,
|
8132 |
+
"loss": 0.6869,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.22688498872438476,
|
8137 |
+
"grad_norm": 9.032883644104004,
|
8138 |
+
"learning_rate": 1.1042306141825808e-05,
|
8139 |
+
"loss": 1.7692,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.22708108638101776,
|
8144 |
+
"grad_norm": 7.324281215667725,
|
8145 |
+
"learning_rate": 1.0936326652798779e-05,
|
8146 |
+
"loss": 1.5287,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.22727718403765074,
|
8151 |
+
"grad_norm": 9.270583152770996,
|
8152 |
+
"learning_rate": 1.083082876707262e-05,
|
8153 |
+
"loss": 2.5909,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.22747328169428374,
|
8158 |
+
"grad_norm": 8.368681907653809,
|
8159 |
+
"learning_rate": 1.0725813055117051e-05,
|
8160 |
+
"loss": 1.3548,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.22766937935091675,
|
8165 |
+
"grad_norm": 5.176713943481445,
|
8166 |
+
"learning_rate": 1.0621280084794483e-05,
|
8167 |
+
"loss": 0.6788,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.22786547700754975,
|
8172 |
+
"grad_norm": 16.383853912353516,
|
8173 |
+
"learning_rate": 1.0517230421357016e-05,
|
8174 |
+
"loss": 1.4931,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.22806157466418275,
|
8179 |
+
"grad_norm": 6.631777286529541,
|
8180 |
+
"learning_rate": 1.041366462744322e-05,
|
8181 |
+
"loss": 1.8152,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.22825767232081576,
|
8186 |
+
"grad_norm": 6.580327033996582,
|
8187 |
+
"learning_rate": 1.0310583263075236e-05,
|
8188 |
+
"loss": 1.2307,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.22845376997744876,
|
8193 |
+
"grad_norm": 3.8760287761688232,
|
8194 |
+
"learning_rate": 1.0207986885655662e-05,
|
8195 |
+
"loss": 1.398,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.22864986763408177,
|
8200 |
+
"grad_norm": 4.494516849517822,
|
8201 |
+
"learning_rate": 1.0105876049964658e-05,
|
8202 |
+
"loss": 1.9966,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.22884596529071477,
|
8207 |
+
"grad_norm": 4.6880879402160645,
|
8208 |
+
"learning_rate": 1.0004251308156776e-05,
|
8209 |
+
"loss": 1.7868,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.22904206294734777,
|
8214 |
+
"grad_norm": 4.97517204284668,
|
8215 |
+
"learning_rate": 9.903113209758096e-06,
|
8216 |
+
"loss": 0.9129,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.22923816060398078,
|
8221 |
+
"grad_norm": 6.1455183029174805,
|
8222 |
+
"learning_rate": 9.802462301663218e-06,
|
8223 |
+
"loss": 1.0401,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.22943425826061378,
|
8228 |
+
"grad_norm": 5.264155387878418,
|
8229 |
+
"learning_rate": 9.702299128132286e-06,
|
8230 |
+
"loss": 0.6803,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.2296303559172468,
|
8235 |
+
"grad_norm": 7.0842108726501465,
|
8236 |
+
"learning_rate": 9.602624230788127e-06,
|
8237 |
+
"loss": 1.2855,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.2298264535738798,
|
8242 |
+
"grad_norm": 23.354158401489258,
|
8243 |
+
"learning_rate": 9.503438148613208e-06,
|
8244 |
+
"loss": 2.12,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.2300225512305128,
|
8249 |
+
"grad_norm": 4.803404331207275,
|
8250 |
+
"learning_rate": 9.404741417946783e-06,
|
8251 |
+
"loss": 1.6464,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.2302186488871458,
|
8256 |
+
"grad_norm": 5.195870876312256,
|
8257 |
+
"learning_rate": 9.306534572481996e-06,
|
8258 |
+
"loss": 2.3011,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.2304147465437788,
|
8263 |
+
"grad_norm": 4.52567720413208,
|
8264 |
+
"learning_rate": 9.208818143262965e-06,
|
8265 |
+
"loss": 0.6482,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.2306108442004118,
|
8270 |
+
"grad_norm": 6.062211990356445,
|
8271 |
+
"learning_rate": 9.111592658681933e-06,
|
8272 |
+
"loss": 1.5717,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.2308069418570448,
|
8277 |
+
"grad_norm": 6.672834396362305,
|
8278 |
+
"learning_rate": 9.014858644476444e-06,
|
8279 |
+
"loss": 1.468,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.23100303951367782,
|
8284 |
+
"grad_norm": 8.963277816772461,
|
8285 |
+
"learning_rate": 8.918616623726428e-06,
|
8286 |
+
"loss": 1.5148,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.23119913717031082,
|
8291 |
+
"grad_norm": 4.698469638824463,
|
8292 |
+
"learning_rate": 8.822867116851396e-06,
|
8293 |
+
"loss": 2.3368,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.23139523482694382,
|
8298 |
+
"grad_norm": 7.142313003540039,
|
8299 |
+
"learning_rate": 8.727610641607642e-06,
|
8300 |
+
"loss": 2.7635,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.23159133248357683,
|
8305 |
+
"grad_norm": 5.125174045562744,
|
8306 |
+
"learning_rate": 8.632847713085502e-06,
|
8307 |
+
"loss": 1.3292,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.23178743014020983,
|
8312 |
+
"grad_norm": 5.394103527069092,
|
8313 |
+
"learning_rate": 8.538578843706423e-06,
|
8314 |
+
"loss": 1.7812,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.23198352779684284,
|
8319 |
+
"grad_norm": 6.396340370178223,
|
8320 |
+
"learning_rate": 8.444804543220308e-06,
|
8321 |
+
"loss": 1.4261,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.23217962545347584,
|
8326 |
+
"grad_norm": 10.947870254516602,
|
8327 |
+
"learning_rate": 8.351525318702702e-06,
|
8328 |
+
"loss": 0.9685,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.23237572311010884,
|
8333 |
+
"grad_norm": 8.984673500061035,
|
8334 |
+
"learning_rate": 8.25874167455214e-06,
|
8335 |
+
"loss": 1.0452,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.23257182076674185,
|
8340 |
+
"grad_norm": 9.12095832824707,
|
8341 |
+
"learning_rate": 8.166454112487288e-06,
|
8342 |
+
"loss": 1.5966,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.23276791842337485,
|
8347 |
+
"grad_norm": 17.694997787475586,
|
8348 |
+
"learning_rate": 8.074663131544313e-06,
|
8349 |
+
"loss": 2.1847,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.23296401608000786,
|
8354 |
+
"grad_norm": 7.42760705947876,
|
8355 |
+
"learning_rate": 7.983369228074155e-06,
|
8356 |
+
"loss": 2.1763,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.23316011373664083,
|
8361 |
+
"grad_norm": 6.491481304168701,
|
8362 |
+
"learning_rate": 7.892572895739913e-06,
|
8363 |
+
"loss": 1.6988,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.23335621139327384,
|
8368 |
+
"grad_norm": 8.320481300354004,
|
8369 |
+
"learning_rate": 7.80227462551405e-06,
|
8370 |
+
"loss": 1.4067,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.23355230904990684,
|
8375 |
+
"grad_norm": 6.234914302825928,
|
8376 |
+
"learning_rate": 7.712474905675837e-06,
|
8377 |
+
"loss": 1.675,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.23374840670653985,
|
8382 |
+
"grad_norm": 6.310087203979492,
|
8383 |
+
"learning_rate": 7.62317422180866e-06,
|
8384 |
+
"loss": 2.4978,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.23394450436317285,
|
8389 |
+
"grad_norm": 7.758471488952637,
|
8390 |
+
"learning_rate": 7.53437305679745e-06,
|
8391 |
+
"loss": 1.8757,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.23414060201980585,
|
8396 |
+
"grad_norm": 4.8770318031311035,
|
8397 |
+
"learning_rate": 7.446071890826023e-06,
|
8398 |
+
"loss": 1.5988,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.23433669967643886,
|
8403 |
+
"grad_norm": 3.4752848148345947,
|
8404 |
+
"learning_rate": 7.358271201374478e-06,
|
8405 |
+
"loss": 1.526,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.23453279733307186,
|
8410 |
+
"grad_norm": 4.924532890319824,
|
8411 |
+
"learning_rate": 7.270971463216658e-06,
|
8412 |
+
"loss": 0.7961,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.23472889498970487,
|
8417 |
+
"grad_norm": 5.1697306632995605,
|
8418 |
+
"learning_rate": 7.184173148417561e-06,
|
8419 |
+
"loss": 1.114,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.23492499264633787,
|
8424 |
+
"grad_norm": 6.321658134460449,
|
8425 |
+
"learning_rate": 7.0978767263307764e-06,
|
8426 |
+
"loss": 1.0523,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.23512109030297088,
|
8431 |
+
"grad_norm": 4.862210273742676,
|
8432 |
+
"learning_rate": 7.012082663595931e-06,
|
8433 |
+
"loss": 2.3474,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.23531718795960388,
|
8438 |
+
"grad_norm": 6.735289096832275,
|
8439 |
+
"learning_rate": 6.9267914241362585e-06,
|
8440 |
+
"loss": 2.2363,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.23551328561623688,
|
8445 |
+
"grad_norm": 6.494600772857666,
|
8446 |
+
"learning_rate": 6.842003469155955e-06,
|
8447 |
+
"loss": 1.4925,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.2357093832728699,
|
8452 |
+
"grad_norm": 6.873384952545166,
|
8453 |
+
"learning_rate": 6.757719257137807e-06,
|
8454 |
+
"loss": 1.7298,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.2359054809295029,
|
8459 |
+
"grad_norm": 11.259360313415527,
|
8460 |
+
"learning_rate": 6.673939243840599e-06,
|
8461 |
+
"loss": 1.0081,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.2361015785861359,
|
8466 |
+
"grad_norm": 4.887358665466309,
|
8467 |
+
"learning_rate": 6.590663882296788e-06,
|
8468 |
+
"loss": 2.1434,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.2362976762427689,
|
8473 |
+
"grad_norm": 4.775937557220459,
|
8474 |
+
"learning_rate": 6.507893622809913e-06,
|
8475 |
+
"loss": 0.6404,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.2364937738994019,
|
8480 |
+
"grad_norm": 4.322073459625244,
|
8481 |
+
"learning_rate": 6.425628912952242e-06,
|
8482 |
+
"loss": 0.9223,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.2366898715560349,
|
8487 |
+
"grad_norm": 5.388957500457764,
|
8488 |
+
"learning_rate": 6.343870197562307e-06,
|
8489 |
+
"loss": 1.7039,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.2368859692126679,
|
8494 |
+
"grad_norm": 7.30275297164917,
|
8495 |
+
"learning_rate": 6.262617918742586e-06,
|
8496 |
+
"loss": 1.7324,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.23708206686930092,
|
8501 |
+
"grad_norm": 6.703151702880859,
|
8502 |
+
"learning_rate": 6.181872515857001e-06,
|
8503 |
+
"loss": 2.2659,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.23727816452593392,
|
8508 |
+
"grad_norm": 7.984116554260254,
|
8509 |
+
"learning_rate": 6.10163442552858e-06,
|
8510 |
+
"loss": 1.5091,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.23747426218256693,
|
8515 |
+
"grad_norm": 6.996639728546143,
|
8516 |
+
"learning_rate": 6.021904081637142e-06,
|
8517 |
+
"loss": 1.9216,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.23767035983919993,
|
8522 |
+
"grad_norm": 9.186728477478027,
|
8523 |
+
"learning_rate": 5.942681915316894e-06,
|
8524 |
+
"loss": 2.6348,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.23786645749583293,
|
8529 |
+
"grad_norm": 12.796127319335938,
|
8530 |
+
"learning_rate": 5.863968354954141e-06,
|
8531 |
+
"loss": 1.6889,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.23806255515246594,
|
8536 |
+
"grad_norm": 10.490285873413086,
|
8537 |
+
"learning_rate": 5.7857638261849314e-06,
|
8538 |
+
"loss": 2.4966,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.23825865280909894,
|
8543 |
+
"grad_norm": 4.777184009552002,
|
8544 |
+
"learning_rate": 5.70806875189277e-06,
|
8545 |
+
"loss": 1.7654,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.23845475046573195,
|
8550 |
+
"grad_norm": 5.094268321990967,
|
8551 |
+
"learning_rate": 5.630883552206367e-06,
|
8552 |
+
"loss": 0.7742,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.23865084812236495,
|
8557 |
+
"grad_norm": 3.6413767337799072,
|
8558 |
+
"learning_rate": 5.554208644497283e-06,
|
8559 |
+
"loss": 0.7798,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.23884694577899795,
|
8564 |
+
"grad_norm": 11.46200942993164,
|
8565 |
+
"learning_rate": 5.478044443377761e-06,
|
8566 |
+
"loss": 1.899,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.23904304343563093,
|
8571 |
+
"grad_norm": 6.592775821685791,
|
8572 |
+
"learning_rate": 5.402391360698456e-06,
|
8573 |
+
"loss": 0.7878,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.23923914109226393,
|
8578 |
+
"grad_norm": 4.292765140533447,
|
8579 |
+
"learning_rate": 5.327249805546175e-06,
|
8580 |
+
"loss": 1.1989,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.23943523874889694,
|
8585 |
+
"grad_norm": 5.002085208892822,
|
8586 |
+
"learning_rate": 5.2526201842416965e-06,
|
8587 |
+
"loss": 1.0088,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.23963133640552994,
|
8592 |
+
"grad_norm": 8.515301704406738,
|
8593 |
+
"learning_rate": 5.178502900337534e-06,
|
8594 |
+
"loss": 2.1646,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.23982743406216295,
|
8599 |
+
"grad_norm": 5.089056491851807,
|
8600 |
+
"learning_rate": 5.104898354615861e-06,
|
8601 |
+
"loss": 1.0671,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.24002353171879595,
|
8606 |
+
"grad_norm": 4.508873462677002,
|
8607 |
+
"learning_rate": 5.031806945086204e-06,
|
8608 |
+
"loss": 2.017,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.24021962937542896,
|
8613 |
+
"grad_norm": 5.015408992767334,
|
8614 |
+
"learning_rate": 4.959229066983373e-06,
|
8615 |
+
"loss": 2.8555,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.24041572703206196,
|
8620 |
+
"grad_norm": 5.895279407501221,
|
8621 |
+
"learning_rate": 4.887165112765291e-06,
|
8622 |
+
"loss": 0.8382,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.24061182468869496,
|
8627 |
+
"grad_norm": 4.138760566711426,
|
8628 |
+
"learning_rate": 4.8156154721109345e-06,
|
8629 |
+
"loss": 0.9274,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.24080792234532797,
|
8634 |
+
"grad_norm": 6.234400749206543,
|
8635 |
+
"learning_rate": 4.74458053191813e-06,
|
8636 |
+
"loss": 1.9066,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.24100402000196097,
|
8641 |
+
"grad_norm": 6.2309441566467285,
|
8642 |
+
"learning_rate": 4.674060676301528e-06,
|
8643 |
+
"loss": 1.2137,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.24120011765859398,
|
8648 |
+
"grad_norm": 6.614995956420898,
|
8649 |
+
"learning_rate": 4.604056286590497e-06,
|
8650 |
+
"loss": 2.1118,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.24139621531522698,
|
8655 |
+
"grad_norm": 7.630300045013428,
|
8656 |
+
"learning_rate": 4.534567741327112e-06,
|
8657 |
+
"loss": 1.028,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.24159231297185998,
|
8662 |
+
"grad_norm": 19.701765060424805,
|
8663 |
+
"learning_rate": 4.465595416264012e-06,
|
8664 |
+
"loss": 2.3872,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.241788410628493,
|
8669 |
+
"grad_norm": 9.740580558776855,
|
8670 |
+
"learning_rate": 4.397139684362462e-06,
|
8671 |
+
"loss": 1.8809,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.241984508285126,
|
8676 |
+
"grad_norm": 5.803571701049805,
|
8677 |
+
"learning_rate": 4.329200915790288e-06,
|
8678 |
+
"loss": 2.1341,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.242180605941759,
|
8683 |
+
"grad_norm": 5.827723503112793,
|
8684 |
+
"learning_rate": 4.261779477919892e-06,
|
8685 |
+
"loss": 0.8315,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.242376703598392,
|
8690 |
+
"grad_norm": 11.323637008666992,
|
8691 |
+
"learning_rate": 4.194875735326253e-06,
|
8692 |
+
"loss": 3.126,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.242572801255025,
|
8697 |
+
"grad_norm": 6.157869815826416,
|
8698 |
+
"learning_rate": 4.12849004978495e-06,
|
8699 |
+
"loss": 1.5295,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.242768898911658,
|
8704 |
+
"grad_norm": 3.730557680130005,
|
8705 |
+
"learning_rate": 4.062622780270253e-06,
|
8706 |
+
"loss": 0.8116,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.24296499656829101,
|
8711 |
+
"grad_norm": 7.499046325683594,
|
8712 |
+
"learning_rate": 3.9972742829531225e-06,
|
8713 |
+
"loss": 1.5176,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.24316109422492402,
|
8718 |
+
"grad_norm": 8.390069007873535,
|
8719 |
+
"learning_rate": 3.932444911199318e-06,
|
8720 |
+
"loss": 0.7114,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.24335719188155702,
|
8725 |
+
"grad_norm": 3.6422150135040283,
|
8726 |
+
"learning_rate": 3.8681350155674315e-06,
|
8727 |
+
"loss": 0.8022,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.24355328953819003,
|
8732 |
+
"grad_norm": 11.20263957977295,
|
8733 |
+
"learning_rate": 3.8043449438071256e-06,
|
8734 |
+
"loss": 1.6755,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.24374938719482303,
|
8739 |
+
"grad_norm": 8.747391700744629,
|
8740 |
+
"learning_rate": 3.7410750408571005e-06,
|
8741 |
+
"loss": 2.4434,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.24394548485145603,
|
8746 |
+
"grad_norm": 4.528903961181641,
|
8747 |
+
"learning_rate": 3.678325648843306e-06,
|
8748 |
+
"loss": 1.1245,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.24414158250808904,
|
8753 |
+
"grad_norm": 3.3688809871673584,
|
8754 |
+
"learning_rate": 3.6160971070770654e-06,
|
8755 |
+
"loss": 1.2526,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.24433768016472204,
|
8760 |
+
"grad_norm": 7.626687049865723,
|
8761 |
+
"learning_rate": 3.5543897520533e-06,
|
8762 |
+
"loss": 2.2074,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.24453377782135505,
|
8767 |
+
"grad_norm": 6.176375389099121,
|
8768 |
+
"learning_rate": 3.4932039174486174e-06,
|
8769 |
+
"loss": 1.9458,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.24472987547798805,
|
8774 |
+
"grad_norm": 3.6054394245147705,
|
8775 |
+
"learning_rate": 3.432539934119572e-06,
|
8776 |
+
"loss": 1.6104,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.24492597313462103,
|
8781 |
+
"grad_norm": 4.255305767059326,
|
8782 |
+
"learning_rate": 3.372398130100851e-06,
|
8783 |
+
"loss": 1.4093,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.24512207079125403,
|
8788 |
+
"grad_norm": 7.934302806854248,
|
8789 |
+
"learning_rate": 3.3127788306035134e-06,
|
8790 |
+
"loss": 1.5826,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.24531816844788704,
|
8795 |
+
"grad_norm": 3.8543338775634766,
|
8796 |
+
"learning_rate": 3.253682358013244e-06,
|
8797 |
+
"loss": 0.5287,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.24551426610452004,
|
8802 |
+
"grad_norm": 5.766990661621094,
|
8803 |
+
"learning_rate": 3.1951090318885434e-06,
|
8804 |
+
"loss": 2.0189,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.24571036376115304,
|
8809 |
+
"grad_norm": 5.633528232574463,
|
8810 |
+
"learning_rate": 3.1370591689590777e-06,
|
8811 |
+
"loss": 1.1827,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.24590646141778605,
|
8816 |
+
"grad_norm": 4.339439868927002,
|
8817 |
+
"learning_rate": 3.0795330831239313e-06,
|
8818 |
+
"loss": 1.5764,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.24610255907441905,
|
8823 |
+
"grad_norm": 7.025827407836914,
|
8824 |
+
"learning_rate": 3.022531085449931e-06,
|
8825 |
+
"loss": 1.94,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.24629865673105206,
|
8830 |
+
"grad_norm": 7.727778434753418,
|
8831 |
+
"learning_rate": 2.966053484169917e-06,
|
8832 |
+
"loss": 1.6985,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.24649475438768506,
|
8837 |
+
"grad_norm": 4.980061054229736,
|
8838 |
+
"learning_rate": 2.9101005846811304e-06,
|
8839 |
+
"loss": 0.6095,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.24669085204431807,
|
8844 |
+
"grad_norm": 9.262031555175781,
|
8845 |
+
"learning_rate": 2.854672689543514e-06,
|
8846 |
+
"loss": 1.9517,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.24688694970095107,
|
8851 |
+
"grad_norm": 10.269953727722168,
|
8852 |
+
"learning_rate": 2.7997700984781272e-06,
|
8853 |
+
"loss": 1.8526,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.24708304735758407,
|
8858 |
+
"grad_norm": 5.118125915527344,
|
8859 |
+
"learning_rate": 2.745393108365457e-06,
|
8860 |
+
"loss": 1.2967,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.24727914501421708,
|
8865 |
+
"grad_norm": 7.1361403465271,
|
8866 |
+
"learning_rate": 2.6915420132439085e-06,
|
8867 |
+
"loss": 1.9573,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.24747524267085008,
|
8872 |
+
"grad_norm": 5.416757583618164,
|
8873 |
+
"learning_rate": 2.638217104308127e-06,
|
8874 |
+
"loss": 1.2684,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.24767134032748309,
|
8879 |
+
"grad_norm": 5.381096363067627,
|
8880 |
+
"learning_rate": 2.585418669907458e-06,
|
8881 |
+
"loss": 0.922,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.2478674379841161,
|
8886 |
+
"grad_norm": 4.663347244262695,
|
8887 |
+
"learning_rate": 2.5331469955443778e-06,
|
8888 |
+
"loss": 0.8759,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.2480635356407491,
|
8893 |
+
"grad_norm": 12.182421684265137,
|
8894 |
+
"learning_rate": 2.4814023638729757e-06,
|
8895 |
+
"loss": 0.8895,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.2482596332973821,
|
8900 |
+
"grad_norm": 6.225351333618164,
|
8901 |
+
"learning_rate": 2.430185054697409e-06,
|
8902 |
+
"loss": 1.8041,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.2484557309540151,
|
8907 |
+
"grad_norm": 9.036142349243164,
|
8908 |
+
"learning_rate": 2.3794953449703837e-06,
|
8909 |
+
"loss": 2.0539,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.2486518286106481,
|
8914 |
+
"grad_norm": 6.467651844024658,
|
8915 |
+
"learning_rate": 2.3293335087916314e-06,
|
8916 |
+
"loss": 1.5472,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.2488479262672811,
|
8921 |
+
"grad_norm": 7.765258312225342,
|
8922 |
+
"learning_rate": 2.279699817406533e-06,
|
8923 |
+
"loss": 1.8689,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.24904402392391412,
|
8928 |
+
"grad_norm": 9.24152946472168,
|
8929 |
+
"learning_rate": 2.230594539204489e-06,
|
8930 |
+
"loss": 1.9637,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.24924012158054712,
|
8935 |
+
"grad_norm": 4.302461624145508,
|
8936 |
+
"learning_rate": 2.1820179397176287e-06,
|
8937 |
+
"loss": 0.8332,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.24943621923718012,
|
8942 |
+
"grad_norm": 5.98826789855957,
|
8943 |
+
"learning_rate": 2.133970281619246e-06,
|
8944 |
+
"loss": 3.038,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.24963231689381313,
|
8949 |
+
"grad_norm": 5.860217094421387,
|
8950 |
+
"learning_rate": 2.0864518247224797e-06,
|
8951 |
+
"loss": 1.7476,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.24982841455044613,
|
8956 |
+
"grad_norm": 7.621738910675049,
|
8957 |
+
"learning_rate": 2.039462825978822e-06,
|
8958 |
+
"loss": 1.7148,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.2500245122070791,
|
8963 |
+
"grad_norm": 6.9399614334106445,
|
8964 |
+
"learning_rate": 1.9930035394768233e-06,
|
8965 |
+
"loss": 1.3601,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.25022060986371214,
|
8970 |
+
"grad_norm": 17.67597007751465,
|
8971 |
+
"learning_rate": 1.947074216440592e-06,
|
8972 |
+
"loss": 3.0125,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.2504167075203451,
|
8977 |
+
"grad_norm": 4.356191158294678,
|
8978 |
+
"learning_rate": 1.9016751052285953e-06,
|
8979 |
+
"loss": 1.0786,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.25061280517697815,
|
8984 |
+
"grad_norm": 5.882324695587158,
|
8985 |
+
"learning_rate": 1.8568064513321715e-06,
|
8986 |
+
"loss": 1.2133,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.2508089028336111,
|
8991 |
+
"grad_norm": 4.599434852600098,
|
8992 |
+
"learning_rate": 1.8124684973742534e-06,
|
8993 |
+
"loss": 1.3988,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.25100500049024416,
|
8998 |
+
"grad_norm": 10.744729995727539,
|
8999 |
+
"learning_rate": 1.768661483108136e-06,
|
9000 |
+
"loss": 2.4072,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.25120109814687713,
|
9005 |
+
"grad_norm": 4.716323375701904,
|
9006 |
+
"learning_rate": 1.7253856454160333e-06,
|
9007 |
+
"loss": 2.1812,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.25139719580351017,
|
9012 |
+
"grad_norm": 3.4088003635406494,
|
9013 |
+
"learning_rate": 1.682641218307901e-06,
|
9014 |
+
"loss": 0.8226,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.25159329346014314,
|
9019 |
+
"grad_norm": 5.354026794433594,
|
9020 |
+
"learning_rate": 1.640428432920138e-06,
|
9021 |
+
"loss": 1.3017,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.2517893911167762,
|
9026 |
+
"grad_norm": 4.884855270385742,
|
9027 |
+
"learning_rate": 1.5987475175143651e-06,
|
9028 |
+
"loss": 0.6766,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.25198548877340915,
|
9033 |
+
"grad_norm": 8.052160263061523,
|
9034 |
+
"learning_rate": 1.557598697476148e-06,
|
9035 |
+
"loss": 2.2202,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.2521815864300422,
|
9040 |
+
"grad_norm": 5.809813976287842,
|
9041 |
+
"learning_rate": 1.5169821953137875e-06,
|
9042 |
+
"loss": 1.1507,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.25237768408667516,
|
9047 |
+
"grad_norm": 5.726134300231934,
|
9048 |
+
"learning_rate": 1.47689823065712e-06,
|
9049 |
+
"loss": 1.3755,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.2525737817433082,
|
9054 |
+
"grad_norm": 6.9269022941589355,
|
9055 |
+
"learning_rate": 1.4373470202563855e-06,
|
9056 |
+
"loss": 1.9541,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.25276987939994117,
|
9061 |
+
"grad_norm": 6.82175874710083,
|
9062 |
+
"learning_rate": 1.398328777980973e-06,
|
9063 |
+
"loss": 0.9483,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.2529659770565742,
|
9068 |
+
"grad_norm": 5.6992058753967285,
|
9069 |
+
"learning_rate": 1.3598437148182652e-06,
|
9070 |
+
"loss": 1.8013,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.2531620747132072,
|
9075 |
+
"grad_norm": 6.446459770202637,
|
9076 |
+
"learning_rate": 1.3218920388725853e-06,
|
9077 |
+
"loss": 1.059,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.2533581723698402,
|
9082 |
+
"grad_norm": 5.6021223068237305,
|
9083 |
+
"learning_rate": 1.2844739553640073e-06,
|
9084 |
+
"loss": 2.3983,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.2535542700264732,
|
9089 |
+
"grad_norm": 5.945677757263184,
|
9090 |
+
"learning_rate": 1.2475896666272136e-06,
|
9091 |
+
"loss": 1.3391,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.25375036768310616,
|
9096 |
+
"grad_norm": 8.367269515991211,
|
9097 |
+
"learning_rate": 1.2112393721104843e-06,
|
9098 |
+
"loss": 0.9355,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.2539464653397392,
|
9103 |
+
"grad_norm": 4.452106952667236,
|
9104 |
+
"learning_rate": 1.1754232683745537e-06,
|
9105 |
+
"loss": 1.3174,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.25414256299637217,
|
9110 |
+
"grad_norm": 5.392490863800049,
|
9111 |
+
"learning_rate": 1.1401415490916e-06,
|
9112 |
+
"loss": 0.7378,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.2543386606530052,
|
9117 |
+
"grad_norm": 7.21303129196167,
|
9118 |
+
"learning_rate": 1.1053944050441245e-06,
|
9119 |
+
"loss": 2.9174,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.2545347583096382,
|
9124 |
+
"grad_norm": 8.784597396850586,
|
9125 |
+
"learning_rate": 1.0711820241240067e-06,
|
9126 |
+
"loss": 3.5646,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.2547308559662712,
|
9131 |
+
"grad_norm": 6.628826141357422,
|
9132 |
+
"learning_rate": 1.0375045913314063e-06,
|
9133 |
+
"loss": 1.2581,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.2549269536229042,
|
9138 |
+
"grad_norm": 11.671515464782715,
|
9139 |
+
"learning_rate": 1.0043622887738413e-06,
|
9140 |
+
"loss": 2.1951,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.2551230512795372,
|
9145 |
+
"grad_norm": 4.008133888244629,
|
9146 |
+
"learning_rate": 9.717552956651331e-07,
|
9147 |
+
"loss": 2.3149,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.2553191489361702,
|
9152 |
+
"grad_norm": 8.170031547546387,
|
9153 |
+
"learning_rate": 9.396837883244746e-07,
|
9154 |
+
"loss": 1.576,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.2555152465928032,
|
9159 |
+
"grad_norm": 7.534716606140137,
|
9160 |
+
"learning_rate": 9.081479401754966e-07,
|
9161 |
+
"loss": 2.1834,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.2557113442494362,
|
9166 |
+
"grad_norm": 4.619508266448975,
|
9167 |
+
"learning_rate": 8.771479217452694e-07,
|
9168 |
+
"loss": 1.6227,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.25590744190606923,
|
9173 |
+
"grad_norm": 4.464870452880859,
|
9174 |
+
"learning_rate": 8.466839006634364e-07,
|
9175 |
+
"loss": 0.6817,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.2561035395627022,
|
9180 |
+
"grad_norm": 7.0456743240356445,
|
9181 |
+
"learning_rate": 8.167560416612596e-07,
|
9182 |
+
"loss": 2.6702,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.25629963721933524,
|
9187 |
+
"grad_norm": 3.647512674331665,
|
9188 |
+
"learning_rate": 7.873645065708091e-07,
|
9189 |
+
"loss": 2.1032,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.2564957348759682,
|
9194 |
+
"grad_norm": 10.428240776062012,
|
9195 |
+
"learning_rate": 7.585094543239857e-07,
|
9196 |
+
"loss": 1.6973,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.25669183253260125,
|
9201 |
+
"grad_norm": 4.8686299324035645,
|
9202 |
+
"learning_rate": 7.301910409517221e-07,
|
9203 |
+
"loss": 1.8198,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.2568879301892342,
|
9208 |
+
"grad_norm": 13.215803146362305,
|
9209 |
+
"learning_rate": 7.024094195831277e-07,
|
9210 |
+
"loss": 1.791,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.25708402784586726,
|
9215 |
+
"grad_norm": 7.418087959289551,
|
9216 |
+
"learning_rate": 6.751647404446781e-07,
|
9217 |
+
"loss": 1.0254,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.25728012550250023,
|
9222 |
+
"grad_norm": 3.0393893718719482,
|
9223 |
+
"learning_rate": 6.484571508593718e-07,
|
9224 |
+
"loss": 0.429,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.25747622315913327,
|
9229 |
+
"grad_norm": 8.804031372070312,
|
9230 |
+
"learning_rate": 6.222867952459299e-07,
|
9231 |
+
"loss": 1.6825,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.25767232081576624,
|
9236 |
+
"grad_norm": 13.433954238891602,
|
9237 |
+
"learning_rate": 5.966538151180645e-07,
|
9238 |
+
"loss": 1.9529,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.2578684184723993,
|
9243 |
+
"grad_norm": 5.364668369293213,
|
9244 |
+
"learning_rate": 5.715583490836673e-07,
|
9245 |
+
"loss": 1.2584,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.25806451612903225,
|
9250 |
+
"grad_norm": 4.066149711608887,
|
9251 |
+
"learning_rate": 5.470005328440664e-07,
|
9252 |
+
"loss": 1.6191,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.2582606137856653,
|
9257 |
+
"grad_norm": 8.135047912597656,
|
9258 |
+
"learning_rate": 5.22980499193304e-07,
|
9259 |
+
"loss": 2.0681,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.25845671144229826,
|
9264 |
+
"grad_norm": 3.4767017364501953,
|
9265 |
+
"learning_rate": 4.994983780174156e-07,
|
9266 |
+
"loss": 1.0348,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.2586528090989313,
|
9271 |
+
"grad_norm": 6.77761173248291,
|
9272 |
+
"learning_rate": 4.7655429629372973e-07,
|
9273 |
+
"loss": 0.7133,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.25884890675556427,
|
9278 |
+
"grad_norm": 4.188867092132568,
|
9279 |
+
"learning_rate": 4.5414837809018007e-07,
|
9280 |
+
"loss": 1.6745,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.2590450044121973,
|
9285 |
+
"grad_norm": 3.1510398387908936,
|
9286 |
+
"learning_rate": 4.322807445646171e-07,
|
9287 |
+
"loss": 0.7392,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.2592411020688303,
|
9292 |
+
"grad_norm": 14.09803581237793,
|
9293 |
+
"learning_rate": 4.1095151396418617e-07,
|
9294 |
+
"loss": 2.5941,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.2594371997254633,
|
9299 |
+
"grad_norm": 7.8199028968811035,
|
9300 |
+
"learning_rate": 3.9016080162466164e-07,
|
9301 |
+
"loss": 1.2837,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.2596332973820963,
|
9306 |
+
"grad_norm": 4.440105438232422,
|
9307 |
+
"learning_rate": 3.699087199698359e-07,
|
9308 |
+
"loss": 1.7956,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.25982939503872926,
|
9313 |
+
"grad_norm": 7.9481048583984375,
|
9314 |
+
"learning_rate": 3.50195378510898e-07,
|
9315 |
+
"loss": 0.7979,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.2600254926953623,
|
9320 |
+
"grad_norm": 6.204482555389404,
|
9321 |
+
"learning_rate": 3.310208838458562e-07,
|
9322 |
+
"loss": 2.1294,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.26022159035199527,
|
9327 |
+
"grad_norm": 6.299440860748291,
|
9328 |
+
"learning_rate": 3.1238533965897156e-07,
|
9329 |
+
"loss": 1.4474,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.2604176880086283,
|
9334 |
+
"grad_norm": 5.203184604644775,
|
9335 |
+
"learning_rate": 2.9428884672015876e-07,
|
9336 |
+
"loss": 1.7164,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.2606137856652613,
|
9341 |
+
"grad_norm": 5.018675804138184,
|
9342 |
+
"learning_rate": 2.7673150288447526e-07,
|
9343 |
+
"loss": 1.0184,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.2608098833218943,
|
9348 |
+
"grad_norm": 5.618327617645264,
|
9349 |
+
"learning_rate": 2.597134030915771e-07,
|
9350 |
+
"loss": 1.4616,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.2610059809785273,
|
9355 |
+
"grad_norm": 10.888134002685547,
|
9356 |
+
"learning_rate": 2.432346393652196e-07,
|
9357 |
+
"loss": 2.6353,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.2612020786351603,
|
9362 |
+
"grad_norm": 4.126128196716309,
|
9363 |
+
"learning_rate": 2.2729530081273542e-07,
|
9364 |
+
"loss": 1.0003,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.2613981762917933,
|
9369 |
+
"grad_norm": 7.105251789093018,
|
9370 |
+
"learning_rate": 2.118954736245682e-07,
|
9371 |
+
"loss": 1.4611,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.2615942739484263,
|
9376 |
+
"grad_norm": 4.438936233520508,
|
9377 |
+
"learning_rate": 1.9703524107382855e-07,
|
9378 |
+
"loss": 1.0069,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.2617903716050593,
|
9383 |
+
"grad_norm": 8.27482795715332,
|
9384 |
+
"learning_rate": 1.8271468351579446e-07,
|
9385 |
+
"loss": 2.8264,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.26198646926169233,
|
9390 |
+
"grad_norm": 7.972108840942383,
|
9391 |
+
"learning_rate": 1.6893387838750053e-07,
|
9392 |
+
"loss": 2.4485,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.2621825669183253,
|
9397 |
+
"grad_norm": 5.920525074005127,
|
9398 |
+
"learning_rate": 1.5569290020736039e-07,
|
9399 |
+
"loss": 1.4498,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.26237866457495834,
|
9404 |
+
"grad_norm": 5.699307918548584,
|
9405 |
+
"learning_rate": 1.429918205746672e-07,
|
9406 |
+
"loss": 2.0144,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.2625747622315913,
|
9411 |
+
"grad_norm": 2.4948651790618896,
|
9412 |
+
"learning_rate": 1.308307081693272e-07,
|
9413 |
+
"loss": 0.7282,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.26277085988822435,
|
9418 |
+
"grad_norm": 9.36095142364502,
|
9419 |
+
"learning_rate": 1.192096287513711e-07,
|
9420 |
+
"loss": 2.026,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.2629669575448573,
|
9425 |
+
"grad_norm": 11.627699851989746,
|
9426 |
+
"learning_rate": 1.0812864516067667e-07,
|
9427 |
+
"loss": 1.6524,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.26316305520149036,
|
9432 |
+
"grad_norm": 4.287498474121094,
|
9433 |
+
"learning_rate": 9.758781731661337e-08,
|
9434 |
+
"loss": 1.1361,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.26335915285812334,
|
9439 |
+
"grad_norm": 3.8631551265716553,
|
9440 |
+
"learning_rate": 8.758720221768713e-08,
|
9441 |
+
"loss": 1.0284,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.26355525051475637,
|
9446 |
+
"grad_norm": 7.8086323738098145,
|
9447 |
+
"learning_rate": 7.812685394127384e-08,
|
9448 |
+
"loss": 1.3352,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.26375134817138934,
|
9453 |
+
"grad_norm": 7.071966171264648,
|
9454 |
+
"learning_rate": 6.920682364330855e-08,
|
9455 |
+
"loss": 1.783,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.2639474458280224,
|
9460 |
+
"grad_norm": 6.961006164550781,
|
9461 |
+
"learning_rate": 6.082715955800789e-08,
|
9462 |
+
"loss": 0.666,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.26414354348465535,
|
9467 |
+
"grad_norm": 10.395101547241211,
|
9468 |
+
"learning_rate": 5.298790699758138e-08,
|
9469 |
+
"loss": 1.6436,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.2643396411412884,
|
9474 |
+
"grad_norm": 6.384310722351074,
|
9475 |
+
"learning_rate": 4.5689108352053867e-08,
|
9476 |
+
"loss": 1.5711,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.26453573879792136,
|
9481 |
+
"grad_norm": 5.545608997344971,
|
9482 |
+
"learning_rate": 3.893080308898789e-08,
|
9483 |
+
"loss": 1.2522,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.2647318364545544,
|
9488 |
+
"grad_norm": 17.074024200439453,
|
9489 |
+
"learning_rate": 3.271302775325058e-08,
|
9490 |
+
"loss": 3.1042,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.26492793411118737,
|
9495 |
+
"grad_norm": 7.239038944244385,
|
9496 |
+
"learning_rate": 2.7035815966891532e-08,
|
9497 |
+
"loss": 1.658,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.2651240317678204,
|
9502 |
+
"grad_norm": 6.944170951843262,
|
9503 |
+
"learning_rate": 2.1899198428876333e-08,
|
9504 |
+
"loss": 1.5184,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.2653201294244534,
|
9509 |
+
"grad_norm": 5.548795700073242,
|
9510 |
+
"learning_rate": 1.730320291498666e-08,
|
9511 |
+
"loss": 1.6825,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.26551622708108635,
|
9516 |
+
"grad_norm": 8.901213645935059,
|
9517 |
+
"learning_rate": 1.3247854277609328e-08,
|
9518 |
+
"loss": 1.1894,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.2657123247377194,
|
9523 |
+
"grad_norm": 9.556551933288574,
|
9524 |
+
"learning_rate": 9.73317444566968e-09,
|
9525 |
+
"loss": 2.2414,
|
9526 |
+
"step": 1355
|
9527 |
+
},
|
9528 |
+
{
|
9529 |
+
"epoch": 0.26590842239435236,
|
9530 |
+
"grad_norm": 6.794140338897705,
|
9531 |
+
"learning_rate": 6.759182424453947e-09,
|
9532 |
+
"loss": 1.3999,
|
9533 |
+
"step": 1356
|
9534 |
+
},
|
9535 |
+
{
|
9536 |
+
"epoch": 0.2661045200509854,
|
9537 |
+
"grad_norm": 5.626242637634277,
|
9538 |
+
"learning_rate": 4.325894295553745e-09,
|
9539 |
+
"loss": 1.5497,
|
9540 |
+
"step": 1357
|
9541 |
+
},
|
9542 |
+
{
|
9543 |
+
"epoch": 0.26630061770761837,
|
9544 |
+
"grad_norm": 6.506687164306641,
|
9545 |
+
"learning_rate": 2.433323216721739e-09,
|
9546 |
+
"loss": 2.8993,
|
9547 |
+
"step": 1358
|
9548 |
+
},
|
9549 |
+
{
|
9550 |
+
"epoch": 0.2664967153642514,
|
9551 |
+
"grad_norm": 5.048938274383545,
|
9552 |
+
"learning_rate": 1.081479421871645e-09,
|
9553 |
+
"loss": 1.7226,
|
9554 |
+
"step": 1359
|
9555 |
+
},
|
9556 |
+
{
|
9557 |
+
"epoch": 0.2666928130208844,
|
9558 |
+
"grad_norm": 5.443484306335449,
|
9559 |
+
"learning_rate": 2.7037022096720876e-10,
|
9560 |
+
"loss": 0.9488,
|
9561 |
+
"step": 1360
|
9562 |
+
},
|
9563 |
+
{
|
9564 |
+
"epoch": 0.2668889106775174,
|
9565 |
+
"grad_norm": 17.070817947387695,
|
9566 |
+
"learning_rate": 0.0,
|
9567 |
+
"loss": 2.8164,
|
9568 |
+
"step": 1361
|
9569 |
}
|
9570 |
],
|
9571 |
"logging_steps": 1,
|
|
|
9580 |
"should_evaluate": false,
|
9581 |
"should_log": false,
|
9582 |
"should_save": true,
|
9583 |
+
"should_training_stop": true
|
9584 |
},
|
9585 |
"attributes": {}
|
9586 |
}
|
9587 |
},
|
9588 |
+
"total_flos": 2.914693202093015e+17,
|
9589 |
"train_batch_size": 2,
|
9590 |
"trial_name": null,
|
9591 |
"trial_params": null
|