abaddon182
commited on
Training in progress, step 500, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 671149168
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f1edf4171baa17b75a74d8a46747e2a9c6512fa30e794236a0a4004827a4bb59
|
3 |
size 671149168
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 341314644
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b18afcd945887baea0934ad00fb692f81e95eba2a4531fab2f9c2e290c37b428
|
3 |
size 341314644
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fd57bf7a2da04dc345c2640eccb46282472c9cb4b2a58bcedc1d3a00a311cb87
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe1d153de177b356f9e3a70d6e4ec979560b0c300994e71ca4cb89afc74c5b3a
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
-
"best_metric": 0.
|
3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 100,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2847,6 +2847,714 @@
|
|
2847 |
"eval_samples_per_second": 13.322,
|
2848 |
"eval_steps_per_second": 3.333,
|
2849 |
"step": 400
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2850 |
}
|
2851 |
],
|
2852 |
"logging_steps": 1,
|
@@ -2870,12 +3578,12 @@
|
|
2870 |
"should_evaluate": false,
|
2871 |
"should_log": false,
|
2872 |
"should_save": true,
|
2873 |
-
"should_training_stop":
|
2874 |
},
|
2875 |
"attributes": {}
|
2876 |
}
|
2877 |
},
|
2878 |
-
"total_flos":
|
2879 |
"train_batch_size": 8,
|
2880 |
"trial_name": null,
|
2881 |
"trial_params": null
|
|
|
1 |
{
|
2 |
+
"best_metric": 0.8778727054595947,
|
3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-500",
|
4 |
+
"epoch": 0.32663726931242854,
|
5 |
"eval_steps": 100,
|
6 |
+
"global_step": 500,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2847 |
"eval_samples_per_second": 13.322,
|
2848 |
"eval_steps_per_second": 3.333,
|
2849 |
"step": 400
|
2850 |
+
},
|
2851 |
+
{
|
2852 |
+
"epoch": 0.2619630899885677,
|
2853 |
+
"grad_norm": 2.0284175872802734,
|
2854 |
+
"learning_rate": 1.013396731136465e-05,
|
2855 |
+
"loss": 3.2076,
|
2856 |
+
"step": 401
|
2857 |
+
},
|
2858 |
+
{
|
2859 |
+
"epoch": 0.26261636452719256,
|
2860 |
+
"grad_norm": 2.200660467147827,
|
2861 |
+
"learning_rate": 9.937309365446973e-06,
|
2862 |
+
"loss": 4.3066,
|
2863 |
+
"step": 402
|
2864 |
+
},
|
2865 |
+
{
|
2866 |
+
"epoch": 0.2632696390658174,
|
2867 |
+
"grad_norm": 2.2217345237731934,
|
2868 |
+
"learning_rate": 9.742367571857091e-06,
|
2869 |
+
"loss": 4.3663,
|
2870 |
+
"step": 403
|
2871 |
+
},
|
2872 |
+
{
|
2873 |
+
"epoch": 0.26392291360444226,
|
2874 |
+
"grad_norm": 2.1234660148620605,
|
2875 |
+
"learning_rate": 9.549150281252633e-06,
|
2876 |
+
"loss": 4.0273,
|
2877 |
+
"step": 404
|
2878 |
+
},
|
2879 |
+
{
|
2880 |
+
"epoch": 0.2645761881430671,
|
2881 |
+
"grad_norm": 2.140134572982788,
|
2882 |
+
"learning_rate": 9.357665770419244e-06,
|
2883 |
+
"loss": 4.0097,
|
2884 |
+
"step": 405
|
2885 |
+
},
|
2886 |
+
{
|
2887 |
+
"epoch": 0.265229462681692,
|
2888 |
+
"grad_norm": 2.241824150085449,
|
2889 |
+
"learning_rate": 9.167922241916055e-06,
|
2890 |
+
"loss": 3.8709,
|
2891 |
+
"step": 406
|
2892 |
+
},
|
2893 |
+
{
|
2894 |
+
"epoch": 0.26588273722031686,
|
2895 |
+
"grad_norm": 2.207245111465454,
|
2896 |
+
"learning_rate": 8.97992782372432e-06,
|
2897 |
+
"loss": 3.7465,
|
2898 |
+
"step": 407
|
2899 |
+
},
|
2900 |
+
{
|
2901 |
+
"epoch": 0.2665360117589417,
|
2902 |
+
"grad_norm": 2.2445242404937744,
|
2903 |
+
"learning_rate": 8.793690568899216e-06,
|
2904 |
+
"loss": 3.9702,
|
2905 |
+
"step": 408
|
2906 |
+
},
|
2907 |
+
{
|
2908 |
+
"epoch": 0.26718928629756655,
|
2909 |
+
"grad_norm": 2.209073781967163,
|
2910 |
+
"learning_rate": 8.609218455224893e-06,
|
2911 |
+
"loss": 3.6522,
|
2912 |
+
"step": 409
|
2913 |
+
},
|
2914 |
+
{
|
2915 |
+
"epoch": 0.2678425608361914,
|
2916 |
+
"grad_norm": 2.3187787532806396,
|
2917 |
+
"learning_rate": 8.426519384872733e-06,
|
2918 |
+
"loss": 3.9904,
|
2919 |
+
"step": 410
|
2920 |
+
},
|
2921 |
+
{
|
2922 |
+
"epoch": 0.26849583537481625,
|
2923 |
+
"grad_norm": 2.3060576915740967,
|
2924 |
+
"learning_rate": 8.245601184062852e-06,
|
2925 |
+
"loss": 3.645,
|
2926 |
+
"step": 411
|
2927 |
+
},
|
2928 |
+
{
|
2929 |
+
"epoch": 0.2691491099134411,
|
2930 |
+
"grad_norm": 2.4985568523406982,
|
2931 |
+
"learning_rate": 8.066471602728803e-06,
|
2932 |
+
"loss": 3.94,
|
2933 |
+
"step": 412
|
2934 |
+
},
|
2935 |
+
{
|
2936 |
+
"epoch": 0.269802384452066,
|
2937 |
+
"grad_norm": 2.4821956157684326,
|
2938 |
+
"learning_rate": 7.889138314185678e-06,
|
2939 |
+
"loss": 3.8423,
|
2940 |
+
"step": 413
|
2941 |
+
},
|
2942 |
+
{
|
2943 |
+
"epoch": 0.27045565899069085,
|
2944 |
+
"grad_norm": 2.536126136779785,
|
2945 |
+
"learning_rate": 7.71360891480134e-06,
|
2946 |
+
"loss": 3.8344,
|
2947 |
+
"step": 414
|
2948 |
+
},
|
2949 |
+
{
|
2950 |
+
"epoch": 0.2711089335293157,
|
2951 |
+
"grad_norm": 2.5374646186828613,
|
2952 |
+
"learning_rate": 7.539890923671062e-06,
|
2953 |
+
"loss": 3.9225,
|
2954 |
+
"step": 415
|
2955 |
+
},
|
2956 |
+
{
|
2957 |
+
"epoch": 0.27176220806794055,
|
2958 |
+
"grad_norm": 2.6912612915039062,
|
2959 |
+
"learning_rate": 7.367991782295391e-06,
|
2960 |
+
"loss": 3.8863,
|
2961 |
+
"step": 416
|
2962 |
+
},
|
2963 |
+
{
|
2964 |
+
"epoch": 0.2724154826065654,
|
2965 |
+
"grad_norm": 2.7828660011291504,
|
2966 |
+
"learning_rate": 7.197918854261432e-06,
|
2967 |
+
"loss": 3.8709,
|
2968 |
+
"step": 417
|
2969 |
+
},
|
2970 |
+
{
|
2971 |
+
"epoch": 0.27306875714519024,
|
2972 |
+
"grad_norm": 2.831202507019043,
|
2973 |
+
"learning_rate": 7.029679424927365e-06,
|
2974 |
+
"loss": 3.7255,
|
2975 |
+
"step": 418
|
2976 |
+
},
|
2977 |
+
{
|
2978 |
+
"epoch": 0.27372203168381515,
|
2979 |
+
"grad_norm": 3.060506582260132,
|
2980 |
+
"learning_rate": 6.863280701110408e-06,
|
2981 |
+
"loss": 4.0642,
|
2982 |
+
"step": 419
|
2983 |
+
},
|
2984 |
+
{
|
2985 |
+
"epoch": 0.27437530622244,
|
2986 |
+
"grad_norm": 3.0359199047088623,
|
2987 |
+
"learning_rate": 6.698729810778065e-06,
|
2988 |
+
"loss": 3.7836,
|
2989 |
+
"step": 420
|
2990 |
+
},
|
2991 |
+
{
|
2992 |
+
"epoch": 0.27502858076106484,
|
2993 |
+
"grad_norm": 3.1587436199188232,
|
2994 |
+
"learning_rate": 6.536033802742813e-06,
|
2995 |
+
"loss": 4.0368,
|
2996 |
+
"step": 421
|
2997 |
+
},
|
2998 |
+
{
|
2999 |
+
"epoch": 0.2756818552996897,
|
3000 |
+
"grad_norm": 3.3620858192443848,
|
3001 |
+
"learning_rate": 6.375199646360142e-06,
|
3002 |
+
"loss": 3.7547,
|
3003 |
+
"step": 422
|
3004 |
+
},
|
3005 |
+
{
|
3006 |
+
"epoch": 0.27633512983831454,
|
3007 |
+
"grad_norm": 3.528315782546997,
|
3008 |
+
"learning_rate": 6.216234231230012e-06,
|
3009 |
+
"loss": 3.7245,
|
3010 |
+
"step": 423
|
3011 |
+
},
|
3012 |
+
{
|
3013 |
+
"epoch": 0.2769884043769394,
|
3014 |
+
"grad_norm": 3.4212453365325928,
|
3015 |
+
"learning_rate": 6.059144366901736e-06,
|
3016 |
+
"loss": 3.3729,
|
3017 |
+
"step": 424
|
3018 |
+
},
|
3019 |
+
{
|
3020 |
+
"epoch": 0.27764167891556424,
|
3021 |
+
"grad_norm": 3.6946189403533936,
|
3022 |
+
"learning_rate": 5.903936782582253e-06,
|
3023 |
+
"loss": 3.5213,
|
3024 |
+
"step": 425
|
3025 |
+
},
|
3026 |
+
{
|
3027 |
+
"epoch": 0.27829495345418914,
|
3028 |
+
"grad_norm": 3.8112924098968506,
|
3029 |
+
"learning_rate": 5.750618126847912e-06,
|
3030 |
+
"loss": 3.5261,
|
3031 |
+
"step": 426
|
3032 |
+
},
|
3033 |
+
{
|
3034 |
+
"epoch": 0.278948227992814,
|
3035 |
+
"grad_norm": 4.669442176818848,
|
3036 |
+
"learning_rate": 5.599194967359639e-06,
|
3037 |
+
"loss": 3.9578,
|
3038 |
+
"step": 427
|
3039 |
+
},
|
3040 |
+
{
|
3041 |
+
"epoch": 0.27960150253143884,
|
3042 |
+
"grad_norm": 4.836587905883789,
|
3043 |
+
"learning_rate": 5.449673790581611e-06,
|
3044 |
+
"loss": 4.1958,
|
3045 |
+
"step": 428
|
3046 |
+
},
|
3047 |
+
{
|
3048 |
+
"epoch": 0.2802547770700637,
|
3049 |
+
"grad_norm": 4.8962507247924805,
|
3050 |
+
"learning_rate": 5.302061001503394e-06,
|
3051 |
+
"loss": 3.6376,
|
3052 |
+
"step": 429
|
3053 |
+
},
|
3054 |
+
{
|
3055 |
+
"epoch": 0.28090805160868854,
|
3056 |
+
"grad_norm": 5.25094747543335,
|
3057 |
+
"learning_rate": 5.156362923365588e-06,
|
3058 |
+
"loss": 3.6825,
|
3059 |
+
"step": 430
|
3060 |
+
},
|
3061 |
+
{
|
3062 |
+
"epoch": 0.2815613261473134,
|
3063 |
+
"grad_norm": 5.470578193664551,
|
3064 |
+
"learning_rate": 5.012585797388936e-06,
|
3065 |
+
"loss": 3.3548,
|
3066 |
+
"step": 431
|
3067 |
+
},
|
3068 |
+
{
|
3069 |
+
"epoch": 0.2822146006859383,
|
3070 |
+
"grad_norm": 6.3519415855407715,
|
3071 |
+
"learning_rate": 4.87073578250698e-06,
|
3072 |
+
"loss": 3.4006,
|
3073 |
+
"step": 432
|
3074 |
+
},
|
3075 |
+
{
|
3076 |
+
"epoch": 0.28286787522456314,
|
3077 |
+
"grad_norm": 6.419662952423096,
|
3078 |
+
"learning_rate": 4.730818955102234e-06,
|
3079 |
+
"loss": 3.493,
|
3080 |
+
"step": 433
|
3081 |
+
},
|
3082 |
+
{
|
3083 |
+
"epoch": 0.283521149763188,
|
3084 |
+
"grad_norm": 6.477165699005127,
|
3085 |
+
"learning_rate": 4.592841308745932e-06,
|
3086 |
+
"loss": 3.3161,
|
3087 |
+
"step": 434
|
3088 |
+
},
|
3089 |
+
{
|
3090 |
+
"epoch": 0.28417442430181283,
|
3091 |
+
"grad_norm": 6.963860034942627,
|
3092 |
+
"learning_rate": 4.456808753941205e-06,
|
3093 |
+
"loss": 3.4243,
|
3094 |
+
"step": 435
|
3095 |
+
},
|
3096 |
+
{
|
3097 |
+
"epoch": 0.2848276988404377,
|
3098 |
+
"grad_norm": 7.6668548583984375,
|
3099 |
+
"learning_rate": 4.322727117869951e-06,
|
3100 |
+
"loss": 2.5756,
|
3101 |
+
"step": 436
|
3102 |
+
},
|
3103 |
+
{
|
3104 |
+
"epoch": 0.28548097337906253,
|
3105 |
+
"grad_norm": 8.136970520019531,
|
3106 |
+
"learning_rate": 4.190602144143207e-06,
|
3107 |
+
"loss": 3.3489,
|
3108 |
+
"step": 437
|
3109 |
+
},
|
3110 |
+
{
|
3111 |
+
"epoch": 0.28613424791768743,
|
3112 |
+
"grad_norm": 10.233713150024414,
|
3113 |
+
"learning_rate": 4.06043949255509e-06,
|
3114 |
+
"loss": 3.6143,
|
3115 |
+
"step": 438
|
3116 |
+
},
|
3117 |
+
{
|
3118 |
+
"epoch": 0.2867875224563123,
|
3119 |
+
"grad_norm": 10.85151481628418,
|
3120 |
+
"learning_rate": 3.932244738840379e-06,
|
3121 |
+
"loss": 3.1783,
|
3122 |
+
"step": 439
|
3123 |
+
},
|
3124 |
+
{
|
3125 |
+
"epoch": 0.28744079699493713,
|
3126 |
+
"grad_norm": 9.790265083312988,
|
3127 |
+
"learning_rate": 3.8060233744356633e-06,
|
3128 |
+
"loss": 2.58,
|
3129 |
+
"step": 440
|
3130 |
+
},
|
3131 |
+
{
|
3132 |
+
"epoch": 0.288094071533562,
|
3133 |
+
"grad_norm": 10.893715858459473,
|
3134 |
+
"learning_rate": 3.681780806244095e-06,
|
3135 |
+
"loss": 3.0611,
|
3136 |
+
"step": 441
|
3137 |
+
},
|
3138 |
+
{
|
3139 |
+
"epoch": 0.28874734607218683,
|
3140 |
+
"grad_norm": 12.682134628295898,
|
3141 |
+
"learning_rate": 3.5595223564037884e-06,
|
3142 |
+
"loss": 2.8948,
|
3143 |
+
"step": 442
|
3144 |
+
},
|
3145 |
+
{
|
3146 |
+
"epoch": 0.2894006206108117,
|
3147 |
+
"grad_norm": 13.385756492614746,
|
3148 |
+
"learning_rate": 3.4392532620598216e-06,
|
3149 |
+
"loss": 2.6341,
|
3150 |
+
"step": 443
|
3151 |
+
},
|
3152 |
+
{
|
3153 |
+
"epoch": 0.2900538951494365,
|
3154 |
+
"grad_norm": 16.10133934020996,
|
3155 |
+
"learning_rate": 3.3209786751399187e-06,
|
3156 |
+
"loss": 3.5305,
|
3157 |
+
"step": 444
|
3158 |
+
},
|
3159 |
+
{
|
3160 |
+
"epoch": 0.29070716968806143,
|
3161 |
+
"grad_norm": 12.668194770812988,
|
3162 |
+
"learning_rate": 3.2047036621337236e-06,
|
3163 |
+
"loss": 2.3386,
|
3164 |
+
"step": 445
|
3165 |
+
},
|
3166 |
+
{
|
3167 |
+
"epoch": 0.2913604442266863,
|
3168 |
+
"grad_norm": 14.395100593566895,
|
3169 |
+
"learning_rate": 3.0904332038757977e-06,
|
3170 |
+
"loss": 2.0309,
|
3171 |
+
"step": 446
|
3172 |
+
},
|
3173 |
+
{
|
3174 |
+
"epoch": 0.2920137187653111,
|
3175 |
+
"grad_norm": 17.13104820251465,
|
3176 |
+
"learning_rate": 2.978172195332263e-06,
|
3177 |
+
"loss": 2.4272,
|
3178 |
+
"step": 447
|
3179 |
+
},
|
3180 |
+
{
|
3181 |
+
"epoch": 0.292666993303936,
|
3182 |
+
"grad_norm": 17.783287048339844,
|
3183 |
+
"learning_rate": 2.8679254453910785e-06,
|
3184 |
+
"loss": 2.7686,
|
3185 |
+
"step": 448
|
3186 |
+
},
|
3187 |
+
{
|
3188 |
+
"epoch": 0.2933202678425608,
|
3189 |
+
"grad_norm": 22.626319885253906,
|
3190 |
+
"learning_rate": 2.759697676656098e-06,
|
3191 |
+
"loss": 2.9195,
|
3192 |
+
"step": 449
|
3193 |
+
},
|
3194 |
+
{
|
3195 |
+
"epoch": 0.29397354238118567,
|
3196 |
+
"grad_norm": 35.02473831176758,
|
3197 |
+
"learning_rate": 2.653493525244721e-06,
|
3198 |
+
"loss": 4.8236,
|
3199 |
+
"step": 450
|
3200 |
+
},
|
3201 |
+
{
|
3202 |
+
"epoch": 0.2946268169198106,
|
3203 |
+
"grad_norm": 2.0250165462493896,
|
3204 |
+
"learning_rate": 2.549317540589308e-06,
|
3205 |
+
"loss": 4.0218,
|
3206 |
+
"step": 451
|
3207 |
+
},
|
3208 |
+
{
|
3209 |
+
"epoch": 0.2952800914584354,
|
3210 |
+
"grad_norm": 1.948515772819519,
|
3211 |
+
"learning_rate": 2.4471741852423237e-06,
|
3212 |
+
"loss": 3.9039,
|
3213 |
+
"step": 452
|
3214 |
+
},
|
3215 |
+
{
|
3216 |
+
"epoch": 0.29593336599706027,
|
3217 |
+
"grad_norm": 2.03169322013855,
|
3218 |
+
"learning_rate": 2.3470678346851518e-06,
|
3219 |
+
"loss": 4.3663,
|
3220 |
+
"step": 453
|
3221 |
+
},
|
3222 |
+
{
|
3223 |
+
"epoch": 0.2965866405356851,
|
3224 |
+
"grad_norm": 2.1053688526153564,
|
3225 |
+
"learning_rate": 2.2490027771406687e-06,
|
3226 |
+
"loss": 4.0127,
|
3227 |
+
"step": 454
|
3228 |
+
},
|
3229 |
+
{
|
3230 |
+
"epoch": 0.29723991507430997,
|
3231 |
+
"grad_norm": 2.2202868461608887,
|
3232 |
+
"learning_rate": 2.152983213389559e-06,
|
3233 |
+
"loss": 4.0635,
|
3234 |
+
"step": 455
|
3235 |
+
},
|
3236 |
+
{
|
3237 |
+
"epoch": 0.2978931896129348,
|
3238 |
+
"grad_norm": 2.141794443130493,
|
3239 |
+
"learning_rate": 2.0590132565903476e-06,
|
3240 |
+
"loss": 3.9512,
|
3241 |
+
"step": 456
|
3242 |
+
},
|
3243 |
+
{
|
3244 |
+
"epoch": 0.2985464641515597,
|
3245 |
+
"grad_norm": 2.1424460411071777,
|
3246 |
+
"learning_rate": 1.9670969321032407e-06,
|
3247 |
+
"loss": 3.928,
|
3248 |
+
"step": 457
|
3249 |
+
},
|
3250 |
+
{
|
3251 |
+
"epoch": 0.29919973869018457,
|
3252 |
+
"grad_norm": 2.2259063720703125,
|
3253 |
+
"learning_rate": 1.8772381773176417e-06,
|
3254 |
+
"loss": 3.9537,
|
3255 |
+
"step": 458
|
3256 |
+
},
|
3257 |
+
{
|
3258 |
+
"epoch": 0.2998530132288094,
|
3259 |
+
"grad_norm": 2.1677327156066895,
|
3260 |
+
"learning_rate": 1.7894408414835362e-06,
|
3261 |
+
"loss": 3.7739,
|
3262 |
+
"step": 459
|
3263 |
+
},
|
3264 |
+
{
|
3265 |
+
"epoch": 0.30050628776743427,
|
3266 |
+
"grad_norm": 2.260230541229248,
|
3267 |
+
"learning_rate": 1.70370868554659e-06,
|
3268 |
+
"loss": 4.0705,
|
3269 |
+
"step": 460
|
3270 |
+
},
|
3271 |
+
{
|
3272 |
+
"epoch": 0.3011595623060591,
|
3273 |
+
"grad_norm": 2.394634246826172,
|
3274 |
+
"learning_rate": 1.620045381987012e-06,
|
3275 |
+
"loss": 4.1479,
|
3276 |
+
"step": 461
|
3277 |
+
},
|
3278 |
+
{
|
3279 |
+
"epoch": 0.30181283684468396,
|
3280 |
+
"grad_norm": 2.3297996520996094,
|
3281 |
+
"learning_rate": 1.5384545146622852e-06,
|
3282 |
+
"loss": 3.6953,
|
3283 |
+
"step": 462
|
3284 |
+
},
|
3285 |
+
{
|
3286 |
+
"epoch": 0.3024661113833088,
|
3287 |
+
"grad_norm": 2.3964977264404297,
|
3288 |
+
"learning_rate": 1.4589395786535953e-06,
|
3289 |
+
"loss": 3.9611,
|
3290 |
+
"step": 463
|
3291 |
+
},
|
3292 |
+
{
|
3293 |
+
"epoch": 0.3031193859219337,
|
3294 |
+
"grad_norm": 2.533637046813965,
|
3295 |
+
"learning_rate": 1.3815039801161721e-06,
|
3296 |
+
"loss": 3.9468,
|
3297 |
+
"step": 464
|
3298 |
+
},
|
3299 |
+
{
|
3300 |
+
"epoch": 0.30377266046055856,
|
3301 |
+
"grad_norm": 2.529632329940796,
|
3302 |
+
"learning_rate": 1.3061510361333185e-06,
|
3303 |
+
"loss": 3.6358,
|
3304 |
+
"step": 465
|
3305 |
+
},
|
3306 |
+
{
|
3307 |
+
"epoch": 0.3044259349991834,
|
3308 |
+
"grad_norm": 2.569408655166626,
|
3309 |
+
"learning_rate": 1.232883974574367e-06,
|
3310 |
+
"loss": 3.5356,
|
3311 |
+
"step": 466
|
3312 |
+
},
|
3313 |
+
{
|
3314 |
+
"epoch": 0.30507920953780826,
|
3315 |
+
"grad_norm": 2.727712869644165,
|
3316 |
+
"learning_rate": 1.1617059339563807e-06,
|
3317 |
+
"loss": 3.7934,
|
3318 |
+
"step": 467
|
3319 |
+
},
|
3320 |
+
{
|
3321 |
+
"epoch": 0.3057324840764331,
|
3322 |
+
"grad_norm": 2.8594043254852295,
|
3323 |
+
"learning_rate": 1.0926199633097157e-06,
|
3324 |
+
"loss": 3.933,
|
3325 |
+
"step": 468
|
3326 |
+
},
|
3327 |
+
{
|
3328 |
+
"epoch": 0.30638575861505796,
|
3329 |
+
"grad_norm": 2.939509868621826,
|
3330 |
+
"learning_rate": 1.0256290220474307e-06,
|
3331 |
+
"loss": 3.5852,
|
3332 |
+
"step": 469
|
3333 |
+
},
|
3334 |
+
{
|
3335 |
+
"epoch": 0.30703903315368286,
|
3336 |
+
"grad_norm": 3.0464086532592773,
|
3337 |
+
"learning_rate": 9.607359798384785e-07,
|
3338 |
+
"loss": 3.7145,
|
3339 |
+
"step": 470
|
3340 |
+
},
|
3341 |
+
{
|
3342 |
+
"epoch": 0.3076923076923077,
|
3343 |
+
"grad_norm": 3.3523106575012207,
|
3344 |
+
"learning_rate": 8.979436164848088e-07,
|
3345 |
+
"loss": 3.8206,
|
3346 |
+
"step": 471
|
3347 |
+
},
|
3348 |
+
{
|
3349 |
+
"epoch": 0.30834558223093256,
|
3350 |
+
"grad_norm": 3.190808057785034,
|
3351 |
+
"learning_rate": 8.372546218022747e-07,
|
3352 |
+
"loss": 3.8822,
|
3353 |
+
"step": 472
|
3354 |
+
},
|
3355 |
+
{
|
3356 |
+
"epoch": 0.3089988567695574,
|
3357 |
+
"grad_norm": 3.408557891845703,
|
3358 |
+
"learning_rate": 7.786715955054203e-07,
|
3359 |
+
"loss": 3.5316,
|
3360 |
+
"step": 473
|
3361 |
+
},
|
3362 |
+
{
|
3363 |
+
"epoch": 0.30965213130818225,
|
3364 |
+
"grad_norm": 3.75345778465271,
|
3365 |
+
"learning_rate": 7.221970470961125e-07,
|
3366 |
+
"loss": 3.8423,
|
3367 |
+
"step": 474
|
3368 |
+
},
|
3369 |
+
{
|
3370 |
+
"epoch": 0.3103054058468071,
|
3371 |
+
"grad_norm": 4.055991172790527,
|
3372 |
+
"learning_rate": 6.678333957560512e-07,
|
3373 |
+
"loss": 3.6669,
|
3374 |
+
"step": 475
|
3375 |
+
},
|
3376 |
+
{
|
3377 |
+
"epoch": 0.31095868038543195,
|
3378 |
+
"grad_norm": 4.102751731872559,
|
3379 |
+
"learning_rate": 6.15582970243117e-07,
|
3380 |
+
"loss": 3.4249,
|
3381 |
+
"step": 476
|
3382 |
+
},
|
3383 |
+
{
|
3384 |
+
"epoch": 0.31161195492405686,
|
3385 |
+
"grad_norm": 4.2560014724731445,
|
3386 |
+
"learning_rate": 5.654480087916303e-07,
|
3387 |
+
"loss": 3.4731,
|
3388 |
+
"step": 477
|
3389 |
+
},
|
3390 |
+
{
|
3391 |
+
"epoch": 0.3122652294626817,
|
3392 |
+
"grad_norm": 4.73115873336792,
|
3393 |
+
"learning_rate": 5.174306590164879e-07,
|
3394 |
+
"loss": 3.3286,
|
3395 |
+
"step": 478
|
3396 |
+
},
|
3397 |
+
{
|
3398 |
+
"epoch": 0.31291850400130655,
|
3399 |
+
"grad_norm": 5.362375259399414,
|
3400 |
+
"learning_rate": 4.715329778211375e-07,
|
3401 |
+
"loss": 3.2443,
|
3402 |
+
"step": 479
|
3403 |
+
},
|
3404 |
+
{
|
3405 |
+
"epoch": 0.3135717785399314,
|
3406 |
+
"grad_norm": 5.289713382720947,
|
3407 |
+
"learning_rate": 4.277569313094809e-07,
|
3408 |
+
"loss": 3.2602,
|
3409 |
+
"step": 480
|
3410 |
+
},
|
3411 |
+
{
|
3412 |
+
"epoch": 0.31422505307855625,
|
3413 |
+
"grad_norm": 5.72009801864624,
|
3414 |
+
"learning_rate": 3.8610439470164737e-07,
|
3415 |
+
"loss": 3.1795,
|
3416 |
+
"step": 481
|
3417 |
+
},
|
3418 |
+
{
|
3419 |
+
"epoch": 0.3148783276171811,
|
3420 |
+
"grad_norm": 6.249419212341309,
|
3421 |
+
"learning_rate": 3.465771522536854e-07,
|
3422 |
+
"loss": 3.2568,
|
3423 |
+
"step": 482
|
3424 |
+
},
|
3425 |
+
{
|
3426 |
+
"epoch": 0.315531602155806,
|
3427 |
+
"grad_norm": 6.340628147125244,
|
3428 |
+
"learning_rate": 3.09176897181096e-07,
|
3429 |
+
"loss": 3.7829,
|
3430 |
+
"step": 483
|
3431 |
+
},
|
3432 |
+
{
|
3433 |
+
"epoch": 0.31618487669443085,
|
3434 |
+
"grad_norm": 6.803210735321045,
|
3435 |
+
"learning_rate": 2.7390523158633554e-07,
|
3436 |
+
"loss": 3.2678,
|
3437 |
+
"step": 484
|
3438 |
+
},
|
3439 |
+
{
|
3440 |
+
"epoch": 0.3168381512330557,
|
3441 |
+
"grad_norm": 7.402984619140625,
|
3442 |
+
"learning_rate": 2.407636663901591e-07,
|
3443 |
+
"loss": 3.2089,
|
3444 |
+
"step": 485
|
3445 |
+
},
|
3446 |
+
{
|
3447 |
+
"epoch": 0.31749142577168055,
|
3448 |
+
"grad_norm": 8.722670555114746,
|
3449 |
+
"learning_rate": 2.0975362126691712e-07,
|
3450 |
+
"loss": 3.4273,
|
3451 |
+
"step": 486
|
3452 |
+
},
|
3453 |
+
{
|
3454 |
+
"epoch": 0.3181447003103054,
|
3455 |
+
"grad_norm": 8.667076110839844,
|
3456 |
+
"learning_rate": 1.8087642458373134e-07,
|
3457 |
+
"loss": 2.9022,
|
3458 |
+
"step": 487
|
3459 |
+
},
|
3460 |
+
{
|
3461 |
+
"epoch": 0.31879797484893024,
|
3462 |
+
"grad_norm": 9.627767562866211,
|
3463 |
+
"learning_rate": 1.5413331334360182e-07,
|
3464 |
+
"loss": 3.0261,
|
3465 |
+
"step": 488
|
3466 |
+
},
|
3467 |
+
{
|
3468 |
+
"epoch": 0.31945124938755515,
|
3469 |
+
"grad_norm": 10.799980163574219,
|
3470 |
+
"learning_rate": 1.2952543313240472e-07,
|
3471 |
+
"loss": 2.731,
|
3472 |
+
"step": 489
|
3473 |
+
},
|
3474 |
+
{
|
3475 |
+
"epoch": 0.32010452392618,
|
3476 |
+
"grad_norm": 12.3025484085083,
|
3477 |
+
"learning_rate": 1.0705383806982606e-07,
|
3478 |
+
"loss": 3.1692,
|
3479 |
+
"step": 490
|
3480 |
+
},
|
3481 |
+
{
|
3482 |
+
"epoch": 0.32075779846480484,
|
3483 |
+
"grad_norm": 11.816473007202148,
|
3484 |
+
"learning_rate": 8.671949076420882e-08,
|
3485 |
+
"loss": 2.1674,
|
3486 |
+
"step": 491
|
3487 |
+
},
|
3488 |
+
{
|
3489 |
+
"epoch": 0.3214110730034297,
|
3490 |
+
"grad_norm": 13.557419776916504,
|
3491 |
+
"learning_rate": 6.852326227130834e-08,
|
3492 |
+
"loss": 2.846,
|
3493 |
+
"step": 492
|
3494 |
+
},
|
3495 |
+
{
|
3496 |
+
"epoch": 0.32206434754205454,
|
3497 |
+
"grad_norm": 11.210492134094238,
|
3498 |
+
"learning_rate": 5.246593205699424e-08,
|
3499 |
+
"loss": 2.5836,
|
3500 |
+
"step": 493
|
3501 |
+
},
|
3502 |
+
{
|
3503 |
+
"epoch": 0.3227176220806794,
|
3504 |
+
"grad_norm": 13.040658950805664,
|
3505 |
+
"learning_rate": 3.8548187963854956e-08,
|
3506 |
+
"loss": 3.2673,
|
3507 |
+
"step": 494
|
3508 |
+
},
|
3509 |
+
{
|
3510 |
+
"epoch": 0.32337089661930424,
|
3511 |
+
"grad_norm": 15.785333633422852,
|
3512 |
+
"learning_rate": 2.6770626181715773e-08,
|
3513 |
+
"loss": 2.518,
|
3514 |
+
"step": 495
|
3515 |
+
},
|
3516 |
+
{
|
3517 |
+
"epoch": 0.32402417115792914,
|
3518 |
+
"grad_norm": 15.67574405670166,
|
3519 |
+
"learning_rate": 1.7133751222137007e-08,
|
3520 |
+
"loss": 2.387,
|
3521 |
+
"step": 496
|
3522 |
+
},
|
3523 |
+
{
|
3524 |
+
"epoch": 0.324677445696554,
|
3525 |
+
"grad_norm": 15.306534767150879,
|
3526 |
+
"learning_rate": 9.637975896759077e-09,
|
3527 |
+
"loss": 2.3683,
|
3528 |
+
"step": 497
|
3529 |
+
},
|
3530 |
+
{
|
3531 |
+
"epoch": 0.32533072023517884,
|
3532 |
+
"grad_norm": 20.011672973632812,
|
3533 |
+
"learning_rate": 4.2836212996499865e-09,
|
3534 |
+
"loss": 3.7281,
|
3535 |
+
"step": 498
|
3536 |
+
},
|
3537 |
+
{
|
3538 |
+
"epoch": 0.3259839947738037,
|
3539 |
+
"grad_norm": 21.42987823486328,
|
3540 |
+
"learning_rate": 1.0709167935385455e-09,
|
3541 |
+
"loss": 2.4191,
|
3542 |
+
"step": 499
|
3543 |
+
},
|
3544 |
+
{
|
3545 |
+
"epoch": 0.32663726931242854,
|
3546 |
+
"grad_norm": 32.15013885498047,
|
3547 |
+
"learning_rate": 0.0,
|
3548 |
+
"loss": 3.3215,
|
3549 |
+
"step": 500
|
3550 |
+
},
|
3551 |
+
{
|
3552 |
+
"epoch": 0.32663726931242854,
|
3553 |
+
"eval_loss": 0.8778727054595947,
|
3554 |
+
"eval_runtime": 193.3536,
|
3555 |
+
"eval_samples_per_second": 13.333,
|
3556 |
+
"eval_steps_per_second": 3.336,
|
3557 |
+
"step": 500
|
3558 |
}
|
3559 |
],
|
3560 |
"logging_steps": 1,
|
|
|
3578 |
"should_evaluate": false,
|
3579 |
"should_log": false,
|
3580 |
"should_save": true,
|
3581 |
+
"should_training_stop": true
|
3582 |
},
|
3583 |
"attributes": {}
|
3584 |
}
|
3585 |
},
|
3586 |
+
"total_flos": 7.243979410448056e+17,
|
3587 |
"train_batch_size": 8,
|
3588 |
"trial_name": null,
|
3589 |
"trial_params": null
|