Uploaded checkpoint-5000
Browse files- model.safetensors +1 -1
- optimizer.pt +1 -1
- rng_state.pth +2 -2
- scheduler.pt +1 -1
- trainer_state.json +711 -3
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2836579040
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:656ccab8ed0ab874f0a9fd954550542c1dbd32c5a5c3d3c57430de11b416638d
|
3 |
size 2836579040
|
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5673376169
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7f6b0abcbc9438259e62aa33ba8ece2a8c5cff5be14160f56bd888126bd8d4f3
|
3 |
size 5673376169
|
rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:46e65e950ab7222c5262501cd7b7e711ba567e3a80af84a6ae9728e309c1152a
|
3 |
+
size 14308
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:61ba5a6e43d8dda2a6a4df1f188af0f99bfbc9b706fb9f9466ccacaa8ed67271
|
3 |
size 1064
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 1000,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2839,6 +2839,714 @@
|
|
2839 |
"eval_samples_per_second": 15.531,
|
2840 |
"eval_steps_per_second": 15.531,
|
2841 |
"step": 4000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2842 |
}
|
2843 |
],
|
2844 |
"logging_steps": 10,
|
@@ -2846,7 +3554,7 @@
|
|
2846 |
"num_input_tokens_seen": 0,
|
2847 |
"num_train_epochs": 1,
|
2848 |
"save_steps": 1000,
|
2849 |
-
"total_flos":
|
2850 |
"train_batch_size": 1,
|
2851 |
"trial_name": null,
|
2852 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.25,
|
5 |
"eval_steps": 1000,
|
6 |
+
"global_step": 5000,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2839 |
"eval_samples_per_second": 15.531,
|
2840 |
"eval_steps_per_second": 15.531,
|
2841 |
"step": 4000
|
2842 |
+
},
|
2843 |
+
{
|
2844 |
+
"epoch": 0.2,
|
2845 |
+
"grad_norm": 6.0625,
|
2846 |
+
"learning_rate": 2.42e-07,
|
2847 |
+
"loss": 0.0556,
|
2848 |
+
"step": 4010
|
2849 |
+
},
|
2850 |
+
{
|
2851 |
+
"epoch": 0.2,
|
2852 |
+
"grad_norm": 7.0625,
|
2853 |
+
"learning_rate": 2.3955555555555555e-07,
|
2854 |
+
"loss": 0.0558,
|
2855 |
+
"step": 4020
|
2856 |
+
},
|
2857 |
+
{
|
2858 |
+
"epoch": 0.2,
|
2859 |
+
"grad_norm": 5.75,
|
2860 |
+
"learning_rate": 2.3711111111111113e-07,
|
2861 |
+
"loss": 0.0572,
|
2862 |
+
"step": 4030
|
2863 |
+
},
|
2864 |
+
{
|
2865 |
+
"epoch": 0.2,
|
2866 |
+
"grad_norm": 6.71875,
|
2867 |
+
"learning_rate": 2.346666666666667e-07,
|
2868 |
+
"loss": 0.055,
|
2869 |
+
"step": 4040
|
2870 |
+
},
|
2871 |
+
{
|
2872 |
+
"epoch": 0.2,
|
2873 |
+
"grad_norm": 5.9375,
|
2874 |
+
"learning_rate": 2.3222222222222223e-07,
|
2875 |
+
"loss": 0.0592,
|
2876 |
+
"step": 4050
|
2877 |
+
},
|
2878 |
+
{
|
2879 |
+
"epoch": 0.2,
|
2880 |
+
"grad_norm": 9.125,
|
2881 |
+
"learning_rate": 2.297777777777778e-07,
|
2882 |
+
"loss": 0.0584,
|
2883 |
+
"step": 4060
|
2884 |
+
},
|
2885 |
+
{
|
2886 |
+
"epoch": 0.2,
|
2887 |
+
"grad_norm": 5.8125,
|
2888 |
+
"learning_rate": 2.2733333333333333e-07,
|
2889 |
+
"loss": 0.0599,
|
2890 |
+
"step": 4070
|
2891 |
+
},
|
2892 |
+
{
|
2893 |
+
"epoch": 0.2,
|
2894 |
+
"grad_norm": 8.625,
|
2895 |
+
"learning_rate": 2.248888888888889e-07,
|
2896 |
+
"loss": 0.0609,
|
2897 |
+
"step": 4080
|
2898 |
+
},
|
2899 |
+
{
|
2900 |
+
"epoch": 0.2,
|
2901 |
+
"grad_norm": 5.46875,
|
2902 |
+
"learning_rate": 2.2244444444444446e-07,
|
2903 |
+
"loss": 0.0512,
|
2904 |
+
"step": 4090
|
2905 |
+
},
|
2906 |
+
{
|
2907 |
+
"epoch": 0.2,
|
2908 |
+
"grad_norm": 9.25,
|
2909 |
+
"learning_rate": 2.2e-07,
|
2910 |
+
"loss": 0.0566,
|
2911 |
+
"step": 4100
|
2912 |
+
},
|
2913 |
+
{
|
2914 |
+
"epoch": 0.21,
|
2915 |
+
"grad_norm": 6.75,
|
2916 |
+
"learning_rate": 2.1755555555555556e-07,
|
2917 |
+
"loss": 0.0525,
|
2918 |
+
"step": 4110
|
2919 |
+
},
|
2920 |
+
{
|
2921 |
+
"epoch": 0.21,
|
2922 |
+
"grad_norm": 5.0625,
|
2923 |
+
"learning_rate": 2.1511111111111114e-07,
|
2924 |
+
"loss": 0.061,
|
2925 |
+
"step": 4120
|
2926 |
+
},
|
2927 |
+
{
|
2928 |
+
"epoch": 0.21,
|
2929 |
+
"grad_norm": 6.34375,
|
2930 |
+
"learning_rate": 2.1266666666666667e-07,
|
2931 |
+
"loss": 0.058,
|
2932 |
+
"step": 4130
|
2933 |
+
},
|
2934 |
+
{
|
2935 |
+
"epoch": 0.21,
|
2936 |
+
"grad_norm": 7.34375,
|
2937 |
+
"learning_rate": 2.1022222222222224e-07,
|
2938 |
+
"loss": 0.0636,
|
2939 |
+
"step": 4140
|
2940 |
+
},
|
2941 |
+
{
|
2942 |
+
"epoch": 0.21,
|
2943 |
+
"grad_norm": 6.65625,
|
2944 |
+
"learning_rate": 2.077777777777778e-07,
|
2945 |
+
"loss": 0.0587,
|
2946 |
+
"step": 4150
|
2947 |
+
},
|
2948 |
+
{
|
2949 |
+
"epoch": 0.21,
|
2950 |
+
"grad_norm": 5.5,
|
2951 |
+
"learning_rate": 2.0533333333333335e-07,
|
2952 |
+
"loss": 0.0559,
|
2953 |
+
"step": 4160
|
2954 |
+
},
|
2955 |
+
{
|
2956 |
+
"epoch": 0.21,
|
2957 |
+
"grad_norm": 5.09375,
|
2958 |
+
"learning_rate": 2.028888888888889e-07,
|
2959 |
+
"loss": 0.0551,
|
2960 |
+
"step": 4170
|
2961 |
+
},
|
2962 |
+
{
|
2963 |
+
"epoch": 0.21,
|
2964 |
+
"grad_norm": 5.96875,
|
2965 |
+
"learning_rate": 2.0044444444444447e-07,
|
2966 |
+
"loss": 0.0541,
|
2967 |
+
"step": 4180
|
2968 |
+
},
|
2969 |
+
{
|
2970 |
+
"epoch": 0.21,
|
2971 |
+
"grad_norm": 6.0625,
|
2972 |
+
"learning_rate": 1.98e-07,
|
2973 |
+
"loss": 0.058,
|
2974 |
+
"step": 4190
|
2975 |
+
},
|
2976 |
+
{
|
2977 |
+
"epoch": 0.21,
|
2978 |
+
"grad_norm": 6.65625,
|
2979 |
+
"learning_rate": 1.9555555555555558e-07,
|
2980 |
+
"loss": 0.0593,
|
2981 |
+
"step": 4200
|
2982 |
+
},
|
2983 |
+
{
|
2984 |
+
"epoch": 0.21,
|
2985 |
+
"grad_norm": 8.5,
|
2986 |
+
"learning_rate": 1.931111111111111e-07,
|
2987 |
+
"loss": 0.0623,
|
2988 |
+
"step": 4210
|
2989 |
+
},
|
2990 |
+
{
|
2991 |
+
"epoch": 0.21,
|
2992 |
+
"grad_norm": 5.84375,
|
2993 |
+
"learning_rate": 1.9066666666666668e-07,
|
2994 |
+
"loss": 0.0584,
|
2995 |
+
"step": 4220
|
2996 |
+
},
|
2997 |
+
{
|
2998 |
+
"epoch": 0.21,
|
2999 |
+
"grad_norm": 5.5,
|
3000 |
+
"learning_rate": 1.8822222222222223e-07,
|
3001 |
+
"loss": 0.0508,
|
3002 |
+
"step": 4230
|
3003 |
+
},
|
3004 |
+
{
|
3005 |
+
"epoch": 0.21,
|
3006 |
+
"grad_norm": 5.8125,
|
3007 |
+
"learning_rate": 1.8577777777777778e-07,
|
3008 |
+
"loss": 0.0618,
|
3009 |
+
"step": 4240
|
3010 |
+
},
|
3011 |
+
{
|
3012 |
+
"epoch": 0.21,
|
3013 |
+
"grad_norm": 5.34375,
|
3014 |
+
"learning_rate": 1.8333333333333333e-07,
|
3015 |
+
"loss": 0.053,
|
3016 |
+
"step": 4250
|
3017 |
+
},
|
3018 |
+
{
|
3019 |
+
"epoch": 0.21,
|
3020 |
+
"grad_norm": 6.875,
|
3021 |
+
"learning_rate": 1.808888888888889e-07,
|
3022 |
+
"loss": 0.0559,
|
3023 |
+
"step": 4260
|
3024 |
+
},
|
3025 |
+
{
|
3026 |
+
"epoch": 0.21,
|
3027 |
+
"grad_norm": 7.375,
|
3028 |
+
"learning_rate": 1.7844444444444444e-07,
|
3029 |
+
"loss": 0.0576,
|
3030 |
+
"step": 4270
|
3031 |
+
},
|
3032 |
+
{
|
3033 |
+
"epoch": 0.21,
|
3034 |
+
"grad_norm": 8.3125,
|
3035 |
+
"learning_rate": 1.7600000000000001e-07,
|
3036 |
+
"loss": 0.0547,
|
3037 |
+
"step": 4280
|
3038 |
+
},
|
3039 |
+
{
|
3040 |
+
"epoch": 0.21,
|
3041 |
+
"grad_norm": 6.90625,
|
3042 |
+
"learning_rate": 1.7355555555555554e-07,
|
3043 |
+
"loss": 0.0532,
|
3044 |
+
"step": 4290
|
3045 |
+
},
|
3046 |
+
{
|
3047 |
+
"epoch": 0.21,
|
3048 |
+
"grad_norm": 7.46875,
|
3049 |
+
"learning_rate": 1.7111111111111112e-07,
|
3050 |
+
"loss": 0.0696,
|
3051 |
+
"step": 4300
|
3052 |
+
},
|
3053 |
+
{
|
3054 |
+
"epoch": 0.22,
|
3055 |
+
"grad_norm": 5.3125,
|
3056 |
+
"learning_rate": 1.6866666666666667e-07,
|
3057 |
+
"loss": 0.0569,
|
3058 |
+
"step": 4310
|
3059 |
+
},
|
3060 |
+
{
|
3061 |
+
"epoch": 0.22,
|
3062 |
+
"grad_norm": 7.125,
|
3063 |
+
"learning_rate": 1.6622222222222222e-07,
|
3064 |
+
"loss": 0.059,
|
3065 |
+
"step": 4320
|
3066 |
+
},
|
3067 |
+
{
|
3068 |
+
"epoch": 0.22,
|
3069 |
+
"grad_norm": 5.03125,
|
3070 |
+
"learning_rate": 1.6377777777777777e-07,
|
3071 |
+
"loss": 0.0486,
|
3072 |
+
"step": 4330
|
3073 |
+
},
|
3074 |
+
{
|
3075 |
+
"epoch": 0.22,
|
3076 |
+
"grad_norm": 7.75,
|
3077 |
+
"learning_rate": 1.6133333333333335e-07,
|
3078 |
+
"loss": 0.0557,
|
3079 |
+
"step": 4340
|
3080 |
+
},
|
3081 |
+
{
|
3082 |
+
"epoch": 0.22,
|
3083 |
+
"grad_norm": 5.90625,
|
3084 |
+
"learning_rate": 1.5888888888888887e-07,
|
3085 |
+
"loss": 0.0605,
|
3086 |
+
"step": 4350
|
3087 |
+
},
|
3088 |
+
{
|
3089 |
+
"epoch": 0.22,
|
3090 |
+
"grad_norm": 4.96875,
|
3091 |
+
"learning_rate": 1.5644444444444445e-07,
|
3092 |
+
"loss": 0.0534,
|
3093 |
+
"step": 4360
|
3094 |
+
},
|
3095 |
+
{
|
3096 |
+
"epoch": 0.22,
|
3097 |
+
"grad_norm": 4.78125,
|
3098 |
+
"learning_rate": 1.5400000000000003e-07,
|
3099 |
+
"loss": 0.0513,
|
3100 |
+
"step": 4370
|
3101 |
+
},
|
3102 |
+
{
|
3103 |
+
"epoch": 0.22,
|
3104 |
+
"grad_norm": 10.75,
|
3105 |
+
"learning_rate": 1.5155555555555555e-07,
|
3106 |
+
"loss": 0.057,
|
3107 |
+
"step": 4380
|
3108 |
+
},
|
3109 |
+
{
|
3110 |
+
"epoch": 0.22,
|
3111 |
+
"grad_norm": 6.125,
|
3112 |
+
"learning_rate": 1.4911111111111113e-07,
|
3113 |
+
"loss": 0.0602,
|
3114 |
+
"step": 4390
|
3115 |
+
},
|
3116 |
+
{
|
3117 |
+
"epoch": 0.22,
|
3118 |
+
"grad_norm": 5.5625,
|
3119 |
+
"learning_rate": 1.4666666666666668e-07,
|
3120 |
+
"loss": 0.0546,
|
3121 |
+
"step": 4400
|
3122 |
+
},
|
3123 |
+
{
|
3124 |
+
"epoch": 0.22,
|
3125 |
+
"grad_norm": 5.40625,
|
3126 |
+
"learning_rate": 1.4422222222222223e-07,
|
3127 |
+
"loss": 0.0543,
|
3128 |
+
"step": 4410
|
3129 |
+
},
|
3130 |
+
{
|
3131 |
+
"epoch": 0.22,
|
3132 |
+
"grad_norm": 6.46875,
|
3133 |
+
"learning_rate": 1.4177777777777779e-07,
|
3134 |
+
"loss": 0.0627,
|
3135 |
+
"step": 4420
|
3136 |
+
},
|
3137 |
+
{
|
3138 |
+
"epoch": 0.22,
|
3139 |
+
"grad_norm": 7.375,
|
3140 |
+
"learning_rate": 1.3933333333333336e-07,
|
3141 |
+
"loss": 0.0596,
|
3142 |
+
"step": 4430
|
3143 |
+
},
|
3144 |
+
{
|
3145 |
+
"epoch": 0.22,
|
3146 |
+
"grad_norm": 5.5625,
|
3147 |
+
"learning_rate": 1.368888888888889e-07,
|
3148 |
+
"loss": 0.0593,
|
3149 |
+
"step": 4440
|
3150 |
+
},
|
3151 |
+
{
|
3152 |
+
"epoch": 0.22,
|
3153 |
+
"grad_norm": 6.75,
|
3154 |
+
"learning_rate": 1.3444444444444444e-07,
|
3155 |
+
"loss": 0.0554,
|
3156 |
+
"step": 4450
|
3157 |
+
},
|
3158 |
+
{
|
3159 |
+
"epoch": 0.22,
|
3160 |
+
"grad_norm": 6.21875,
|
3161 |
+
"learning_rate": 1.32e-07,
|
3162 |
+
"loss": 0.0516,
|
3163 |
+
"step": 4460
|
3164 |
+
},
|
3165 |
+
{
|
3166 |
+
"epoch": 0.22,
|
3167 |
+
"grad_norm": 6.21875,
|
3168 |
+
"learning_rate": 1.2955555555555557e-07,
|
3169 |
+
"loss": 0.0529,
|
3170 |
+
"step": 4470
|
3171 |
+
},
|
3172 |
+
{
|
3173 |
+
"epoch": 0.22,
|
3174 |
+
"grad_norm": 7.71875,
|
3175 |
+
"learning_rate": 1.2711111111111112e-07,
|
3176 |
+
"loss": 0.0549,
|
3177 |
+
"step": 4480
|
3178 |
+
},
|
3179 |
+
{
|
3180 |
+
"epoch": 0.22,
|
3181 |
+
"grad_norm": 5.34375,
|
3182 |
+
"learning_rate": 1.2466666666666667e-07,
|
3183 |
+
"loss": 0.0564,
|
3184 |
+
"step": 4490
|
3185 |
+
},
|
3186 |
+
{
|
3187 |
+
"epoch": 0.23,
|
3188 |
+
"grad_norm": 5.96875,
|
3189 |
+
"learning_rate": 1.2222222222222222e-07,
|
3190 |
+
"loss": 0.0576,
|
3191 |
+
"step": 4500
|
3192 |
+
},
|
3193 |
+
{
|
3194 |
+
"epoch": 0.23,
|
3195 |
+
"grad_norm": 6.375,
|
3196 |
+
"learning_rate": 1.1977777777777777e-07,
|
3197 |
+
"loss": 0.0539,
|
3198 |
+
"step": 4510
|
3199 |
+
},
|
3200 |
+
{
|
3201 |
+
"epoch": 0.23,
|
3202 |
+
"grad_norm": 6.25,
|
3203 |
+
"learning_rate": 1.1733333333333335e-07,
|
3204 |
+
"loss": 0.0546,
|
3205 |
+
"step": 4520
|
3206 |
+
},
|
3207 |
+
{
|
3208 |
+
"epoch": 0.23,
|
3209 |
+
"grad_norm": 5.65625,
|
3210 |
+
"learning_rate": 1.148888888888889e-07,
|
3211 |
+
"loss": 0.0555,
|
3212 |
+
"step": 4530
|
3213 |
+
},
|
3214 |
+
{
|
3215 |
+
"epoch": 0.23,
|
3216 |
+
"grad_norm": 5.84375,
|
3217 |
+
"learning_rate": 1.1244444444444445e-07,
|
3218 |
+
"loss": 0.0583,
|
3219 |
+
"step": 4540
|
3220 |
+
},
|
3221 |
+
{
|
3222 |
+
"epoch": 0.23,
|
3223 |
+
"grad_norm": 5.75,
|
3224 |
+
"learning_rate": 1.1e-07,
|
3225 |
+
"loss": 0.0553,
|
3226 |
+
"step": 4550
|
3227 |
+
},
|
3228 |
+
{
|
3229 |
+
"epoch": 0.23,
|
3230 |
+
"grad_norm": 5.875,
|
3231 |
+
"learning_rate": 1.0755555555555557e-07,
|
3232 |
+
"loss": 0.0533,
|
3233 |
+
"step": 4560
|
3234 |
+
},
|
3235 |
+
{
|
3236 |
+
"epoch": 0.23,
|
3237 |
+
"grad_norm": 4.8125,
|
3238 |
+
"learning_rate": 1.0511111111111112e-07,
|
3239 |
+
"loss": 0.0565,
|
3240 |
+
"step": 4570
|
3241 |
+
},
|
3242 |
+
{
|
3243 |
+
"epoch": 0.23,
|
3244 |
+
"grad_norm": 4.9375,
|
3245 |
+
"learning_rate": 1.0266666666666667e-07,
|
3246 |
+
"loss": 0.0573,
|
3247 |
+
"step": 4580
|
3248 |
+
},
|
3249 |
+
{
|
3250 |
+
"epoch": 0.23,
|
3251 |
+
"grad_norm": 6.71875,
|
3252 |
+
"learning_rate": 1.0022222222222224e-07,
|
3253 |
+
"loss": 0.054,
|
3254 |
+
"step": 4590
|
3255 |
+
},
|
3256 |
+
{
|
3257 |
+
"epoch": 0.23,
|
3258 |
+
"grad_norm": 7.21875,
|
3259 |
+
"learning_rate": 9.777777777777779e-08,
|
3260 |
+
"loss": 0.0592,
|
3261 |
+
"step": 4600
|
3262 |
+
},
|
3263 |
+
{
|
3264 |
+
"epoch": 0.23,
|
3265 |
+
"grad_norm": 7.0625,
|
3266 |
+
"learning_rate": 9.533333333333334e-08,
|
3267 |
+
"loss": 0.0591,
|
3268 |
+
"step": 4610
|
3269 |
+
},
|
3270 |
+
{
|
3271 |
+
"epoch": 0.23,
|
3272 |
+
"grad_norm": 6.03125,
|
3273 |
+
"learning_rate": 9.288888888888889e-08,
|
3274 |
+
"loss": 0.0645,
|
3275 |
+
"step": 4620
|
3276 |
+
},
|
3277 |
+
{
|
3278 |
+
"epoch": 0.23,
|
3279 |
+
"grad_norm": 7.0625,
|
3280 |
+
"learning_rate": 9.044444444444446e-08,
|
3281 |
+
"loss": 0.0549,
|
3282 |
+
"step": 4630
|
3283 |
+
},
|
3284 |
+
{
|
3285 |
+
"epoch": 0.23,
|
3286 |
+
"grad_norm": 5.65625,
|
3287 |
+
"learning_rate": 8.800000000000001e-08,
|
3288 |
+
"loss": 0.0554,
|
3289 |
+
"step": 4640
|
3290 |
+
},
|
3291 |
+
{
|
3292 |
+
"epoch": 0.23,
|
3293 |
+
"grad_norm": 7.8125,
|
3294 |
+
"learning_rate": 8.555555555555556e-08,
|
3295 |
+
"loss": 0.0536,
|
3296 |
+
"step": 4650
|
3297 |
+
},
|
3298 |
+
{
|
3299 |
+
"epoch": 0.23,
|
3300 |
+
"grad_norm": 5.6875,
|
3301 |
+
"learning_rate": 8.311111111111111e-08,
|
3302 |
+
"loss": 0.0633,
|
3303 |
+
"step": 4660
|
3304 |
+
},
|
3305 |
+
{
|
3306 |
+
"epoch": 0.23,
|
3307 |
+
"grad_norm": 5.96875,
|
3308 |
+
"learning_rate": 8.066666666666667e-08,
|
3309 |
+
"loss": 0.0529,
|
3310 |
+
"step": 4670
|
3311 |
+
},
|
3312 |
+
{
|
3313 |
+
"epoch": 0.23,
|
3314 |
+
"grad_norm": 7.96875,
|
3315 |
+
"learning_rate": 7.822222222222223e-08,
|
3316 |
+
"loss": 0.0532,
|
3317 |
+
"step": 4680
|
3318 |
+
},
|
3319 |
+
{
|
3320 |
+
"epoch": 0.23,
|
3321 |
+
"grad_norm": 9.5625,
|
3322 |
+
"learning_rate": 7.577777777777778e-08,
|
3323 |
+
"loss": 0.0603,
|
3324 |
+
"step": 4690
|
3325 |
+
},
|
3326 |
+
{
|
3327 |
+
"epoch": 0.23,
|
3328 |
+
"grad_norm": 6.875,
|
3329 |
+
"learning_rate": 7.333333333333334e-08,
|
3330 |
+
"loss": 0.0584,
|
3331 |
+
"step": 4700
|
3332 |
+
},
|
3333 |
+
{
|
3334 |
+
"epoch": 0.24,
|
3335 |
+
"grad_norm": 6.0625,
|
3336 |
+
"learning_rate": 7.088888888888889e-08,
|
3337 |
+
"loss": 0.0577,
|
3338 |
+
"step": 4710
|
3339 |
+
},
|
3340 |
+
{
|
3341 |
+
"epoch": 0.24,
|
3342 |
+
"grad_norm": 7.71875,
|
3343 |
+
"learning_rate": 6.844444444444444e-08,
|
3344 |
+
"loss": 0.0612,
|
3345 |
+
"step": 4720
|
3346 |
+
},
|
3347 |
+
{
|
3348 |
+
"epoch": 0.24,
|
3349 |
+
"grad_norm": 6.28125,
|
3350 |
+
"learning_rate": 6.6e-08,
|
3351 |
+
"loss": 0.0668,
|
3352 |
+
"step": 4730
|
3353 |
+
},
|
3354 |
+
{
|
3355 |
+
"epoch": 0.24,
|
3356 |
+
"grad_norm": 6.09375,
|
3357 |
+
"learning_rate": 6.355555555555556e-08,
|
3358 |
+
"loss": 0.0577,
|
3359 |
+
"step": 4740
|
3360 |
+
},
|
3361 |
+
{
|
3362 |
+
"epoch": 0.24,
|
3363 |
+
"grad_norm": 5.59375,
|
3364 |
+
"learning_rate": 6.111111111111111e-08,
|
3365 |
+
"loss": 0.0578,
|
3366 |
+
"step": 4750
|
3367 |
+
},
|
3368 |
+
{
|
3369 |
+
"epoch": 0.24,
|
3370 |
+
"grad_norm": 5.78125,
|
3371 |
+
"learning_rate": 5.8666666666666676e-08,
|
3372 |
+
"loss": 0.0567,
|
3373 |
+
"step": 4760
|
3374 |
+
},
|
3375 |
+
{
|
3376 |
+
"epoch": 0.24,
|
3377 |
+
"grad_norm": 7.3125,
|
3378 |
+
"learning_rate": 5.622222222222223e-08,
|
3379 |
+
"loss": 0.0609,
|
3380 |
+
"step": 4770
|
3381 |
+
},
|
3382 |
+
{
|
3383 |
+
"epoch": 0.24,
|
3384 |
+
"grad_norm": 6.34375,
|
3385 |
+
"learning_rate": 5.3777777777777785e-08,
|
3386 |
+
"loss": 0.0561,
|
3387 |
+
"step": 4780
|
3388 |
+
},
|
3389 |
+
{
|
3390 |
+
"epoch": 0.24,
|
3391 |
+
"grad_norm": 5.125,
|
3392 |
+
"learning_rate": 5.1333333333333336e-08,
|
3393 |
+
"loss": 0.0531,
|
3394 |
+
"step": 4790
|
3395 |
+
},
|
3396 |
+
{
|
3397 |
+
"epoch": 0.24,
|
3398 |
+
"grad_norm": 5.625,
|
3399 |
+
"learning_rate": 4.8888888888888894e-08,
|
3400 |
+
"loss": 0.0546,
|
3401 |
+
"step": 4800
|
3402 |
+
},
|
3403 |
+
{
|
3404 |
+
"epoch": 0.24,
|
3405 |
+
"grad_norm": 7.21875,
|
3406 |
+
"learning_rate": 4.6444444444444446e-08,
|
3407 |
+
"loss": 0.0525,
|
3408 |
+
"step": 4810
|
3409 |
+
},
|
3410 |
+
{
|
3411 |
+
"epoch": 0.24,
|
3412 |
+
"grad_norm": 5.65625,
|
3413 |
+
"learning_rate": 4.4000000000000004e-08,
|
3414 |
+
"loss": 0.0589,
|
3415 |
+
"step": 4820
|
3416 |
+
},
|
3417 |
+
{
|
3418 |
+
"epoch": 0.24,
|
3419 |
+
"grad_norm": 6.90625,
|
3420 |
+
"learning_rate": 4.1555555555555555e-08,
|
3421 |
+
"loss": 0.0595,
|
3422 |
+
"step": 4830
|
3423 |
+
},
|
3424 |
+
{
|
3425 |
+
"epoch": 0.24,
|
3426 |
+
"grad_norm": 5.5625,
|
3427 |
+
"learning_rate": 3.911111111111111e-08,
|
3428 |
+
"loss": 0.054,
|
3429 |
+
"step": 4840
|
3430 |
+
},
|
3431 |
+
{
|
3432 |
+
"epoch": 0.24,
|
3433 |
+
"grad_norm": 7.375,
|
3434 |
+
"learning_rate": 3.666666666666667e-08,
|
3435 |
+
"loss": 0.0606,
|
3436 |
+
"step": 4850
|
3437 |
+
},
|
3438 |
+
{
|
3439 |
+
"epoch": 0.24,
|
3440 |
+
"grad_norm": 6.375,
|
3441 |
+
"learning_rate": 3.422222222222222e-08,
|
3442 |
+
"loss": 0.0572,
|
3443 |
+
"step": 4860
|
3444 |
+
},
|
3445 |
+
{
|
3446 |
+
"epoch": 0.24,
|
3447 |
+
"grad_norm": 5.21875,
|
3448 |
+
"learning_rate": 3.177777777777778e-08,
|
3449 |
+
"loss": 0.0558,
|
3450 |
+
"step": 4870
|
3451 |
+
},
|
3452 |
+
{
|
3453 |
+
"epoch": 0.24,
|
3454 |
+
"grad_norm": 6.9375,
|
3455 |
+
"learning_rate": 2.9333333333333338e-08,
|
3456 |
+
"loss": 0.0577,
|
3457 |
+
"step": 4880
|
3458 |
+
},
|
3459 |
+
{
|
3460 |
+
"epoch": 0.24,
|
3461 |
+
"grad_norm": 5.46875,
|
3462 |
+
"learning_rate": 2.6888888888888893e-08,
|
3463 |
+
"loss": 0.0521,
|
3464 |
+
"step": 4890
|
3465 |
+
},
|
3466 |
+
{
|
3467 |
+
"epoch": 0.24,
|
3468 |
+
"grad_norm": 4.96875,
|
3469 |
+
"learning_rate": 2.4444444444444447e-08,
|
3470 |
+
"loss": 0.0548,
|
3471 |
+
"step": 4900
|
3472 |
+
},
|
3473 |
+
{
|
3474 |
+
"epoch": 0.25,
|
3475 |
+
"grad_norm": 6.9375,
|
3476 |
+
"learning_rate": 2.2000000000000002e-08,
|
3477 |
+
"loss": 0.0582,
|
3478 |
+
"step": 4910
|
3479 |
+
},
|
3480 |
+
{
|
3481 |
+
"epoch": 0.25,
|
3482 |
+
"grad_norm": 5.0,
|
3483 |
+
"learning_rate": 1.9555555555555556e-08,
|
3484 |
+
"loss": 0.0527,
|
3485 |
+
"step": 4920
|
3486 |
+
},
|
3487 |
+
{
|
3488 |
+
"epoch": 0.25,
|
3489 |
+
"grad_norm": 6.96875,
|
3490 |
+
"learning_rate": 1.711111111111111e-08,
|
3491 |
+
"loss": 0.0521,
|
3492 |
+
"step": 4930
|
3493 |
+
},
|
3494 |
+
{
|
3495 |
+
"epoch": 0.25,
|
3496 |
+
"grad_norm": 5.28125,
|
3497 |
+
"learning_rate": 1.4666666666666669e-08,
|
3498 |
+
"loss": 0.0581,
|
3499 |
+
"step": 4940
|
3500 |
+
},
|
3501 |
+
{
|
3502 |
+
"epoch": 0.25,
|
3503 |
+
"grad_norm": 5.59375,
|
3504 |
+
"learning_rate": 1.2222222222222224e-08,
|
3505 |
+
"loss": 0.0509,
|
3506 |
+
"step": 4950
|
3507 |
+
},
|
3508 |
+
{
|
3509 |
+
"epoch": 0.25,
|
3510 |
+
"grad_norm": 9.1875,
|
3511 |
+
"learning_rate": 9.777777777777778e-09,
|
3512 |
+
"loss": 0.0546,
|
3513 |
+
"step": 4960
|
3514 |
+
},
|
3515 |
+
{
|
3516 |
+
"epoch": 0.25,
|
3517 |
+
"grad_norm": 5.71875,
|
3518 |
+
"learning_rate": 7.3333333333333345e-09,
|
3519 |
+
"loss": 0.0588,
|
3520 |
+
"step": 4970
|
3521 |
+
},
|
3522 |
+
{
|
3523 |
+
"epoch": 0.25,
|
3524 |
+
"grad_norm": 6.15625,
|
3525 |
+
"learning_rate": 4.888888888888889e-09,
|
3526 |
+
"loss": 0.0601,
|
3527 |
+
"step": 4980
|
3528 |
+
},
|
3529 |
+
{
|
3530 |
+
"epoch": 0.25,
|
3531 |
+
"grad_norm": 8.0625,
|
3532 |
+
"learning_rate": 2.4444444444444446e-09,
|
3533 |
+
"loss": 0.0551,
|
3534 |
+
"step": 4990
|
3535 |
+
},
|
3536 |
+
{
|
3537 |
+
"epoch": 0.25,
|
3538 |
+
"grad_norm": 5.25,
|
3539 |
+
"learning_rate": 0.0,
|
3540 |
+
"loss": 0.0534,
|
3541 |
+
"step": 5000
|
3542 |
+
},
|
3543 |
+
{
|
3544 |
+
"epoch": 0.25,
|
3545 |
+
"eval_loss": 0.05834246426820755,
|
3546 |
+
"eval_runtime": 64.5109,
|
3547 |
+
"eval_samples_per_second": 15.501,
|
3548 |
+
"eval_steps_per_second": 15.501,
|
3549 |
+
"step": 5000
|
3550 |
}
|
3551 |
],
|
3552 |
"logging_steps": 10,
|
|
|
3554 |
"num_input_tokens_seen": 0,
|
3555 |
"num_train_epochs": 1,
|
3556 |
"save_steps": 1000,
|
3557 |
+
"total_flos": 8.06961020928e+16,
|
3558 |
"train_batch_size": 1,
|
3559 |
"trial_name": null,
|
3560 |
"trial_params": null
|