akahana commited on
Commit
24f465b
1 Parent(s): 4ae22d8

Training in progress, step 100500

Browse files
README.md ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - generated_from_trainer
4
+ datasets:
5
+ - akahana/GlotCC-V1-jav-Latn
6
+ metrics:
7
+ - accuracy
8
+ model-index:
9
+ - name: tinygpt2-javanese
10
+ results:
11
+ - task:
12
+ name: Causal Language Modeling
13
+ type: text-generation
14
+ dataset:
15
+ name: akahana/GlotCC-V1-jav-Latn default
16
+ type: akahana/GlotCC-V1-jav-Latn
17
+ args: default
18
+ metrics:
19
+ - name: Accuracy
20
+ type: accuracy
21
+ value: 0.25270675362470885
22
+ ---
23
+
24
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
25
+ should probably proofread and complete it, then remove this comment. -->
26
+
27
+ # tinygpt2-javanese
28
+
29
+ This model is a fine-tuned version of [](https://huggingface.co/) on the akahana/GlotCC-V1-jav-Latn default dataset.
30
+ It achieves the following results on the evaluation set:
31
+ - Loss: 5.0050
32
+ - Accuracy: 0.2527
33
+
34
+ ## Model description
35
+
36
+ More information needed
37
+
38
+ ## Intended uses & limitations
39
+
40
+ More information needed
41
+
42
+ ## Training and evaluation data
43
+
44
+ More information needed
45
+
46
+ ## Training procedure
47
+
48
+ ### Training hyperparameters
49
+
50
+ The following hyperparameters were used during training:
51
+ - learning_rate: 5e-05
52
+ - train_batch_size: 16
53
+ - eval_batch_size: 4
54
+ - seed: 42
55
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
56
+ - lr_scheduler_type: linear
57
+ - num_epochs: 20.0
58
+ - mixed_precision_training: Native AMP
59
+
60
+ ### Training results
61
+
62
+
63
+
64
+ ### Framework versions
65
+
66
+ - Transformers 4.42.4
67
+ - Pytorch 2.3.1+cu121
68
+ - Datasets 2.20.0
69
+ - Tokenizers 0.19.1
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<|endoftext|>": 50257
3
+ }
all_results.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "eval_accuracy": 0.25270675362470885,
4
+ "eval_loss": 5.005028247833252,
5
+ "eval_runtime": 10.9985,
6
+ "eval_samples": 4053,
7
+ "eval_samples_per_second": 368.506,
8
+ "eval_steps_per_second": 92.195,
9
+ "perplexity": 149.16129658231105,
10
+ "total_flos": 488922611712000.0,
11
+ "train_loss": 5.48259629872295,
12
+ "train_runtime": 4495.3437,
13
+ "train_samples": 80219,
14
+ "train_samples_per_second": 356.898,
15
+ "train_steps_per_second": 22.308
16
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "eval_accuracy": 0.25270675362470885,
4
+ "eval_loss": 5.005028247833252,
5
+ "eval_runtime": 10.9985,
6
+ "eval_samples": 4053,
7
+ "eval_samples_per_second": 368.506,
8
+ "eval_steps_per_second": 92.195,
9
+ "perplexity": 149.16129658231105
10
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.42.4"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d27647f456d7c3a5e6689566c2e23ac98df779ff413ee07103fd343deb189d48
3
+ size 27846288
runs/Jul21_22-13-57_78d944cbbe34/events.out.tfevents.1721600093.78d944cbbe34.1304.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1585b185477b362a98a1d3068070e5d19a00bbbf8197a651cde41eaf3a58f03f
3
+ size 48066
runs/Jul21_22-13-57_78d944cbbe34/events.out.tfevents.1721604604.78d944cbbe34.1304.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c498c9186ea79fc5eaf4a7480c4f6383300a1a5d1ed7e6a19657935e5aefa583
3
+ size 417
runs/Jul21_23-32-54_78d944cbbe34/events.out.tfevents.1721604826.78d944cbbe34.21392.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33e6a5b0b5e3254ea959dc96b78dc7ada344d6948943e26ed10c06cfba93fddb
3
+ size 5052
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "unk_token": "<|endoftext|>"
5
+ }
tokenizer.json CHANGED
@@ -2,7 +2,17 @@
2
  "version": "1.0",
3
  "truncation": null,
4
  "padding": null,
5
- "added_tokens": [],
 
 
 
 
 
 
 
 
 
 
6
  "normalizer": null,
7
  "pre_tokenizer": {
8
  "type": "ByteLevel",
 
2
  "version": "1.0",
3
  "truncation": null,
4
  "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 50257,
8
+ "content": "<|endoftext|>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ }
15
+ ],
16
  "normalizer": null,
17
  "pre_tokenizer": {
18
  "type": "ByteLevel",
tokenizer_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50257": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1000000000000000019884624838656,
17
+ "tokenizer_class": "GPT2Tokenizer",
18
+ "unk_token": "<|endoftext|>"
19
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "total_flos": 488922611712000.0,
4
+ "train_loss": 5.48259629872295,
5
+ "train_runtime": 4495.3437,
6
+ "train_samples": 80219,
7
+ "train_samples_per_second": 356.898,
8
+ "train_steps_per_second": 22.308
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,1442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 20.0,
5
+ "eval_steps": 500,
6
+ "global_step": 100280,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0997207818109294,
13
+ "grad_norm": 0.9261826276779175,
14
+ "learning_rate": 4.975069804547268e-05,
15
+ "loss": 9.3488,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.1994415636218588,
20
+ "grad_norm": 0.5372676253318787,
21
+ "learning_rate": 4.9501396090945354e-05,
22
+ "loss": 8.0556,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.2991623454327882,
27
+ "grad_norm": 0.5985580086708069,
28
+ "learning_rate": 4.925209413641803e-05,
29
+ "loss": 7.7905,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.3988831272437176,
34
+ "grad_norm": 0.7738422751426697,
35
+ "learning_rate": 4.900279218189071e-05,
36
+ "loss": 7.5996,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.49860390905464697,
41
+ "grad_norm": 0.8470643162727356,
42
+ "learning_rate": 4.875349022736339e-05,
43
+ "loss": 7.4439,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 0.5983246908655764,
48
+ "grad_norm": 0.9632411003112793,
49
+ "learning_rate": 4.850418827283606e-05,
50
+ "loss": 7.2826,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 0.6980454726765057,
55
+ "grad_norm": 0.9540568590164185,
56
+ "learning_rate": 4.825488631830874e-05,
57
+ "loss": 7.1712,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 0.7977662544874352,
62
+ "grad_norm": 0.9605555534362793,
63
+ "learning_rate": 4.8005584363781416e-05,
64
+ "loss": 7.0881,
65
+ "step": 4000
66
+ },
67
+ {
68
+ "epoch": 0.8974870362983646,
69
+ "grad_norm": 1.101860523223877,
70
+ "learning_rate": 4.775678101316314e-05,
71
+ "loss": 7.0052,
72
+ "step": 4500
73
+ },
74
+ {
75
+ "epoch": 0.9972078181092939,
76
+ "grad_norm": 1.8117992877960205,
77
+ "learning_rate": 4.750747905863582e-05,
78
+ "loss": 6.9103,
79
+ "step": 5000
80
+ },
81
+ {
82
+ "epoch": 1.0969285999202234,
83
+ "grad_norm": 1.2837625741958618,
84
+ "learning_rate": 4.72581771041085e-05,
85
+ "loss": 6.8231,
86
+ "step": 5500
87
+ },
88
+ {
89
+ "epoch": 1.1966493817311528,
90
+ "grad_norm": 1.3095475435256958,
91
+ "learning_rate": 4.7008875149581175e-05,
92
+ "loss": 6.7796,
93
+ "step": 6000
94
+ },
95
+ {
96
+ "epoch": 1.2963701635420821,
97
+ "grad_norm": 1.4652228355407715,
98
+ "learning_rate": 4.6760071798962906e-05,
99
+ "loss": 6.707,
100
+ "step": 6500
101
+ },
102
+ {
103
+ "epoch": 1.3960909453530115,
104
+ "grad_norm": 1.357987403869629,
105
+ "learning_rate": 4.651076984443558e-05,
106
+ "loss": 6.6568,
107
+ "step": 7000
108
+ },
109
+ {
110
+ "epoch": 1.4958117271639408,
111
+ "grad_norm": 1.31229829788208,
112
+ "learning_rate": 4.626146788990826e-05,
113
+ "loss": 6.6144,
114
+ "step": 7500
115
+ },
116
+ {
117
+ "epoch": 1.5955325089748702,
118
+ "grad_norm": 1.4246258735656738,
119
+ "learning_rate": 4.6012165935380934e-05,
120
+ "loss": 6.5669,
121
+ "step": 8000
122
+ },
123
+ {
124
+ "epoch": 1.6952532907857998,
125
+ "grad_norm": 1.3963290452957153,
126
+ "learning_rate": 4.576286398085361e-05,
127
+ "loss": 6.508,
128
+ "step": 8500
129
+ },
130
+ {
131
+ "epoch": 1.7949740725967291,
132
+ "grad_norm": 1.5991405248641968,
133
+ "learning_rate": 4.551406063023534e-05,
134
+ "loss": 6.4651,
135
+ "step": 9000
136
+ },
137
+ {
138
+ "epoch": 1.8946948544076585,
139
+ "grad_norm": 1.6727421283721924,
140
+ "learning_rate": 4.526475867570802e-05,
141
+ "loss": 6.4129,
142
+ "step": 9500
143
+ },
144
+ {
145
+ "epoch": 1.994415636218588,
146
+ "grad_norm": 1.5657908916473389,
147
+ "learning_rate": 4.50154567211807e-05,
148
+ "loss": 6.3859,
149
+ "step": 10000
150
+ },
151
+ {
152
+ "epoch": 2.0941364180295174,
153
+ "grad_norm": 1.4079279899597168,
154
+ "learning_rate": 4.4766154766653375e-05,
155
+ "loss": 6.3136,
156
+ "step": 10500
157
+ },
158
+ {
159
+ "epoch": 2.193857199840447,
160
+ "grad_norm": 1.4724199771881104,
161
+ "learning_rate": 4.45173514160351e-05,
162
+ "loss": 6.2935,
163
+ "step": 11000
164
+ },
165
+ {
166
+ "epoch": 2.293577981651376,
167
+ "grad_norm": 1.7075014114379883,
168
+ "learning_rate": 4.426804946150778e-05,
169
+ "loss": 6.2724,
170
+ "step": 11500
171
+ },
172
+ {
173
+ "epoch": 2.3932987634623055,
174
+ "grad_norm": 1.550031304359436,
175
+ "learning_rate": 4.401874750698046e-05,
176
+ "loss": 6.2526,
177
+ "step": 12000
178
+ },
179
+ {
180
+ "epoch": 2.493019545273235,
181
+ "grad_norm": 1.5938421487808228,
182
+ "learning_rate": 4.376944555245313e-05,
183
+ "loss": 6.2218,
184
+ "step": 12500
185
+ },
186
+ {
187
+ "epoch": 2.5927403270841642,
188
+ "grad_norm": 1.853521704673767,
189
+ "learning_rate": 4.3520642201834866e-05,
190
+ "loss": 6.1679,
191
+ "step": 13000
192
+ },
193
+ {
194
+ "epoch": 2.6924611088950936,
195
+ "grad_norm": 1.7296770811080933,
196
+ "learning_rate": 4.327134024730754e-05,
197
+ "loss": 6.1442,
198
+ "step": 13500
199
+ },
200
+ {
201
+ "epoch": 2.792181890706023,
202
+ "grad_norm": 1.752852201461792,
203
+ "learning_rate": 4.302203829278022e-05,
204
+ "loss": 6.1241,
205
+ "step": 14000
206
+ },
207
+ {
208
+ "epoch": 2.8919026725169523,
209
+ "grad_norm": 1.6265596151351929,
210
+ "learning_rate": 4.2772736338252893e-05,
211
+ "loss": 6.0973,
212
+ "step": 14500
213
+ },
214
+ {
215
+ "epoch": 2.9916234543278817,
216
+ "grad_norm": 1.7537871599197388,
217
+ "learning_rate": 4.2523932987634625e-05,
218
+ "loss": 6.0858,
219
+ "step": 15000
220
+ },
221
+ {
222
+ "epoch": 3.0913442361388115,
223
+ "grad_norm": 1.5949361324310303,
224
+ "learning_rate": 4.22746310331073e-05,
225
+ "loss": 6.0188,
226
+ "step": 15500
227
+ },
228
+ {
229
+ "epoch": 3.191065017949741,
230
+ "grad_norm": 1.907575011253357,
231
+ "learning_rate": 4.202532907857998e-05,
232
+ "loss": 6.0118,
233
+ "step": 16000
234
+ },
235
+ {
236
+ "epoch": 3.29078579976067,
237
+ "grad_norm": 1.8919939994812012,
238
+ "learning_rate": 4.177602712405265e-05,
239
+ "loss": 5.9883,
240
+ "step": 16500
241
+ },
242
+ {
243
+ "epoch": 3.3905065815715996,
244
+ "grad_norm": 1.8701094388961792,
245
+ "learning_rate": 4.1527223773434384e-05,
246
+ "loss": 5.9725,
247
+ "step": 17000
248
+ },
249
+ {
250
+ "epoch": 3.490227363382529,
251
+ "grad_norm": 2.043443202972412,
252
+ "learning_rate": 4.127792181890706e-05,
253
+ "loss": 5.9474,
254
+ "step": 17500
255
+ },
256
+ {
257
+ "epoch": 3.5899481451934583,
258
+ "grad_norm": 1.852910041809082,
259
+ "learning_rate": 4.102861986437974e-05,
260
+ "loss": 5.9103,
261
+ "step": 18000
262
+ },
263
+ {
264
+ "epoch": 3.6896689270043876,
265
+ "grad_norm": 1.7579346895217896,
266
+ "learning_rate": 4.077931790985242e-05,
267
+ "loss": 5.9047,
268
+ "step": 18500
269
+ },
270
+ {
271
+ "epoch": 3.789389708815317,
272
+ "grad_norm": 1.8655468225479126,
273
+ "learning_rate": 4.053051455923414e-05,
274
+ "loss": 5.9043,
275
+ "step": 19000
276
+ },
277
+ {
278
+ "epoch": 3.8891104906262464,
279
+ "grad_norm": 2.0585408210754395,
280
+ "learning_rate": 4.0281212604706826e-05,
281
+ "loss": 5.8705,
282
+ "step": 19500
283
+ },
284
+ {
285
+ "epoch": 3.988831272437176,
286
+ "grad_norm": 2.088595151901245,
287
+ "learning_rate": 4.00319106501795e-05,
288
+ "loss": 5.8608,
289
+ "step": 20000
290
+ },
291
+ {
292
+ "epoch": 4.0885520542481055,
293
+ "grad_norm": 1.8989760875701904,
294
+ "learning_rate": 3.978260869565217e-05,
295
+ "loss": 5.8265,
296
+ "step": 20500
297
+ },
298
+ {
299
+ "epoch": 4.188272836059035,
300
+ "grad_norm": 1.8559260368347168,
301
+ "learning_rate": 3.953330674112485e-05,
302
+ "loss": 5.8052,
303
+ "step": 21000
304
+ },
305
+ {
306
+ "epoch": 4.287993617869964,
307
+ "grad_norm": 2.0016090869903564,
308
+ "learning_rate": 3.9284503390506585e-05,
309
+ "loss": 5.7947,
310
+ "step": 21500
311
+ },
312
+ {
313
+ "epoch": 4.387714399680894,
314
+ "grad_norm": 2.0224192142486572,
315
+ "learning_rate": 3.903520143597926e-05,
316
+ "loss": 5.7605,
317
+ "step": 22000
318
+ },
319
+ {
320
+ "epoch": 4.487435181491823,
321
+ "grad_norm": 1.9289922714233398,
322
+ "learning_rate": 3.8785899481451936e-05,
323
+ "loss": 5.7315,
324
+ "step": 22500
325
+ },
326
+ {
327
+ "epoch": 4.587155963302752,
328
+ "grad_norm": 2.1070337295532227,
329
+ "learning_rate": 3.853659752692461e-05,
330
+ "loss": 5.7517,
331
+ "step": 23000
332
+ },
333
+ {
334
+ "epoch": 4.686876745113682,
335
+ "grad_norm": 2.091681718826294,
336
+ "learning_rate": 3.8287794176306343e-05,
337
+ "loss": 5.7206,
338
+ "step": 23500
339
+ },
340
+ {
341
+ "epoch": 4.786597526924611,
342
+ "grad_norm": 2.0619523525238037,
343
+ "learning_rate": 3.803849222177902e-05,
344
+ "loss": 5.6829,
345
+ "step": 24000
346
+ },
347
+ {
348
+ "epoch": 4.88631830873554,
349
+ "grad_norm": 2.1252663135528564,
350
+ "learning_rate": 3.7789190267251695e-05,
351
+ "loss": 5.7028,
352
+ "step": 24500
353
+ },
354
+ {
355
+ "epoch": 4.98603909054647,
356
+ "grad_norm": 2.179452657699585,
357
+ "learning_rate": 3.753988831272438e-05,
358
+ "loss": 5.6975,
359
+ "step": 25000
360
+ },
361
+ {
362
+ "epoch": 5.085759872357399,
363
+ "grad_norm": 2.054488182067871,
364
+ "learning_rate": 3.72910849621061e-05,
365
+ "loss": 5.6527,
366
+ "step": 25500
367
+ },
368
+ {
369
+ "epoch": 5.1854806541683285,
370
+ "grad_norm": 2.3839542865753174,
371
+ "learning_rate": 3.704178300757878e-05,
372
+ "loss": 5.6498,
373
+ "step": 26000
374
+ },
375
+ {
376
+ "epoch": 5.285201435979258,
377
+ "grad_norm": 1.9893797636032104,
378
+ "learning_rate": 3.679248105305146e-05,
379
+ "loss": 5.6103,
380
+ "step": 26500
381
+ },
382
+ {
383
+ "epoch": 5.384922217790187,
384
+ "grad_norm": 2.089535713195801,
385
+ "learning_rate": 3.654317909852413e-05,
386
+ "loss": 5.6011,
387
+ "step": 27000
388
+ },
389
+ {
390
+ "epoch": 5.484642999601117,
391
+ "grad_norm": 1.9748643636703491,
392
+ "learning_rate": 3.629387714399681e-05,
393
+ "loss": 5.6206,
394
+ "step": 27500
395
+ },
396
+ {
397
+ "epoch": 5.584363781412046,
398
+ "grad_norm": 2.1696887016296387,
399
+ "learning_rate": 3.6045073793378544e-05,
400
+ "loss": 5.607,
401
+ "step": 28000
402
+ },
403
+ {
404
+ "epoch": 5.684084563222975,
405
+ "grad_norm": 2.2261533737182617,
406
+ "learning_rate": 3.579577183885121e-05,
407
+ "loss": 5.5895,
408
+ "step": 28500
409
+ },
410
+ {
411
+ "epoch": 5.783805345033905,
412
+ "grad_norm": 1.9739435911178589,
413
+ "learning_rate": 3.5546469884323896e-05,
414
+ "loss": 5.5654,
415
+ "step": 29000
416
+ },
417
+ {
418
+ "epoch": 5.883526126844835,
419
+ "grad_norm": 2.3373613357543945,
420
+ "learning_rate": 3.529716792979657e-05,
421
+ "loss": 5.554,
422
+ "step": 29500
423
+ },
424
+ {
425
+ "epoch": 5.983246908655763,
426
+ "grad_norm": 2.0227203369140625,
427
+ "learning_rate": 3.50483645791783e-05,
428
+ "loss": 5.569,
429
+ "step": 30000
430
+ },
431
+ {
432
+ "epoch": 6.082967690466694,
433
+ "grad_norm": 2.1894445419311523,
434
+ "learning_rate": 3.479906262465098e-05,
435
+ "loss": 5.5268,
436
+ "step": 30500
437
+ },
438
+ {
439
+ "epoch": 6.182688472277623,
440
+ "grad_norm": 2.3545119762420654,
441
+ "learning_rate": 3.4549760670123655e-05,
442
+ "loss": 5.5102,
443
+ "step": 31000
444
+ },
445
+ {
446
+ "epoch": 6.282409254088552,
447
+ "grad_norm": 2.380277156829834,
448
+ "learning_rate": 3.430045871559634e-05,
449
+ "loss": 5.5301,
450
+ "step": 31500
451
+ },
452
+ {
453
+ "epoch": 6.382130035899482,
454
+ "grad_norm": 2.288188934326172,
455
+ "learning_rate": 3.405165536497806e-05,
456
+ "loss": 5.4927,
457
+ "step": 32000
458
+ },
459
+ {
460
+ "epoch": 6.481850817710411,
461
+ "grad_norm": 2.2211456298828125,
462
+ "learning_rate": 3.380235341045074e-05,
463
+ "loss": 5.4786,
464
+ "step": 32500
465
+ },
466
+ {
467
+ "epoch": 6.58157159952134,
468
+ "grad_norm": 2.5629711151123047,
469
+ "learning_rate": 3.355305145592342e-05,
470
+ "loss": 5.4932,
471
+ "step": 33000
472
+ },
473
+ {
474
+ "epoch": 6.68129238133227,
475
+ "grad_norm": 2.385563611984253,
476
+ "learning_rate": 3.330374950139609e-05,
477
+ "loss": 5.4715,
478
+ "step": 33500
479
+ },
480
+ {
481
+ "epoch": 6.781013163143199,
482
+ "grad_norm": 2.284985303878784,
483
+ "learning_rate": 3.305494615077782e-05,
484
+ "loss": 5.4599,
485
+ "step": 34000
486
+ },
487
+ {
488
+ "epoch": 6.8807339449541285,
489
+ "grad_norm": 2.3653366565704346,
490
+ "learning_rate": 3.2805644196250504e-05,
491
+ "loss": 5.4498,
492
+ "step": 34500
493
+ },
494
+ {
495
+ "epoch": 6.980454726765058,
496
+ "grad_norm": 2.311102867126465,
497
+ "learning_rate": 3.255634224172317e-05,
498
+ "loss": 5.4315,
499
+ "step": 35000
500
+ },
501
+ {
502
+ "epoch": 7.080175508575987,
503
+ "grad_norm": 2.4815216064453125,
504
+ "learning_rate": 3.2307040287195855e-05,
505
+ "loss": 5.4282,
506
+ "step": 35500
507
+ },
508
+ {
509
+ "epoch": 7.179896290386917,
510
+ "grad_norm": 2.228046178817749,
511
+ "learning_rate": 3.205823693657759e-05,
512
+ "loss": 5.4321,
513
+ "step": 36000
514
+ },
515
+ {
516
+ "epoch": 7.279617072197846,
517
+ "grad_norm": 2.459022283554077,
518
+ "learning_rate": 3.1808934982050256e-05,
519
+ "loss": 5.3886,
520
+ "step": 36500
521
+ },
522
+ {
523
+ "epoch": 7.379337854008775,
524
+ "grad_norm": 2.217167615890503,
525
+ "learning_rate": 3.155963302752294e-05,
526
+ "loss": 5.3924,
527
+ "step": 37000
528
+ },
529
+ {
530
+ "epoch": 7.479058635819705,
531
+ "grad_norm": 2.3231680393218994,
532
+ "learning_rate": 3.1310331072995614e-05,
533
+ "loss": 5.389,
534
+ "step": 37500
535
+ },
536
+ {
537
+ "epoch": 7.578779417630634,
538
+ "grad_norm": 2.220628261566162,
539
+ "learning_rate": 3.1061527722377346e-05,
540
+ "loss": 5.3856,
541
+ "step": 38000
542
+ },
543
+ {
544
+ "epoch": 7.678500199441563,
545
+ "grad_norm": 2.612741708755493,
546
+ "learning_rate": 3.081222576785002e-05,
547
+ "loss": 5.3851,
548
+ "step": 38500
549
+ },
550
+ {
551
+ "epoch": 7.778220981252493,
552
+ "grad_norm": 2.194031000137329,
553
+ "learning_rate": 3.05629238133227e-05,
554
+ "loss": 5.3744,
555
+ "step": 39000
556
+ },
557
+ {
558
+ "epoch": 7.877941763063422,
559
+ "grad_norm": 2.342750310897827,
560
+ "learning_rate": 3.0313621858795377e-05,
561
+ "loss": 5.3644,
562
+ "step": 39500
563
+ },
564
+ {
565
+ "epoch": 7.9776625448743514,
566
+ "grad_norm": 2.273401975631714,
567
+ "learning_rate": 3.0064818508177105e-05,
568
+ "loss": 5.3828,
569
+ "step": 40000
570
+ },
571
+ {
572
+ "epoch": 8.07738332668528,
573
+ "grad_norm": 2.5998456478118896,
574
+ "learning_rate": 2.981551655364978e-05,
575
+ "loss": 5.3399,
576
+ "step": 40500
577
+ },
578
+ {
579
+ "epoch": 8.177104108496211,
580
+ "grad_norm": 2.4312164783477783,
581
+ "learning_rate": 2.956621459912246e-05,
582
+ "loss": 5.3491,
583
+ "step": 41000
584
+ },
585
+ {
586
+ "epoch": 8.27682489030714,
587
+ "grad_norm": 2.1767194271087646,
588
+ "learning_rate": 2.9316912644595136e-05,
589
+ "loss": 5.318,
590
+ "step": 41500
591
+ },
592
+ {
593
+ "epoch": 8.37654567211807,
594
+ "grad_norm": 2.546261787414551,
595
+ "learning_rate": 2.9068109293976864e-05,
596
+ "loss": 5.3169,
597
+ "step": 42000
598
+ },
599
+ {
600
+ "epoch": 8.476266453928998,
601
+ "grad_norm": 2.5187346935272217,
602
+ "learning_rate": 2.8818807339449543e-05,
603
+ "loss": 5.3051,
604
+ "step": 42500
605
+ },
606
+ {
607
+ "epoch": 8.575987235739928,
608
+ "grad_norm": 2.4358792304992676,
609
+ "learning_rate": 2.856950538492222e-05,
610
+ "loss": 5.3063,
611
+ "step": 43000
612
+ },
613
+ {
614
+ "epoch": 8.675708017550857,
615
+ "grad_norm": 2.22619891166687,
616
+ "learning_rate": 2.8320203430394898e-05,
617
+ "loss": 5.3241,
618
+ "step": 43500
619
+ },
620
+ {
621
+ "epoch": 8.775428799361787,
622
+ "grad_norm": 2.6035451889038086,
623
+ "learning_rate": 2.8071400079776626e-05,
624
+ "loss": 5.2964,
625
+ "step": 44000
626
+ },
627
+ {
628
+ "epoch": 8.875149581172716,
629
+ "grad_norm": 2.5391156673431396,
630
+ "learning_rate": 2.7822098125249302e-05,
631
+ "loss": 5.2914,
632
+ "step": 44500
633
+ },
634
+ {
635
+ "epoch": 8.974870362983646,
636
+ "grad_norm": 2.4130935668945312,
637
+ "learning_rate": 2.757279617072198e-05,
638
+ "loss": 5.2883,
639
+ "step": 45000
640
+ },
641
+ {
642
+ "epoch": 9.074591144794574,
643
+ "grad_norm": 2.411205530166626,
644
+ "learning_rate": 2.7323494216194657e-05,
645
+ "loss": 5.2893,
646
+ "step": 45500
647
+ },
648
+ {
649
+ "epoch": 9.174311926605505,
650
+ "grad_norm": 2.484266757965088,
651
+ "learning_rate": 2.7074690865576385e-05,
652
+ "loss": 5.2649,
653
+ "step": 46000
654
+ },
655
+ {
656
+ "epoch": 9.274032708416435,
657
+ "grad_norm": 2.446840524673462,
658
+ "learning_rate": 2.6825388911049064e-05,
659
+ "loss": 5.2579,
660
+ "step": 46500
661
+ },
662
+ {
663
+ "epoch": 9.373753490227363,
664
+ "grad_norm": 2.2476446628570557,
665
+ "learning_rate": 2.657608695652174e-05,
666
+ "loss": 5.2616,
667
+ "step": 47000
668
+ },
669
+ {
670
+ "epoch": 9.473474272038294,
671
+ "grad_norm": 2.36161732673645,
672
+ "learning_rate": 2.632678500199442e-05,
673
+ "loss": 5.2458,
674
+ "step": 47500
675
+ },
676
+ {
677
+ "epoch": 9.573195053849222,
678
+ "grad_norm": 2.4564807415008545,
679
+ "learning_rate": 2.6077981651376147e-05,
680
+ "loss": 5.2522,
681
+ "step": 48000
682
+ },
683
+ {
684
+ "epoch": 9.672915835660152,
685
+ "grad_norm": 2.477536678314209,
686
+ "learning_rate": 2.5828679696848823e-05,
687
+ "loss": 5.249,
688
+ "step": 48500
689
+ },
690
+ {
691
+ "epoch": 9.77263661747108,
692
+ "grad_norm": 2.8510327339172363,
693
+ "learning_rate": 2.5579377742321503e-05,
694
+ "loss": 5.2397,
695
+ "step": 49000
696
+ },
697
+ {
698
+ "epoch": 9.872357399282011,
699
+ "grad_norm": 2.4770243167877197,
700
+ "learning_rate": 2.533007578779418e-05,
701
+ "loss": 5.2172,
702
+ "step": 49500
703
+ },
704
+ {
705
+ "epoch": 9.97207818109294,
706
+ "grad_norm": 2.492191791534424,
707
+ "learning_rate": 2.5081272437175906e-05,
708
+ "loss": 5.236,
709
+ "step": 50000
710
+ },
711
+ {
712
+ "epoch": 10.07179896290387,
713
+ "grad_norm": 2.5560014247894287,
714
+ "learning_rate": 2.4831970482648582e-05,
715
+ "loss": 5.2159,
716
+ "step": 50500
717
+ },
718
+ {
719
+ "epoch": 10.171519744714798,
720
+ "grad_norm": 2.550168752670288,
721
+ "learning_rate": 2.458266852812126e-05,
722
+ "loss": 5.2026,
723
+ "step": 51000
724
+ },
725
+ {
726
+ "epoch": 10.271240526525728,
727
+ "grad_norm": 2.562626600265503,
728
+ "learning_rate": 2.4333366573593937e-05,
729
+ "loss": 5.2121,
730
+ "step": 51500
731
+ },
732
+ {
733
+ "epoch": 10.370961308336657,
734
+ "grad_norm": 2.389833927154541,
735
+ "learning_rate": 2.408456322297567e-05,
736
+ "loss": 5.1963,
737
+ "step": 52000
738
+ },
739
+ {
740
+ "epoch": 10.470682090147587,
741
+ "grad_norm": 2.617138385772705,
742
+ "learning_rate": 2.3835261268448345e-05,
743
+ "loss": 5.1838,
744
+ "step": 52500
745
+ },
746
+ {
747
+ "epoch": 10.570402871958516,
748
+ "grad_norm": 2.6732029914855957,
749
+ "learning_rate": 2.358595931392102e-05,
750
+ "loss": 5.2135,
751
+ "step": 53000
752
+ },
753
+ {
754
+ "epoch": 10.670123653769446,
755
+ "grad_norm": 2.509752035140991,
756
+ "learning_rate": 2.33366573593937e-05,
757
+ "loss": 5.1937,
758
+ "step": 53500
759
+ },
760
+ {
761
+ "epoch": 10.769844435580374,
762
+ "grad_norm": 2.732623815536499,
763
+ "learning_rate": 2.3087355404866376e-05,
764
+ "loss": 5.2013,
765
+ "step": 54000
766
+ },
767
+ {
768
+ "epoch": 10.869565217391305,
769
+ "grad_norm": 2.7967655658721924,
770
+ "learning_rate": 2.2838552054248104e-05,
771
+ "loss": 5.1751,
772
+ "step": 54500
773
+ },
774
+ {
775
+ "epoch": 10.969285999202233,
776
+ "grad_norm": 2.6768581867218018,
777
+ "learning_rate": 2.2589250099720783e-05,
778
+ "loss": 5.1728,
779
+ "step": 55000
780
+ },
781
+ {
782
+ "epoch": 11.069006781013163,
783
+ "grad_norm": 2.3465123176574707,
784
+ "learning_rate": 2.233994814519346e-05,
785
+ "loss": 5.1869,
786
+ "step": 55500
787
+ },
788
+ {
789
+ "epoch": 11.168727562824092,
790
+ "grad_norm": 2.3460209369659424,
791
+ "learning_rate": 2.2090646190666138e-05,
792
+ "loss": 5.1704,
793
+ "step": 56000
794
+ },
795
+ {
796
+ "epoch": 11.268448344635022,
797
+ "grad_norm": 2.7022573947906494,
798
+ "learning_rate": 2.1841842840047866e-05,
799
+ "loss": 5.1508,
800
+ "step": 56500
801
+ },
802
+ {
803
+ "epoch": 11.36816912644595,
804
+ "grad_norm": 2.5259013175964355,
805
+ "learning_rate": 2.1592540885520542e-05,
806
+ "loss": 5.1441,
807
+ "step": 57000
808
+ },
809
+ {
810
+ "epoch": 11.46788990825688,
811
+ "grad_norm": 2.6938321590423584,
812
+ "learning_rate": 2.134323893099322e-05,
813
+ "loss": 5.1628,
814
+ "step": 57500
815
+ },
816
+ {
817
+ "epoch": 11.56761069006781,
818
+ "grad_norm": 2.874973773956299,
819
+ "learning_rate": 2.1093936976465897e-05,
820
+ "loss": 5.1405,
821
+ "step": 58000
822
+ },
823
+ {
824
+ "epoch": 11.66733147187874,
825
+ "grad_norm": 2.762739896774292,
826
+ "learning_rate": 2.0845133625847625e-05,
827
+ "loss": 5.1264,
828
+ "step": 58500
829
+ },
830
+ {
831
+ "epoch": 11.76705225368967,
832
+ "grad_norm": 2.617100954055786,
833
+ "learning_rate": 2.0595831671320304e-05,
834
+ "loss": 5.1297,
835
+ "step": 59000
836
+ },
837
+ {
838
+ "epoch": 11.866773035500598,
839
+ "grad_norm": 2.754258632659912,
840
+ "learning_rate": 2.034652971679298e-05,
841
+ "loss": 5.1349,
842
+ "step": 59500
843
+ },
844
+ {
845
+ "epoch": 11.966493817311529,
846
+ "grad_norm": 2.57446551322937,
847
+ "learning_rate": 2.009722776226566e-05,
848
+ "loss": 5.1401,
849
+ "step": 60000
850
+ },
851
+ {
852
+ "epoch": 12.066214599122457,
853
+ "grad_norm": 2.7069363594055176,
854
+ "learning_rate": 1.9848424411647387e-05,
855
+ "loss": 5.1093,
856
+ "step": 60500
857
+ },
858
+ {
859
+ "epoch": 12.165935380933387,
860
+ "grad_norm": 2.5617587566375732,
861
+ "learning_rate": 1.9599122457120063e-05,
862
+ "loss": 5.1071,
863
+ "step": 61000
864
+ },
865
+ {
866
+ "epoch": 12.265656162744316,
867
+ "grad_norm": 2.7849984169006348,
868
+ "learning_rate": 1.9349820502592742e-05,
869
+ "loss": 5.1298,
870
+ "step": 61500
871
+ },
872
+ {
873
+ "epoch": 12.365376944555246,
874
+ "grad_norm": 2.6736953258514404,
875
+ "learning_rate": 1.910051854806542e-05,
876
+ "loss": 5.0828,
877
+ "step": 62000
878
+ },
879
+ {
880
+ "epoch": 12.465097726366174,
881
+ "grad_norm": 2.623760938644409,
882
+ "learning_rate": 1.8851715197447146e-05,
883
+ "loss": 5.1166,
884
+ "step": 62500
885
+ },
886
+ {
887
+ "epoch": 12.564818508177105,
888
+ "grad_norm": 3.202988624572754,
889
+ "learning_rate": 1.8602413242919826e-05,
890
+ "loss": 5.1128,
891
+ "step": 63000
892
+ },
893
+ {
894
+ "epoch": 12.664539289988033,
895
+ "grad_norm": 2.4918911457061768,
896
+ "learning_rate": 1.83531112883925e-05,
897
+ "loss": 5.1181,
898
+ "step": 63500
899
+ },
900
+ {
901
+ "epoch": 12.764260071798963,
902
+ "grad_norm": 2.7274303436279297,
903
+ "learning_rate": 1.810380933386518e-05,
904
+ "loss": 5.1089,
905
+ "step": 64000
906
+ },
907
+ {
908
+ "epoch": 12.863980853609892,
909
+ "grad_norm": 2.760390520095825,
910
+ "learning_rate": 1.785500598324691e-05,
911
+ "loss": 5.1175,
912
+ "step": 64500
913
+ },
914
+ {
915
+ "epoch": 12.963701635420822,
916
+ "grad_norm": 2.7950050830841064,
917
+ "learning_rate": 1.7605704028719585e-05,
918
+ "loss": 5.1173,
919
+ "step": 65000
920
+ },
921
+ {
922
+ "epoch": 13.06342241723175,
923
+ "grad_norm": 2.5247349739074707,
924
+ "learning_rate": 1.7356402074192264e-05,
925
+ "loss": 5.0765,
926
+ "step": 65500
927
+ },
928
+ {
929
+ "epoch": 13.16314319904268,
930
+ "grad_norm": 2.5236001014709473,
931
+ "learning_rate": 1.710710011966494e-05,
932
+ "loss": 5.0699,
933
+ "step": 66000
934
+ },
935
+ {
936
+ "epoch": 13.26286398085361,
937
+ "grad_norm": 2.407404661178589,
938
+ "learning_rate": 1.6858296769046668e-05,
939
+ "loss": 5.0999,
940
+ "step": 66500
941
+ },
942
+ {
943
+ "epoch": 13.36258476266454,
944
+ "grad_norm": 2.665024518966675,
945
+ "learning_rate": 1.6608994814519347e-05,
946
+ "loss": 5.0852,
947
+ "step": 67000
948
+ },
949
+ {
950
+ "epoch": 13.462305544475468,
951
+ "grad_norm": 2.7694313526153564,
952
+ "learning_rate": 1.6359692859992023e-05,
953
+ "loss": 5.0785,
954
+ "step": 67500
955
+ },
956
+ {
957
+ "epoch": 13.562026326286398,
958
+ "grad_norm": 2.839297294616699,
959
+ "learning_rate": 1.6110390905464702e-05,
960
+ "loss": 5.0826,
961
+ "step": 68000
962
+ },
963
+ {
964
+ "epoch": 13.661747108097327,
965
+ "grad_norm": 2.831908941268921,
966
+ "learning_rate": 1.586158755484643e-05,
967
+ "loss": 5.0877,
968
+ "step": 68500
969
+ },
970
+ {
971
+ "epoch": 13.761467889908257,
972
+ "grad_norm": 2.9526407718658447,
973
+ "learning_rate": 1.5612285600319106e-05,
974
+ "loss": 5.0692,
975
+ "step": 69000
976
+ },
977
+ {
978
+ "epoch": 13.861188671719185,
979
+ "grad_norm": 2.832224130630493,
980
+ "learning_rate": 1.5362983645791785e-05,
981
+ "loss": 5.0687,
982
+ "step": 69500
983
+ },
984
+ {
985
+ "epoch": 13.960909453530116,
986
+ "grad_norm": 2.622544050216675,
987
+ "learning_rate": 1.5113681691264461e-05,
988
+ "loss": 5.065,
989
+ "step": 70000
990
+ },
991
+ {
992
+ "epoch": 14.060630235341046,
993
+ "grad_norm": 2.9850549697875977,
994
+ "learning_rate": 1.486487834064619e-05,
995
+ "loss": 5.0665,
996
+ "step": 70500
997
+ },
998
+ {
999
+ "epoch": 14.160351017151974,
1000
+ "grad_norm": 2.7051777839660645,
1001
+ "learning_rate": 1.4615576386118868e-05,
1002
+ "loss": 5.0509,
1003
+ "step": 71000
1004
+ },
1005
+ {
1006
+ "epoch": 14.260071798962905,
1007
+ "grad_norm": 2.6535110473632812,
1008
+ "learning_rate": 1.4366274431591544e-05,
1009
+ "loss": 5.0426,
1010
+ "step": 71500
1011
+ },
1012
+ {
1013
+ "epoch": 14.359792580773833,
1014
+ "grad_norm": 2.8298914432525635,
1015
+ "learning_rate": 1.4116972477064222e-05,
1016
+ "loss": 5.0348,
1017
+ "step": 72000
1018
+ },
1019
+ {
1020
+ "epoch": 14.459513362584763,
1021
+ "grad_norm": 2.776466131210327,
1022
+ "learning_rate": 1.3868169126445951e-05,
1023
+ "loss": 5.0486,
1024
+ "step": 72500
1025
+ },
1026
+ {
1027
+ "epoch": 14.559234144395692,
1028
+ "grad_norm": 2.762392044067383,
1029
+ "learning_rate": 1.3618867171918629e-05,
1030
+ "loss": 5.0478,
1031
+ "step": 73000
1032
+ },
1033
+ {
1034
+ "epoch": 14.658954926206622,
1035
+ "grad_norm": 2.7879889011383057,
1036
+ "learning_rate": 1.3369565217391305e-05,
1037
+ "loss": 5.0453,
1038
+ "step": 73500
1039
+ },
1040
+ {
1041
+ "epoch": 14.75867570801755,
1042
+ "grad_norm": 2.7736377716064453,
1043
+ "learning_rate": 1.3120263262863982e-05,
1044
+ "loss": 5.0538,
1045
+ "step": 74000
1046
+ },
1047
+ {
1048
+ "epoch": 14.85839648982848,
1049
+ "grad_norm": 3.0703177452087402,
1050
+ "learning_rate": 1.2871459912245712e-05,
1051
+ "loss": 5.0649,
1052
+ "step": 74500
1053
+ },
1054
+ {
1055
+ "epoch": 14.95811727163941,
1056
+ "grad_norm": 2.8057234287261963,
1057
+ "learning_rate": 1.262215795771839e-05,
1058
+ "loss": 5.0558,
1059
+ "step": 75000
1060
+ },
1061
+ {
1062
+ "epoch": 15.05783805345034,
1063
+ "grad_norm": 2.9843761920928955,
1064
+ "learning_rate": 1.2372856003191066e-05,
1065
+ "loss": 5.0367,
1066
+ "step": 75500
1067
+ },
1068
+ {
1069
+ "epoch": 15.157558835261268,
1070
+ "grad_norm": 2.723043918609619,
1071
+ "learning_rate": 1.2123554048663741e-05,
1072
+ "loss": 5.0407,
1073
+ "step": 76000
1074
+ },
1075
+ {
1076
+ "epoch": 15.257279617072198,
1077
+ "grad_norm": 2.729093551635742,
1078
+ "learning_rate": 1.1874750698045473e-05,
1079
+ "loss": 5.0261,
1080
+ "step": 76500
1081
+ },
1082
+ {
1083
+ "epoch": 15.357000398883127,
1084
+ "grad_norm": 2.7686829566955566,
1085
+ "learning_rate": 1.162544874351815e-05,
1086
+ "loss": 5.0189,
1087
+ "step": 77000
1088
+ },
1089
+ {
1090
+ "epoch": 15.456721180694057,
1091
+ "grad_norm": 2.7151553630828857,
1092
+ "learning_rate": 1.1376146788990826e-05,
1093
+ "loss": 5.0186,
1094
+ "step": 77500
1095
+ },
1096
+ {
1097
+ "epoch": 15.556441962504985,
1098
+ "grad_norm": 2.6453444957733154,
1099
+ "learning_rate": 1.1126844834463502e-05,
1100
+ "loss": 5.0252,
1101
+ "step": 78000
1102
+ },
1103
+ {
1104
+ "epoch": 15.656162744315916,
1105
+ "grad_norm": 2.6896181106567383,
1106
+ "learning_rate": 1.0878041483845234e-05,
1107
+ "loss": 5.0129,
1108
+ "step": 78500
1109
+ },
1110
+ {
1111
+ "epoch": 15.755883526126844,
1112
+ "grad_norm": 2.6944217681884766,
1113
+ "learning_rate": 1.0628739529317911e-05,
1114
+ "loss": 5.0443,
1115
+ "step": 79000
1116
+ },
1117
+ {
1118
+ "epoch": 15.855604307937774,
1119
+ "grad_norm": 2.782818555831909,
1120
+ "learning_rate": 1.0379437574790587e-05,
1121
+ "loss": 5.0325,
1122
+ "step": 79500
1123
+ },
1124
+ {
1125
+ "epoch": 15.955325089748703,
1126
+ "grad_norm": 2.8319053649902344,
1127
+ "learning_rate": 1.0130135620263263e-05,
1128
+ "loss": 5.0277,
1129
+ "step": 80000
1130
+ },
1131
+ {
1132
+ "epoch": 16.05504587155963,
1133
+ "grad_norm": 2.616605758666992,
1134
+ "learning_rate": 9.881332269644994e-06,
1135
+ "loss": 5.0127,
1136
+ "step": 80500
1137
+ },
1138
+ {
1139
+ "epoch": 16.15476665337056,
1140
+ "grad_norm": 2.8563239574432373,
1141
+ "learning_rate": 9.632030315117672e-06,
1142
+ "loss": 5.0094,
1143
+ "step": 81000
1144
+ },
1145
+ {
1146
+ "epoch": 16.254487435181492,
1147
+ "grad_norm": 2.536868095397949,
1148
+ "learning_rate": 9.382728360590348e-06,
1149
+ "loss": 4.9974,
1150
+ "step": 81500
1151
+ },
1152
+ {
1153
+ "epoch": 16.354208216992422,
1154
+ "grad_norm": 2.8117527961730957,
1155
+ "learning_rate": 9.133426406063023e-06,
1156
+ "loss": 5.0044,
1157
+ "step": 82000
1158
+ },
1159
+ {
1160
+ "epoch": 16.453928998803352,
1161
+ "grad_norm": 2.776693344116211,
1162
+ "learning_rate": 8.884623055444755e-06,
1163
+ "loss": 4.994,
1164
+ "step": 82500
1165
+ },
1166
+ {
1167
+ "epoch": 16.55364978061428,
1168
+ "grad_norm": 2.7791900634765625,
1169
+ "learning_rate": 8.635321100917432e-06,
1170
+ "loss": 5.0239,
1171
+ "step": 83000
1172
+ },
1173
+ {
1174
+ "epoch": 16.65337056242521,
1175
+ "grad_norm": 2.8289597034454346,
1176
+ "learning_rate": 8.386019146390108e-06,
1177
+ "loss": 5.0059,
1178
+ "step": 83500
1179
+ },
1180
+ {
1181
+ "epoch": 16.75309134423614,
1182
+ "grad_norm": 2.89103102684021,
1183
+ "learning_rate": 8.136717191862784e-06,
1184
+ "loss": 5.0237,
1185
+ "step": 84000
1186
+ },
1187
+ {
1188
+ "epoch": 16.85281212604707,
1189
+ "grad_norm": 2.7034354209899902,
1190
+ "learning_rate": 7.887913841244516e-06,
1191
+ "loss": 5.0107,
1192
+ "step": 84500
1193
+ },
1194
+ {
1195
+ "epoch": 16.952532907857996,
1196
+ "grad_norm": 3.348228693008423,
1197
+ "learning_rate": 7.638611886717193e-06,
1198
+ "loss": 5.0128,
1199
+ "step": 85000
1200
+ },
1201
+ {
1202
+ "epoch": 17.052253689668927,
1203
+ "grad_norm": 2.9482028484344482,
1204
+ "learning_rate": 7.389309932189868e-06,
1205
+ "loss": 5.0143,
1206
+ "step": 85500
1207
+ },
1208
+ {
1209
+ "epoch": 17.151974471479857,
1210
+ "grad_norm": 2.8442418575286865,
1211
+ "learning_rate": 7.140007977662546e-06,
1212
+ "loss": 4.9986,
1213
+ "step": 86000
1214
+ },
1215
+ {
1216
+ "epoch": 17.251695253290787,
1217
+ "grad_norm": 2.7483792304992676,
1218
+ "learning_rate": 6.891204627044276e-06,
1219
+ "loss": 5.0003,
1220
+ "step": 86500
1221
+ },
1222
+ {
1223
+ "epoch": 17.351416035101714,
1224
+ "grad_norm": 2.8354785442352295,
1225
+ "learning_rate": 6.641902672516953e-06,
1226
+ "loss": 5.0106,
1227
+ "step": 87000
1228
+ },
1229
+ {
1230
+ "epoch": 17.451136816912644,
1231
+ "grad_norm": 3.0782690048217773,
1232
+ "learning_rate": 6.392600717989629e-06,
1233
+ "loss": 4.9894,
1234
+ "step": 87500
1235
+ },
1236
+ {
1237
+ "epoch": 17.550857598723574,
1238
+ "grad_norm": 2.572624444961548,
1239
+ "learning_rate": 6.143298763462306e-06,
1240
+ "loss": 4.9901,
1241
+ "step": 88000
1242
+ },
1243
+ {
1244
+ "epoch": 17.650578380534505,
1245
+ "grad_norm": 2.726686477661133,
1246
+ "learning_rate": 5.894495412844037e-06,
1247
+ "loss": 5.0045,
1248
+ "step": 88500
1249
+ },
1250
+ {
1251
+ "epoch": 17.75029916234543,
1252
+ "grad_norm": 2.79811429977417,
1253
+ "learning_rate": 5.645193458316714e-06,
1254
+ "loss": 4.9927,
1255
+ "step": 89000
1256
+ },
1257
+ {
1258
+ "epoch": 17.85001994415636,
1259
+ "grad_norm": 2.7528791427612305,
1260
+ "learning_rate": 5.39589150378939e-06,
1261
+ "loss": 4.9834,
1262
+ "step": 89500
1263
+ },
1264
+ {
1265
+ "epoch": 17.949740725967292,
1266
+ "grad_norm": 2.85099196434021,
1267
+ "learning_rate": 5.146589549262067e-06,
1268
+ "loss": 4.9967,
1269
+ "step": 90000
1270
+ },
1271
+ {
1272
+ "epoch": 18.049461507778222,
1273
+ "grad_norm": 2.686501979827881,
1274
+ "learning_rate": 4.897786198643798e-06,
1275
+ "loss": 4.9948,
1276
+ "step": 90500
1277
+ },
1278
+ {
1279
+ "epoch": 18.14918228958915,
1280
+ "grad_norm": 3.057145357131958,
1281
+ "learning_rate": 4.648484244116474e-06,
1282
+ "loss": 4.9885,
1283
+ "step": 91000
1284
+ },
1285
+ {
1286
+ "epoch": 18.24890307140008,
1287
+ "grad_norm": 2.915149211883545,
1288
+ "learning_rate": 4.399182289589151e-06,
1289
+ "loss": 4.9876,
1290
+ "step": 91500
1291
+ },
1292
+ {
1293
+ "epoch": 18.34862385321101,
1294
+ "grad_norm": 2.907449960708618,
1295
+ "learning_rate": 4.149880335061828e-06,
1296
+ "loss": 5.0049,
1297
+ "step": 92000
1298
+ },
1299
+ {
1300
+ "epoch": 18.44834463502194,
1301
+ "grad_norm": 2.798488140106201,
1302
+ "learning_rate": 3.901076984443558e-06,
1303
+ "loss": 4.9643,
1304
+ "step": 92500
1305
+ },
1306
+ {
1307
+ "epoch": 18.54806541683287,
1308
+ "grad_norm": 2.8539681434631348,
1309
+ "learning_rate": 3.6517750299162346e-06,
1310
+ "loss": 4.9917,
1311
+ "step": 93000
1312
+ },
1313
+ {
1314
+ "epoch": 18.647786198643796,
1315
+ "grad_norm": 2.7598862648010254,
1316
+ "learning_rate": 3.4024730753889117e-06,
1317
+ "loss": 4.9894,
1318
+ "step": 93500
1319
+ },
1320
+ {
1321
+ "epoch": 18.747506980454727,
1322
+ "grad_norm": 2.577714443206787,
1323
+ "learning_rate": 3.153171120861588e-06,
1324
+ "loss": 4.9607,
1325
+ "step": 94000
1326
+ },
1327
+ {
1328
+ "epoch": 18.847227762265657,
1329
+ "grad_norm": 2.8153915405273438,
1330
+ "learning_rate": 2.904367770243319e-06,
1331
+ "loss": 4.9905,
1332
+ "step": 94500
1333
+ },
1334
+ {
1335
+ "epoch": 18.946948544076587,
1336
+ "grad_norm": 2.7925517559051514,
1337
+ "learning_rate": 2.6550658157159952e-06,
1338
+ "loss": 5.0036,
1339
+ "step": 95000
1340
+ },
1341
+ {
1342
+ "epoch": 19.046669325887514,
1343
+ "grad_norm": 2.60158371925354,
1344
+ "learning_rate": 2.405763861188672e-06,
1345
+ "loss": 4.9877,
1346
+ "step": 95500
1347
+ },
1348
+ {
1349
+ "epoch": 19.146390107698444,
1350
+ "grad_norm": 2.7144620418548584,
1351
+ "learning_rate": 2.156461906661348e-06,
1352
+ "loss": 5.0011,
1353
+ "step": 96000
1354
+ },
1355
+ {
1356
+ "epoch": 19.246110889509374,
1357
+ "grad_norm": 2.779771327972412,
1358
+ "learning_rate": 1.9076585560430792e-06,
1359
+ "loss": 4.9869,
1360
+ "step": 96500
1361
+ },
1362
+ {
1363
+ "epoch": 19.345831671320305,
1364
+ "grad_norm": 2.8586442470550537,
1365
+ "learning_rate": 1.6583566015157561e-06,
1366
+ "loss": 4.9858,
1367
+ "step": 97000
1368
+ },
1369
+ {
1370
+ "epoch": 19.44555245313123,
1371
+ "grad_norm": 2.572789430618286,
1372
+ "learning_rate": 1.4090546469884324e-06,
1373
+ "loss": 4.9934,
1374
+ "step": 97500
1375
+ },
1376
+ {
1377
+ "epoch": 19.54527323494216,
1378
+ "grad_norm": 2.7267684936523438,
1379
+ "learning_rate": 1.159752692461109e-06,
1380
+ "loss": 4.9714,
1381
+ "step": 98000
1382
+ },
1383
+ {
1384
+ "epoch": 19.644994016753092,
1385
+ "grad_norm": 2.7596216201782227,
1386
+ "learning_rate": 9.109493418428401e-07,
1387
+ "loss": 4.9698,
1388
+ "step": 98500
1389
+ },
1390
+ {
1391
+ "epoch": 19.744714798564022,
1392
+ "grad_norm": 2.850459575653076,
1393
+ "learning_rate": 6.616473873155166e-07,
1394
+ "loss": 4.9653,
1395
+ "step": 99000
1396
+ },
1397
+ {
1398
+ "epoch": 19.84443558037495,
1399
+ "grad_norm": 3.2066328525543213,
1400
+ "learning_rate": 4.1234543278819307e-07,
1401
+ "loss": 4.964,
1402
+ "step": 99500
1403
+ },
1404
+ {
1405
+ "epoch": 19.94415636218588,
1406
+ "grad_norm": 3.170915365219116,
1407
+ "learning_rate": 1.6304347826086955e-07,
1408
+ "loss": 4.9803,
1409
+ "step": 100000
1410
+ },
1411
+ {
1412
+ "epoch": 20.0,
1413
+ "step": 100280,
1414
+ "total_flos": 488922611712000.0,
1415
+ "train_loss": 5.48259629872295,
1416
+ "train_runtime": 4495.3437,
1417
+ "train_samples_per_second": 356.898,
1418
+ "train_steps_per_second": 22.308
1419
+ }
1420
+ ],
1421
+ "logging_steps": 500,
1422
+ "max_steps": 100280,
1423
+ "num_input_tokens_seen": 0,
1424
+ "num_train_epochs": 20,
1425
+ "save_steps": 500,
1426
+ "stateful_callbacks": {
1427
+ "TrainerControl": {
1428
+ "args": {
1429
+ "should_epoch_stop": false,
1430
+ "should_evaluate": false,
1431
+ "should_log": false,
1432
+ "should_save": true,
1433
+ "should_training_stop": true
1434
+ },
1435
+ "attributes": {}
1436
+ }
1437
+ },
1438
+ "total_flos": 488922611712000.0,
1439
+ "train_batch_size": 16,
1440
+ "trial_name": null,
1441
+ "trial_params": null
1442
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d70000823e4055e92e6c49683ff6e821a1edb31eb6b4486e7920823eb06eb349
3
+ size 5176
vocab.json ADDED
The diff for this file is too large to render. See raw diff