jtatman commited on
Commit
25ba530
1 Parent(s): ac77227

End of training

Browse files
Files changed (4) hide show
  1. README.md +64 -63
  2. adapter_model.safetensors +2 -2
  3. config.json +1 -1
  4. pytorch_model.bin +1 -1
README.md CHANGED
@@ -69,7 +69,7 @@ output_dir: ./outputs/lora-alpaca-pythia-160m-dolphin-extended
69
  gradient_accumulation_steps: 16
70
  micro_batch_size: 1
71
  num_epochs: 1
72
- learning_rate: 0.0006
73
  lr_scheduler: cosine_with_restarts
74
  #cosine_min_lr_ratio: 0.1
75
  train_on_inputs: false
@@ -84,7 +84,7 @@ optimizer: paged_adamw_8bit
84
  gpu_memory_limit: 8GiB
85
  hub_model_id: jtatman/pythia-160m-dolphin-extended
86
  early_stopping_patience: 10
87
- #resume_from_checkpoint: outputs/lora-alpaca-pythia-125m/checkpoint-51040
88
  auto_resume_from_checkpoints: true
89
  local_rank:
90
  weight_decay: 0.0
@@ -106,7 +106,7 @@ tokens:
106
 
107
  This model is a fine-tuned version of [EleutherAI/pythia-160m-deduped](https://huggingface.co/EleutherAI/pythia-160m-deduped) on the None dataset.
108
  It achieves the following results on the evaluation set:
109
- - Loss: 5.3345
110
 
111
  ## Model description
112
 
@@ -125,7 +125,7 @@ More information needed
125
  ### Training hyperparameters
126
 
127
  The following hyperparameters were used during training:
128
- - learning_rate: 0.0006
129
  - train_batch_size: 1
130
  - eval_batch_size: 1
131
  - seed: 42
@@ -140,65 +140,66 @@ The following hyperparameters were used during training:
140
 
141
  | Training Loss | Epoch | Step | Validation Loss |
142
  |:-------------:|:------:|:-----:|:---------------:|
143
- | 25.9906 | 0.0001 | 1 | 29.5451 |
144
- | 30.6876 | 0.0167 | 200 | 26.6061 |
145
- | 15.1401 | 0.0334 | 400 | 13.0583 |
146
- | 12.521 | 0.0500 | 600 | 10.7947 |
147
- | 10.212 | 0.0667 | 800 | 10.5847 |
148
- | 9.619 | 0.0834 | 1000 | 10.7486 |
149
- | 11.9315 | 0.1001 | 1200 | 10.9554 |
150
- | 14.3105 | 0.1167 | 1400 | 10.3818 |
151
- | 10.5925 | 0.1334 | 1600 | 10.6131 |
152
- | 8.7233 | 0.1501 | 1800 | 10.2776 |
153
- | 10.2267 | 0.1668 | 2000 | 10.0918 |
154
- | 12.8447 | 0.1835 | 2200 | 10.3923 |
155
- | 6.329 | 0.2001 | 2400 | 9.7525 |
156
- | 11.7827 | 0.2168 | 2600 | 10.3966 |
157
- | 13.6659 | 0.2335 | 2800 | 10.3891 |
158
- | 13.903 | 0.2502 | 3000 | 9.6615 |
159
- | 7.8718 | 0.2668 | 3200 | 9.7266 |
160
- | 11.3558 | 0.2835 | 3400 | 9.2946 |
161
- | 7.1755 | 0.3002 | 3600 | 8.7202 |
162
- | 8.2074 | 0.3169 | 3800 | 8.5147 |
163
- | 7.0288 | 0.3335 | 4000 | 7.2318 |
164
- | 9.7612 | 0.3502 | 4200 | 7.5585 |
165
- | 4.6886 | 0.3669 | 4400 | 7.0378 |
166
- | 11.0692 | 0.3836 | 4600 | 6.6091 |
167
- | 4.8223 | 0.4003 | 4800 | 6.7305 |
168
- | 6.6341 | 0.4169 | 5000 | 6.5858 |
169
- | 11.4613 | 0.4336 | 5200 | 6.5236 |
170
- | 12.5182 | 0.4503 | 5400 | 6.4048 |
171
- | 11.9191 | 0.4670 | 5600 | 6.4032 |
172
- | 7.9905 | 0.4836 | 5800 | 5.7290 |
173
- | 10.2991 | 0.5003 | 6000 | 5.7079 |
174
- | 4.6978 | 0.5170 | 6200 | 6.0383 |
175
- | 5.5322 | 0.5337 | 6400 | 5.8702 |
176
- | 8.5077 | 0.5504 | 6600 | 5.6017 |
177
- | 5.5676 | 0.5670 | 6800 | 5.8460 |
178
- | 5.0347 | 0.5837 | 7000 | 5.7875 |
179
- | 5.3157 | 0.6004 | 7200 | 5.4782 |
180
- | 6.8562 | 0.6171 | 7400 | 5.7030 |
181
- | 5.2433 | 0.6337 | 7600 | 5.5765 |
182
- | 4.4054 | 0.6504 | 7800 | 5.6948 |
183
- | 6.4413 | 0.6671 | 8000 | 5.4767 |
184
- | 4.5828 | 0.6838 | 8200 | 5.6491 |
185
- | 4.4912 | 0.7004 | 8400 | 5.7442 |
186
- | 5.2625 | 0.7171 | 8600 | 5.5131 |
187
- | 5.0451 | 0.7338 | 8800 | 5.6446 |
188
- | 4.7825 | 0.7505 | 9000 | 5.5226 |
189
- | 4.7226 | 0.7672 | 9200 | 5.4118 |
190
- | 6.0616 | 0.7838 | 9400 | 5.2987 |
191
- | 5.4928 | 0.8005 | 9600 | 5.2385 |
192
- | 6.1017 | 0.8172 | 9800 | 5.4942 |
193
- | 5.1683 | 0.8339 | 10000 | 5.2841 |
194
- | 4.4583 | 0.8505 | 10200 | 5.4625 |
195
- | 5.1028 | 0.8672 | 10400 | 5.4928 |
196
- | 4.4848 | 0.8839 | 10600 | 5.3151 |
197
- | 4.9981 | 0.9006 | 10800 | 5.3956 |
198
- | 4.7987 | 0.9173 | 11000 | 5.2824 |
199
- | 4.5008 | 0.9339 | 11200 | 5.6660 |
200
- | 4.037 | 0.9506 | 11400 | 5.6325 |
201
- | 4.5158 | 0.9673 | 11600 | 5.3345 |
 
202
 
203
 
204
  ### Framework versions
 
69
  gradient_accumulation_steps: 16
70
  micro_batch_size: 1
71
  num_epochs: 1
72
+ learning_rate: 0.0004
73
  lr_scheduler: cosine_with_restarts
74
  #cosine_min_lr_ratio: 0.1
75
  train_on_inputs: false
 
84
  gpu_memory_limit: 8GiB
85
  hub_model_id: jtatman/pythia-160m-dolphin-extended
86
  early_stopping_patience: 10
87
+ #resume_from_checkpoint: outputs/lora-alpaca-pythia-160m-dolphin-extended/checkpoint-11400
88
  auto_resume_from_checkpoints: true
89
  local_rank:
90
  weight_decay: 0.0
 
106
 
107
  This model is a fine-tuned version of [EleutherAI/pythia-160m-deduped](https://huggingface.co/EleutherAI/pythia-160m-deduped) on the None dataset.
108
  It achieves the following results on the evaluation set:
109
+ - Loss: 6.6729
110
 
111
  ## Model description
112
 
 
125
  ### Training hyperparameters
126
 
127
  The following hyperparameters were used during training:
128
+ - learning_rate: 0.0004
129
  - train_batch_size: 1
130
  - eval_batch_size: 1
131
  - seed: 42
 
140
 
141
  | Training Loss | Epoch | Step | Validation Loss |
142
  |:-------------:|:------:|:-----:|:---------------:|
143
+ | 25.9906 | 0.0001 | 1 | 29.5342 |
144
+ | 21.1303 | 0.0167 | 200 | 20.2350 |
145
+ | 16.5026 | 0.0334 | 400 | 18.4930 |
146
+ | 17.2725 | 0.0500 | 600 | 16.3395 |
147
+ | 11.9697 | 0.0667 | 800 | 12.1401 |
148
+ | 11.3783 | 0.0834 | 1000 | 11.8383 |
149
+ | 12.8084 | 0.1001 | 1200 | 12.9667 |
150
+ | 9.4119 | 0.1167 | 1400 | 9.8787 |
151
+ | 10.3527 | 0.1334 | 1600 | 10.0560 |
152
+ | 9.3545 | 0.1501 | 1800 | 9.7355 |
153
+ | 8.9165 | 0.1668 | 2000 | 9.1513 |
154
+ | 8.5467 | 0.1835 | 2200 | 8.2025 |
155
+ | 7.9152 | 0.2001 | 2400 | 7.6616 |
156
+ | 7.3362 | 0.2168 | 2600 | 7.5699 |
157
+ | 7.9374 | 0.2335 | 2800 | 7.4818 |
158
+ | 7.838 | 0.2502 | 3000 | 7.4635 |
159
+ | 7.5731 | 0.2668 | 3200 | 7.4899 |
160
+ | 7.8289 | 0.2835 | 3400 | 7.3594 |
161
+ | 7.8906 | 0.3002 | 3600 | 8.0934 |
162
+ | 7.7318 | 0.3169 | 3800 | 7.5812 |
163
+ | 7.2089 | 0.3335 | 4000 | 7.4839 |
164
+ | 7.202 | 0.3502 | 4200 | 7.4486 |
165
+ | 6.9493 | 0.3669 | 4400 | 7.3208 |
166
+ | 7.1492 | 0.3836 | 4600 | 7.2469 |
167
+ | 7.3443 | 0.4003 | 4800 | 7.1378 |
168
+ | 7.7056 | 0.4169 | 5000 | 7.1385 |
169
+ | 55.0553 | 0.4336 | 5200 | 50.0135 |
170
+ | 7.1868 | 0.4503 | 5400 | 6.9898 |
171
+ | 6.5803 | 0.4670 | 5600 | 6.9559 |
172
+ | 8.6171 | 0.4836 | 5800 | 7.9075 |
173
+ | 7.1373 | 0.5003 | 6000 | 6.9280 |
174
+ | 6.7077 | 0.5170 | 6200 | 6.8797 |
175
+ | 7.0026 | 0.5337 | 6400 | 6.8635 |
176
+ | 6.6797 | 0.5504 | 6600 | 6.8178 |
177
+ | 6.8067 | 0.5670 | 6800 | 6.7893 |
178
+ | 6.5979 | 0.5837 | 7000 | 6.8106 |
179
+ | 6.7283 | 0.6004 | 7200 | 6.7998 |
180
+ | 7.0015 | 0.6171 | 7400 | 6.7705 |
181
+ | 6.1182 | 0.6337 | 7600 | 6.7592 |
182
+ | 6.7919 | 0.6504 | 7800 | 6.7446 |
183
+ | 6.4523 | 0.6671 | 8000 | 6.7260 |
184
+ | 6.765 | 0.6838 | 8200 | 6.7135 |
185
+ | 6.4625 | 0.7004 | 8400 | 6.7099 |
186
+ | 6.79 | 0.7171 | 8600 | 6.7070 |
187
+ | 6.6101 | 0.7338 | 8800 | 6.7017 |
188
+ | 6.7541 | 0.7505 | 9000 | 6.6964 |
189
+ | 6.7777 | 0.7672 | 9200 | 6.6901 |
190
+ | 7.2082 | 0.7838 | 9400 | 6.6869 |
191
+ | 6.4263 | 0.8005 | 9600 | 6.6875 |
192
+ | 6.1944 | 0.8172 | 9800 | 6.6803 |
193
+ | 6.7745 | 0.8339 | 10000 | 6.6865 |
194
+ | 6.6746 | 0.8505 | 10200 | 6.6756 |
195
+ | 6.6319 | 0.8672 | 10400 | 6.6941 |
196
+ | 6.6657 | 0.8839 | 10600 | 6.6764 |
197
+ | 6.8516 | 0.9006 | 10800 | 6.6776 |
198
+ | 6.6391 | 0.9173 | 11000 | 6.6749 |
199
+ | 6.5763 | 0.9339 | 11200 | 6.6729 |
200
+ | 6.585 | 0.9506 | 11400 | 6.6694 |
201
+ | 6.2999 | 0.9673 | 11600 | 6.6722 |
202
+ | 6.8343 | 0.9840 | 11800 | 6.6729 |
203
 
204
 
205
  ### Framework versions
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:309e6076d1fdae624f21dc047777fa3c40fdc141362c9aea9188121255eb986f
3
- size 155717368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e44ce263e6fd885f50d82ca515b9325375b43ee36ededb75acf161ce88bc2e41
3
+ size 48
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
- "torch_dtype": "float16",
26
  "transformers_version": "4.41.2",
27
  "use_cache": false,
28
  "use_parallel_residual": true,
 
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
  "transformers_version": "4.41.2",
27
  "use_cache": false,
28
  "use_parallel_residual": true,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:918bf4ce4696f96c4eb6666643b83882e350a989ff145826d791c242261dc7e9
3
  size 324696090
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbab5c7bca5353a2e8c0c41092cc717c126a4fe034466b9c2e66b7673c602baf
3
  size 324696090