mtzig commited on
Commit
e3f8185
·
verified ·
1 Parent(s): c3b3275

Model save

Browse files
Files changed (4) hide show
  1. README.md +80 -80
  2. config.json +5 -5
  3. model.safetensors +2 -2
  4. training_args.bin +1 -1
README.md CHANGED
@@ -16,7 +16,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 1.6817
20
  - Accuracy: 0.0
21
 
22
  ## Model description
@@ -49,85 +49,85 @@ The following hyperparameters were used during training:
49
 
50
  | Training Loss | Epoch | Step | Validation Loss | Accuracy |
51
  |:-------------:|:------:|:----:|:---------------:|:--------:|
52
- | No log | 0 | 0 | 2.6804 | 0.0 |
53
- | 2.2675 | 0.0128 | 100 | 2.2266 | 0.0 |
54
- | 2.1492 | 0.0256 | 200 | 2.1178 | 0.0 |
55
- | 2.1295 | 0.0384 | 300 | 2.0687 | 0.0 |
56
- | 2.2024 | 0.0512 | 400 | 2.1664 | 0.0 |
57
- | 2.0 | 0.0640 | 500 | 1.9627 | 0.0 |
58
- | 2.0962 | 0.0768 | 600 | 2.1138 | 0.0 |
59
- | 2.0072 | 0.0896 | 700 | 2.0115 | 0.0 |
60
- | 2.097 | 0.1024 | 800 | 2.0415 | 0.0 |
61
- | 2.0797 | 0.1152 | 900 | 1.9546 | 0.0 |
62
- | 1.9762 | 0.1280 | 1000 | 2.1161 | 0.0 |
63
- | 1.9884 | 0.1408 | 1100 | 1.9407 | 0.0 |
64
- | 1.9643 | 0.1536 | 1200 | 1.9384 | 0.0 |
65
- | 1.9923 | 0.1665 | 1300 | 1.9917 | 0.0 |
66
- | 1.947 | 0.1793 | 1400 | 1.9375 | 0.0 |
67
- | 1.9744 | 0.1921 | 1500 | 1.9428 | 0.0 |
68
- | 1.9553 | 0.2049 | 1600 | 1.9343 | 0.0 |
69
- | 1.9432 | 0.2177 | 1700 | 1.9203 | 0.0 |
70
- | 1.9371 | 0.2305 | 1800 | 1.9146 | 0.0 |
71
- | 1.9352 | 0.2433 | 1900 | 1.9568 | 0.0 |
72
- | 1.9524 | 0.2561 | 2000 | 1.9041 | 0.0 |
73
- | 1.9202 | 0.2689 | 2100 | 1.8986 | 0.0 |
74
- | 1.9001 | 0.2817 | 2200 | 1.8738 | 0.0 |
75
- | 1.9562 | 0.2945 | 2300 | 1.9206 | 0.0 |
76
- | 1.8877 | 0.3073 | 2400 | 1.9163 | 0.0 |
77
- | 1.8626 | 0.3201 | 2500 | 1.8233 | 0.0 |
78
- | 1.8348 | 0.3329 | 2600 | 1.8809 | 0.0 |
79
- | 1.8347 | 0.3457 | 2700 | 1.8131 | 0.0 |
80
- | 1.8213 | 0.3585 | 2800 | 1.7922 | 0.0 |
81
- | 1.7962 | 0.3713 | 2900 | 1.8129 | 0.0 |
82
- | 1.8744 | 0.3841 | 3000 | 1.8993 | 0.0 |
83
- | 1.8734 | 0.3969 | 3100 | 1.8109 | 0.0 |
84
- | 1.9698 | 0.4097 | 3200 | 1.8633 | 0.0 |
85
- | 1.9312 | 0.4225 | 3300 | 1.8544 | 0.0 |
86
- | 1.8277 | 0.4353 | 3400 | 1.7484 | 0.0 |
87
- | 1.7854 | 0.4481 | 3500 | 1.7561 | 0.0 |
88
- | 1.7452 | 0.4609 | 3600 | 1.7582 | 0.0 |
89
- | 1.802 | 0.4738 | 3700 | 1.7538 | 0.0 |
90
- | 1.7698 | 0.4866 | 3800 | 1.7321 | 0.0 |
91
- | 1.7697 | 0.4994 | 3900 | 1.7262 | 0.0 |
92
- | 1.7468 | 0.5122 | 4000 | 1.7293 | 0.0 |
93
- | 1.7811 | 0.5250 | 4100 | 1.7334 | 0.0 |
94
- | 1.7113 | 0.5378 | 4200 | 1.7089 | 0.0 |
95
- | 1.7738 | 0.5506 | 4300 | 1.7482 | 0.0 |
96
- | 1.7791 | 0.5634 | 4400 | 1.7244 | 0.0 |
97
- | 1.742 | 0.5762 | 4500 | 1.7046 | 0.0 |
98
- | 1.7209 | 0.5890 | 4600 | 1.6993 | 0.0 |
99
- | 1.7457 | 0.6018 | 4700 | 1.7321 | 0.0 |
100
- | 1.7085 | 0.6146 | 4800 | 1.6991 | 0.0 |
101
- | 1.718 | 0.6274 | 4900 | 1.6914 | 0.0 |
102
- | 1.7197 | 0.6402 | 5000 | 1.6944 | 0.0 |
103
- | 1.6825 | 0.6530 | 5100 | 1.6938 | 0.0 |
104
- | 1.7476 | 0.6658 | 5200 | 1.6952 | 0.0 |
105
- | 1.7131 | 0.6786 | 5300 | 1.6917 | 0.0 |
106
- | 1.8437 | 0.6914 | 5400 | 1.8308 | 0.0 |
107
- | 1.7133 | 0.7042 | 5500 | 1.6897 | 0.0 |
108
- | 1.6871 | 0.7170 | 5600 | 1.6904 | 0.0 |
109
- | 1.7024 | 0.7298 | 5700 | 1.6852 | 0.0 |
110
- | 1.7467 | 0.7426 | 5800 | 1.6853 | 0.0 |
111
- | 1.7108 | 0.7554 | 5900 | 1.6861 | 0.0 |
112
- | 1.6875 | 0.7682 | 6000 | 1.6835 | 0.0 |
113
- | 1.7171 | 0.7810 | 6100 | 1.6823 | 0.0 |
114
- | 1.7107 | 0.7939 | 6200 | 1.6817 | 0.0 |
115
- | 1.7082 | 0.8067 | 6300 | 1.6833 | 0.0 |
116
- | 1.6806 | 0.8195 | 6400 | 1.6833 | 0.0 |
117
- | 1.6861 | 0.8323 | 6500 | 1.6829 | 0.0 |
118
- | 1.6941 | 0.8451 | 6600 | 1.6819 | 0.0 |
119
- | 1.6907 | 0.8579 | 6700 | 1.6823 | 0.0 |
120
- | 1.7049 | 0.8707 | 6800 | 1.6810 | 0.0 |
121
- | 1.6958 | 0.8835 | 6900 | 1.6816 | 0.0 |
122
- | 1.7268 | 0.8963 | 7000 | 1.6817 | 0.0 |
123
- | 1.7255 | 0.9091 | 7100 | 1.6822 | 0.0 |
124
- | 1.7086 | 0.9219 | 7200 | 1.6820 | 0.0 |
125
- | 1.7244 | 0.9347 | 7300 | 1.6819 | 0.0 |
126
- | 1.6964 | 0.9475 | 7400 | 1.6822 | 0.0 |
127
- | 1.7062 | 0.9603 | 7500 | 1.6817 | 0.0 |
128
- | 1.7114 | 0.9731 | 7600 | 1.6817 | 0.0 |
129
- | 1.7363 | 0.9859 | 7700 | 1.6817 | 0.0 |
130
- | 1.7008 | 0.9987 | 7800 | 1.6817 | 0.0 |
131
 
132
 
133
  ### Framework versions
 
16
 
17
  This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
+ - Loss: 0.8346
20
  - Accuracy: 0.0
21
 
22
  ## Model description
 
49
 
50
  | Training Loss | Epoch | Step | Validation Loss | Accuracy |
51
  |:-------------:|:------:|:----:|:---------------:|:--------:|
52
+ | No log | 0 | 0 | 2.6609 | 0.0 |
53
+ | 1.8563 | 0.0128 | 100 | 1.8701 | 0.0 |
54
+ | 1.8631 | 0.0256 | 200 | 1.8579 | 0.0 |
55
+ | 1.8648 | 0.0384 | 300 | 1.8517 | 0.0 |
56
+ | 1.8491 | 0.0512 | 400 | 1.8521 | 0.0 |
57
+ | 1.8554 | 0.0640 | 500 | 1.8477 | 0.0 |
58
+ | 1.8473 | 0.0768 | 600 | 1.8471 | 0.0 |
59
+ | 1.8067 | 0.0896 | 700 | 1.7959 | 0.0 |
60
+ | 1.6826 | 0.1024 | 800 | 1.6881 | 0.0 |
61
+ | 1.7295 | 0.1152 | 900 | 1.7043 | 0.0 |
62
+ | 1.5475 | 0.1280 | 1000 | 1.5749 | 0.0 |
63
+ | 1.5244 | 0.1408 | 1100 | 1.5619 | 0.0 |
64
+ | 1.5361 | 0.1536 | 1200 | 1.5511 | 0.0 |
65
+ | 1.4774 | 0.1665 | 1300 | 1.5394 | 0.0 |
66
+ | 1.4691 | 0.1793 | 1400 | 1.5045 | 0.0 |
67
+ | 1.4446 | 0.1921 | 1500 | 1.3720 | 0.0 |
68
+ | 1.2363 | 0.2049 | 1600 | 1.2546 | 0.0 |
69
+ | 1.3268 | 0.2177 | 1700 | 1.2886 | 0.0 |
70
+ | 1.2509 | 0.2305 | 1800 | 1.2707 | 0.0 |
71
+ | 1.3544 | 0.2433 | 1900 | 1.2846 | 0.0 |
72
+ | 1.2432 | 0.2561 | 2000 | 1.2700 | 0.0 |
73
+ | 1.1864 | 0.2689 | 2100 | 1.2224 | 0.0 |
74
+ | 1.3889 | 0.2817 | 2200 | 1.4080 | 0.0 |
75
+ | 1.1777 | 0.2945 | 2300 | 1.2035 | 0.0 |
76
+ | 1.2342 | 0.3073 | 2400 | 1.1984 | 0.0 |
77
+ | 1.1624 | 0.3201 | 2500 | 1.1610 | 0.0 |
78
+ | 1.1352 | 0.3329 | 2600 | 1.1979 | 0.0 |
79
+ | 1.2747 | 0.3457 | 2700 | 1.2184 | 0.0 |
80
+ | 1.1499 | 0.3585 | 2800 | 1.1461 | 0.0 |
81
+ | 1.086 | 0.3713 | 2900 | 1.1256 | 0.0 |
82
+ | 1.0623 | 0.3841 | 3000 | 1.1108 | 0.0 |
83
+ | 1.14 | 0.3969 | 3100 | 1.1380 | 0.0 |
84
+ | 1.092 | 0.4097 | 3200 | 1.1087 | 0.0 |
85
+ | 1.1119 | 0.4225 | 3300 | 1.1128 | 0.0 |
86
+ | 1.0986 | 0.4353 | 3400 | 1.1065 | 0.0 |
87
+ | 1.1209 | 0.4481 | 3500 | 1.1146 | 0.0 |
88
+ | 1.1403 | 0.4609 | 3600 | 1.1177 | 0.0 |
89
+ | 1.0833 | 0.4738 | 3700 | 1.1079 | 0.0 |
90
+ | 1.058 | 0.4866 | 3800 | 1.0979 | 0.0 |
91
+ | 1.0343 | 0.4994 | 3900 | 1.0948 | 0.0 |
92
+ | 1.114 | 0.5122 | 4000 | 1.0905 | 0.0 |
93
+ | 1.0575 | 0.5250 | 4100 | 1.0938 | 0.0 |
94
+ | 1.0455 | 0.5378 | 4200 | 1.0924 | 0.0 |
95
+ | 1.1491 | 0.5506 | 4300 | 1.0869 | 0.0 |
96
+ | 1.0754 | 0.5634 | 4400 | 1.0866 | 0.0 |
97
+ | 1.1082 | 0.5762 | 4500 | 1.0741 | 0.0 |
98
+ | 1.068 | 0.5890 | 4600 | 1.0692 | 0.0 |
99
+ | 1.0465 | 0.6018 | 4700 | 1.0755 | 0.0 |
100
+ | 1.1529 | 0.6146 | 4800 | 1.1614 | 0.0 |
101
+ | 0.9963 | 0.6274 | 4900 | 1.0647 | 0.01 |
102
+ | 1.0269 | 0.6402 | 5000 | 1.0322 | 0.0 |
103
+ | 0.9824 | 0.6530 | 5100 | 1.0241 | 0.0 |
104
+ | 0.989 | 0.6658 | 5200 | 1.0183 | 0.0 |
105
+ | 1.0653 | 0.6786 | 5300 | 0.9991 | 0.005 |
106
+ | 0.9786 | 0.6914 | 5400 | 1.0060 | 0.0 |
107
+ | 1.1023 | 0.7042 | 5500 | 1.0032 | 0.0 |
108
+ | 0.9057 | 0.7170 | 5600 | 0.9354 | 0.0 |
109
+ | 0.9622 | 0.7298 | 5700 | 0.9240 | 0.0 |
110
+ | 0.8582 | 0.7426 | 5800 | 0.9358 | 0.0 |
111
+ | 0.8251 | 0.7554 | 5900 | 0.8959 | 0.005 |
112
+ | 0.7712 | 0.7682 | 6000 | 0.8940 | 0.005 |
113
+ | 0.8207 | 0.7810 | 6100 | 0.8885 | 0.005 |
114
+ | 0.8718 | 0.7939 | 6200 | 0.8886 | 0.0 |
115
+ | 0.8238 | 0.8067 | 6300 | 0.8771 | 0.0 |
116
+ | 0.9613 | 0.8195 | 6400 | 0.8826 | 0.0 |
117
+ | 0.821 | 0.8323 | 6500 | 0.8762 | 0.0 |
118
+ | 0.847 | 0.8451 | 6600 | 0.8555 | 0.01 |
119
+ | 0.9417 | 0.8579 | 6700 | 0.8519 | 0.0 |
120
+ | 0.808 | 0.8707 | 6800 | 0.8491 | 0.0 |
121
+ | 0.9 | 0.8835 | 6900 | 0.8519 | 0.0 |
122
+ | 0.8663 | 0.8963 | 7000 | 0.8457 | 0.0 |
123
+ | 0.844 | 0.9091 | 7100 | 0.8448 | 0.005 |
124
+ | 0.9081 | 0.9219 | 7200 | 0.8374 | 0.0 |
125
+ | 0.7387 | 0.9347 | 7300 | 0.8376 | 0.0 |
126
+ | 0.8548 | 0.9475 | 7400 | 0.8345 | 0.0 |
127
+ | 0.8636 | 0.9603 | 7500 | 0.8345 | 0.0 |
128
+ | 0.8227 | 0.9731 | 7600 | 0.8345 | 0.0 |
129
+ | 0.7796 | 0.9859 | 7700 | 0.8347 | 0.0 |
130
+ | 0.8433 | 0.9987 | 7800 | 0.8346 | 0.0 |
131
 
132
 
133
  ### Framework versions
config.json CHANGED
@@ -3,16 +3,16 @@
3
  "NanoGPT"
4
  ],
5
  "bias": true,
6
- "block_size": 1024,
7
  "dropout": 0.0,
8
  "model_type": "nanogpt",
9
- "n_embd": 256,
10
- "n_head": 4,
11
  "n_layer": 6,
12
- "nonlinearity": "RELU",
13
  "torch_dtype": "float32",
14
  "transformers_version": "4.46.0",
15
- "use_NoPE": true,
16
  "use_layernorm": true,
17
  "vocab_size": 14
18
  }
 
3
  "NanoGPT"
4
  ],
5
  "bias": true,
6
+ "block_size": 256,
7
  "dropout": 0.0,
8
  "model_type": "nanogpt",
9
+ "n_embd": 384,
10
+ "n_head": 6,
11
  "n_layer": 6,
12
+ "nonlinearity": "GELU",
13
  "torch_dtype": "float32",
14
  "transformers_version": "4.46.0",
15
+ "use_NoPE": false,
16
  "use_layernorm": true,
17
  "vocab_size": 14
18
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6cd20fe4264a048350bfce60a3ffe0e233953a037b9f69d12fbce8e9e1e35fb5
3
- size 18992440
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e919f269c2554dab76508be8923357ded937fe98679acb4d89d392b366bb825b
3
+ size 43034048
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8035e194bf556bda66bfb9185a91338ab3f8fbfd93089290724b06b5a981518
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a091fe5845f9add4f8475b7213c6232ae3e1b903a5698feccec97c1fb6c6548
3
  size 5240