channudam commited on
Commit
62501d3
1 Parent(s): b8ff5ef

End of training

Browse files
Files changed (7) hide show
  1. README.md +16 -20
  2. config.json +1 -0
  3. model.safetensors +1 -1
  4. tokenizer.json +2 -2
  5. tokenizer_config.json +1 -0
  6. training_args.bin +1 -1
  7. vocab.json +1 -266
README.md CHANGED
@@ -4,20 +4,18 @@ tags:
4
  model-index:
5
  - name: khmer-trocr-base-printed
6
  results: []
7
- pipeline_tag: image-to-text
8
- inference: true
9
  ---
10
 
11
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
  should probably proofread and complete it, then remove this comment. -->
13
 
14
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/rayranger/huggingface/runs/hddo3082)
15
  # khmer-trocr-base-printed
16
 
17
- This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 0.1980
20
- - Cer: 0.5955
21
 
22
  ## Model description
23
 
@@ -42,23 +40,21 @@ The following hyperparameters were used during training:
42
  - seed: 42
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: linear
45
- - num_epochs: 5
46
  - mixed_precision_training: Native AMP
47
 
48
  ### Training results
49
 
50
- | Training Loss | Epoch | Step | Validation Loss | Cer |
51
- |:-------------:|:------:|:-----:|:---------------:|:------:|
52
- | 1.1417 | 0.4876 | 1000 | 1.0816 | 0.8567 |
53
- | 1.031 | 0.9751 | 2000 | 0.9824 | 0.8652 |
54
- | 0.9015 | 1.4627 | 3000 | 0.8875 | 0.8421 |
55
- | 0.7111 | 1.9503 | 4000 | 0.6645 | 0.7871 |
56
- | 0.5049 | 2.4378 | 5000 | 0.4831 | 0.7234 |
57
- | 0.4108 | 2.9254 | 6000 | 0.3594 | 0.6712 |
58
- | 0.2876 | 3.4130 | 7000 | 0.3076 | 0.6458 |
59
- | 0.2163 | 3.9005 | 8000 | 0.2418 | 0.6214 |
60
- | 0.1862 | 4.3881 | 9000 | 0.2119 | 0.5998 |
61
- | 0.1933 | 4.8757 | 10000 | 0.1980 | 0.5955 |
62
 
63
 
64
  ### Framework versions
@@ -66,4 +62,4 @@ The following hyperparameters were used during training:
66
  - Transformers 4.42.3
67
  - Pytorch 2.1.2
68
  - Datasets 2.20.0
69
- - Tokenizers 0.19.1
 
4
  model-index:
5
  - name: khmer-trocr-base-printed
6
  results: []
 
 
7
  ---
8
 
9
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
10
  should probably proofread and complete it, then remove this comment. -->
11
 
12
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/rayranger/huggingface/runs/9t8e6qv5)
13
  # khmer-trocr-base-printed
14
 
15
+ This model was trained from scratch on an unknown dataset.
16
  It achieves the following results on the evaluation set:
17
+ - Loss: 0.0783
18
+ - Cer: 0.5191
19
 
20
  ## Model description
21
 
 
40
  - seed: 42
41
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
42
  - lr_scheduler_type: linear
43
+ - num_epochs: 4
44
  - mixed_precision_training: Native AMP
45
 
46
  ### Training results
47
 
48
+ | Training Loss | Epoch | Step | Validation Loss | Cer |
49
+ |:-------------:|:------:|:----:|:---------------:|:------:|
50
+ | 0.2278 | 0.4876 | 1000 | 0.2244 | 0.6099 |
51
+ | 0.1517 | 0.9751 | 2000 | 0.1667 | 0.5711 |
52
+ | 0.1147 | 1.4627 | 3000 | 0.1412 | 0.5578 |
53
+ | 0.0927 | 1.9503 | 4000 | 0.1159 | 0.5552 |
54
+ | 0.0669 | 2.4378 | 5000 | 0.1028 | 0.5458 |
55
+ | 0.0551 | 2.9254 | 6000 | 0.0901 | 0.5270 |
56
+ | 0.0304 | 3.4130 | 7000 | 0.0833 | 0.5170 |
57
+ | 0.0182 | 3.9005 | 8000 | 0.0783 | 0.5191 |
 
 
58
 
59
 
60
  ### Framework versions
 
62
  - Transformers 4.42.3
63
  - Pytorch 2.1.2
64
  - Datasets 2.20.0
65
+ - Tokenizers 0.19.1
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "VisionEncoderDecoderModel"
4
  ],
 
1
  {
2
+ "_name_or_path": "/kaggle/input/khmerocr-trocr-base/KhmerOCR_TrOCR",
3
  "architectures": [
4
  "VisionEncoderDecoderModel"
5
  ],
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:91c126cc91712d1c91445786293cf542cbd0ba9ac2d40c950a5d640885803615
3
  size 608335608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:480b91120705e08bbee9571fe7f7180c7342f31264384a18b789f35fdcbc7b88
3
  size 608335608
tokenizer.json CHANGED
@@ -2,13 +2,13 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 100,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
  "padding": {
10
  "strategy": {
11
- "Fixed": 100
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 145,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
  "padding": {
10
  "strategy": {
11
+ "Fixed": 145
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
tokenizer_config.json CHANGED
@@ -51,6 +51,7 @@
51
  "max_len": 150,
52
  "model_max_length": 150,
53
  "pad_token": "<pad>",
 
54
  "sep_token": "</s>",
55
  "tokenizer_class": "RobertaTokenizer",
56
  "trim_offsets": true,
 
51
  "max_len": 150,
52
  "model_max_length": 150,
53
  "pad_token": "<pad>",
54
+ "processor_class": "TrOCRProcessor",
55
  "sep_token": "</s>",
56
  "tokenizer_class": "RobertaTokenizer",
57
  "trim_offsets": true,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:016c3c28dbc67f7a85bbc1e4279553dae73e15da096e6a19b0186beb0a625b8f
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6ade5c73b7b57963d278c6d29997401b539d87942c697453f54cd979117942b
3
  size 5240
vocab.json CHANGED
@@ -1,266 +1 @@
1
- {
2
- "!": 5,
3
- "\"": 6,
4
- "#": 7,
5
- "$": 8,
6
- "%": 9,
7
- "&": 10,
8
- "'": 11,
9
- "(": 12,
10
- ")": 13,
11
- "*": 14,
12
- "+": 15,
13
- ",": 16,
14
- "-": 17,
15
- ".": 18,
16
- "/": 19,
17
- "0": 20,
18
- "1": 21,
19
- "2": 22,
20
- "3": 23,
21
- "4": 24,
22
- "5": 25,
23
- "6": 26,
24
- "7": 27,
25
- "8": 28,
26
- "9": 29,
27
- ":": 30,
28
- ";": 31,
29
- "<": 32,
30
- "<e>": 2,
31
- "<mask>": 4,
32
- "<pad>": 1,
33
- "<s>": 0,
34
- "<unk>": 3,
35
- "=": 33,
36
- ">": 34,
37
- "?": 35,
38
- "@": 36,
39
- "A": 37,
40
- "B": 38,
41
- "C": 39,
42
- "D": 40,
43
- "E": 41,
44
- "F": 42,
45
- "G": 43,
46
- "H": 44,
47
- "I": 45,
48
- "J": 46,
49
- "K": 47,
50
- "L": 48,
51
- "M": 49,
52
- "N": 50,
53
- "O": 51,
54
- "P": 52,
55
- "Q": 53,
56
- "R": 54,
57
- "S": 55,
58
- "T": 56,
59
- "U": 57,
60
- "V": 58,
61
- "W": 59,
62
- "X": 60,
63
- "Y": 61,
64
- "Z": 62,
65
- "[": 63,
66
- "\\": 64,
67
- "]": 65,
68
- "^": 66,
69
- "_": 67,
70
- "`": 68,
71
- "a": 69,
72
- "b": 70,
73
- "c": 71,
74
- "d": 72,
75
- "e": 73,
76
- "f": 74,
77
- "g": 75,
78
- "h": 76,
79
- "i": 77,
80
- "j": 78,
81
- "k": 79,
82
- "l": 80,
83
- "m": 81,
84
- "n": 82,
85
- "o": 83,
86
- "p": 84,
87
- "q": 85,
88
- "r": 86,
89
- "s": 87,
90
- "t": 88,
91
- "u": 89,
92
- "v": 90,
93
- "w": 91,
94
- "x": 92,
95
- "y": 93,
96
- "z": 94,
97
- "{": 95,
98
- "|": 96,
99
- "}": 97,
100
- "~": 98,
101
- "¡": 99,
102
- "¢": 100,
103
- "£": 101,
104
- "¤": 102,
105
- "¥": 103,
106
- "¦": 104,
107
- "§": 105,
108
- "¨": 106,
109
- "©": 107,
110
- "ª": 108,
111
- "«": 109,
112
- "¬": 110,
113
- "®": 111,
114
- "¯": 112,
115
- "°": 113,
116
- "±": 114,
117
- "²": 115,
118
- "³": 116,
119
- "´": 117,
120
- "µ": 118,
121
- "¶": 119,
122
- "·": 120,
123
- "¸": 121,
124
- "¹": 122,
125
- "º": 123,
126
- "»": 124,
127
- "¼": 125,
128
- "½": 126,
129
- "¾": 127,
130
- "¿": 128,
131
- "À": 129,
132
- "Á": 130,
133
- "Â": 131,
134
- "Ã": 132,
135
- "Ä": 133,
136
- "Å": 134,
137
- "Æ": 135,
138
- "Ç": 136,
139
- "È": 137,
140
- "É": 138,
141
- "Ê": 139,
142
- "Ë": 140,
143
- "Ì": 141,
144
- "Í": 142,
145
- "Î": 143,
146
- "Ï": 144,
147
- "Ð": 145,
148
- "Ñ": 146,
149
- "Ò": 147,
150
- "Ó": 148,
151
- "Ô": 149,
152
- "Õ": 150,
153
- "Ö": 151,
154
- "×": 152,
155
- "Ø": 153,
156
- "Ù": 154,
157
- "Ú": 155,
158
- "Û": 156,
159
- "Ü": 157,
160
- "Ý": 158,
161
- "Þ": 159,
162
- "ß": 160,
163
- "à": 161,
164
- "á": 162,
165
- "áŀ": 261,
166
- "áŁ": 262,
167
- "â": 163,
168
- "âĢ": 263,
169
- "ã": 164,
170
- "ä": 165,
171
- "å": 166,
172
- "æ": 167,
173
- "ç": 168,
174
- "è": 169,
175
- "é": 170,
176
- "ê": 171,
177
- "ë": 172,
178
- "ì": 173,
179
- "í": 174,
180
- "î": 175,
181
- "ï": 176,
182
- "ð": 177,
183
- "ñ": 178,
184
- "ò": 179,
185
- "ó": 180,
186
- "ô": 181,
187
- "õ": 182,
188
- "ö": 183,
189
- "÷": 184,
190
- "ø": 185,
191
- "ù": 186,
192
- "ú": 187,
193
- "û": 188,
194
- "ü": 189,
195
- "ý": 190,
196
- "þ": 191,
197
- "ÿ": 192,
198
- "Ā": 193,
199
- "ā": 194,
200
- "Ă": 195,
201
- "ă": 196,
202
- "Ą": 197,
203
- "ą": 198,
204
- "Ć": 199,
205
- "ć": 200,
206
- "Ĉ": 201,
207
- "ĉ": 202,
208
- "Ċ": 203,
209
- "ċ": 204,
210
- "Č": 205,
211
- "č": 206,
212
- "Ď": 207,
213
- "ď": 208,
214
- "Đ": 209,
215
- "đ": 210,
216
- "Ē": 211,
217
- "ē": 212,
218
- "Ĕ": 213,
219
- "ĕ": 214,
220
- "Ė": 215,
221
- "ė": 216,
222
- "Ę": 217,
223
- "ę": 218,
224
- "Ě": 219,
225
- "ě": 220,
226
- "Ĝ": 221,
227
- "ĝ": 222,
228
- "Ğ": 223,
229
- "ğ": 224,
230
- "Ġ": 225,
231
- "ġ": 226,
232
- "Ģ": 227,
233
- "ģ": 228,
234
- "Ĥ": 229,
235
- "ĥ": 230,
236
- "Ħ": 231,
237
- "ħ": 232,
238
- "Ĩ": 233,
239
- "ĩ": 234,
240
- "Ī": 235,
241
- "ī": 236,
242
- "Ĭ": 237,
243
- "ĭ": 238,
244
- "Į": 239,
245
- "į": 240,
246
- "İ": 241,
247
- "ı": 242,
248
- "IJ": 243,
249
- "ij": 244,
250
- "Ĵ": 245,
251
- "ĵ": 246,
252
- "Ķ": 247,
253
- "ķ": 248,
254
- "ĸ": 249,
255
- "Ĺ": 250,
256
- "ĺ": 251,
257
- "Ļ": 252,
258
- "ļ": 253,
259
- "Ľ": 254,
260
- "ľ": 255,
261
- "Ŀ": 256,
262
- "ŀ": 257,
263
- "Ł": 258,
264
- "ł": 259,
265
- "Ń": 260
266
- }
 
1
+ {"<s>":0,"<pad>":1,"<e>":2,"<unk>":3,"<mask>":4,"!":5,"\"":6,"#":7,"$":8,"%":9,"&":10,"'":11,"(":12,")":13,"*":14,"+":15,",":16,"-":17,".":18,"/":19,"0":20,"1":21,"2":22,"3":23,"4":24,"5":25,"6":26,"7":27,"8":28,"9":29,":":30,";":31,"<":32,"=":33,">":34,"?":35,"@":36,"A":37,"B":38,"C":39,"D":40,"E":41,"F":42,"G":43,"H":44,"I":45,"J":46,"K":47,"L":48,"M":49,"N":50,"O":51,"P":52,"Q":53,"R":54,"S":55,"T":56,"U":57,"V":58,"W":59,"X":60,"Y":61,"Z":62,"[":63,"\\":64,"]":65,"^":66,"_":67,"`":68,"a":69,"b":70,"c":71,"d":72,"e":73,"f":74,"g":75,"h":76,"i":77,"j":78,"k":79,"l":80,"m":81,"n":82,"o":83,"p":84,"q":85,"r":86,"s":87,"t":88,"u":89,"v":90,"w":91,"x":92,"y":93,"z":94,"{":95,"|":96,"}":97,"~":98,"¡":99,"¢":100,"£":101,"¤":102,"¥":103,"¦":104,"§":105,"¨":106,"©":107,"ª":108,"«":109,"¬":110,"®":111,"¯":112,"°":113,"±":114,"²":115,"³":116,"´":117,"µ":118,"¶":119,"·":120,"¸":121,"¹":122,"º":123,"»":124,"¼":125,"½":126,"¾":127,"¿":128,"À":129,"Á":130,"Â":131,"Ã":132,"Ä":133,"Å":134,"Æ":135,"Ç":136,"È":137,"É":138,"Ê":139,"Ë":140,"Ì":141,"Í":142,"Î":143,"Ï":144,"Ð":145,"Ñ":146,"Ò":147,"Ó":148,"Ô":149,"Õ":150,"Ö":151,"×":152,"Ø":153,"Ù":154,"Ú":155,"Û":156,"Ü":157,"Ý":158,"Þ":159,"ß":160,"à":161,"á":162,"â":163,"ã":164,"ä":165,"å":166,"æ":167,"ç":168,"è":169,"é":170,"ê":171,"ë":172,"ì":173,"í":174,"î":175,"ï":176,"ð":177,"ñ":178,"ò":179,"ó":180,"ô":181,"õ":182,"ö":183,"÷":184,"ø":185,"ù":186,"ú":187,"û":188,"ü":189,"ý":190,"þ":191,"ÿ":192,"Ā":193,"ā":194,"Ă":195,"ă":196,"��":197,"ą":198,"Ć":199,"ć":200,"Ĉ":201,"ĉ":202,"Ċ":203,"ċ":204,"Č":205,"č":206,"Ď":207,"ď":208,"Đ":209,"đ":210,"Ē":211,"ē":212,"Ĕ":213,"ĕ":214,"Ė":215,"ė":216,"Ę":217,"ę":218,"Ě":219,"ě":220,"Ĝ":221,"ĝ":222,"Ğ":223,"ğ":224,"Ġ":225,"ġ":226,"Ģ":227,"ģ":228,"Ĥ":229,"ĥ":230,"Ħ":231,"ħ":232,"Ĩ":233,"ĩ":234,"Ī":235,"ī":236,"Ĭ":237,"ĭ":238,"Į":239,"į":240,"İ":241,"ı":242,"IJ":243,"ij":244,"Ĵ":245,"ĵ":246,"Ķ":247,"ķ":248,"ĸ":249,"Ĺ":250,"ĺ":251,"Ļ":252,"ļ":253,"Ľ":254,"ľ":255,"Ŀ":256,"ŀ":257,"Ł":258,"ł":259,"Ń":260,"áŀ":261,"áŁ":262,"âĢ":263}