mehedihasanbijoy commited on
Commit
1bd3278
1 Parent(s): 515525a

Upload tokenizer

Browse files
Files changed (2) hide show
  1. README.md +7 -7
  2. tokenizer_config.json +0 -1
README.md CHANGED
@@ -1,19 +1,19 @@
1
  ---
2
- library_name: transformers
3
- license: mit
4
  base_model: facebook/w2v-bert-2.0
5
- tags:
6
- - generated_from_trainer
7
  datasets:
8
  - common_voice_16_0
 
 
9
  metrics:
10
  - wer
 
 
11
  model-index:
12
  - name: w2v-bert-2.0-mongolian-colab-CV16.0
13
  results:
14
  - task:
15
- name: Automatic Speech Recognition
16
  type: automatic-speech-recognition
 
17
  dataset:
18
  name: common_voice_16_0
19
  type: common_voice_16_0
@@ -21,9 +21,9 @@ model-index:
21
  split: test
22
  args: mn
23
  metrics:
24
- - name: Wer
25
- type: wer
26
  value: 0.5182727865999565
 
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
1
  ---
 
 
2
  base_model: facebook/w2v-bert-2.0
 
 
3
  datasets:
4
  - common_voice_16_0
5
+ library_name: transformers
6
+ license: mit
7
  metrics:
8
  - wer
9
+ tags:
10
+ - generated_from_trainer
11
  model-index:
12
  - name: w2v-bert-2.0-mongolian-colab-CV16.0
13
  results:
14
  - task:
 
15
  type: automatic-speech-recognition
16
+ name: Automatic Speech Recognition
17
  dataset:
18
  name: common_voice_16_0
19
  type: common_voice_16_0
 
21
  split: test
22
  args: mn
23
  metrics:
24
+ - type: wer
 
25
  value: 0.5182727865999565
26
+ name: Wer
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
tokenizer_config.json CHANGED
@@ -39,7 +39,6 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
- "processor_class": "Wav2Vec2BertProcessor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
 
42
  "replace_word_delimiter_char": " ",
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",