umisetokikaze commited on
Commit
0949d81
1 Parent(s): d0590a9

Upload 5 files

Browse files
Files changed (3) hide show
  1. config.json +5 -5
  2. special_tokens_map.json +3 -28
  3. tokenizer_config.json +2 -4
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "umise/vector_test",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -10,7 +10,7 @@
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
- "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
@@ -19,8 +19,8 @@
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
- "torch_dtype": "float16",
23
- "transformers_version": "4.40.0",
24
- "use_cache": false,
25
  "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "umisetokikaze/vectorizen-1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
+ "max_position_embeddings": 4096,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.36.2",
24
+ "use_cache": true,
25
  "vocab_size": 32000
26
  }
special_tokens_map.json CHANGED
@@ -1,30 +1,5 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<unk>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<unk>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  }
tokenizer_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
- "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
@@ -33,9 +32,8 @@
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
36
- "model_max_length": 8192,
37
- "pad_token": "<unk>",
38
- "padding_side": "right",
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": true,
35
+ "model_max_length": 1000000000000000019884624838656,
36
+ "pad_token": null,
 
37
  "sp_model_kwargs": {},
38
  "spaces_between_special_tokens": false,
39
  "tokenizer_class": "LlamaTokenizer",