wukaixingxp qubvel-hf HF staff commited on
Commit
5d29db4
1 Parent(s): 3a18199

Upload folder using huggingface_hub (#1)

Browse files

- Upload folder using huggingface_hub (e0c8cb14edb9102d594a3af12cf78e42a161805e)


Co-authored-by: Pavel Iakubovskii <qubvel-hf@users.noreply.huggingface.co>

config.json CHANGED
@@ -11,7 +11,7 @@
11
  "attention_bias": false,
12
  "bad_words_ids": null,
13
  "begin_suppress_tokens": null,
14
- "bos_token_id": 1,
15
  "chunk_size_feed_forward": 0,
16
  "cross_attention_hidden_size": null,
17
  "cross_attention_layers": [
@@ -30,7 +30,11 @@
30
  "dropout": 0,
31
  "early_stopping": false,
32
  "encoder_no_repeat_ngram_size": 0,
33
- "eos_token_id": 2,
 
 
 
 
34
  "exponential_decay_length_penalty": null,
35
  "finetuning_task": null,
36
  "forced_bos_token_id": null,
@@ -64,7 +68,7 @@
64
  "output_attentions": false,
65
  "output_hidden_states": false,
66
  "output_scores": false,
67
- "pad_token_id": null,
68
  "prefix": null,
69
  "problem_type": null,
70
  "pruned_heads": {},
@@ -99,7 +103,6 @@
99
  "use_scaled_rope": true,
100
  "vocab_size": 128256
101
  },
102
- "torch_dtype": "bfloat16",
103
  "transformers_version": "4.45.0.dev0",
104
  "vision_config": {
105
  "_name_or_path": "",
 
11
  "attention_bias": false,
12
  "bad_words_ids": null,
13
  "begin_suppress_tokens": null,
14
+ "bos_token_id": 128000,
15
  "chunk_size_feed_forward": 0,
16
  "cross_attention_hidden_size": null,
17
  "cross_attention_layers": [
 
30
  "dropout": 0,
31
  "early_stopping": false,
32
  "encoder_no_repeat_ngram_size": 0,
33
+ "eos_token_id": [
34
+ 128001,
35
+ 128008,
36
+ 128009
37
+ ],
38
  "exponential_decay_length_penalty": null,
39
  "finetuning_task": null,
40
  "forced_bos_token_id": null,
 
68
  "output_attentions": false,
69
  "output_hidden_states": false,
70
  "output_scores": false,
71
+ "pad_token_id": 128004,
72
  "prefix": null,
73
  "problem_type": null,
74
  "pruned_heads": {},
 
103
  "use_scaled_rope": true,
104
  "vocab_size": 128256
105
  },
 
106
  "transformers_version": "4.45.0.dev0",
107
  "vision_config": {
108
  "_name_or_path": "",
generation_config.json CHANGED
@@ -1,6 +1,13 @@
1
  {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
 
 
 
 
 
 
 
5
  "transformers_version": "4.45.0.dev0"
6
  }
 
1
  {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "pad_token_id": 128004,
10
+ "temperature": 0.6,
11
+ "top_p": 0.9,
12
  "transformers_version": "4.45.0.dev0"
13
  }
special_tokens_map.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
  "bos_token": "<|begin_of_text|>",
3
- "eos_token": "<|end_of_text|>",
4
  "pad_token": "<|finetune_right_pad_id|>"
5
  }
 
1
  {
2
  "bos_token": "<|begin_of_text|>",
3
+ "eos_token": "<|eot_id|>",
4
  "pad_token": "<|finetune_right_pad_id|>"
5
  }
tokenizer_config.json CHANGED
@@ -2060,7 +2060,7 @@
2060
  "bos_token": "<|begin_of_text|>",
2061
  "chat_template": "{% for message in messages %}{% if loop.index0 == 0 %}{{bos_token}}{% endif %}{{'<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n' }}{% for content in message['content'] %}{% if content['type'] == 'image' %}{{ '<|image|>' }}{% elif content['type'] == 'text' %}{{ content['text'] }}{% endif %}{% endfor %}{{ '<|eot_id|>' }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2062
  "clean_up_tokenization_spaces": true,
2063
- "eos_token": "<|end_of_text|>",
2064
  "model_input_names": [
2065
  "input_ids",
2066
  "attention_mask"
 
2060
  "bos_token": "<|begin_of_text|>",
2061
  "chat_template": "{% for message in messages %}{% if loop.index0 == 0 %}{{bos_token}}{% endif %}{{'<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n' }}{% for content in message['content'] %}{% if content['type'] == 'image' %}{{ '<|image|>' }}{% elif content['type'] == 'text' %}{{ content['text'] }}{% endif %}{% endfor %}{{ '<|eot_id|>' }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2062
  "clean_up_tokenization_spaces": true,
2063
+ "eos_token": "<|eot_id|>",
2064
  "model_input_names": [
2065
  "input_ids",
2066
  "attention_mask"