LAOS-Y commited on
Commit
6adb35d
·
1 Parent(s): 6db9caa

update model_type

Browse files
Files changed (1) hide show
  1. config.json +4 -11
config.json CHANGED
@@ -1,10 +1,8 @@
1
  {
2
- "_name_or_path": "/mnt/ceph_rbd/mnt_pvc/zbc/llava-v1.5-7b/",
3
  "architectures": [
4
  "LlavaLlamaForCausalLM"
5
  ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "freeze_mm_mlp_adapter": false,
@@ -17,8 +15,6 @@
17
  "max_length": 4096,
18
  "max_position_embeddings": 4096,
19
  "mm_hidden_size": 1024,
20
- "mm_patch_merge_type": "flat",
21
- "mm_projector_lr": null,
22
  "mm_projector_type": "mlp2x_gelu",
23
  "mm_resampler_type": null,
24
  "mm_use_im_patch_token": false,
@@ -26,7 +22,7 @@
26
  "mm_vision_select_feature": "patch",
27
  "mm_vision_select_layer": -2,
28
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
29
- "model_type": "llava_llama",
30
  "num_attention_heads": 32,
31
  "num_hidden_layers": 32,
32
  "num_key_value_heads": 32,
@@ -34,12 +30,9 @@
34
  "pretraining_tp": 1,
35
  "rms_norm_eps": 1e-05,
36
  "rope_scaling": null,
37
- "rope_theta": 10000.0,
38
  "tie_word_embeddings": false,
39
- "tokenizer_model_max_length": 4096,
40
- "tokenizer_padding_side": "right",
41
- "torch_dtype": "bfloat16",
42
- "transformers_version": "4.37.2",
43
  "tune_mm_mlp_adapter": false,
44
  "tune_mm_vision_resampler": false,
45
  "unfreeze_mm_vision_tower": false,
 
1
  {
2
+ "_name_or_path": "llava-v1.5-7b",
3
  "architectures": [
4
  "LlavaLlamaForCausalLM"
5
  ],
 
 
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "freeze_mm_mlp_adapter": false,
 
15
  "max_length": 4096,
16
  "max_position_embeddings": 4096,
17
  "mm_hidden_size": 1024,
 
 
18
  "mm_projector_type": "mlp2x_gelu",
19
  "mm_resampler_type": null,
20
  "mm_use_im_patch_token": false,
 
22
  "mm_vision_select_feature": "patch",
23
  "mm_vision_select_layer": -2,
24
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
25
+ "model_type": "llava",
26
  "num_attention_heads": 32,
27
  "num_hidden_layers": 32,
28
  "num_key_value_heads": 32,
 
30
  "pretraining_tp": 1,
31
  "rms_norm_eps": 1e-05,
32
  "rope_scaling": null,
 
33
  "tie_word_embeddings": false,
34
+ "torch_dtype": "float16",
35
+ "transformers_version": "4.31.0",
 
 
36
  "tune_mm_mlp_adapter": false,
37
  "tune_mm_vision_resampler": false,
38
  "unfreeze_mm_vision_tower": false,