fede97 commited on
Commit
c47e4c1
1 Parent(s): 0442a20

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/leonardo_scratch/large/userexternal/fcocchi0/rag_mlmm/hf_models/llama_3_1/models--meta-llama--Meta-Llama-3.1-8B-Instruct/snapshots/07eb05b21d191a58c577b4a45982fe0c049d0693",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -27,7 +27,7 @@
27
  "mm_use_im_start_end": false,
28
  "mm_vision_select_feature": "patch",
29
  "mm_vision_select_layer": -2,
30
- "mm_vision_tower": "/leonardo_scratch/large/userexternal/fcocchi0/rag_mlmm/hf_models/siglip/models--google--siglip-so400m-patch14-384/snapshots/7067f6db2baa594bab7c6d965fe488c7ac62f1c8",
31
  "model_type": "llava_llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,
 
1
  {
2
+ "_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
27
  "mm_use_im_start_end": false,
28
  "mm_vision_select_feature": "patch",
29
  "mm_vision_select_layer": -2,
30
+ "mm_vision_tower": "google/siglip-so400m-patch14-384",
31
  "model_type": "llava_llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,