{ "_name_or_path": "/ceph/hdd/staff/charpent/.cache/models2riyr4wgy2r4vryo", "architectures": [ "TinyLlavaForConditionalGeneration" ], "auto_map": { "AutoConfig": "configuration.TinyLlavaConfig", "AutoModelForCausalLM": "modeling_tinyllava_phi.TinyLlavaForConditionalGeneration" }, "cache_dir": null, "connector_type": "mlp2x_gelu", "hidden_size": 2560, "ignore_index": -100, "image_aspect_ratio": "square", "image_token_index": -200, "llm_model_name_or_path": "microsoft/phi-2", "model_type": "tinyllava", "num_queries": 128, "num_resampler_layers": 3, "pad_token": "<|endoftext|>", "quantization_config": { "_load_in_4bit": true, "_load_in_8bit": false, "bnb_4bit_compute_dtype": "bfloat16", "bnb_4bit_quant_storage": "uint8", "bnb_4bit_quant_type": "fp4", "bnb_4bit_use_double_quant": false, "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_has_fp16_weight": false, "llm_int8_skip_modules": [ "lm_head" ], "llm_int8_threshold": 6.0, "load_in_4bit": true, "load_in_8bit": false, "quant_method": "bitsandbytes" }, "resampler_hidden_size": 768, "text_config": { "_name_or_path": "microsoft/phi-2", "architectures": [ "PhiForCausalLM" ], "auto_map": { "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig", "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM" }, "bos_token_id": 50256, "embd_pdrop": 0.0, "eos_token_id": 50256, "hidden_act": "gelu_new", "hidden_size": 2560, "intermediate_size": 10240, "layer_norm_eps": 1e-05, "model_type": "phi", "num_hidden_layers": 32, "partial_rotary_factor": 0.4, "qk_layernorm": false, "resid_pdrop": 0.1, "torch_dtype": "float16", "vocab_size": 51200 }, "tokenizer_model_max_length": 3072, "tokenizer_name_or_path": "microsoft/phi-2", "tokenizer_padding_side": "right", "tokenizer_use_fast": false, "torch_dtype": "float16", "transformers_version": "4.42.4", "tune_type_connector": "full", "tune_type_llm": "full", "tune_type_vision_tower": "frozen", "tune_vision_tower_from_layer": 0, "use_cache": true, "vision_config": { "hidden_act": "gelu_pytorch_tanh", "hidden_size": 1152, "image_size": 384, "intermediate_size": 4304, "layer_norm_eps": 1e-06, "model_name_or_path": "google/siglip-so400m-patch14-384", "model_name_or_path2": "", "model_type": "siglip_vision_model", "num_attention_heads": 16, "num_hidden_layers": 27, "patch_size": 14 }, "vision_feature_layer": -2, "vision_feature_select_strategy": "patch", "vision_hidden_size": 1152, "vision_model_name_or_path": "google/siglip-so400m-patch14-384", "vision_model_name_or_path2": "", "vocab_size": 51200 }