File size: 1,056 Bytes
6847662 0c189dc 6847662 0c189dc b7bd78d 6847662 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
{
"arch": "mini_gpt4_llama_v2",
"architectures": [
"MiniGPT4_Video"
],
"auto_map": {
"AutoConfig": "mini_gpt4_llama_v2.minigpt4_video_config",
"AutoModel": "mini_gpt4_llama_v2.MiniGPT4_Video"
},
"chat_template": true,
"ckpt": "checkpoints/video_mistral_all_checkpoint_last.pth",
"device": "cuda",
"drop_path_rate": 0,
"end_sym": "</s>",
"freeze_qformer": true,
"freeze_vit": true,
"image_size": 224,
"img_size": 224,
"length": 50,
"llama_model": "mistralai/Mistral-7B-Instruct-v0.2",
"lora_alpha": 16,
"lora_dropout": 0.05,
"lora_r": 64,
"lora_target_modules": [
"q_proj",
"v_proj"
],
"low_resource": true,
"max_context_len": 7200,
"max_txt_len": 512,
"model_type": "minigpt4_video",
"num_query_token": 32,
"prompt": "",
"prompt_path": "",
"remove_template": false,
"token_pooling": true,
"torch_dtype": "float32",
"transformers_version": "4.37.2",
"use_grad_checkpoint": true,
"use_grad_checkpoint_llm": true,
"vit_model": "eva_clip_g",
"vit_precision": "fp16"
}
|