Phi-3.5-vision-instruct-q4f16_1-MLC / mlc-chat-config.json
mengshyu's picture
Upload folder using huggingface_hub
fedbdc3 verified
raw
history blame
5.29 kB
{
"version": "0.1.0",
"model_type": "phi3_v",
"quantization": "q4f16_1",
"model_config": {
"model_type": "phi3_v",
"hidden_size": 3072,
"vocab_size": 32064,
"num_hidden_layers": 32,
"num_attention_heads": 32,
"intermediate_size": 8192,
"rms_norm_eps": 1e-05,
"num_key_value_heads": 32,
"max_position_embeddings": 131072,
"vision_config": {
"hidden_size": 1024,
"image_size": 336,
"intermediate_size": 4096,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"patch_size": 14,
"projection_dim": 768,
"vocab_size": null,
"num_channels": 3,
"layer_norm_eps": 1e-05,
"kwargs": {}
},
"img_processor": {
"image_dim_out": 1024,
"model_name": "openai/clip-vit-large-patch14-336",
"name": "clip_vision_model",
"num_img_tokens": 144
},
"position_embedding_base": 10000.0,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.08,
1.1,
1.1300000000000001,
1.2800000000000002,
1.3100000000000003,
1.4500000000000004,
1.4500000000000004,
1.9500000000000008,
2.030000000000001,
2.4299999999999926,
2.5699999999999896,
2.9499999999999815,
3.729999999999965,
3.869999999999962,
4.189999999999955,
4.43999999999995,
4.6399999999999455,
4.979999999999938,
5.159999999999934,
5.279999999999932,
5.759999999999922,
5.889999999999919,
5.889999999999919,
5.969999999999917,
6.089999999999915,
6.2799999999999105,
6.7699999999999,
6.8899999999998975,
7.109999999999893,
7.129999999999892,
7.179999999999891,
7.289999999999889,
7.339999999999888,
7.559999999999883,
7.619999999999882,
7.69999999999988,
7.879999999999876,
7.879999999999876,
7.879999999999876,
7.939999999999875,
7.949999999999875,
7.979999999999874,
8.19999999999987,
8.439999999999864,
8.469999999999864,
8.589999999999861,
8.809999999999857,
8.999999999999853
],
"type": "longrope",
"rope_type": "longrope",
"max_position_embeddings": 131072,
"original_max_position_embeddings": 4096
},
"original_max_position_embeddings": 4096,
"context_window_size": 131072,
"prefill_chunk_size": 8192,
"head_dim": 96,
"tensor_parallel_shards": 1,
"max_batch_size": 128
},
"vocab_size": 32064,
"context_window_size": 131072,
"sliding_window_size": -1,
"prefill_chunk_size": 8192,
"attention_sink_size": -1,
"tensor_parallel_shards": 1,
"pipeline_parallel_stages": 1,
"temperature": 1.0,
"presence_penalty": 0.0,
"frequency_penalty": 0.0,
"repetition_penalty": 1.0,
"top_p": 1.0,
"tokenizer_files": [
"tokenizer.json",
"tokenizer_config.json"
],
"tokenizer_info": {
"token_postproc_method": "byte_fallback",
"prepend_space_in_encode": true,
"strip_space_in_decode": true
},
"conv_template": {
"name": "phi-3-vision",
"system_template": "{system_message}",
"system_message": "",
"system_prefix_token_ids": [
1
],
"add_role_after_system_message": true,
"roles": {
"user": "<|user|>",
"assistant": "<|assistant|>"
},
"role_templates": {
"user": "{user_message}",
"assistant": "{assistant_message}",
"tool": "{tool_message}"
},
"messages": [],
"seps": [
"<|end|>\n"
],
"role_content_sep": "\n",
"role_empty_sep": "\n",
"stop_str": [
"<|endoftext|>"
],
"stop_token_ids": [
2,
32000,
32001,
32007
],
"function_string": "",
"use_function_calling": false
},
"pad_token_id": 32000,
"bos_token_id": 1,
"eos_token_id": 2
}