change model type to internvl so it is compatible with vllm etc
Browse files- config.json +2 -2
- configuration_h2ovl_chat.py +1 -1
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
"AutoConfig": "configuration_h2ovl_chat.H2OVLChatConfig",
|
@@ -69,7 +69,7 @@
|
|
69 |
},
|
70 |
"max_dynamic_patch": 6,
|
71 |
"min_dynamic_patch": 1,
|
72 |
-
"model_type": "
|
73 |
"pad2square": false,
|
74 |
"ps_version": "v2",
|
75 |
"select_layer": -1,
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"InternVLChatModel"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
"AutoConfig": "configuration_h2ovl_chat.H2OVLChatConfig",
|
|
|
69 |
},
|
70 |
"max_dynamic_patch": 6,
|
71 |
"min_dynamic_patch": 1,
|
72 |
+
"model_type": "internvl_chat",
|
73 |
"pad2square": false,
|
74 |
"ps_version": "v2",
|
75 |
"select_layer": -1,
|
configuration_h2ovl_chat.py
CHANGED
@@ -7,7 +7,7 @@ from transformers.models.auto import CONFIG_MAPPING
|
|
7 |
logger = logging.get_logger(__name__)
|
8 |
|
9 |
class H2OVLChatConfig(PretrainedConfig):
|
10 |
-
model_type = '
|
11 |
is_composition = True
|
12 |
|
13 |
def __init__(
|
|
|
7 |
logger = logging.get_logger(__name__)
|
8 |
|
9 |
class H2OVLChatConfig(PretrainedConfig):
|
10 |
+
model_type = 'internvl_chat'
|
11 |
is_composition = True
|
12 |
|
13 |
def __init__(
|