ggcristian commited on
Commit
0b39632
1 Parent(s): e9db3c9

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +92 -0
config.json ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "TinyEmoForConditionalGeneration"
4
+ ],
5
+ "cache_dir": null,
6
+ "connector_type": "mlp2x_gelu",
7
+ "hidden_size": 1536,
8
+ "ignore_index": -100,
9
+ "image_aspect_ratio": "square",
10
+ "image_token_index": -200,
11
+ "llm_model_name_or_path": "apple/OpenELM-450M-Instruct",
12
+ "model_type": "tinyemo",
13
+ "auto_map": {
14
+ "AutoConfig": "configuration_openelm.OpenELMConfig",
15
+ "AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM"
16
+ },
17
+ "num_queries": 128,
18
+ "num_resampler_layers": 3,
19
+ "pad_token": "<|endoftext|>",
20
+ "pad_token_id": 1,
21
+ "resampler_hidden_size": 768,
22
+ "text_config": {
23
+ "_name_or_path": "apple/OpenELM-450M-Instruct",
24
+ "architectures": [
25
+ "OpenELMForCausalLM"
26
+ ],
27
+ "auto_map": {
28
+ "AutoConfig": "configuration_openelm.OpenELMConfig",
29
+ "AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM"
30
+ },
31
+ "bos_token_id": 1,
32
+ "eos_token_id": 2,
33
+ "ffn_dim_divisor": 256,
34
+ "ffn_multipliers": [
35
+ 0.5, 0.68, 0.87, 1.05, 1.24, 1.42, 1.61, 1.79, 1.97, 2.16, 2.34, 2.53, 2.71, 2.89, 3.08, 3.26, 3.45, 3.63, 3.82, 4.0
36
+ ],
37
+ "ffn_with_glu": true,
38
+ "hidden_act": "swish",
39
+ "head_dim": 64,
40
+ "initializer_range": 0.02,
41
+ "max_context_length": 2048,
42
+ "model_dim": 1536,
43
+ "model_type": "openelm",
44
+ "normalization_layer_name": "rms_norm",
45
+ "normalize_qk_projections": true,
46
+ "num_gqa_groups": 4,
47
+ "num_kv_heads": [
48
+ 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6
49
+ ],
50
+ "num_query_heads": [
51
+ 12, 12, 12, 16, 16, 16, 16, 16, 16, 16, 20, 20, 20, 20, 20, 20, 24, 24, 24, 24
52
+ ],
53
+ "num_transformer_layers": 20,
54
+ "qkv_multipliers": [0.5, 1.0],
55
+ "rope_freq_constant": 10000,
56
+ "rope_max_length": 4096,
57
+ "share_input_output_layers": true,
58
+ "torch_dtype": "bfloat16",
59
+ "vocab_size": 32000
60
+ },
61
+ "tokenizer_model_max_length": 4096,
62
+ "tokenizer_padding_side": "right",
63
+ "tokenizer_use_fast": false,
64
+ "torch_dtype": "bfloat16",
65
+ "transformers_version": "4.39.3",
66
+ "tune_type_connector": "full",
67
+ "tune_type_llm": "full",
68
+ "tune_type_vision_tower": "frozen",
69
+ "tune_vision_tower_from_layer": 0,
70
+ "use_cache": true,
71
+ "vision_config": {
72
+ "_name_or_path": "openai/clip-vit-large-patch14",
73
+ "architectures": [
74
+ "CLIPModel"
75
+ ],
76
+ "hidden_act": "quick_gelu",
77
+ "hidden_size": 1024,
78
+ "image_size": 224,
79
+ "intermediate_size": 4096,
80
+ "layer_norm_eps": 1e-05,
81
+ "model_type": "clip_vision_model",
82
+ "num_attention_heads": 16,
83
+ "num_hidden_layers": 24,
84
+ "patch_size": 14,
85
+ "projection_dim": 768
86
+ },
87
+ "vision_feature_layer": -2,
88
+ "vision_feature_select_strategy": "patch",
89
+ "vision_hidden_size": 1024,
90
+ "vision_model_name_or_path": "openai/clip-vit-large-patch14",
91
+ "vocab_size": 32000
92
+ }