farzadab commited on
Commit
ec3adb0
1 Parent(s): 8c5cb7c

remove text/audio model_id and lora_configs

Browse files
Files changed (1) hide show
  1. config.json +2 -25
config.json CHANGED
@@ -14,17 +14,7 @@
14
  "hidden_dropout_prob": 0.1,
15
  "model_type": "wav2vec2"
16
  },
17
- "audio_model_id": "facebook/wav2vec2-base-960h",
18
- "audio_model_lora_config": {
19
- "lora_alpha": 8,
20
- "r": 0,
21
- "target_modules": [
22
- "k_proj",
23
- "q_proj",
24
- "linear_k",
25
- "linear_q"
26
- ]
27
- },
28
  "audio_token_index": 128257,
29
  "hidden_size": 4096,
30
  "ignore_index": -100,
@@ -46,20 +36,7 @@
46
  "torch_dtype": "bfloat16",
47
  "vocab_size": 128258
48
  },
49
- "text_model_id": "meta-llama/Meta-Llama-3-8B-Instruct",
50
- "text_model_lora_config": {
51
- "lora_alpha": 8,
52
- "r": 64,
53
- "target_modules": [
54
- "mlp.gate_proj",
55
- "mlp.up_proj",
56
- "mlp.down_proj",
57
- "v_proj",
58
- "o_proj",
59
- "k_proj",
60
- "q_proj"
61
- ]
62
- },
63
  "torch_dtype": "float32",
64
  "transformers_version": "4.40.0",
65
  "vocab_size": 32
 
14
  "hidden_dropout_prob": 0.1,
15
  "model_type": "wav2vec2"
16
  },
17
+ "audio_model_id": null,
 
 
 
 
 
 
 
 
 
 
18
  "audio_token_index": 128257,
19
  "hidden_size": 4096,
20
  "ignore_index": -100,
 
36
  "torch_dtype": "bfloat16",
37
  "vocab_size": 128258
38
  },
39
+ "text_model_id": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.40.0",
42
  "vocab_size": 32