Caslow commited on
Commit
268156c
1 Parent(s): 501ac4e

git add lora_model/config.json

Browse files
Files changed (1) hide show
  1. lora_model/config.json +31 -0
lora_model/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": ["LlamaForCausalLM"],
3
+ "model_type": "llama",
4
+ "hidden_size": 4096,
5
+ "intermediate_size": 11008,
6
+ "num_attention_heads": 32,
7
+ "num_hidden_layers": 32,
8
+ "vocab_size": 128000,
9
+ "max_position_embeddings": 2048,
10
+ "bos_token_id": 1,
11
+ "eos_token_id": 2,
12
+ "pad_token_id": 0,
13
+ "initializer_range": 0.02,
14
+ "lora_config": {
15
+ "lora_alpha": 16,
16
+ "lora_dropout": 0,
17
+ "r": 16,
18
+ "target_modules": [
19
+ "k_proj",
20
+ "v_proj",
21
+ "down_proj",
22
+ "gate_proj",
23
+ "o_proj",
24
+ "up_proj",
25
+ "q_proj"
26
+ ]
27
+ },
28
+ "peft_type": "LORA",
29
+ "task_type": "CAUSAL_LM"
30
+ }
31
+