Vijayendra commited on
Commit
39f8afe
·
verified ·
1 Parent(s): 22876b4

Update peft_config.json

Browse files
Files changed (1) hide show
  1. peft_config.json +26 -1
peft_config.json CHANGED
@@ -1 +1,26 @@
1
- {"peft_type": "LORA", "auto_mapping": null, "base_model_name_or_path": null, "revision": null, "task_type": "CAUSAL_LM", "inference_mode": false, "r": 16, "target_modules": ["up_proj", "gate_proj", "down_proj", "o_proj", "k_proj", "v_proj", "q_proj"], "lora_alpha": 16, "lora_dropout": 0.05, "fan_in_fan_out": false, "bias": "none", "use_rslora": false, "modules_to_save": null, "init_lora_weights": true, "layers_to_transform": null, "layers_pattern": null, "rank_pattern": {}, "alpha_pattern": {}, "megatron_config": null, "megatron_core": "megatron.core", "loftq_config": {}, "use_dora": false, "layer_replication": null}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "peft_type": "LORA",
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Vijayendra/llama-3b-lora-cyclic-attention", // or your base model, if different
5
+ "revision": null,
6
+ "task_type": "CAUSAL_LM",
7
+ "inference_mode": false,
8
+ "r": 16,
9
+ "target_modules": ["up_proj", "gate_proj", "down_proj", "o_proj", "k_proj", "v_proj", "q_proj"],
10
+ "lora_alpha": 16,
11
+ "lora_dropout": 0.05,
12
+ "fan_in_fan_out": false,
13
+ "bias": "none",
14
+ "use_rslora": false,
15
+ "modules_to_save": null,
16
+ "init_lora_weights": true,
17
+ "layers_to_transform": null,
18
+ "layers_pattern": null,
19
+ "rank_pattern": {},
20
+ "alpha_pattern": {},
21
+ "megatron_config": null,
22
+ "megatron_core": "megatron.core",
23
+ "loftq_config": {},
24
+ "use_dora": false,
25
+ "layer_replication": null
26
+ }