PEFT
natnitaract commited on
Commit
667de24
1 Parent(s): bc4ff94

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +13 -0
  2. adapter_config.json +3 -3
  3. adapter_model.bin +2 -2
README.md CHANGED
@@ -112,6 +112,18 @@ The following `bitsandbytes` quantization config was used during training:
112
  - bnb_4bit_use_double_quant: False
113
  - bnb_4bit_compute_dtype: float32
114
 
 
 
 
 
 
 
 
 
 
 
 
 
115
  The following `bitsandbytes` quantization config was used during training:
116
  - quant_method: bitsandbytes
117
  - load_in_8bit: False
@@ -134,5 +146,6 @@ The following `bitsandbytes` quantization config was used during training:
134
  - PEFT 0.5.0
135
  - PEFT 0.5.0
136
  - PEFT 0.5.0
 
137
 
138
  - PEFT 0.5.0
 
112
  - bnb_4bit_use_double_quant: False
113
  - bnb_4bit_compute_dtype: float32
114
 
115
+ The following `bitsandbytes` quantization config was used during training:
116
+ - quant_method: bitsandbytes
117
+ - load_in_8bit: False
118
+ - load_in_4bit: True
119
+ - llm_int8_threshold: 6.0
120
+ - llm_int8_skip_modules: None
121
+ - llm_int8_enable_fp32_cpu_offload: False
122
+ - llm_int8_has_fp16_weight: False
123
+ - bnb_4bit_quant_type: nf4
124
+ - bnb_4bit_use_double_quant: False
125
+ - bnb_4bit_compute_dtype: float32
126
+
127
  The following `bitsandbytes` quantization config was used during training:
128
  - quant_method: bitsandbytes
129
  - load_in_8bit: False
 
146
  - PEFT 0.5.0
147
  - PEFT 0.5.0
148
  - PEFT 0.5.0
149
+ - PEFT 0.5.0
150
 
151
  - PEFT 0.5.0
adapter_config.json CHANGED
@@ -11,16 +11,16 @@
11
  "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 128,
15
  "revision": null,
16
  "target_modules": [
17
- "up_proj",
18
  "k_proj",
19
  "q_proj",
20
  "v_proj",
21
  "gate_proj",
 
22
  "o_proj",
23
- "down_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
 
11
  "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 64,
15
  "revision": null,
16
  "target_modules": [
 
17
  "k_proj",
18
  "q_proj",
19
  "v_proj",
20
  "gate_proj",
21
+ "down_proj",
22
  "o_proj",
23
+ "up_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a0f7ab38d0a8979e6a45ab9c3ce9e5de5a3dc111cb705d765bb153e2d95dcab0
3
- size 1342338829
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c08924efae3ed321fababeb3cb46e017c15f3de6e7a25c291141d49687fbbc6
3
+ size 671250189