Safetensors
lora
adamo1139 commited on
Commit
f9ad118
1 Parent(s): 92da061

Upload 6 files

Browse files
1-yi-34b-200k-xlctx-rawrr/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "yi-34b-200k-2",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "lora_alpha": 64,
12
+ "lora_dropout": 0,
13
+ "modules_to_save": null,
14
+ "peft_type": "LORA",
15
+ "r": 32,
16
+ "rank_pattern": {},
17
+ "revision": "unsloth",
18
+ "target_modules": [
19
+ "v_proj",
20
+ "down_proj",
21
+ "o_proj",
22
+ "up_proj",
23
+ "k_proj",
24
+ "gate_proj",
25
+ "q_proj"
26
+ ],
27
+ "task_type": "CAUSAL_LM"
28
+ }
1-yi-34b-200k-xlctx-rawrr/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d9cda17fa15887278bd27fb6c47933064de7f8a030d0bec0df3ae7e2a7b3bd2
3
+ size 983153656
2-yi-34b-200k-xlctx-rawrr-aezakmi/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "yi-34b-200k-xlctx-raw-1-1803",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "lora_alpha": 64,
12
+ "lora_dropout": 0,
13
+ "modules_to_save": null,
14
+ "peft_type": "LORA",
15
+ "r": 32,
16
+ "rank_pattern": {},
17
+ "revision": "unsloth",
18
+ "target_modules": [
19
+ "down_proj",
20
+ "gate_proj",
21
+ "q_proj",
22
+ "k_proj",
23
+ "up_proj",
24
+ "v_proj",
25
+ "o_proj"
26
+ ],
27
+ "task_type": "CAUSAL_LM"
28
+ }
2-yi-34b-200k-xlctx-rawrr-aezakmi/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a9df973f24c8e3fcf29c35e9050e79f62e02e9ae9e1d01eac6817f0a4a44b81
3
+ size 983153656
3-yi-34b-200k-xlctx-rawrr-aezakmi-toxic/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "yi-34b-200k-xlctx-aezakmi-raw-1-2003",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "lora_alpha": 32,
12
+ "lora_dropout": 0,
13
+ "modules_to_save": null,
14
+ "peft_type": "LORA",
15
+ "r": 32,
16
+ "rank_pattern": {},
17
+ "revision": "unsloth",
18
+ "target_modules": [
19
+ "k_proj",
20
+ "o_proj",
21
+ "gate_proj",
22
+ "v_proj",
23
+ "up_proj",
24
+ "down_proj",
25
+ "q_proj"
26
+ ],
27
+ "task_type": "CAUSAL_LM"
28
+ }
3-yi-34b-200k-xlctx-rawrr-aezakmi-toxic/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc474c748bd219f9420a933eed5f43b930de90db17d2c62940618c625ca8e9e
3
+ size 983153656