wraps commited on
Commit
6771e28
1 Parent(s): d792b42

Upload model

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "up_proj",
23
- "q_proj",
24
  "down_proj",
25
- "o_proj",
26
- "k_proj",
27
  "gate_proj",
28
- "v_proj"
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "k_proj",
23
  "up_proj",
 
24
  "down_proj",
25
+ "v_proj",
 
26
  "gate_proj",
27
+ "o_proj",
28
+ "q_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:30cd2a3d8306681dc213f9eae2d8a2a5d4bfd07c01a18c576361ee66ddad53cf
3
  size 9124880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63b2215bebdb86d970f8436873bc63a27c7dbac4f1207f3585e6b17961dcdcc6
3
  size 9124880
final_checkpoint/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0d22508af7b3af412e36321d21a3d5f40c6a7deaa87a58963f59e39adfa9fbc2
3
  size 1934288228
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7741ee595d00c3d77e25f964808c165ed98be701e26e3d3171dcee19da817a39
3
  size 1934288228
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d929a64c6be778fec765671ac4f1cd64dcc5ce20b1ee7af29e8aa445bb5243f1
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc80604ac05d763b418f63e377f536748d94ee89df3a8eb425f4b01f1caf24b1
3
  size 4920