lyleokoth commited on
Commit
ad45a16
1 Parent(s): 603035d

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "down_proj",
24
  "o_proj",
25
  "v_proj",
26
  "gate_proj",
27
- "k_proj",
28
  "up_proj",
29
- "q_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "o_proj",
24
  "v_proj",
25
  "gate_proj",
26
+ "q_proj",
27
  "up_proj",
28
+ "k_proj",
29
+ "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f10f7ff27b47dabbfc03da33c02549b7fb4080f8b45aac69e426c5d0f5a6ae09
3
  size 45258384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72925d564e23d0c734dffec5c66d32543b86cbc423c411c2cd22f1b6327f42d7
3
  size 45258384
runs/Jul05_15-12-48_e7ed27cc0a22/events.out.tfevents.1720192369.e7ed27cc0a22.12855.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12c01e0eafb1dbe1823e1028a4f34c549f7f4ea0270be660996ed4e79782a03b
3
+ size 5654
runs/Jul05_15-12-48_e7ed27cc0a22/events.out.tfevents.1720192404.e7ed27cc0a22.12855.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3128c2ee138e2880919150c1fedc4c882ee05130508341f4fd0618d16ed6bdf
3
+ size 7724
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:63066ed4d3df839dee7b055781bfd66617b89c178e9928faec9461fdd1811552
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6cc1a2fe3f74f71aa42cf734a966b6314c65880605986a19f1c575c14cc3f50
3
  size 5112