Liu-Xiang commited on
Commit
646291c
·
verified ·
1 Parent(s): dc04e3a

Training in progress, step 500

Browse files
adapter_config.json CHANGED
@@ -6,7 +6,6 @@
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
- "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
@@ -20,15 +19,14 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "up_proj",
24
- "v_proj",
25
  "gate_proj",
26
- "q_proj",
27
  "o_proj",
28
  "k_proj",
 
 
 
29
  "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
- "use_dora": false,
33
  "use_rslora": false
34
  }
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "gate_proj",
 
23
  "o_proj",
24
  "k_proj",
25
+ "up_proj",
26
+ "v_proj",
27
+ "q_proj",
28
  "down_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
 
31
  "use_rslora": false
32
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a562ea5d606be5dd6b205acbfd2ba71ae02c1850f574d29a1875062c7d1510e1
3
- size 2684416208
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1176753252ecac9e3eb3d10084b47f0677ccca3934f1065f42235a97e315cf99
3
+ size 1342239008
runs/Jul15_12-12-37_llm-finetuing-workbench-0/events.out.tfevents.1721045625.llm-finetuing-workbench-0.283.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f830206f6a2d700947a0b9c6d971ef1bbd2e88c844e7e9cc15fac9ca45966bd
3
+ size 4756
runs/Jul15_13-20-23_llm-finetuing-workbench-0/events.out.tfevents.1721049690.llm-finetuing-workbench-0.277.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c8bb2d30e2597bdc1530e6efa139df9164cc05f945c1455c52a177901e807d7
3
+ size 6625
runs/Jul15_13-49-38_llm-finetuing-workbench-0/events.out.tfevents.1721051445.llm-finetuing-workbench-0.321.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:773b7b15005083c0c1e963cf28d50ae196f3fb3878ed03a167874ba8cc9a36b3
3
+ size 17381
tokenizer_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
- "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0075b9b61e7983c32c312554ca9dcd21647b0ea967ce48f3aaaca988a7c3509b
3
- size 4411
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b70d7b8904307a810fb2857ef343adf82097ac93c3e6ca7d37b5bf29d4b46e5
3
+ size 4219