shirzady1934 commited on
Commit
01445ea
1 Parent(s): 06ff101

Training in progress, step 200

Browse files
adapter_config.json CHANGED
@@ -8,16 +8,19 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 16,
12
  "lora_dropout": 0.05,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 16,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "lm_head",
20
- "wte"
21
  ],
22
  "task_type": "CAUSAL_LM"
23
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "loftq_config": {},
12
  "lora_alpha": 16,
13
  "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
  "r": 16,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "wte",
23
+ "lm_head"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ae72702d86e8595631327b59d103b03b60a3b8e4a34126a9dcdf4e083752f9c
3
- size 6531688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b76a31ba0bbfe8ab3c58a744876eca18e84ec3104d553ac85a9d25cf397fb4c
3
+ size 315310960
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b54e7146d4a5dc4a3337fb31a8552c8e24ec22dac2fb8150598aee2a784cb069
3
- size 4536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60b018dc4d170d606b335547d5635a623e16b3c4e70985d20aeaeda860bed4d6
3
+ size 4219