zbigi commited on
Commit
689d2f7
·
verified ·
1 Parent(s): 8789eed

End of training

Browse files
README.md CHANGED
@@ -44,14 +44,10 @@ The following hyperparameters were used during training:
44
  - num_epochs: 10
45
  - mixed_precision_training: Native AMP
46
 
47
- ### Training results
48
-
49
-
50
-
51
  ### Framework versions
52
 
53
  - PEFT 0.13.2
54
  - Transformers 4.45.2
55
- - Pytorch 2.5.0+cu121
56
  - Datasets 3.1.0
57
  - Tokenizers 0.20.3
 
44
  - num_epochs: 10
45
  - mixed_precision_training: Native AMP
46
 
 
 
 
 
47
  ### Framework versions
48
 
49
  - PEFT 0.13.2
50
  - Transformers 4.45.2
51
+ - Pytorch 2.5.1+cu121
52
  - Datasets 3.1.0
53
  - Tokenizers 0.20.3
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "k_proj",
24
- "v_proj",
25
  "o_proj",
26
- "q_proj"
 
27
  ],
28
  "task_type": "SEQ_2_SEQ_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
 
24
  "o_proj",
25
+ "v_proj",
26
+ "k_proj"
27
  ],
28
  "task_type": "SEQ_2_SEQ_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a63f0e01e5b6a1a310f45101fb25641e9329329115b73fbed9e7d2dff7dedb2
3
  size 21248928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09d8f4285e7c69170d3e0fec16d75d38b96743582f8904c530b7226ae37915f7
3
  size 21248928
runs/Nov16_16-15-30_40601be62e29/events.out.tfevents.1731774733.40601be62e29.6331.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2425fac45b65420c7bcfcac2eee20f359c11f82792b5b6fe46b81ba3b60ac80c
3
+ size 1270
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b30106af6344a9fafe37dcad4a168308649c4c1427626ee093aff39f9f944eb
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84ad92415f55def63a1da23b7773ef161c43525e01fd37a5342925816579c166
3
  size 5432