IngeniousArtist commited on
Commit
2cc7285
1 Parent(s): 6a71712

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -7,11 +7,11 @@
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 16,
11
- "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 64,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
 
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 32,
11
+ "lora_dropout": 0.5,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 8,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a8af0c502276a5de63997416422ab28f5ba179b9f8409d1949d261ad9fdf7f5
3
- size 134263757
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7250b974cca171eff26cc020f916e0d793246aab14539fba56f4878bc75630a
3
+ size 16822989
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 4096,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9daa05828058094cd919682af271f6630a16021abceed57e6f54f6717d3d41b1
3
  size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17d211d8c4bed18a6ebfd173740e269f5c9d1ab5c3012a9a9c2daf39fe4a6c59
3
  size 4027