EddyGiusepe commited on
Commit
4596029
1 Parent(s): 2f7e3f3

Training in progress, epoch 0

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e47e23b15b9d262b38ef2903970669834bc3e0e7f188bd88b5f0993b311dde0
3
  size 4517152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d6263d6e62e276c6689864f39501285f61f0c6f3c5c1ba85ac91d54f1f2e314
3
  size 4517152
runs/Jan27_17-45-16_2701addb9961/events.out.tfevents.1706377545.2701addb9961.1157.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae1aad1b06829798f51b3e9298651d1c0af6b19b3405190a31704fc672239790
3
+ size 88
runs/Jan27_17-48-00_2701addb9961/events.out.tfevents.1706377708.2701addb9961.4347.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4fd0a557ca477d7a2514c9bf2e35c1adbb73f7b910f5f7dae89c46a2bcc47ae
3
+ size 9721
special_tokens_map.json CHANGED
@@ -13,13 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": true,
19
- "normalized": false,
20
- "rstrip": true,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 1024,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
tokenizer_config.json CHANGED
@@ -55,7 +55,7 @@
55
  "eos_token": "</s>",
56
  "legacy": false,
57
  "model_max_length": 1000000000000000019884624838656,
58
- "pad_token": "[PAD]",
59
  "padding_side": "right",
60
  "sp_model_kwargs": {},
61
  "tokenizer_class": "LlamaTokenizer",
 
55
  "eos_token": "</s>",
56
  "legacy": false,
57
  "model_max_length": 1000000000000000019884624838656,
58
+ "pad_token": "</s>",
59
  "padding_side": "right",
60
  "sp_model_kwargs": {},
61
  "tokenizer_class": "LlamaTokenizer",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:88d74082098d51cb67ce002fad503e31772c0c6f81e190e33220517325f56ea8
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d902168528e6a814cd7c64e8bf122d2ba6fc8583ab8b1889147cd512229e9f3
3
  size 4664