philgrey commited on
Commit
76c4ed5
1 Parent(s): 599415c

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -28,6 +28,6 @@
28
  "sinusoidal_pos_embds": false,
29
  "tie_weights_": true,
30
  "torch_dtype": "float32",
31
- "transformers_version": "4.40.1",
32
  "vocab_size": 30522
33
  }
 
28
  "sinusoidal_pos_embds": false,
29
  "tie_weights_": true,
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.40.2",
32
  "vocab_size": 30522
33
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:51c28b913f424ac319e74b802b2526c5a12e8359d5ff0cab1864bf99b2f9c3a7
3
  size 267832560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:862f17fe72c6ee0c770de3fa59ac0188398226b545569d1b1de58ac48883bc0f
3
  size 267832560
tokenizer_config.json CHANGED
@@ -45,7 +45,7 @@
45
  "cls_token": "[CLS]",
46
  "do_lower_case": true,
47
  "mask_token": "[MASK]",
48
- "model_max_length": 1000000000000000019884624838656,
49
  "pad_token": "[PAD]",
50
  "sep_token": "[SEP]",
51
  "strip_accents": null,
 
45
  "cls_token": "[CLS]",
46
  "do_lower_case": true,
47
  "mask_token": "[MASK]",
48
+ "model_max_length": 512,
49
  "pad_token": "[PAD]",
50
  "sep_token": "[SEP]",
51
  "strip_accents": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6ce43e25a78327d3cb1c36b3912be85b4b2ab66e2f220d4b66b64e1f05306bb
3
  size 4984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c45e637515f2d4be5d10b24805881f27b3e3e3a869b71e822a452ad3d7e5b2fd
3
  size 4984