asahi417 commited on
Commit
e24a7b8
1 Parent(s): 6ea9776

model update

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline/t_roberta_base_dec2021/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "cardiffnlp/twitter-roberta-base-dec2021",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa698dfdfd9ed808d735ec1f3ed842267200a432b46a872ebe2e2df4f185bbd9
3
- size 496349169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abe2aed52ddd17853ba3230c6d423ec8c3344726b6e524762b1b67e483c7d39e
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline/t_roberta_base_dec2021/best_model", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cardiffnlp/twitter-roberta-base-dec2021", "tokenizer_class": "RobertaTokenizer"}