Tim Miller commited on
Commit
eabb23e
·
1 Parent(s): cabdb6d

Updated for cnlpt version 0.6.0.

Browse files
Files changed (2) hide show
  1. config.json +14 -10
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "architectures": [
3
  "CnlpModelForClassification"
4
  ],
 
5
  "encoder_config": {
6
  "_name_or_path": "microsoft/BiomedNLP-PubMedBERT-large-uncased-abstract",
7
  "add_cross_attention": false,
@@ -90,19 +91,22 @@
90
  "hidden_dropout_prob": 0.1,
91
  "hidden_size": 1024,
92
  "hier_head_config": null,
93
- "layer": 11,
 
 
 
 
 
 
94
  "model_type": "cnlpt",
95
- "num_labels_list": [
96
- 2
97
- ],
98
  "num_rel_attention_heads": 12,
99
  "rel_attention_head_dims": 64,
100
- "relations": [
101
- false
102
- ],
103
- "tagger": [
104
- false
105
- ],
106
  "tokens": false,
107
  "torch_dtype": "float32",
108
  "transformers_version": "4.22.2",
 
2
  "architectures": [
3
  "CnlpModelForClassification"
4
  ],
5
+ "cnlpt_version": "0.6.0",
6
  "encoder_config": {
7
  "_name_or_path": "microsoft/BiomedNLP-PubMedBERT-large-uncased-abstract",
8
  "add_cross_attention": false,
 
91
  "hidden_dropout_prob": 0.1,
92
  "hidden_size": 1024,
93
  "hier_head_config": null,
94
+ "label_dictionary": {
95
+ "current": [
96
+ "False",
97
+ "True"
98
+ ]
99
+ },
100
+ "layer": 12,
101
  "model_type": "cnlpt",
 
 
 
102
  "num_rel_attention_heads": 12,
103
  "rel_attention_head_dims": 64,
104
+ "relations": {
105
+ "current": false
106
+ },
107
+ "tagger": {
108
+ "current": false
109
+ },
110
  "tokens": false,
111
  "torch_dtype": "float32",
112
  "transformers_version": "4.22.2",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbac4dcfe1926512ef4669997b18bf1b28ed5d3d58e9f64354d1ccb97a5e006e
3
  size 1338283453
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3a25cd4a1c2c9900ebd12a58eebf1a216445c1331152cdfa61c3c37e940cb13
3
  size 1338283453