Ubuntu
commited on
Commit
•
bf6d8e7
1
Parent(s):
d12ff1d
Initial commit
Browse files- config.json +33 -0
- pytorch_model.bin +3 -0
- results.csv +47 -0
- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
config.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "nreimers/MiniLM-L6-H384-uncased",
|
3 |
+
"architectures": [
|
4 |
+
"BertForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"classifier_dropout": null,
|
8 |
+
"gradient_checkpointing": false,
|
9 |
+
"hidden_act": "gelu",
|
10 |
+
"hidden_dropout_prob": 0.1,
|
11 |
+
"hidden_size": 384,
|
12 |
+
"id2label": {
|
13 |
+
"0": "LABEL_0"
|
14 |
+
},
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"intermediate_size": 1536,
|
17 |
+
"label2id": {
|
18 |
+
"LABEL_0": 0
|
19 |
+
},
|
20 |
+
"layer_norm_eps": 1e-12,
|
21 |
+
"max_position_embeddings": 512,
|
22 |
+
"model_type": "bert",
|
23 |
+
"num_attention_heads": 12,
|
24 |
+
"num_hidden_layers": 6,
|
25 |
+
"pad_token_id": 0,
|
26 |
+
"position_embedding_type": "absolute",
|
27 |
+
"problem_type": "multi_label_classification",
|
28 |
+
"torch_dtype": "float32",
|
29 |
+
"transformers_version": "4.12.5",
|
30 |
+
"type_vocab_size": 2,
|
31 |
+
"use_cache": true,
|
32 |
+
"vocab_size": 30522
|
33 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c12d092b63e47fc6529215f97c5287ba07fc68c217313019f84e6c35eb3aa0cb
|
3 |
+
size 90899885
|
results.csv
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
step,epoch,msmarco_mrr
|
2 |
+
20000.0,0.0,0.3343
|
3 |
+
40000.0,0.0,0.3589
|
4 |
+
60000.0,0.0,0.3612
|
5 |
+
80000.0,0.0,0.3663
|
6 |
+
100000.0,0.0,0.3803
|
7 |
+
120000.0,0.0,0.3505
|
8 |
+
140000.0,0.0,0.3513
|
9 |
+
160000.0,0.0,0.3818
|
10 |
+
180000.0,0.0,0.3902
|
11 |
+
200000.0,0.0,0.3932
|
12 |
+
220000.0,0.0,0.3928
|
13 |
+
240000.0,0.0,0.4007
|
14 |
+
260000.0,0.0,0.3835
|
15 |
+
280000.0,0.0,0.4024
|
16 |
+
300000.0,0.0,0.3744
|
17 |
+
320000.0,0.0,0.3941
|
18 |
+
340000.0,0.0,0.362
|
19 |
+
360000.0,0.0,0.4083
|
20 |
+
380000.0,0.0,0.4108
|
21 |
+
400000.0,0.0,0.4091
|
22 |
+
420000.0,0.0,0.406
|
23 |
+
440000.0,0.0,0.3607
|
24 |
+
460000.0,0.0,0.4147
|
25 |
+
480000.0,0.0,0.3961
|
26 |
+
500000.0,0.0,0.4119
|
27 |
+
520000.0,0.0,0.4065
|
28 |
+
540000.0,0.0,0.4009
|
29 |
+
560000.0,0.0,0.4148
|
30 |
+
580000.0,0.0,0.3864
|
31 |
+
600000.0,0.0,0.4176
|
32 |
+
620000.0,0.0,0.3552
|
33 |
+
640000.0,0.0,0.43
|
34 |
+
660000.0,0.0,0.417
|
35 |
+
680000.0,0.0,0.4027
|
36 |
+
700000.0,0.0,0.4168
|
37 |
+
720000.0,0.0,0.4219
|
38 |
+
740000.0,0.0,0.4015
|
39 |
+
760000.0,0.0,0.43
|
40 |
+
780000.0,0.0,0.4025
|
41 |
+
800000.0,0.0,0.4162
|
42 |
+
820000.0,0.0,0.405
|
43 |
+
840000.0,0.0,0.4334
|
44 |
+
860000.0,0.0,0.4043
|
45 |
+
880000.0,0.0,0.4183
|
46 |
+
900000.0,0.0,0.4017
|
47 |
+
920000.0,0.0,0.3992
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "name_or_path": "nreimers/MiniLM-L6-H384-uncased", "do_basic_tokenize": true, "never_split": null, "model_max_length": 512, "special_tokens_map_file": "/home/azureuser/.cache/huggingface/transformers/4bab06d0845c01b218fc031866e62ccf55a15d3f649549f76c89e002ba24d5d9.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d", "tokenizer_class": "BertTokenizer"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c9e96fcfef9dd1f2261d5a1173091dcc14362c8d5f61bd4887d59bf545994a8
|
3 |
+
size 2927
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|