pablocosta
commited on
Commit
·
2d74c59
1
Parent(s):
9a6cc18
Update README.md
Browse files
README.md
CHANGED
@@ -31,3 +31,16 @@ from transformers import AutoModel # or BertModel, for BERT without pretraining
|
|
31 |
model = AutoModelForPreTraining.from_pretrained('pablocosta/bertabaporu-base-uncased')
|
32 |
tokenizer = AutoTokenizer.from_pretrained('pablocosta/bertabaporu-base-uncased')
|
33 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
model = AutoModelForPreTraining.from_pretrained('pablocosta/bertabaporu-base-uncased')
|
32 |
tokenizer = AutoTokenizer.from_pretrained('pablocosta/bertabaporu-base-uncased')
|
33 |
```
|
34 |
+
## Cite us
|
35 |
+
|
36 |
+
@inproceedings{bertabaporu,
|
37 |
+
author={Pablo Botton da Costa and
|
38 |
+
Matheus Camasmie Pavan and
|
39 |
+
Wesley Ramos dos Santos and
|
40 |
+
Samuel Caetano da Silva and
|
41 |
+
Ivandr\'e Paraboni},
|
42 |
+
title={{BERTabaporu: assessing a genre-specific language model for Portuguese NLP}},
|
43 |
+
booktitle={Recents Advances in Natural Language Processing ({RANLP-2023})},
|
44 |
+
year={2023},
|
45 |
+
address={Varna, Bulgaria}
|
46 |
+
}
|