uer commited on
Commit
48080cb
1 Parent(s): a97e8d9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +20 -0
README.md CHANGED
@@ -63,6 +63,26 @@ python3 scripts/convert_bert_text_classification_from_uer_to_huggingface.py --in
63
  ### BibTeX entry and citation info
64
 
65
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  @article{zhao2019uer,
67
  title={UER: An Open-Source Toolkit for Pre-training Models},
68
  author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},
 
63
  ### BibTeX entry and citation info
64
 
65
  ```
66
+ @article{devlin2018bert,
67
+ title={BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding},
68
+ author={Devlin, Jacob and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
69
+ journal={arXiv preprint arXiv:1810.04805},
70
+ year={2018}
71
+ }
72
+
73
+ @article{DBLP:journals/corr/abs-1907-11692,
74
+ title={RoBERTa: {A} Robustly Optimized {BERT} Pretraining Approach},
75
+ author={Yinhan Liu and Myle Ott and Naman Goyal and Jingfei Du and Mandar Joshi and Danqi Chen and Omer Levy and Mike Lewis and Luke Zettlemoyer and Veselin Stoyanov},
76
+ journal={CoRR},
77
+ year={2019},
78
+ }
79
+
80
+ @misc{zhang2017encoding,
81
+ title={Which Encoding is the Best for Text Classification in Chinese, English, Japanese and Korean?},
82
+ author={Xiang Zhang and Yann LeCun},
83
+ year={2017},
84
+ }
85
+
86
  @article{zhao2019uer,
87
  title={UER: An Open-Source Toolkit for Pre-training Models},
88
  author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},