Saxo commited on
Commit
b193300
1 Parent(s): 9228d38

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -92
README.md CHANGED
@@ -37,95 +37,4 @@ Dr. Yunsung Ji (Saxo), a data scientist at Linkbricks, a company specializing in
37
  It is a Korean language model trained to handle complex Korean logic problems through Korean-Chinese-English-Japanese cross-training data and logical data, and Tokenizer uses the base model without word expansion.
38
 
39
 
40
- www.linkbricks.com, www.linkbricks.vc
41
-
42
-
43
-
44
-
45
- @inproceedings{park2024open,
46
- title={Open Ko-LLM Leaderboard: Evaluating Large Language Models in Korean with Ko-H5 Benchmark},
47
- author={Chanjun Park and Hyeonwoo Kim and Dahyun Kim and Seonghwan Cho and Sanghoon Kim and Sukyung Lee and Yungi Kim and Hwalsuk Lee},
48
- year={2024},
49
- booktitle={The 62nd Annual Meeting of the Association for Computational Linguistics (ACL 2024) }
50
- }
51
-
52
-
53
- @software{eval-harness,
54
- author = {Gao, Leo and
55
- Tow, Jonathan and
56
- Biderman, Stella and
57
- Black, Sid and
58
- DiPofi, Anthony and
59
- Foster, Charles and
60
- Golding, Laurence and
61
- Hsu, Jeffrey and
62
- McDonell, Kyle and
63
- Muennighoff, Niklas and
64
- Phang, Jason and
65
- Reynolds, Laria and
66
- Tang, Eric and
67
- Thite, Anish and
68
- Wang, Ben and
69
- Wang, Kevin and
70
- Zou, Andy},
71
- title = {A framework for few-shot language model evaluation},
72
- month = sep,
73
- year = 2021,
74
- publisher = {Zenodo},
75
- version = {v0.0.1},
76
- doi = {10.5281/zenodo.5371628},
77
- url = {https://doi.org/10.5281/zenodo.5371628},
78
- }
79
-
80
- @misc{rein2023gpqagraduatelevelgoogleproofqa,
81
- title={GPQA: A Graduate-Level Google-Proof Q&A Benchmark},
82
- author={David Rein and Betty Li Hou and Asa Cooper Stickland and Jackson Petty and Richard Yuanzhe Pang and Julien Dirani and Julian Michael and Samuel R. Bowman},
83
- year={2023},
84
- eprint={2311.12022},
85
- archivePrefix={arXiv},
86
- primaryClass={cs.AI},
87
- url={https://arxiv.org/abs/2311.12022},
88
- }
89
-
90
- @article{sakaguchi2021winogrande,
91
- title={Winogrande: An adversarial winograd schema challenge at scale},
92
- author={Sakaguchi, Keisuke and Bras, Ronan Le and Bhagavatula, Chandra and Choi, Yejin},
93
- journal={Communications of the ACM},
94
- volume={64},
95
- number={9},
96
- pages={99--106},
97
- year={2021},
98
- publisher={ACM New York, NY, USA}
99
- }
100
-
101
- @article{cobbe2021training,
102
- title={Training verifiers to solve math word problems},
103
- author={Cobbe, Karl and Kosaraju, Vineet and Bavarian, Mohammad and Chen, Mark and Jun, Heewoo and Kaiser, Lukasz and Plappert, Matthias and Tworek, Jerry and Hilton, Jacob and Nakano, Reiichiro and others},
104
- journal={arXiv preprint arXiv:2110.14168},
105
- year={2021}
106
- }
107
-
108
- article{paech2023eq,
109
- title={Eq-bench: An emotional intelligence benchmark for large language models},
110
- author={Paech, Samuel J},
111
- journal={arXiv preprint arXiv:2312.06281},
112
- year={2023}
113
- }
114
-
115
-
116
- @misc{zhou2023instructionfollowingevaluationlargelanguage,
117
- title={Instruction-Following Evaluation for Large Language Models},
118
- author={Jeffrey Zhou and Tianjian Lu and Swaroop Mishra and Siddhartha Brahma and Sujoy Basu and Yi Luan and Denny Zhou and Le Hou},
119
- year={2023},
120
- eprint={2311.07911},
121
- archivePrefix={arXiv},
122
- primaryClass={cs.CL},
123
- url={https://arxiv.org/abs/2311.07911},
124
- }
125
-
126
- @article{lee2024kornat,
127
- title={KorNAT: LLM Alignment Benchmark for Korean Social Values and Common Knowledge},
128
- author={Lee, Jiyoung and Kim, Minwoo and Kim, Seungho and Kim, Junghwan and Won, Seunghyun and Lee, Hwaran and Choi, Edward},
129
- journal={arXiv preprint arXiv:2402.13605},
130
- year={2024}
131
- }
 
37
  It is a Korean language model trained to handle complex Korean logic problems through Korean-Chinese-English-Japanese cross-training data and logical data, and Tokenizer uses the base model without word expansion.
38
 
39
 
40
+ www.linkbricks.com, www.linkbricks.vc