Divyanshusingh01 commited on
Commit
1f06874
1 Parent(s): 0288a79

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -50,6 +50,7 @@ If you use this dataset in your research or applications, please consider citing
50
  We acknowledge and cite the CulturaX dataset using the following citation:
51
 
52
  ```
 
53
  @misc{nguyen2023culturax,
54
  title={CulturaX: A Cleaned, Enormous, and Multilingual Dataset for Large Language Models in 167 Languages},
55
  author={Thuat Nguyen and Chien Van Nguyen and Viet Dac Lai and Hieu Man and Nghia Trung Ngo and Franck Dernoncourt and Ryan A. Rossi and Thien Huu Nguyen},
@@ -58,12 +59,12 @@ We acknowledge and cite the CulturaX dataset using the following citation:
58
  archivePrefix={arXiv},
59
  primaryClass={cs.CL}.
60
 
61
- ``
62
 
63
  Additionally, the dataset includes news article data, and we acknowledge and cite the source of this data using the following citations:
64
 
65
 
66
- @inproceedings{see-etal-2017-get,
67
  title = "Get To The Point: Summarization with Pointer-Generator Networks",
68
  author = "See, Abigail and
69
  Liu, Peter J. and
@@ -76,8 +77,7 @@ Additionally, the dataset includes news article data, and we acknowledge and cit
76
  url = "https://www.aclweb.org/anthology/P17-1099",
77
  doi = "10.18653/v1/P17-1099",
78
  pages = "1073--1083",
79
- abstract = "Neural sequence-to-sequence models have provided a viable new approach for abstractive text summarization (meaning they are not restricted to simply selecting and rearranging passages from the original text). However, these models have two shortcomings: they are liable to reproduce factual details inaccurately, and they tend to repeat themselves. In this work we propose a novel architecture that augments the standard sequence-to-sequence attentional model in two orthogonal ways. First, we use a hybrid pointer-generator network that can copy words from the source text via pointing, which aids accurate reproduction of information, while retaining the ability to produce novel words through the generator. Second, we use coverage to keep track of what has been summarized, which discourages repetition. We apply our model to the CNN / Daily Mail summarization task, outperforming the current abstractive state-of-the-art by at least 2 ROUGE points.",
80
- }
81
 
82
  @inproceedings{DBLP:conf/nips/HermannKGEKSB15,
83
  author={Karl Moritz Hermann and Tomás Kociský and Edward Grefenstette and Lasse Espeholt and Will Kay and Mustafa Suleyman and Phil Blunsom},
 
50
  We acknowledge and cite the CulturaX dataset using the following citation:
51
 
52
  ```
53
+
54
  @misc{nguyen2023culturax,
55
  title={CulturaX: A Cleaned, Enormous, and Multilingual Dataset for Large Language Models in 167 Languages},
56
  author={Thuat Nguyen and Chien Van Nguyen and Viet Dac Lai and Hieu Man and Nghia Trung Ngo and Franck Dernoncourt and Ryan A. Rossi and Thien Huu Nguyen},
 
59
  archivePrefix={arXiv},
60
  primaryClass={cs.CL}.
61
 
62
+ ```
63
 
64
  Additionally, the dataset includes news article data, and we acknowledge and cite the source of this data using the following citations:
65
 
66
 
67
+ @inproceedings{see-etal-2017-get,
68
  title = "Get To The Point: Summarization with Pointer-Generator Networks",
69
  author = "See, Abigail and
70
  Liu, Peter J. and
 
77
  url = "https://www.aclweb.org/anthology/P17-1099",
78
  doi = "10.18653/v1/P17-1099",
79
  pages = "1073--1083",
80
+ abstract = "Neural sequence-to-sequence models have provided a viable new approach for abstractive text summarization (meaning they are not restricted to simply selecting and rearranging passages from the original text). However, these models have two shortcomings: they are liable to reproduce factual details inaccurately, and they tend to repeat themselves. In this work we propose a novel architecture that augments the standard sequence-to-sequence attentional model in two orthogonal ways. First, we use a hybrid pointer-generator network that can copy words from the source text via pointing, which aids accurate reproduction of information, while retaining the ability to produce novel words through the generator. Second, we use coverage to keep track of what has been summarized, which discourages repetition. We apply our model to the CNN / Daily Mail summarization task, outperforming the current abstractive state-of-the-art by at least 2 ROUGE points.",}
 
81
 
82
  @inproceedings{DBLP:conf/nips/HermannKGEKSB15,
83
  author={Karl Moritz Hermann and Tomás Kociský and Edward Grefenstette and Lasse Espeholt and Will Kay and Mustafa Suleyman and Phil Blunsom},