leonadase commited on
Commit
e83e85c
1 Parent(s): d62f733

Update fdner.py

Browse files
Files changed (1) hide show
  1. fdner.py +152 -152
fdner.py CHANGED
@@ -1,153 +1,153 @@
1
- # coding=utf-8
2
- # Copyright 2020 HuggingFace Datasets Authors.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- # Lint as: python3
17
- """Introduction to the CoNLL-2003 Shared Task: Language-Independent Named Entity Recognition"""
18
-
19
- import os
20
-
21
- import datasets
22
-
23
-
24
- logger = datasets.logging.get_logger(__name__)
25
-
26
-
27
- _CITATION = """\
28
- @inproceedings{tjong-kim-sang-de-meulder-2003-introduction,
29
- title = "Introduction to the Fault_Detection_Ner Task: Language-Independent Named Entity Recognition",
30
- author = "Tian Jie",
31
- year = "2022"
32
- }
33
- """
34
-
35
- _DESCRIPTION = """\
36
- 用于故障诊断领域相关知识的命名实体识别语料
37
- """
38
-
39
- _URL = "https://cdn-lfs.huggingface.co/datasets/leonadase/fdner/89a87eacfebc06862ac4b5a356c35430dfdf8ef2f0f2e0d9ff5e02ce6c117474"
40
- _TRAINING_FILE = "train.txt"
41
- _DEV_FILE = "valid.txt"
42
- _TEST_FILE = "test.txt"
43
-
44
-
45
- class fdnerConfig(datasets.BuilderConfig):
46
- """BuilderConfig for fdNer"""
47
-
48
- def __init__(self, **kwargs):
49
- """BuilderConfig for fdNer.
50
- Args:
51
- **kwargs: keyword arguments forwarded to super.
52
- """
53
- logger.info("Generating examples from 1")
54
- super(fdnerConfig, self).__init__(**kwargs)
55
-
56
-
57
- class fdner(datasets.GeneratorBasedBuilder):
58
- """fdNer dataset."""
59
-
60
- BUILDER_CONFIGS = [
61
- fdnerConfig(name="fdner", version=datasets.Version("1.0.0"), description="fdner dataset"),
62
- ]
63
-
64
- def _info(self):
65
- logger.info("Generating examples from 1")
66
- return datasets.DatasetInfo(
67
- description=_DESCRIPTION,
68
- features=datasets.Features(
69
- {
70
- "id": datasets.Value("string"),
71
- "tokens": datasets.Sequence(datasets.Value("string")),
72
- "ner_tags": datasets.Sequence(
73
- datasets.features.ClassLabel(
74
- names=[
75
- "O",
76
- "B-EN",
77
- "I-EN",
78
- "B-STRUC",
79
- "I-STRUC",
80
- "B-CHA",
81
- "I-CHA",
82
- "B-KIND",
83
- "I-KIND",
84
- "B-ADV",
85
- "I-ADV",
86
- "B-DISA",
87
- "I-DISA",
88
- "B-METH",
89
- "I-METH",
90
- "B-NUM",
91
- "I-NUM",
92
- "B-PRO",
93
- "I-PRO",
94
- "B-THE",
95
- "I-THE",
96
- "B-DEF",
97
- "I-DEF",
98
- "B-FUC",
99
- "I-FUC",
100
- ]
101
- )
102
- ),
103
- }
104
- ),
105
- supervised_keys=None,
106
- # homepage="https://www.aclweb.org/anthology/W03-0419/",
107
- citation=_CITATION,
108
- )
109
-
110
- def _split_generators(self, dl_manager):
111
- logger.info("Generating examples from 2")
112
- """Returns SplitGenerators."""
113
- downloaded_file = dl_manager.download_and_extract(_URL)
114
- data_files = {
115
- "train": os.path.join(downloaded_file, _TRAINING_FILE),
116
- "dev": os.path.join(downloaded_file, _DEV_FILE),
117
- "test": os.path.join(downloaded_file, _TEST_FILE),
118
- }
119
-
120
- return [
121
- datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_files["train"]}),
122
- datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": data_files["dev"]}),
123
- datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": data_files["test"]}),
124
- ]
125
- def _generate_examples(self, filepath):
126
- logger.info("⏳ Generating examples from = %s", filepath)
127
- with open(filepath, encoding="utf-8") as f:
128
- guid = 0
129
- tokens = []
130
- ner_tags = []
131
- for line in f:
132
- if line.startswith("-DOCSTART-") or line == "" or line == "\n":
133
- if tokens:
134
- yield guid, {
135
- "id": str(guid),
136
- "tokens": tokens,
137
- "ner_tags": ner_tags,
138
- }
139
- guid += 1
140
- tokens = []
141
- ner_tags = []
142
- else:
143
- # conll2003 tokens are space separated
144
- splits = line.split(" ")
145
- tokens.append(splits[0])
146
- ner_tags.append(splits[1].rstrip())
147
- # last example
148
- yield guid, {
149
- "id": str(guid),
150
- "tokens": tokens,
151
- "ner_tags": ner_tags,
152
- }
153
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+ """Introduction to the CoNLL-2003 Shared Task: Language-Independent Named Entity Recognition"""
18
+
19
+ import os
20
+
21
+ import datasets
22
+
23
+
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+
27
+ _CITATION = """\
28
+ @inproceedings{tjong-kim-sang-de-meulder-2003-introduction,
29
+ title = "Introduction to the Fault_Detection_Ner Task: Language-Independent Named Entity Recognition",
30
+ author = "Tian Jie",
31
+ year = "2022"
32
+ }
33
+ """
34
+
35
+ _DESCRIPTION = """\
36
+ 用于故障诊断领域相关知识的命名实体识别语料
37
+ """
38
+
39
+ _URL = "https://huggingface.co/datasets/leonadase/fdner/resolve/main/fdner.zip"
40
+ _TRAINING_FILE = "train.txt"
41
+ _DEV_FILE = "valid.txt"
42
+ _TEST_FILE = "test.txt"
43
+
44
+
45
+ class fdnerConfig(datasets.BuilderConfig):
46
+ """BuilderConfig for fdNer"""
47
+
48
+ def __init__(self, **kwargs):
49
+ """BuilderConfig for fdNer.
50
+ Args:
51
+ **kwargs: keyword arguments forwarded to super.
52
+ """
53
+ logger.info("Generating examples from 1")
54
+ super(fdnerConfig, self).__init__(**kwargs)
55
+
56
+
57
+ class fdner(datasets.GeneratorBasedBuilder):
58
+ """fdNer dataset."""
59
+
60
+ BUILDER_CONFIGS = [
61
+ fdnerConfig(name="fdner", version=datasets.Version("1.0.0"), description="fdner dataset"),
62
+ ]
63
+
64
+ def _info(self):
65
+ logger.info("Generating examples from 1")
66
+ return datasets.DatasetInfo(
67
+ description=_DESCRIPTION,
68
+ features=datasets.Features(
69
+ {
70
+ "id": datasets.Value("string"),
71
+ "tokens": datasets.Sequence(datasets.Value("string")),
72
+ "ner_tags": datasets.Sequence(
73
+ datasets.features.ClassLabel(
74
+ names=[
75
+ "O",
76
+ "B-EN",
77
+ "I-EN",
78
+ "B-STRUC",
79
+ "I-STRUC",
80
+ "B-CHA",
81
+ "I-CHA",
82
+ "B-KIND",
83
+ "I-KIND",
84
+ "B-ADV",
85
+ "I-ADV",
86
+ "B-DISA",
87
+ "I-DISA",
88
+ "B-METH",
89
+ "I-METH",
90
+ "B-NUM",
91
+ "I-NUM",
92
+ "B-PRO",
93
+ "I-PRO",
94
+ "B-THE",
95
+ "I-THE",
96
+ "B-DEF",
97
+ "I-DEF",
98
+ "B-FUC",
99
+ "I-FUC",
100
+ ]
101
+ )
102
+ ),
103
+ }
104
+ ),
105
+ supervised_keys=None,
106
+ # homepage="https://www.aclweb.org/anthology/W03-0419/",
107
+ citation=_CITATION,
108
+ )
109
+
110
+ def _split_generators(self, dl_manager):
111
+ logger.info("Generating examples from 2")
112
+ """Returns SplitGenerators."""
113
+ downloaded_file = dl_manager.download_and_extract(_URL)
114
+ data_files = {
115
+ "train": os.path.join(downloaded_file, _TRAINING_FILE),
116
+ "dev": os.path.join(downloaded_file, _DEV_FILE),
117
+ "test": os.path.join(downloaded_file, _TEST_FILE),
118
+ }
119
+
120
+ return [
121
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_files["train"]}),
122
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": data_files["dev"]}),
123
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": data_files["test"]}),
124
+ ]
125
+ def _generate_examples(self, filepath):
126
+ logger.info("⏳ Generating examples from = %s", filepath)
127
+ with open(filepath, encoding="utf-8") as f:
128
+ guid = 0
129
+ tokens = []
130
+ ner_tags = []
131
+ for line in f:
132
+ if line.startswith("-DOCSTART-") or line == "" or line == "\n":
133
+ if tokens:
134
+ yield guid, {
135
+ "id": str(guid),
136
+ "tokens": tokens,
137
+ "ner_tags": ner_tags,
138
+ }
139
+ guid += 1
140
+ tokens = []
141
+ ner_tags = []
142
+ else:
143
+ # conll2003 tokens are space separated
144
+ splits = line.split(" ")
145
+ tokens.append(splits[0])
146
+ ner_tags.append(splits[1].rstrip())
147
+ # last example
148
+ yield guid, {
149
+ "id": str(guid),
150
+ "tokens": tokens,
151
+ "ner_tags": ner_tags,
152
+ }
153