asahi417 commited on
Commit
89204dc
1 Parent(s): ca22fc0
conll2003.py CHANGED
@@ -5,21 +5,14 @@ import datasets
5
 
6
  logger = datasets.logging.get_logger(__name__)
7
  _DESCRIPTION = """[CoNLL 2003 NER dataset](https://aclanthology.org/W03-0419/)"""
 
 
8
  _URL = 'https://huggingface.co/datasets/tner/conll2003/raw/main/dataset'
9
  _URLS = {
10
- str(datasets.Split.TEST): [f'{_URL}/test{i:02d}.jsonl' for i in range(8)],
11
- str(datasets.Split.TRAIN): [f'{_URL}/train{i:02d}.jsonl' for i in range(52)],
12
- str(datasets.Split.VALIDATION): [f'{_URL}/validation{i:02d}.jsonl' for i in range(8)],
13
  }
14
-
15
- import os
16
-
17
- import datasets
18
-
19
-
20
- logger = datasets.logging.get_logger(__name__)
21
-
22
-
23
  _CITATION = """\
24
  @inproceedings{tjong-kim-sang-de-meulder-2003-introduction,
25
  title = "Introduction to the {C}o{NLL}-2003 Shared Task: Language-Independent Named Entity Recognition",
@@ -32,33 +25,11 @@ _CITATION = """\
32
  }
33
  """
34
 
35
- _DESCRIPTION = """\
36
- The shared task of CoNLL-2003 concerns language-independent named entity recognition. We will concentrate on
37
- four types of named entities: persons, locations, organizations and names of miscellaneous entities that do
38
- not belong to the previous three groups.
39
-
40
- The CoNLL-2003 shared task data files contain four columns separated by a single space. Each word has been put on
41
- a separate line and there is an empty line after each sentence. The first item on each line is a word, the second
42
- a part-of-speech (POS) tag, the third a syntactic chunk tag and the fourth the named entity tag. The chunk tags
43
- and the named entity tags have the format I-TYPE which means that the word is inside a phrase of type TYPE. Only
44
- if two phrases of the same type immediately follow each other, the first word of the second phrase will have tag
45
- B-TYPE to show that it starts a new phrase. A word with tag O is not part of a phrase. Note the dataset uses IOB2
46
- tagging scheme, whereas the original dataset uses IOB1.
47
-
48
- For more details see https://www.clips.uantwerpen.be/conll2003/ner/ and https://www.aclweb.org/anthology/W03-0419
49
- """
50
-
51
- _URL = "https://data.deepai.org/conll2003.zip"
52
- _TRAINING_FILE = "train.txt"
53
- _DEV_FILE = "valid.txt"
54
- _TEST_FILE = "test.txt"
55
-
56
-
57
  class Conll2003Config(datasets.BuilderConfig):
58
- """BuilderConfig for Conll2003"""
59
 
60
  def __init__(self, **kwargs):
61
- """BuilderConfig forConll2003.
62
 
63
  Args:
64
  **kwargs: keyword arguments forwarded to super.
@@ -67,102 +38,36 @@ class Conll2003Config(datasets.BuilderConfig):
67
 
68
 
69
  class Conll2003(datasets.GeneratorBasedBuilder):
70
- """Conll2003 dataset."""
71
 
72
  BUILDER_CONFIGS = [
73
- Conll2003Config(name="conll2003", version=datasets.Version("1.0.0"), description="Conll2003 dataset"),
74
  ]
75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  def _info(self):
77
  return datasets.DatasetInfo(
78
  description=_DESCRIPTION,
79
  features=datasets.Features(
80
  {
81
- "id": datasets.Value("string"),
82
  "tokens": datasets.Sequence(datasets.Value("string")),
83
- "pos_tags": datasets.Sequence(
84
- datasets.features.ClassLabel(
85
- names=[
86
- '"',
87
- "''",
88
- "#",
89
- "$",
90
- "(",
91
- ")",
92
- ",",
93
- ".",
94
- ":",
95
- "``",
96
- "CC",
97
- "CD",
98
- "DT",
99
- "EX",
100
- "FW",
101
- "IN",
102
- "JJ",
103
- "JJR",
104
- "JJS",
105
- "LS",
106
- "MD",
107
- "NN",
108
- "NNP",
109
- "NNPS",
110
- "NNS",
111
- "NN|SYM",
112
- "PDT",
113
- "POS",
114
- "PRP",
115
- "PRP$",
116
- "RB",
117
- "RBR",
118
- "RBS",
119
- "RP",
120
- "SYM",
121
- "TO",
122
- "UH",
123
- "VB",
124
- "VBD",
125
- "VBG",
126
- "VBN",
127
- "VBP",
128
- "VBZ",
129
- "WDT",
130
- "WP",
131
- "WP$",
132
- "WRB",
133
- ]
134
- )
135
- ),
136
- "chunk_tags": datasets.Sequence(
137
- datasets.features.ClassLabel(
138
- names=[
139
- "O",
140
- "B-ADJP",
141
- "I-ADJP",
142
- "B-ADVP",
143
- "I-ADVP",
144
- "B-CONJP",
145
- "I-CONJP",
146
- "B-INTJ",
147
- "I-INTJ",
148
- "B-LST",
149
- "I-LST",
150
- "B-NP",
151
- "I-NP",
152
- "B-PP",
153
- "I-PP",
154
- "B-PRT",
155
- "I-PRT",
156
- "B-SBAR",
157
- "I-SBAR",
158
- "B-UCP",
159
- "I-UCP",
160
- "B-VP",
161
- "I-VP",
162
- ]
163
- )
164
- ),
165
- "ner_tags": datasets.Sequence(
166
  datasets.features.ClassLabel(
167
  names=[
168
  "O",
@@ -180,61 +85,6 @@ class Conll2003(datasets.GeneratorBasedBuilder):
180
  }
181
  ),
182
  supervised_keys=None,
183
- homepage="https://www.aclweb.org/anthology/W03-0419/",
184
  citation=_CITATION,
185
  )
186
-
187
- def _split_generators(self, dl_manager):
188
- """Returns SplitGenerators."""
189
- downloaded_file = dl_manager.download_and_extract(_URL)
190
- data_files = {
191
- "train": os.path.join(downloaded_file, _TRAINING_FILE),
192
- "dev": os.path.join(downloaded_file, _DEV_FILE),
193
- "test": os.path.join(downloaded_file, _TEST_FILE),
194
- }
195
-
196
- return [
197
- datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_files["train"]}),
198
- datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": data_files["dev"]}),
199
- datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": data_files["test"]}),
200
- ]
201
-
202
- def _generate_examples(self, filepath):
203
- logger.info("⏳ Generating examples from = %s", filepath)
204
- with open(filepath, encoding="utf-8") as f:
205
- guid = 0
206
- tokens = []
207
- pos_tags = []
208
- chunk_tags = []
209
- ner_tags = []
210
- for line in f:
211
- if line.startswith("-DOCSTART-") or line == "" or line == "\n":
212
- if tokens:
213
- yield guid, {
214
- "id": str(guid),
215
- "tokens": tokens,
216
- "pos_tags": pos_tags,
217
- "chunk_tags": chunk_tags,
218
- "ner_tags": ner_tags,
219
- }
220
- guid += 1
221
- tokens = []
222
- pos_tags = []
223
- chunk_tags = []
224
- ner_tags = []
225
- else:
226
- # conll2003 tokens are space separated
227
- splits = line.split(" ")
228
- tokens.append(splits[0])
229
- pos_tags.append(splits[1])
230
- chunk_tags.append(splits[2])
231
- ner_tags.append(splits[3].rstrip())
232
- # last example
233
- if tokens:
234
- yield guid, {
235
- "id": str(guid),
236
- "tokens": tokens,
237
- "pos_tags": pos_tags,
238
- "chunk_tags": chunk_tags,
239
- "ner_tags": ner_tags,
240
- }
 
5
 
6
  logger = datasets.logging.get_logger(__name__)
7
  _DESCRIPTION = """[CoNLL 2003 NER dataset](https://aclanthology.org/W03-0419/)"""
8
+ _NAME = "conll2003"
9
+ _HOME_PAGE = "https://github.com/asahi417/tner"
10
  _URL = 'https://huggingface.co/datasets/tner/conll2003/raw/main/dataset'
11
  _URLS = {
12
+ str(datasets.Split.TEST): [f'{_URL}/test.json'],
13
+ str(datasets.Split.TRAIN): [f'{_URL}/train.json'],
14
+ str(datasets.Split.VALIDATION): [f'{_URL}/valid.json'],
15
  }
 
 
 
 
 
 
 
 
 
16
  _CITATION = """\
17
  @inproceedings{tjong-kim-sang-de-meulder-2003-introduction,
18
  title = "Introduction to the {C}o{NLL}-2003 Shared Task: Language-Independent Named Entity Recognition",
 
25
  }
26
  """
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  class Conll2003Config(datasets.BuilderConfig):
29
+ """BuilderConfig"""
30
 
31
  def __init__(self, **kwargs):
32
+ """BuilderConfig.
33
 
34
  Args:
35
  **kwargs: keyword arguments forwarded to super.
 
38
 
39
 
40
  class Conll2003(datasets.GeneratorBasedBuilder):
41
+ """Dataset."""
42
 
43
  BUILDER_CONFIGS = [
44
+ Conll2003Config(name=_NAME, version=datasets.Version("1.0.0"), description=_DESCRIPTION),
45
  ]
46
 
47
+ def _split_generators(self, dl_manager):
48
+ downloaded_file = dl_manager.download_and_extract(_URLS)
49
+ return [datasets.SplitGenerator(name=i, gen_kwargs={"filepaths": downloaded_file[str(i)]})
50
+ for i in [datasets.Split.TRAIN, datasets.Split.VALIDATION, datasets.Split.TEST]]
51
+
52
+ def _generate_examples(self, filepaths):
53
+ _key = 0
54
+ for filepath in filepaths:
55
+ logger.info(f"generating examples from = {filepath}")
56
+
57
+ with open(filepath) as f:
58
+ data_list = json.load(f)
59
+ print(data_list)
60
+ for (tokens, tags) in data_list:
61
+ yield _key, {'tokens': tokens, 'tags': tags}
62
+ _key += 1
63
+
64
  def _info(self):
65
  return datasets.DatasetInfo(
66
  description=_DESCRIPTION,
67
  features=datasets.Features(
68
  {
 
69
  "tokens": datasets.Sequence(datasets.Value("string")),
70
+ "tags": datasets.Sequence(
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  datasets.features.ClassLabel(
72
  names=[
73
  "O",
 
85
  }
86
  ),
87
  supervised_keys=None,
88
+ homepage=_HOME_PAGE,
89
  citation=_CITATION,
90
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dataset/{conll2003.label.json → label.json} RENAMED
File without changes
dataset/{conll2003.data.test.json → test.json} RENAMED
File without changes
dataset/{conll2003.data.train.json → train.json} RENAMED
File without changes
dataset/{conll2003.data.valid.json → valid.json} RENAMED
File without changes