dfki-nlp commited on
Commit
96d77a2
1 Parent(s): c11a6bf

Update tacred.py

Browse files
Files changed (1) hide show
  1. tacred.py +7 -7
tacred.py CHANGED
@@ -210,15 +210,15 @@ class Tacred(datasets.GeneratorBasedBuilder):
210
  {
211
  "id": datasets.Value("string"),
212
  "docid": datasets.Value("string"),
213
- "tokens": datasets.Sequence(datasets.Value("string")),
214
  "subj_start": datasets.Value("int32"),
215
  "subj_end": datasets.Value("int32"),
216
  "subj_type": datasets.ClassLabel(names=_NER_CLASS_LABELS),
217
  "obj_start": datasets.Value("int32"),
218
  "obj_end": datasets.Value("int32"),
219
  "obj_type": datasets.Value("string"),
220
- "pos_tags": datasets.Sequence(datasets.Value("string")),
221
- "ner_tags": datasets.Sequence(datasets.Value("string")),
222
  "stanford_deprel": datasets.Sequence(datasets.Value("string")),
223
  "stanford_head": datasets.Sequence(datasets.Value("int32")),
224
  "relation": datasets.ClassLabel(names=_CLASS_LABELS),
@@ -288,7 +288,7 @@ class Tacred(datasets.GeneratorBasedBuilder):
288
 
289
  def _generate_examples(self, filepath, patch_filepath):
290
  """Yields examples."""
291
- # TODO: This method will receive as arguments the `gen_kwargs` defined in the previous `_split_generators` method.
292
  # It is in charge of opening the given file and yielding (key, example) tuples from the dataset
293
  # The key is not important, it's more here for legacy reason (legacy from tfds)
294
  patch_examples = {}
@@ -307,7 +307,7 @@ class Tacred(datasets.GeneratorBasedBuilder):
307
  yield id_, {
308
  "id": example["id"],
309
  "docid": example["docid"],
310
- "tokens": [convert_ptb_token(token) for token in example["token"]],
311
  "subj_start": example["subj_start"],
312
  "subj_end": example["subj_end"] + 1, # make end offset exclusive
313
  "subj_type": example["subj_type"],
@@ -315,8 +315,8 @@ class Tacred(datasets.GeneratorBasedBuilder):
315
  "obj_end": example["obj_end"] + 1, # make end offset exclusive
316
  "obj_type": example["obj_type"],
317
  "relation": example["relation"],
318
- "pos_tags": example["stanford_pos"],
319
- "ner_tags": example["stanford_ner"],
320
  "stanford_deprel": example["stanford_deprel"],
321
  "stanford_head": [
322
  head - 1 for head in example["stanford_head"]
 
210
  {
211
  "id": datasets.Value("string"),
212
  "docid": datasets.Value("string"),
213
+ "token": datasets.Sequence(datasets.Value("string")),
214
  "subj_start": datasets.Value("int32"),
215
  "subj_end": datasets.Value("int32"),
216
  "subj_type": datasets.ClassLabel(names=_NER_CLASS_LABELS),
217
  "obj_start": datasets.Value("int32"),
218
  "obj_end": datasets.Value("int32"),
219
  "obj_type": datasets.Value("string"),
220
+ "stanford_pos": datasets.Sequence(datasets.Value("string")),
221
+ "stanford_ner": datasets.Sequence(datasets.Value("string")),
222
  "stanford_deprel": datasets.Sequence(datasets.Value("string")),
223
  "stanford_head": datasets.Sequence(datasets.Value("int32")),
224
  "relation": datasets.ClassLabel(names=_CLASS_LABELS),
 
288
 
289
  def _generate_examples(self, filepath, patch_filepath):
290
  """Yields examples."""
291
+ # This method will receive as arguments the `gen_kwargs` defined in the previous `_split_generators` method.
292
  # It is in charge of opening the given file and yielding (key, example) tuples from the dataset
293
  # The key is not important, it's more here for legacy reason (legacy from tfds)
294
  patch_examples = {}
 
307
  yield id_, {
308
  "id": example["id"],
309
  "docid": example["docid"],
310
+ "token": [convert_ptb_token(token) for token in example["token"]],
311
  "subj_start": example["subj_start"],
312
  "subj_end": example["subj_end"] + 1, # make end offset exclusive
313
  "subj_type": example["subj_type"],
 
315
  "obj_end": example["obj_end"] + 1, # make end offset exclusive
316
  "obj_type": example["obj_type"],
317
  "relation": example["relation"],
318
+ "stanford_pos": example["stanford_pos"],
319
+ "stanford_ner": example["stanford_ner"],
320
  "stanford_deprel": example["stanford_deprel"],
321
  "stanford_head": [
322
  head - 1 for head in example["stanford_head"]