jeanpoll
commited on
Commit
·
f4fbe25
1
Parent(s):
4fbea16
working version of wikiner_fr.py
Browse files- wikiner_fr.py +10 -15
wikiner_fr.py
CHANGED
@@ -21,7 +21,7 @@ import json
|
|
21 |
import os
|
22 |
|
23 |
import datasets
|
24 |
-
|
25 |
|
26 |
|
27 |
_NER_LABEL_NAMES = [
|
@@ -65,7 +65,7 @@ _LICENSE = ""
|
|
65 |
# The HuggingFace dataset library don't host the datasets but only point to the original files
|
66 |
# This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
|
67 |
_URLs = {
|
68 |
-
"NER": "https://huggingface.co/datasets/Jean-Baptiste/wikiner_fr/resolve/main/data"
|
69 |
}
|
70 |
|
71 |
|
@@ -96,7 +96,7 @@ class WikinerFr(datasets.GeneratorBasedBuilder):
|
|
96 |
if self.config.name == "NER": # This is the name of the configuration selected in BUILDER_CONFIGS above
|
97 |
features = datasets.Features(
|
98 |
{
|
99 |
-
"id": datasets.
|
100 |
"ner_tags": datasets.Sequence(
|
101 |
feature=datasets.ClassLabel(num_classes=len(_NER_LABEL_NAMES), names=_NER_LABEL_NAMES)
|
102 |
),
|
@@ -142,7 +142,7 @@ class WikinerFr(datasets.GeneratorBasedBuilder):
|
|
142 |
name=datasets.Split.TRAIN,
|
143 |
# These kwargs will be passed to _generate_examples
|
144 |
gen_kwargs={
|
145 |
-
"filepath": os.path.join(data_dir, "train
|
146 |
"split": "train",
|
147 |
},
|
148 |
),
|
@@ -150,7 +150,7 @@ class WikinerFr(datasets.GeneratorBasedBuilder):
|
|
150 |
name=datasets.Split.TEST,
|
151 |
# These kwargs will be passed to _generate_examples
|
152 |
gen_kwargs={
|
153 |
-
"filepath": os.path.join(data_dir, "test
|
154 |
"split": "test"
|
155 |
},
|
156 |
)
|
@@ -162,13 +162,8 @@ class WikinerFr(datasets.GeneratorBasedBuilder):
|
|
162 |
""" Yields examples as (key, example) tuples. """
|
163 |
# This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
|
164 |
# The `key` is here for legacy reason (tfds) and is not important in itself.
|
165 |
-
|
166 |
-
|
167 |
-
#
|
168 |
-
|
169 |
-
|
170 |
-
# yield id_, {
|
171 |
-
# "sentence": data["sentence"],
|
172 |
-
# "option1": data["option1"],
|
173 |
-
# "answer": "" if split == "test" else data["answer"],
|
174 |
-
# }
|
|
|
21 |
import os
|
22 |
|
23 |
import datasets
|
24 |
+
from datasets import Dataset
|
25 |
|
26 |
|
27 |
_NER_LABEL_NAMES = [
|
|
|
65 |
# The HuggingFace dataset library don't host the datasets but only point to the original files
|
66 |
# This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
|
67 |
_URLs = {
|
68 |
+
"NER": "https://huggingface.co/datasets/Jean-Baptiste/wikiner_fr/resolve/main/data.zip"
|
69 |
}
|
70 |
|
71 |
|
|
|
96 |
if self.config.name == "NER": # This is the name of the configuration selected in BUILDER_CONFIGS above
|
97 |
features = datasets.Features(
|
98 |
{
|
99 |
+
"id": datasets.Value("int32"),
|
100 |
"ner_tags": datasets.Sequence(
|
101 |
feature=datasets.ClassLabel(num_classes=len(_NER_LABEL_NAMES), names=_NER_LABEL_NAMES)
|
102 |
),
|
|
|
142 |
name=datasets.Split.TRAIN,
|
143 |
# These kwargs will be passed to _generate_examples
|
144 |
gen_kwargs={
|
145 |
+
"filepath": os.path.join(data_dir, "data","train"),
|
146 |
"split": "train",
|
147 |
},
|
148 |
),
|
|
|
150 |
name=datasets.Split.TEST,
|
151 |
# These kwargs will be passed to _generate_examples
|
152 |
gen_kwargs={
|
153 |
+
"filepath": os.path.join(data_dir, "data", "test"),
|
154 |
"split": "test"
|
155 |
},
|
156 |
)
|
|
|
162 |
""" Yields examples as (key, example) tuples. """
|
163 |
# This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
|
164 |
# The `key` is here for legacy reason (tfds) and is not important in itself.
|
165 |
+
dataset = Dataset.load_from_disk(filepath)
|
166 |
+
for i in range(0, len(dataset)):
|
167 |
+
# print(dataset[i])
|
168 |
+
yield i, {"id": str(dataset[i]["id"]),"tokens": dataset[i]["tokens"], "ner_tags": dataset[i]["ner_tags"]}
|
169 |
+
|
|
|
|
|
|
|
|
|
|