Refine code
Browse files- wikipedia.py +3 -3
wikipedia.py
CHANGED
@@ -981,7 +981,6 @@ class Wikipedia(datasets.GeneratorBasedBuilder):
|
|
981 |
def _extract_content(filepath):
|
982 |
"""Extracts article content from a single WikiMedia XML file."""
|
983 |
logger.info("generating examples from = %s", filepath)
|
984 |
-
content = []
|
985 |
f = bz2.BZ2File(filename=filepath)
|
986 |
# Workaround due to: https://github.com/tensorflow/tensorflow/issues/33563
|
987 |
utf_f = codecs.getreader("utf-8")(f)
|
@@ -1007,8 +1006,7 @@ class Wikipedia(datasets.GeneratorBasedBuilder):
|
|
1007 |
if raw_content is None or red_ is not None:
|
1008 |
continue
|
1009 |
|
1010 |
-
|
1011 |
-
return content
|
1012 |
|
1013 |
def _clean_content(inputs, language):
|
1014 |
"""Cleans raw wikicode to extract text."""
|
@@ -1037,7 +1035,9 @@ class Wikipedia(datasets.GeneratorBasedBuilder):
|
|
1037 |
examples.put(ProcessDone())
|
1038 |
|
1039 |
with multiprocessing.Pool() as pool:
|
|
|
1040 |
result = pool.apply_async(parse_and_clean, filepaths)
|
|
|
1041 |
n = len(filepaths)
|
1042 |
complete = 0
|
1043 |
while complete != n:
|
|
|
981 |
def _extract_content(filepath):
|
982 |
"""Extracts article content from a single WikiMedia XML file."""
|
983 |
logger.info("generating examples from = %s", filepath)
|
|
|
984 |
f = bz2.BZ2File(filename=filepath)
|
985 |
# Workaround due to: https://github.com/tensorflow/tensorflow/issues/33563
|
986 |
utf_f = codecs.getreader("utf-8")(f)
|
|
|
1006 |
if raw_content is None or red_ is not None:
|
1007 |
continue
|
1008 |
|
1009 |
+
yield (id_, title, raw_content)
|
|
|
1010 |
|
1011 |
def _clean_content(inputs, language):
|
1012 |
"""Cleans raw wikicode to extract text."""
|
|
|
1035 |
examples.put(ProcessDone())
|
1036 |
|
1037 |
with multiprocessing.Pool() as pool:
|
1038 |
+
print("start apply_async")
|
1039 |
result = pool.apply_async(parse_and_clean, filepaths)
|
1040 |
+
print("apply_async called")
|
1041 |
n = len(filepaths)
|
1042 |
complete = 0
|
1043 |
while complete != n:
|