|
|
|
|
|
""" |
|
News Source Extractor: |
|
|
|
This script is designed to extract the content of news articles from various French media sources. |
|
The URLs of these articles are retrieved from the `base_news` table, where articles marked with |
|
a `step` value of '0' are pending extraction. |
|
|
|
To install the necessary packages: |
|
pip install aiohttp mysql-connector-python |
|
|
|
Once extracted, the content of each article is saved locally for further processing. This separation |
|
of content fetching and processing is intentional to optimize resource management. |
|
|
|
The script operates in batches, processing a defined number of entries (`NB_BY_STEP`) at a time. |
|
After extraction, the `step` value of the processed articles is updated to '1' to indicate completion. |
|
|
|
Author : Guillaume Eckendoerffer |
|
Date : 29-09-23 |
|
Repository : https://github.com/Eckendoerffer/TorchTrainerFlow/ |
|
https://huggingface.co/datasets/eckendoerffer/news_fr |
|
""" |
|
|
|
import asyncio |
|
import aiohttp |
|
import time |
|
import mysql.connector |
|
import os |
|
|
|
|
|
db_config = { |
|
"host": "[host]", |
|
"user": "[user]", |
|
"password": "[passwd]", |
|
"database": "[database]" |
|
} |
|
|
|
NB_BY_STEP = 20 |
|
path = os.getcwd() |
|
|
|
def mysqli_return_number(conn, query): |
|
cursor = conn.cursor() |
|
cursor.execute(query) |
|
result = cursor.fetchone() |
|
cursor.close() |
|
return result[0] if result else 0 |
|
|
|
async def fetch_and_save(url, id_source): |
|
time_start_item = time.time() |
|
try: |
|
async with aiohttp.ClientSession() as session: |
|
async with session.get(url) as response: |
|
byte_content = await response.read() |
|
try: |
|
text_content = byte_content.decode('utf-8') |
|
except UnicodeDecodeError: |
|
text_content = byte_content.decode('ISO-8859-1') |
|
with open(f"{path}/sources/html_news/{id_source}.txt", "w", encoding="utf-8") as file: |
|
file.write(text_content) |
|
time_end_item = time.time() |
|
print(f'{id_source}) {time_end_item-time_start_item:.5f} {url}') |
|
except aiohttp.client_exceptions.TooManyRedirects: |
|
print(f"Too many redirects for URL: {url}") |
|
except aiohttp.client_exceptions.ClientConnectorError: |
|
print(f"Failed to connect to URL: {url}") |
|
except Exception as e: |
|
print(f"Unexpected error for URL {url}: {str(e)}") |
|
|
|
|
|
async def main(): |
|
conn = mysql.connector.connect(**db_config) |
|
while True: |
|
time_start = time.time() |
|
cursor = conn.cursor() |
|
cursor.execute(f"SELECT `id`, `url` FROM `base_news` WHERE `step`='0' ORDER BY RAND() LIMIT {NB_BY_STEP}") |
|
rows = cursor.fetchall() |
|
cursor.close() |
|
if not rows: |
|
break |
|
tasks = [] |
|
for row in rows: |
|
id_source, url = row |
|
cursor = conn.cursor() |
|
cursor.execute(f"UPDATE `base_news` SET `step`='1' WHERE `id`='{id_source}' LIMIT 1") |
|
cursor.close() |
|
tasks.append(fetch_and_save(url.strip(), id_source)) |
|
|
|
await asyncio.gather(*tasks) |
|
|
|
nb_base = mysqli_return_number(conn, "SELECT COUNT(`id`) FROM `base_news` WHERE `step`='0'") |
|
time_elapsed = time.time() - time_start |
|
time_per_item = time_elapsed / NB_BY_STEP |
|
print(f"Remaining: {nb_base} - Time: {time_per_item:.3f}s/item") |
|
|
|
conn.close() |
|
|
|
asyncio.run(main()) |