eckendoerffer commited on
Commit
86c1e5d
·
1 Parent(s): ee5bef8

Delete extract_news/4_extract_news_url.py

Browse files
Files changed (1) hide show
  1. extract_news/4_extract_news_url.py +0 -132
extract_news/4_extract_news_url.py DELETED
@@ -1,132 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- """
4
- Random Line Fetcher for Large Datasets
5
-
6
- Extracts and stores relevant links from local French online news articles.
7
-
8
- pip install beautifulsoup4 mysql-connector-python colorama
9
-
10
- Author : Guillaume Eckendoerffer
11
- Date : 28-09-23
12
- Repository : https://github.com/Eckendoerffer/TorchTrainerFlow/
13
- https://huggingface.co/datasets/eckendoerffer/news_fr
14
- """
15
-
16
- import os
17
- from bs4 import BeautifulSoup
18
- import mysql.connector
19
- import hashlib
20
- from colorama import Fore, init
21
-
22
- # Database configuration
23
- db_config = {
24
- "host": "[host]",
25
- "user": "[user]",
26
- "password": "[passwd]",
27
- "database": "[database]"
28
- }
29
-
30
- conn = mysql.connector.connect(**db_config)
31
- cursor = conn.cursor()
32
- query = "SELECT `key_media` FROM `base_news` WHERE `key_media` != ''"
33
- cursor.execute(query)
34
- keys = cursor.fetchall()
35
- formatted_keys = "|".join([key[0] for key in keys]) + "|"
36
-
37
- init(autoreset=True)
38
-
39
- def get_dom_path(url):
40
- from urllib.parse import urlparse
41
- parsed_url = urlparse(url)
42
- return f"{parsed_url.scheme}://{parsed_url.netloc}"
43
-
44
- def get_html_content(file_path):
45
- with open(file_path, 'r', encoding='utf8', errors='ignore') as file:
46
- return file.read()
47
-
48
- def mysqli_return_number(conn, query, params=None):
49
- cursor = conn.cursor()
50
- cursor.execute(query)
51
- result = cursor.fetchone()
52
- cursor.close()
53
- return result[0] if result else 0
54
-
55
- def mysqli_return_count(conn, query):
56
- cursor = conn.cursor()
57
- cursor.execute(query)
58
- result = cursor.fetchone()
59
- cursor.close()
60
- return result[0] if result else 0
61
-
62
- def process_news_source():
63
- global formatted_keys
64
-
65
- cursor = conn.cursor()
66
- query = ("SELECT `id`, `url`, `media` FROM `base_news` WHERE `link`='0' AND `step` > 0 AND `id` > 215000 AND `url` NOT LIKE 'https://avis-vin.%' AND `url` NOT LIKE 'https://www.elle.fr/%' AND `url` NOT LIKE 'www.lamontagne.fr/%' AND `url` NOT LIKE 'https://www.rtbf.be/%' AND `url` NOT LIKE 'https://www.tf1info.fr/%' AND `url` NOT LIKE 'https://www.futura-sciences.com/%' AND `url` NOT LIKE 'https://cdn-elle.ladmedia.fr/%' ORDER BY Rand() LIMIT 1")
67
- cursor.execute(query)
68
- row = cursor.fetchone()
69
-
70
- if not row:
71
- return 'No unprocessed news source found.'
72
-
73
- id_source, url_source, id_media = row
74
- dom = get_dom_path(url_source)
75
- cursor.execute(f"UPDATE `base_news` SET `link`='1' WHERE `id`='{id_source}' LIMIT 1")
76
- conn.commit()
77
-
78
- querys = "SELECT COUNT(`id`) FROM `base_news` WHERE `step`='0'"
79
- nb_link = mysqli_return_count(conn, querys)
80
-
81
- file_path = f"sources/html_news/{id_source}.txt"
82
- if os.path.exists(file_path):
83
- html_content = get_html_content(file_path)
84
- else:
85
- return
86
-
87
- print(f"{nb_link} {url_source} {id_media} ({len(html_content)})")
88
-
89
- soup = BeautifulSoup(html_content, 'html.parser')
90
- nb_add = 0
91
- for link in soup.find_all('a'):
92
- url = link.get('href')
93
- if url is None:
94
- continue
95
- url = url.split("#")[0]
96
- url = url.split("?")[0]
97
-
98
- if not url:
99
- continue
100
- if not "//" in url:
101
- url = f"{dom}/{url}" if url[0] != '/' else f"{dom}{url}"
102
- elif "http" not in url:
103
- url = 'https:' + url
104
- if not url.startswith(("http://", "https://")) or url.count(' ') or url.count('%') or url.count('\''):
105
- continue
106
-
107
- key = hashlib.md5(url.encode()).hexdigest()
108
- nb_base_news = formatted_keys.count(f'{key}|')
109
-
110
- if url.startswith(dom):
111
- if nb_base_news:
112
- #print(Fore.YELLOW + url)
113
- continue
114
- elif (
115
- url.count("-") > 6 and
116
- not any(substring in url for substring in ['replay', 'video', 'login', '/inloggen', '?redirect', '.jpg', '.png', 'mailto'])
117
- ):
118
- print(Fore.GREEN + url)
119
- insert_query = f"INSERT INTO `base_news` (`id`, `key_media`, `media`, `url`, `step`) VALUES (NULL, '{key}', '{id_media}', '{url}', '0');"
120
- cursor.execute(insert_query)
121
- conn.commit()
122
- formatted_keys = f'{formatted_keys}{key}|'
123
- nb_add += 1
124
- else:
125
- #print(Fore.RED + url)
126
- continue
127
-
128
- while True:
129
- process_news_source()
130
-
131
- conn.close()
132
-