Update app.py
Browse files
app.py
CHANGED
@@ -28,9 +28,8 @@ import re
|
|
28 |
from collections import Counter
|
29 |
from wordcloud import WordCloud
|
30 |
import advertools as adv
|
31 |
-
from collections import Counter
|
32 |
|
33 |
-
#
|
34 |
st.set_page_config(
|
35 |
layout="wide",
|
36 |
page_title="محلل المواقع المتقدم | Website Analyzer Pro",
|
@@ -38,7 +37,7 @@ st.set_page_config(
|
|
38 |
initial_sidebar_state="expanded"
|
39 |
)
|
40 |
|
41 |
-
#
|
42 |
st.markdown("""
|
43 |
<style>
|
44 |
@import url('https://fonts.googleapis.com/css2?family=Tajawal:wght@400;500;700&display=swap');
|
@@ -156,7 +155,129 @@ class AdvancedWebsiteAnalyzer:
|
|
156 |
}
|
157 |
self.history = self.load_history()
|
158 |
|
159 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
160 |
|
161 |
async def analyze_seo(self, url):
|
162 |
try:
|
@@ -164,8 +285,13 @@ class AdvancedWebsiteAnalyzer:
|
|
164 |
response = await client.get(url)
|
165 |
soup = BeautifulSoup(response.text, 'html.parser')
|
166 |
|
|
|
167 |
content_analysis = self._analyze_content(soup)
|
|
|
|
|
168 |
links_analysis = self._analyze_links(soup)
|
|
|
|
|
169 |
keywords_analysis = self._extract_keywords(soup)
|
170 |
|
171 |
seo_analysis = {
|
@@ -183,155 +309,213 @@ class AdvancedWebsiteAnalyzer:
|
|
183 |
except Exception as e:
|
184 |
return {"error": f"خطأ في تحليل SEO: {str(e)}"}
|
185 |
|
186 |
-
def _extract_keywords(self, soup):
|
187 |
-
# Add implementation for keyword extraction
|
188 |
-
pass
|
189 |
-
|
190 |
-
def _calculate_seo_score(self, soup):
|
191 |
-
# Add implementation for SEO scoring
|
192 |
-
pass
|
193 |
-
|
194 |
-
def _get_seo_recommendations(self, soup):
|
195 |
-
# Add implementation for SEO recommendations
|
196 |
-
pass
|
197 |
-
|
198 |
def _analyze_content(self, soup):
|
199 |
-
|
200 |
-
|
201 |
-
"""
|
202 |
-
try:
|
203 |
-
text_content = ' '.join([p.text.strip() for p in soup.find_all(['p', 'div', 'article', 'section'])])
|
204 |
-
headings = {f'h{i}': len(soup.find_all(f'h{i}')) for i in range(1, 7)}
|
205 |
-
words = text_content.split()
|
206 |
-
word_count = len(words)
|
207 |
-
readability_score = self._calculate_readability(text_content)
|
208 |
-
keyword_density = self._calculate_keyword_density(text_content)
|
209 |
-
|
210 |
-
images = soup.find_all('img')
|
211 |
-
images_with_alt = len([img for img in images if img.get('alt')])
|
212 |
-
|
213 |
-
quality_score = self._calculate_content_quality_score(
|
214 |
-
word_count,
|
215 |
-
readability_score,
|
216 |
-
images_with_alt,
|
217 |
-
len(images),
|
218 |
-
headings
|
219 |
-
)
|
220 |
-
|
221 |
-
return {
|
222 |
-
"إحصائيات المحتوى": {
|
223 |
-
"عدد الكلمات": word_count,
|
224 |
-
"مستوى القراءة": readability_score,
|
225 |
-
"نسبة الصور مع نص بديل": f"{(images_with_alt/len(images)*100 if images else 0):.1f}%",
|
226 |
-
"توزيع العناوين": headings,
|
227 |
-
},
|
228 |
-
"تحليل الكلمات المفتاحية": {
|
229 |
-
"كثافة الكلمات الرئيسية": keyword_density,
|
230 |
-
"الكلمات الأكثر تكراراً": self._get_top_words(text_content, 5)
|
231 |
-
},
|
232 |
-
"تقييم جودة المحتوى": {
|
233 |
-
"الدرجة": quality_score,
|
234 |
-
"التقييم": self._get_content_rating(quality_score),
|
235 |
-
"التوصيات": self._get_content_recommendations(
|
236 |
-
word_count,
|
237 |
-
readability_score,
|
238 |
-
images_with_alt,
|
239 |
-
len(images),
|
240 |
-
headings
|
241 |
-
)
|
242 |
-
}
|
243 |
-
}
|
244 |
-
except Exception as e:
|
245 |
-
return {"error": f"خطأ في تحليل المحتوى: {str(e)}"}
|
246 |
-
|
247 |
-
def _calculate_readability(self, text):
|
248 |
-
# Add implementation for readability calculation
|
249 |
-
pass
|
250 |
-
|
251 |
-
def _calculate_keyword_density(self, text):
|
252 |
-
# Add implementation for keyword density calculation
|
253 |
-
pass
|
254 |
-
|
255 |
-
def _calculate_content_quality_score(self, word_count, readability, alt_images, total_images, headings):
|
256 |
-
score = 100
|
257 |
|
258 |
-
|
259 |
-
|
260 |
-
elif word_count < 600:
|
261 |
-
score -= 10
|
262 |
|
263 |
-
|
264 |
-
|
265 |
-
elif readability < 60:
|
266 |
-
score -= 10
|
267 |
|
268 |
-
|
269 |
-
|
270 |
-
if alt_ratio < 0.5:
|
271 |
-
score -= 15
|
272 |
-
elif alt_ratio < 0.8:
|
273 |
-
score -= 10
|
274 |
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
return max(0, score)
|
283 |
|
284 |
-
def
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
elif score >= 60:
|
292 |
-
return "مقبول"
|
293 |
-
else:
|
294 |
-
return "يحتاج تحسين"
|
295 |
|
296 |
-
def
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
recommendations.append({
|
301 |
-
"المشكلة": "محتوى قصير جداً",
|
302 |
-
"الحل": "زيادة المحتوى إلى 300 كلمة على الأقل",
|
303 |
-
"الأولوية": "عالية"
|
304 |
-
})
|
305 |
-
|
306 |
-
if readability < 60:
|
307 |
-
recommendations.append({
|
308 |
-
"المشكلة": "صعوبة قراءة المحتوى",
|
309 |
-
"الحل": "تبسيط الجمل واستخدام لغة أسهل",
|
310 |
-
"الأولوية": "متوسطة"
|
311 |
-
})
|
312 |
|
313 |
-
if
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
|
|
|
|
|
|
|
|
319 |
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
326 |
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
|
|
|
|
|
|
|
|
332 |
|
333 |
-
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
from collections import Counter
|
29 |
from wordcloud import WordCloud
|
30 |
import advertools as adv
|
|
|
31 |
|
32 |
+
# تحسين مظهر الصفحة
|
33 |
st.set_page_config(
|
34 |
layout="wide",
|
35 |
page_title="محلل المواقع المتقدم | Website Analyzer Pro",
|
|
|
37 |
initial_sidebar_state="expanded"
|
38 |
)
|
39 |
|
40 |
+
# تحسين التصميم باستخدام CSS المحسن
|
41 |
st.markdown("""
|
42 |
<style>
|
43 |
@import url('https://fonts.googleapis.com/css2?family=Tajawal:wght@400;500;700&display=swap');
|
|
|
155 |
}
|
156 |
self.history = self.load_history()
|
157 |
|
158 |
+
def load_history(self):
|
159 |
+
try:
|
160 |
+
return pd.read_csv('analysis_history.csv')
|
161 |
+
except:
|
162 |
+
return pd.DataFrame(columns=['url', 'timestamp', 'performance_score', 'seo_score', 'security_score'])
|
163 |
+
|
164 |
+
def save_history(self, data):
|
165 |
+
self.history = self.history.append(data, ignore_index=True)
|
166 |
+
self.history.to_csv('analysis_history.csv', index=False)
|
167 |
+
|
168 |
+
async def analyze_performance(self, url):
|
169 |
+
try:
|
170 |
+
start_time = time.time()
|
171 |
+
async with httpx.AsyncClient() as client:
|
172 |
+
response = await client.get(url)
|
173 |
+
load_time = time.time() - start_time
|
174 |
+
page_size = len(response.content) / 1024
|
175 |
+
|
176 |
+
# تحليل الصور والموارد
|
177 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
178 |
+
images = soup.find_all('img')
|
179 |
+
scripts = soup.find_all('script')
|
180 |
+
css_files = soup.find_all('link', {'rel': 'stylesheet'})
|
181 |
+
|
182 |
+
performance_metrics = {
|
183 |
+
"زمن التحميل": round(load_time, 2),
|
184 |
+
"حجم الصفحة": round(page_size, 2),
|
185 |
+
"حالة الاستجابة": response.status_code,
|
186 |
+
"عدد الصور": len(images),
|
187 |
+
"عدد ملفات JavaScript": len(scripts),
|
188 |
+
"عدد ملفات CSS": len(css_files),
|
189 |
+
"تقييم الأداء": self._calculate_performance_score(load_time, page_size, len(images), len(scripts)),
|
190 |
+
"توصيات التحسين": self._get_performance_recommendations(load_time, page_size, len(images), len(scripts))
|
191 |
+
}
|
192 |
+
|
193 |
+
# إضافة تحليل الموارد
|
194 |
+
resources_analysis = await self._analyze_resources(url)
|
195 |
+
performance_metrics.update(resources_analysis)
|
196 |
+
|
197 |
+
return performance_metrics
|
198 |
+
except Exception as e:
|
199 |
+
return {"error": f"خطأ في تحليل الأداء: {str(e)}"}
|
200 |
+
|
201 |
+
async def _analyze_resources(self, url):
|
202 |
+
try:
|
203 |
+
async with httpx.AsyncClient() as client:
|
204 |
+
response = await client.get(url)
|
205 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
206 |
+
|
207 |
+
# تحليل الصور
|
208 |
+
images = soup.find_all('img')
|
209 |
+
image_sizes = []
|
210 |
+
for img in images[:5]: # تحليل أول 5 صور فقط لتجنب البطء
|
211 |
+
if img.get('src'):
|
212 |
+
try:
|
213 |
+
img_response = await client.get(img['src'])
|
214 |
+
image_sizes.append(len(img_response.content) / 1024)
|
215 |
+
except:
|
216 |
+
continue
|
217 |
+
|
218 |
+
return {
|
219 |
+
"تحليل الموارد": {
|
220 |
+
"متوسط حجم الصور": round(np.mean(image_sizes), 2) if image_sizes else 0,
|
221 |
+
"عدد الموارد الخارجية": len(soup.find_all(['script', 'link', 'img'])),
|
222 |
+
"توصيات تحسين الموارد": self._get_resource_recommendations(image_sizes)
|
223 |
+
}
|
224 |
+
}
|
225 |
+
except Exception as e:
|
226 |
+
return {"error": f"خطأ في تحليل الموارد: {str(e)}"}
|
227 |
+
|
228 |
+
def _calculate_performance_score(self, load_time, page_size, image_count, script_count):
|
229 |
+
score = 100
|
230 |
+
|
231 |
+
# تأثير زمن التحميل
|
232 |
+
if load_time > 2:
|
233 |
+
score -= min(30, (load_time - 2) * 10)
|
234 |
+
|
235 |
+
# تأثير حجم الصفحة
|
236 |
+
if page_size > 1000:
|
237 |
+
score -= min(20, (page_size - 1000) / 100)
|
238 |
+
|
239 |
+
# تأثير عدد الصور
|
240 |
+
if image_count > 10:
|
241 |
+
score -= min(15, (image_count - 10) * 1.5)
|
242 |
+
|
243 |
+
# تأثير عدد ملفات JavaScript
|
244 |
+
if script_count > 5:
|
245 |
+
score -= min(15, (script_count - 5) * 2)
|
246 |
+
|
247 |
+
return max(0, round(score))
|
248 |
+
|
249 |
+
def _get_performance_recommendations(self, load_time, page_size, image_count, script_count):
|
250 |
+
recommendations = []
|
251 |
+
|
252 |
+
if load_time > 2:
|
253 |
+
recommendations.append({
|
254 |
+
"المشكلة": "بطء زمن التحميل",
|
255 |
+
"الحل": "تحسين سرعة الخادم وتفعيل التخزين المؤقت",
|
256 |
+
"الأولوية": "عالية"
|
257 |
+
})
|
258 |
+
|
259 |
+
if page_size > 1000:
|
260 |
+
recommendations.append({
|
261 |
+
"المشكلة": "حجم الصفحة كبير",
|
262 |
+
"الحل": "ضغط الملفات وتحسين الكود",
|
263 |
+
"الأولوية": "متوسطة"
|
264 |
+
})
|
265 |
+
|
266 |
+
if image_count > 10:
|
267 |
+
recommendations.append({
|
268 |
+
"المشكلة": "عدد كبير من الصور",
|
269 |
+
"الحل": "تحسين حجم الصور واستخدام التحميل الكسول",
|
270 |
+
"الأولوية": "متوسطة"
|
271 |
+
})
|
272 |
+
|
273 |
+
if script_count > 5:
|
274 |
+
recommendations.append({
|
275 |
+
"المشكلة": "عدد كبير من ملفات JavaScript",
|
276 |
+
"الحل": "دمج وضغط ملفات JavaScript",
|
277 |
+
"الأولوية": "عالية"
|
278 |
+
})
|
279 |
+
|
280 |
+
return recommendations if recommendations else [{"المشكلة": "لا توجد مشاكل", "الحل": "الأداء جيد!", "الأولوية": "منخفضة"}]
|
281 |
|
282 |
async def analyze_seo(self, url):
|
283 |
try:
|
|
|
285 |
response = await client.get(url)
|
286 |
soup = BeautifulSoup(response.text, 'html.parser')
|
287 |
|
288 |
+
# تحليل المحتوى
|
289 |
content_analysis = self._analyze_content(soup)
|
290 |
+
|
291 |
+
# تحليل الروابط
|
292 |
links_analysis = self._analyze_links(soup)
|
293 |
+
|
294 |
+
# تحليل الكلمات المفتاحية
|
295 |
keywords_analysis = self._extract_keywords(soup)
|
296 |
|
297 |
seo_analysis = {
|
|
|
309 |
except Exception as e:
|
310 |
return {"error": f"خطأ في تحليل SEO: {str(e)}"}
|
311 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
def _analyze_content(self, soup):
|
313 |
+
# استخراج النص
|
314 |
+
text_content = ' '.join([p.text for p in soup.find_all('p')])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
315 |
|
316 |
+
# تحليل طول المحتوى
|
317 |
+
word_count = len(text_content.split())
|
|
|
|
|
318 |
|
319 |
+
# تحليل قراءة المحتوى
|
320 |
+
readability_score = self._calculate_readability(text_content)
|
|
|
|
|
321 |
|
322 |
+
# تحليل كثافة الكلمات المفتاحية
|
323 |
+
keyword_density = self._calculate_keyword_density(text_content)
|
|
|
|
|
|
|
|
|
324 |
|
325 |
+
return {
|
326 |
+
"عدد الكلمات": word_count,
|
327 |
+
"مستوى القراءة": readability_score,
|
328 |
+
"كثافة الكلمات المفتاحية": keyword_density,
|
329 |
+
"التقييم": "ممتاز" if word_count > 300 and readability_score > 60 else "يحتاج تحسين"
|
330 |
+
}
|
|
|
|
|
331 |
|
332 |
+
def _calculate_readability(self, text):
|
333 |
+
# حساب مؤشر بسيط لسهولة القراءة
|
334 |
+
sentences = len(re.split(r'[.!?]+', text))
|
335 |
+
words = len(text.split())
|
336 |
+
if sentences == 0:
|
337 |
+
return 0
|
338 |
+
return min(100, round((words / sentences) * 10))
|
|
|
|
|
|
|
|
|
339 |
|
340 |
+
def _calculate_keyword_density(self, text):
|
341 |
+
words = text.lower().split()
|
342 |
+
word_freq = Counter(words)
|
343 |
+
total_words = len(words)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
344 |
|
345 |
+
if total_words == 0:
|
346 |
+
return {}
|
347 |
+
|
348 |
+
return {word: round((count / total_words) * 100, 2)
|
349 |
+
for word, count in word_freq.most_common(5)}
|
350 |
+
|
351 |
+
def analyze_security(self, url):
|
352 |
+
try:
|
353 |
+
domain = urlparse(url).netloc
|
354 |
+
whois_info = self._get_whois_info(domain)
|
355 |
|
356 |
+
security_analysis = {
|
357 |
+
"تحليل SSL": self._check_ssl(url),
|
358 |
+
"تحليل DNS": self._check_dns(domain),
|
359 |
+
"تحليل Headers": self._check_security_headers(url),
|
360 |
+
"فحص المخاطر": self._check_security_risks(url),
|
361 |
+
"معلومات Whois": whois_info,
|
362 |
+
"تقييم الأمان": self._calculate_security_score(url),
|
363 |
+
"توصيات الأمان": self._get_security_recommendations(url)
|
364 |
+
}
|
365 |
+
return security_analysis
|
366 |
+
except Exception as e:
|
367 |
+
return {"error": f"خطأ في تحليل الأمان: {str(e)}"}
|
368 |
+
|
369 |
+
def _get_whois_info(self, domain):
|
370 |
+
try:
|
371 |
+
w = whois.whois(domain)
|
372 |
+
return {
|
373 |
+
"اسم النطاق": domain,
|
374 |
+
"تاريخ التسجيل": str(w.creation_date),
|
375 |
+
"تاريخ الانتهاء": str(w.expiration_date),
|
376 |
+
"المسجل": w.registrar,
|
377 |
+
"الحالة": w.status
|
378 |
+
}
|
379 |
+
except:
|
380 |
+
return {"error": "لا يمكن الحصول على معلومات Whois"}
|
381 |
+
|
382 |
+
def _check_ssl(self, url):
|
383 |
+
try:
|
384 |
+
context = ssl.create_default_context()
|
385 |
+
with socket.create_connection((urlparse(url).netloc, 443)) as sock:
|
386 |
+
with context.wrap_socket(sock, server_hostname=urlparse(url).netloc) as ssock:
|
387 |
+
cert = ssock.getpeercert()
|
388 |
+
return {
|
389 |
+
"الحالة": "آمن ✅",
|
390 |
+
"نوع الشهادة": cert.get('subject', {}).get('commonName', 'Unknown'),
|
391 |
+
"تاريخ الإصدار": cert.get('notBefore', 'Unknown'),
|
392 |
+
"تاريخ الانتهاء": cert.get('notAfter', 'Unknown'),
|
393 |
+
"الخوارزمية": ssock.cipher()[0],
|
394 |
+
"قوة التشفير": f"{ssock.cipher()[2]} bits"
|
395 |
+
}
|
396 |
+
except:
|
397 |
+
return {
|
398 |
+
"الحالة": "غير آمن ❌",
|
399 |
+
"السبب": "لا يوجد شهادة SSL صالحة"
|
400 |
+
}
|
401 |
+
|
402 |
+
def _check_security_headers(self, url):
|
403 |
+
try:
|
404 |
+
response = requests.get(url)
|
405 |
+
headers = response.headers
|
406 |
+
security_headers = {
|
407 |
+
'Strict-Transport-Security': 'HSTS',
|
408 |
+
'Content-Security-Policy': 'CSP',
|
409 |
+
'X-Frame-Options': 'X-Frame',
|
410 |
+
'X-Content-Type-Options': 'X-Content-Type',
|
411 |
+
'X-XSS-Protection': 'XSS Protection'
|
412 |
+
}
|
413 |
|
414 |
+
results = {}
|
415 |
+
for header, name in security_headers.items():
|
416 |
+
results[name] = {
|
417 |
+
"موجود": header in headers,
|
418 |
+
"القيمة": headers.get(header, "غير موجود")
|
419 |
+
}
|
420 |
+
return results
|
421 |
+
except:
|
422 |
+
return {"error": "فشل فحص headers الأمان"}
|
423 |
|
424 |
+
def _check_security_risks(self, url):
|
425 |
+
risks = []
|
426 |
+
|
427 |
+
# فحص بروتوكول HTTP
|
428 |
+
if not url.startswith('https'):
|
429 |
+
risks.append({
|
430 |
+
"المستوى": "عالي",
|
431 |
+
"النوع": "بروتوكول غير آمن",
|
432 |
+
"الوصف": "الموقع يستخدم HTTP بدلاً من HTTPS"
|
433 |
+
})
|
434 |
+
|
435 |
+
# فحص تحديث شهادة SSL
|
436 |
+
ssl_info = self._check_ssl(url)
|
437 |
+
if ssl_info.get("الحالة") == "غير آمن ❌":
|
438 |
+
risks.append({
|
439 |
+
"المستوى": "عالي",
|
440 |
+
"النوع": "شهادة SSL",
|
441 |
+
"الوصف": "شهادة SSL غير صالحة أو منتهية"
|
442 |
+
})
|
443 |
+
|
444 |
+
# فحص headers الأمان
|
445 |
+
headers = self._check_security_headers(url)
|
446 |
+
if isinstance(headers, dict) and not headers.get("HSTS", {}).get("موجود"):
|
447 |
+
risks.append({
|
448 |
+
"المستوى": "متوسط",
|
449 |
+
"النوع": "HSTS غير مفعل",
|
450 |
+
"الوصف": "عدم وجود حماية النقل الآمن الصارم"
|
451 |
+
})
|
452 |
+
|
453 |
+
return {
|
454 |
+
"المخاطر المكتشفة": risks,
|
455 |
+
"عدد المخاطر": len(risks),
|
456 |
+
"مستوى الخطورة": "عالي" if any(r["المستوى"] == "عالي" for r in risks) else "متوسط" if risks else "منخفض"
|
457 |
+
}
|
458 |
+
|
459 |
+
def _calculate_security_score(self, url):
|
460 |
+
score = 100
|
461 |
+
|
462 |
+
# فحص HTTPS
|
463 |
+
if not url.startswith('https'):
|
464 |
+
score -= 30
|
465 |
+
|
466 |
+
# فحص SSL
|
467 |
+
ssl_info = self._check_ssl(url)
|
468 |
+
if ssl_info.get("الحالة") == "غير آمن ❌":
|
469 |
+
score -= 25
|
470 |
+
|
471 |
+
# فحص Headers
|
472 |
+
headers = self._check_security_headers(url)
|
473 |
+
if isinstance(headers, dict):
|
474 |
+
for header_info in headers.values():
|
475 |
+
if not header_info.get("موجود"):
|
476 |
+
score -= 5
|
477 |
+
|
478 |
+
# فحص مخاطر الأمان
|
479 |
+
risks = self._check_security_risks(url)
|
480 |
+
score -= (risks.get("عدد المخاطر", 0) * 10)
|
481 |
+
|
482 |
+
return max(0, score)
|
483 |
+
|
484 |
+
def _get_security_recommendations(self, url):
|
485 |
+
recommendations = []
|
486 |
+
|
487 |
+
# فحص HTTPS
|
488 |
+
if not url.startswith('https'):
|
489 |
+
recommendations.append({
|
490 |
+
"المشكلة": "عدم استخدام HTTPS",
|
491 |
+
"الحل": "قم بتفعيل HTTPS وتثبيت شهادة SSL",
|
492 |
+
"الأولوية": "عالية"
|
493 |
+
})
|
494 |
+
|
495 |
+
# فحص SSL
|
496 |
+
ssl_info = self._check_ssl(url)
|
497 |
+
if ssl_info.get("الحالة") == "غير آمن ❌":
|
498 |
+
recommendations.append({
|
499 |
+
"المشكلة": "شهادة SSL غير صالحة",
|
500 |
+
"الحل": "قم بتجديد أو تثبيت شهادة SSL جديدة",
|
501 |
+
"الأولوية": "عالية"
|
502 |
+
})
|
503 |
+
|
504 |
+
# فحص Headers
|
505 |
+
headers = self._check_security_headers(url)
|
506 |
+
if isinstance(headers, dict):
|
507 |
+
for name, info in headers.items():
|
508 |
+
if not info.get("موجود"):
|
509 |
+
recommendations.append({
|
510 |
+
"المشكلة": f"عدم وجود {name}",
|
511 |
+
"الحل": f"قم بإضافة header الأمان {name}",
|
512 |
+
"الأولوية": "متوسطة"
|
513 |
+
})
|
514 |
+
|
515 |
+
return recommendations if recommendations else [
|
516 |
+
{
|
517 |
+
"المشكلة": "لا توجد مشاكل أمنية واضحة",
|
518 |
+
"الحل": "استمر في مراقبة وتحديث إعدادات الأمان",
|
519 |
+
"الأولوية": "منخفضة"
|
520 |
+
}
|
521 |
+
]
|