Update app.py
Browse files
app.py
CHANGED
|
@@ -510,219 +510,167 @@ class WebsiteAnalyzer:
|
|
| 510 |
else:
|
| 511 |
return "موثوقية منخفضة ⛔"
|
| 512 |
def _analyze_description(self, description):
|
| 513 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 514 |
return {
|
| 515 |
-
'
|
| 516 |
-
'
|
|
|
|
|
|
|
|
|
|
| 517 |
}
|
| 518 |
-
|
| 519 |
-
|
| 520 |
-
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
| 526 |
-
|
| 527 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 528 |
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
recommendations.append("أضف كلمات استفهامية لجذب المزيد من النقرات")
|
| 532 |
|
| 533 |
-
|
| 534 |
-
|
| 535 |
-
'recommendations': recommendations
|
| 536 |
-
}
|
| 537 |
-
|
| 538 |
-
def _extract_keywords(self, soup):
|
| 539 |
-
# استخراج النص من العناصر المهمة
|
| 540 |
-
text_elements = []
|
| 541 |
-
for tag in ['h1', 'h2', 'h3', 'p', 'li']:
|
| 542 |
-
elements = soup.find_all(tag)
|
| 543 |
-
for element in elements:
|
| 544 |
-
text_elements.append(element.get_text())
|
| 545 |
-
|
| 546 |
-
# تنظيف النص
|
| 547 |
-
text = ' '.join(text_elements)
|
| 548 |
-
words = re.findall(r'\b\w+\b', text.lower())
|
| 549 |
-
|
| 550 |
-
# حذف الكلمات الشائعة
|
| 551 |
-
stop_words = set(['في', 'من', 'على', 'إلى', 'عن', 'مع', 'هذا', 'هذه', 'تلك', 'ذلك'])
|
| 552 |
-
words = [word for word in words if word not in stop_words and len(word) > 2]
|
| 553 |
-
|
| 554 |
-
# حساب تكرار الكلمات
|
| 555 |
-
word_freq = {}
|
| 556 |
-
for word in words:
|
| 557 |
-
word_freq[word] = word_freq.get(word, 0) + 1
|
| 558 |
-
|
| 559 |
-
# ترتيب الكلمات حسب التكرار
|
| 560 |
-
sorted_keywords = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)
|
| 561 |
-
return [word for word, freq in sorted_keywords[:10]]
|
| 562 |
-
|
| 563 |
-
def _analyze_links(self, soup, base_url):
|
| 564 |
-
internal_links = []
|
| 565 |
-
external_links = []
|
| 566 |
-
base_domain = urlparse(base_url).netloc
|
| 567 |
-
|
| 568 |
-
for link in soup.find_all('a', href=True):
|
| 569 |
-
href = link['href']
|
| 570 |
-
if href.startswith('/') or base_domain in href:
|
| 571 |
-
internal_links.append(href)
|
| 572 |
-
elif href.startswith('http'):
|
| 573 |
-
external_links.append(href)
|
| 574 |
-
|
| 575 |
-
return internal_links, external_links
|
| 576 |
-
|
| 577 |
-
def _analyze_content(self, soup):
|
| 578 |
-
# استخراج النص الكامل
|
| 579 |
-
text = ' '.join([p.get_text() for p in soup.find_all('p')])
|
| 580 |
-
words = text.split()
|
| 581 |
-
|
| 582 |
-
# حساب كثافة الكلمات المفتاحية
|
| 583 |
-
keywords = self._extract_keywords(soup)
|
| 584 |
-
keyword_count = sum(text.lower().count(keyword) for keyword in keywords)
|
| 585 |
-
keyword_density = keyword_count / len(words) if words else 0
|
| 586 |
-
|
| 587 |
-
# تقييم تنوع المحتوى
|
| 588 |
-
content_types = {
|
| 589 |
-
'صور': len(soup.find_all('img')),
|
| 590 |
-
'فيديوهات': len(soup.find_all(['video', 'iframe'])),
|
| 591 |
-
'جداول': len(soup.find_all('table')),
|
| 592 |
-
'قوائم': len(soup.find_all(['ul', 'ol'])),
|
| 593 |
-
'عناوين': len(soup.find_all(['h1', 'h2', 'h3', 'h4', 'h5', 'h6']))
|
| 594 |
-
}
|
| 595 |
-
|
| 596 |
-
# حساب قابلية القراءة (مقياس مبسط)
|
| 597 |
-
sentences = text.split('.')
|
| 598 |
-
avg_words_per_sentence = len(words) / len(sentences) if sentences else 0
|
| 599 |
-
|
| 600 |
-
recommendations = []
|
| 601 |
-
if len(words) < 300:
|
| 602 |
-
recommendations.append("المحتوى قصير جداً، أضف الم��يد من النصوص الغنية")
|
| 603 |
-
if keyword_density < 0.01:
|
| 604 |
-
recommendations.append("كثافة الكلمات المفتاحية منخفضة")
|
| 605 |
-
elif keyword_density > 0.05:
|
| 606 |
-
recommendations.append("كثافة الكلمات المفتاحية مرتفعة جداً")
|
| 607 |
-
if avg_words_per_sentence > 20:
|
| 608 |
-
recommendations.append("الجمل طويلة جداً، حاول تقصيرها لتحسين القراءة")
|
| 609 |
-
|
| 610 |
-
return {
|
| 611 |
-
'word_count': len(words),
|
| 612 |
-
'keyword_density': f"{keyword_density:.2%}",
|
| 613 |
-
'content_diversity': self._evaluate_diversity(content_types),
|
| 614 |
-
'readability': self._evaluate_readability(avg_words_per_sentence),
|
| 615 |
-
'recommendations': recommendations
|
| 616 |
-
}
|
| 617 |
-
|
| 618 |
-
def _evaluate_diversity(self, content_types):
|
| 619 |
-
score = 0
|
| 620 |
-
total_elements = sum(content_types.values())
|
| 621 |
-
|
| 622 |
-
if content_types['صور'] > 0:
|
| 623 |
-
score += 2
|
| 624 |
-
if content_types['فيديوهات'] > 0:
|
| 625 |
-
score += 2
|
| 626 |
-
if content_types['جداول'] > 0:
|
| 627 |
-
score += 1
|
| 628 |
-
if content_types['قوائم'] > 0:
|
| 629 |
-
score += 1
|
| 630 |
-
if content_types['عناوين'] >= 3:
|
| 631 |
-
score += 2
|
| 632 |
-
|
| 633 |
-
if total_elements > 10:
|
| 634 |
-
score += 2
|
| 635 |
-
|
| 636 |
-
return f"{score}/10"
|
| 637 |
-
|
| 638 |
-
def _evaluate_readability(self, avg_words_per_sentence):
|
| 639 |
-
if avg_words_per_sentence <= 12:
|
| 640 |
-
return "ممتاز"
|
| 641 |
-
elif avg_words_per_sentence <= 15:
|
| 642 |
-
return "جيد"
|
| 643 |
-
elif avg_words_per_sentence <= 20:
|
| 644 |
-
return "متوسط"
|
| 645 |
-
else:
|
| 646 |
-
return "صعب"
|
| 647 |
-
|
| 648 |
-
def _evaluate_speed(self, total_load_time):
|
| 649 |
-
if total_load_time < 2:
|
| 650 |
-
return "ممتاز ⚡"
|
| 651 |
-
elif total_load_time < 3:
|
| 652 |
-
return "جيد ✅"
|
| 653 |
-
elif total_load_time < 5:
|
| 654 |
-
return "متوسط ⚠️"
|
| 655 |
-
else:
|
| 656 |
-
return "بطيء ❌"
|
| 657 |
-
|
| 658 |
-
def _generate_performance_recommendations(self, metrics, resources):
|
| 659 |
-
recommendations = []
|
| 660 |
-
|
| 661 |
-
if metrics['ttfb'] > 0.5:
|
| 662 |
-
recommendations.append("تحسين زمن استجابة الخادم")
|
| 663 |
-
|
| 664 |
-
if resources['total_size'] > 1500: # أكثر من 1.5 ميجابايت
|
| 665 |
-
recommendations.append("تقليل حجم الصفحة الإجمالي")
|
| 666 |
-
|
| 667 |
-
if resources['images'] > 10:
|
| 668 |
-
recommendations.append("ضغط وتحسين الصور")
|
| 669 |
-
|
| 670 |
-
if resources['scripts'] > 15:
|
| 671 |
-
recommendations.append("دمج وضغط ملفات JavaScript")
|
| 672 |
-
|
| 673 |
-
if resources['stylesheets'] > 5:
|
| 674 |
-
recommendations.append("دمج ملفات CSS")
|
| 675 |
-
|
| 676 |
-
return recommendations
|
| 677 |
-
|
| 678 |
-
async def _get_similarweb_data(self, domain):
|
| 679 |
-
"""
|
| 680 |
-
الحصول على بيانات حركة المرور من SimilarWeb
|
| 681 |
-
تحتاج لمفتاح API حقيقي للاستخدام
|
| 682 |
-
"""
|
| 683 |
-
try:
|
| 684 |
-
# هذا مجرد مثال، يجب استبداله بمفتاح API حقيقي
|
| 685 |
-
api_key = "YOUR_SIMILARWEB_API_KEY"
|
| 686 |
-
url = f"https://api.similarweb.com/v1/website/{domain}/total-traffic-and-engagement/visits"
|
| 687 |
|
| 688 |
-
|
| 689 |
-
response = await client.get(url, headers={'Authorization': api_key})
|
| 690 |
-
data = response.json()
|
| 691 |
-
return data.get('visits', 0)
|
| 692 |
-
except:
|
| 693 |
-
return None
|
| 694 |
-
|
| 695 |
-
async def _get_alexa_rank(self, domain):
|
| 696 |
-
"""
|
| 697 |
-
الحصول على تصنيف Alexa للموقع
|
| 698 |
-
ملاحظة: خدمة Alexa متوقفة حالياً، هذا مجرد مثال
|
| 699 |
-
"""
|
| 700 |
-
try:
|
| 701 |
-
url = f"http://data.alexa.com/data?cli=10&url={domain}"
|
| 702 |
-
async with httpx.AsyncClient() as client:
|
| 703 |
-
response = await client.get(url)
|
| 704 |
-
soup = BeautifulSoup(response.text, 'xml')
|
| 705 |
-
rank = soup.find('REACH')['RANK']
|
| 706 |
-
return int(rank)
|
| 707 |
-
except:
|
| 708 |
-
return None
|
| 709 |
-
|
| 710 |
-
def _rank_to_traffic(self, rank):
|
| 711 |
-
"""تحويل تصنيف Alexa إلى تقدير تقريبي لحركة المرور"""
|
| 712 |
-
if not rank:
|
| 713 |
-
return None
|
| 714 |
-
# معادلة تقريبية جداً
|
| 715 |
-
return int(1000000 / (rank ** 0.6))
|
| 716 |
-
|
| 717 |
-
def _calculate_security_score(self, ssl_info, security_headers):
|
| 718 |
-
score = 0
|
| 719 |
-
|
| 720 |
-
# تقييم SSL
|
| 721 |
-
if isinstance(ssl_info, dict) and ssl_info.get("الحالة") == "✅ آمن":
|
| 722 |
-
score += 40
|
| 723 |
-
|
| 724 |
-
# تقييم رؤوس الأمان
|
| 725 |
-
headers_score = float(security_headers.get("درجة الأمان", "0/100").split('/')[0])
|
| 726 |
-
score += headers_score * 0.6
|
| 727 |
-
|
| 728 |
-
return f"{min(score, 100)}/100"
|
|
|
|
| 510 |
else:
|
| 511 |
return "موثوقية منخفضة ⛔"
|
| 512 |
def _analyze_description(self, description):
|
| 513 |
+
if not description:
|
| 514 |
+
return {
|
| 515 |
+
'score': "0/10",
|
| 516 |
+
'recommendations': ["يجب إضافة وصف للصفحة"]
|
| 517 |
+
}
|
| 518 |
+
|
| 519 |
+
score = 10
|
| 520 |
+
recommendations = []
|
| 521 |
+
|
| 522 |
+
if len(description) < 120:
|
| 523 |
+
score -= 2
|
| 524 |
+
recommendations.append("الوصف قصير جداً، يُفضل أن يكون بين 120-155 حرفاً")
|
| 525 |
+
elif len(description) > 155:
|
| 526 |
+
score -= 2
|
| 527 |
+
recommendations.append("الوصف طويل جداً، يجب تقصيره إلى 155 حرفاً كحد أقصى")
|
| 528 |
+
|
| 529 |
+
if not any(char in description.lower() for char in ['ما', 'كيف', 'لماذا', 'متى', 'أين']):
|
| 530 |
+
score -= 1
|
| 531 |
+
recommendations.append("أضف كلمات استفهامية لجذب المزيد من النقرات")
|
| 532 |
+
|
| 533 |
+
return {
|
| 534 |
+
'score': f"{score}/10",
|
| 535 |
+
'recommendations': recommendations
|
| 536 |
+
}
|
| 537 |
+
|
| 538 |
+
def _extract_keywords(self, soup):
|
| 539 |
+
# استخراج النص من العناصر المهمة
|
| 540 |
+
text_elements = []
|
| 541 |
+
for tag in ['h1', 'h2', 'h3', 'p', 'li']:
|
| 542 |
+
elements = soup.find_all(tag)
|
| 543 |
+
for element in elements:
|
| 544 |
+
text_elements.append(element.get_text())
|
| 545 |
+
|
| 546 |
+
# تنظيف النص
|
| 547 |
+
text = ' '.join(text_elements)
|
| 548 |
+
words = re.findall(r'\b\w+\b', text.lower())
|
| 549 |
+
|
| 550 |
+
# حذف الكلمات الشائعة
|
| 551 |
+
stop_words = set(['في', 'من', 'على', 'إلى', 'عن', 'مع', 'هذا', 'هذه', 'تلك', 'ذلك'])
|
| 552 |
+
words = [word for word in words if word not in stop_words and len(word) > 2]
|
| 553 |
+
|
| 554 |
+
# حساب تكرار الكلمات
|
| 555 |
+
word_freq = {}
|
| 556 |
+
for word in words:
|
| 557 |
+
word_freq[word] = word_freq.get(word, 0) + 1
|
| 558 |
+
|
| 559 |
+
# ترتيب الكلمات حسب التكرار
|
| 560 |
+
sorted_keywords = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)
|
| 561 |
+
return [word for word, freq in sorted_keywords[:10]]
|
| 562 |
+
|
| 563 |
+
def _analyze_links(self, soup, base_url):
|
| 564 |
+
internal_links = []
|
| 565 |
+
external_links = []
|
| 566 |
+
base_domain = urlparse(base_url).netloc
|
| 567 |
+
|
| 568 |
+
for link in soup.find_all('a', href=True):
|
| 569 |
+
href = link['href']
|
| 570 |
+
if href.startswith('/') or base_domain in href:
|
| 571 |
+
internal_links.append(href)
|
| 572 |
+
elif href.startswith('http'):
|
| 573 |
+
external_links.append(href)
|
| 574 |
+
|
| 575 |
+
return internal_links, external_links
|
| 576 |
+
|
| 577 |
+
def _analyze_content(self, soup):
|
| 578 |
+
# استخراج النص الكامل
|
| 579 |
+
text = ' '.join([p.get_text() for p in soup.find_all('p')])
|
| 580 |
+
words = text.split()
|
| 581 |
+
|
| 582 |
+
# حساب كثافة الكلمات المفتاحية
|
| 583 |
+
keywords = self._extract_keywords(soup)
|
| 584 |
+
keyword_count = sum(text.lower().count(keyword) for keyword in keywords)
|
| 585 |
+
keyword_density = keyword_count / len(words) if words else 0
|
| 586 |
+
|
| 587 |
+
# تقييم تنوع المحتوى
|
| 588 |
+
content_types = {
|
| 589 |
+
'صور': len(soup.find_all('img')),
|
| 590 |
+
'فيديوهات': len(soup.find_all(['video', 'iframe'])),
|
| 591 |
+
'جداول': len(soup.find_all('table')),
|
| 592 |
+
'قوائم': len(soup.find_all(['ul', 'ol'])),
|
| 593 |
+
'عناوين': len(soup.find_all(['h1', 'h2', 'h3', 'h4', 'h5', 'h6']))
|
| 594 |
+
}
|
| 595 |
+
|
| 596 |
+
# حساب قابلية القراءة (مقياس مبسط)
|
| 597 |
+
sentences = text.split('.')
|
| 598 |
+
avg_words_per_sentence = len(words) / len(sentences) if sentences else 0
|
| 599 |
+
|
| 600 |
+
recommendations = []
|
| 601 |
+
if len(words) < 300:
|
| 602 |
+
recommendations.append("المحتوى قصير جداً، أضف المزيد من النصوص الغنية")
|
| 603 |
+
if keyword_density < 0.01:
|
| 604 |
+
recommendations.append("كثافة الكلمات المفتاحية منخفضة")
|
| 605 |
+
elif keyword_density > 0.05:
|
| 606 |
+
recommendations.append("كثافة الكلمات المفتاحية مرتفعة جداً")
|
| 607 |
+
if avg_words_per_sentence > 20:
|
| 608 |
+
recommendations.append("الجمل طويلة جداً، حاول تقصيرها لتحسين القراءة")
|
| 609 |
+
|
| 610 |
return {
|
| 611 |
+
'word_count': len(words),
|
| 612 |
+
'keyword_density': f"{keyword_density:.2%}",
|
| 613 |
+
'content_diversity': self._evaluate_diversity(content_types),
|
| 614 |
+
'readability': self._evaluate_readability(avg_words_per_sentence),
|
| 615 |
+
'recommendations': recommendations
|
| 616 |
}
|
| 617 |
+
|
| 618 |
+
def _evaluate_diversity(self, content_types):
|
| 619 |
+
score = 0
|
| 620 |
+
total_elements = sum(content_types.values())
|
| 621 |
+
|
| 622 |
+
if content_types['صور'] > 0:
|
| 623 |
+
score += 2
|
| 624 |
+
if content_types['فيديوهات'] > 0:
|
| 625 |
+
score += 2
|
| 626 |
+
if content_types['جداول'] > 0:
|
| 627 |
+
score += 1
|
| 628 |
+
if content_types['قوائم'] > 0:
|
| 629 |
+
score += 1
|
| 630 |
+
if content_types['عناوين'] >= 3:
|
| 631 |
+
score += 2
|
| 632 |
+
|
| 633 |
+
if total_elements > 10:
|
| 634 |
+
score += 2
|
| 635 |
+
|
| 636 |
+
return f"{score}/10"
|
| 637 |
+
|
| 638 |
+
def _evaluate_readability(self, avg_words_per_sentence):
|
| 639 |
+
if avg_words_per_sentence <= 12:
|
| 640 |
+
return "ممتاز"
|
| 641 |
+
elif avg_words_per_sentence <= 15:
|
| 642 |
+
return "جيد"
|
| 643 |
+
elif avg_words_per_sentence <= 20:
|
| 644 |
+
return "متوسط"
|
| 645 |
+
else:
|
| 646 |
+
return "صعب"
|
| 647 |
+
|
| 648 |
+
def _evaluate_speed(self, total_load_time):
|
| 649 |
+
if total_load_time < 2:
|
| 650 |
+
return "ممتاز ⚡"
|
| 651 |
+
elif total_load_time < 3:
|
| 652 |
+
return "جيد ✅"
|
| 653 |
+
elif total_load_time < 5:
|
| 654 |
+
return "متوسط ⚠️"
|
| 655 |
+
else:
|
| 656 |
+
return "بطيء ❌"
|
| 657 |
+
|
| 658 |
+
def _generate_performance_recommendations(self, metrics, resources):
|
| 659 |
+
recommendations = []
|
| 660 |
+
|
| 661 |
+
if metrics['ttfb'] > 0.5:
|
| 662 |
+
recommendations.append("تحسين زمن استجابة الخادم")
|
| 663 |
+
|
| 664 |
+
if resources['total_size'] > 1500: # أكثر من 1.5 ميجابايت
|
| 665 |
+
recommendations.append("تقليل حجم الصفحة الإجمالي")
|
| 666 |
+
|
| 667 |
+
if resources['images'] > 10:
|
| 668 |
+
recommendations.append("ضغط وتحسين الصور")
|
| 669 |
|
| 670 |
+
if resources['scripts'] > 15:
|
| 671 |
+
recommendations.append("دمج وضغط ملفات JavaScript")
|
|
|
|
| 672 |
|
| 673 |
+
if resources['stylesheets'] > 5:
|
| 674 |
+
recommendations.append("دمج ملفات CSS")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 675 |
|
| 676 |
+
return recommendations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|