Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline | |
import re | |
# تحميل النموذج | |
classifier = pipeline("zero-shot-classification", model="cross-encoder/nli-distilroberta-base") | |
# عنوان التطبيق | |
st.title("Keyword & URL Analysis App") | |
# اختيار العملية | |
operation = st.radio("Choose an operation:", ["Filter Keywords", "Analyze URLs"]) | |
# إدخال الملف النصي | |
uploaded_file = st.file_uploader("Upload a text file", type=["txt"]) | |
if uploaded_file is not None: | |
# قراءة الملف النصي | |
content = uploaded_file.read().decode("utf-8") | |
items = [line.strip() for line in content.splitlines() if line.strip()] | |
# تحديد الفئات | |
categories = ["shop", "game", "stream"] | |
# قوائم لتخزين النتائج | |
shopping_items = [] | |
gaming_items = [] | |
streaming_items = [] | |
unknown_items = [] | |
# قوائم خاصة بالروابط | |
parameters = [] | |
domains = [] | |
full_page_types = [] | |
file_extensions = [] | |
# دالة تصنيف الكلمات المفتاحية | |
def classify_keywords(items, categories): | |
results = classifier(items, categories) | |
for i, result in enumerate(results): | |
best_category = result['labels'][0] | |
score = result['scores'][0] | |
if best_category == "shop" and score > 0.5: | |
shopping_items.append(items[i]) | |
elif best_category == "game" and score > 0.5: | |
gaming_items.append(items[i]) | |
elif best_category == "stream" and score > 0.5: | |
streaming_items.append(items[i]) | |
else: | |
unknown_items.append(items[i]) | |
# دالة تحليل الروابط | |
def analyze_urls(urls): | |
for url in urls: | |
# استخراج الباراميترات باستخدام RegEx | |
params = re.findall(r'(\w+)=', url) # استخراج الأسماء فقط (بدون '=') | |
parameters.extend(params) | |
# استخراج الدومينات (مثل .com, .org) | |
domain_match = re.search(r'\.([a-zA-Z]{2,})$', url) | |
if domain_match: | |
domain = domain_match.group(1) | |
if domain not in domains: | |
domains.append(domain) | |
# استخراج أنماط الصفحات الكاملة (مثل product_detail.php?, index.php?) | |
page_type_match = re.search(r'(\w+\.[a-z]+)\?', url) | |
if page_type_match: | |
page_type = page_type_match.group(1) | |
if page_type not in full_page_types: | |
full_page_types.append(page_type) | |
# استخراج الصيغ (مثل php, phtml, asp) بدون باقي الرابط | |
extension_match = re.search(r'\.([a-z]+)(\?|$)', url) | |
if extension_match: | |
extension = extension_match.group(1) | |
if extension not in file_extensions: | |
file_extensions.append(extension) | |
# زر البدء | |
if st.button("Start"): | |
if operation == "Filter Keywords": | |
classify_keywords(items, categories) | |
elif operation == "Analyze URLs": | |
analyze_urls(items) | |
# إزالة التكرارات من القوائم | |
parameters = list(set(parameters)) | |
domains = list(set(domains)) | |
full_page_types = list(set(full_page_types)) | |
file_extensions = list(set(file_extensions)) | |
# دالة تصدير النتائج | |
def export_results(key, filename): | |
with open(filename, "w") as f: | |
f.write("\n".join(st.session_state[key])) | |
st.success(f"Results exported to {filename}") | |
# عرض النتائج بناءً على الخيار المختار | |
if operation == "Filter Keywords": | |
# عرض النتائج للكلمات المفتاحية | |
st.header("Shopping Keywords") | |
st.text_area("Copy the shopping keywords here:", value="\n".join(shopping_items), height=200, key="shopping") | |
st.button("Export Shopping Keywords", on_click=export_results, args=("shopping", "shopping_keywords.txt")) | |
st.header("Gaming Keywords") | |
st.text_area("Copy the gaming keywords here:", value="\n".join(gaming_items), height=200, key="gaming") | |
st.button("Export Gaming Keywords", on_click=export_results, args=("gaming", "gaming_keywords.txt")) | |
st.header("Streaming Keywords") | |
st.text_area("Copy the streaming keywords here:", value="\n".join(streaming_items), height=200, key="streaming") | |
st.button("Export Streaming Keywords", on_click=export_results, args=("streaming", "streaming_keywords.txt")) | |
st.header("Unknown Keywords") | |
st.text_area("Copy the unknown keywords here:", value="\n".join(unknown_items), height=200, key="unknown") | |
st.button("Export Unknown Keywords", on_click=export_results, args=("unknown", "unknown_keywords.txt")) | |
elif operation == "Analyze URLs": | |
# عرض النتائج للروابط | |
st.header("Parameters") | |
st.text_area("Copy the parameters here:", value="\n".join(parameters), height=200, key="parameters") | |
st.button("Export Parameters", on_click=export_results, args=("parameters", "parameters.txt")) | |
st.header("Domains") | |
st.text_area("Copy the domains here:", value="\n".join(domains), height=200, key="domains") | |
st.button("Export Domains", on_click=export_results, args=("domains", "domains.txt")) | |
st.header("Full PageType") | |
st.text_area("Copy the full page types here:", value="\n".join(full_page_types), height=200, key="full_page_types") | |
st.button("Export Full PageTypes", on_click=export_results, args=("full_page_types", "full_page_types.txt")) | |
st.header("File Extensions") | |
st.text_area("Copy the file extensions here:", value="\n".join(file_extensions), height=200, key="file_extensions") | |
st.button("Export File Extensions", on_click=export_results, args=("file_extensions", "file_extensions.txt")) | |
else: | |
st.warning("Please upload a text file to start analysis.") |