Spaces:
Sleeping
Sleeping
File size: 14,013 Bytes
314bf31 a4303b2 314bf31 0b28455 59084a2 cd9d0c4 314bf31 6952cd8 314bf31 59084a2 cd9d0c4 59084a2 314bf31 5165383 0b28455 5165383 314bf31 0b28455 5165383 0b28455 5165383 0b28455 5165383 0b28455 5165383 314bf31 0b28455 314bf31 0b28455 5165383 0b28455 5165383 314bf31 59084a2 ab98d81 59084a2 314bf31 5165383 f745765 cd9d0c4 f745765 cd9d0c4 f745765 cd9d0c4 6952cd8 5165383 cd9d0c4 5165383 cd9d0c4 5165383 cd9d0c4 5165383 0b28455 59084a2 5165383 59084a2 5165383 cd9d0c4 6952cd8 5165383 cd9d0c4 59084a2 5165383 f745765 cd9d0c4 5165383 cd9d0c4 5165383 cd9d0c4 0b28455 5165383 cd9d0c4 5165383 cd9d0c4 f745765 cd9d0c4 5165383 cd9d0c4 ab98d81 5165383 ab98d81 cd9d0c4 5165383 cd9d0c4 f745765 ab98d81 cd9d0c4 ab98d81 f745765 6952cd8 99d9847 f745765 6952cd8 f745765 cd9d0c4 5165383 cd9d0c4 f745765 cd9d0c4 f745765 cd9d0c4 f745765 5165383 f745765 6952cd8 cd9d0c4 ab98d81 cd9d0c4 f745765 cd9d0c4 f745765 cd9d0c4 f745765 cd9d0c4 f745765 ab98d81 cd9d0c4 ab98d81 cd9d0c4 ab98d81 f745765 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 |
# app.py
import gradio as gr
from bs4 import BeautifulSoup
import requests
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
import asyncio
import aiohttp
import re
import pandas as pd
# Initialize models and variables
embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
faiss_index = None
bookmarks = []
fetch_cache = {}
# Define the categories
CATEGORIES = [
"Social Media",
"News and Media",
"Education and Learning",
"Entertainment",
"Shopping and E-commerce",
"Finance and Banking",
"Technology",
"Health and Fitness",
"Travel and Tourism",
"Food and Recipes",
"Sports",
"Arts and Culture",
"Government and Politics",
"Business and Economy",
"Science and Research",
"Personal Blogs and Journals",
"Job Search and Careers",
"Music and Audio",
"Videos and Movies",
"Reference and Knowledge Bases",
"Dead Link",
"Uncategorized",
]
def parse_bookmarks(file_content):
soup = BeautifulSoup(file_content, 'html.parser')
extracted_bookmarks = []
for link in soup.find_all('a'):
url = link.get('href')
title = link.text.strip()
if url and title:
extracted_bookmarks.append({'url': url, 'title': title})
return extracted_bookmarks
async def fetch_url_info(session, bookmark):
url = bookmark['url']
if url in fetch_cache:
bookmark.update(fetch_cache[url])
return bookmark
try:
async with session.get(url, timeout=5) as response:
bookmark['etag'] = response.headers.get('ETag', 'N/A')
bookmark['status_code'] = response.status
if response.status >= 400:
bookmark['dead_link'] = True
bookmark['description'] = ''
else:
bookmark['dead_link'] = False
content = await response.text()
soup = BeautifulSoup(content, 'html.parser')
# Extract meta description or Open Graph description
meta_description = soup.find('meta', attrs={'name': 'description'})
og_description = soup.find('meta', attrs={'property': 'og:description'})
if og_description and og_description.get('content'):
description = og_description.get('content')
elif meta_description and meta_description.get('content'):
description = meta_description.get('content')
else:
description = ''
bookmark['description'] = description
except Exception as e:
bookmark['dead_link'] = True
bookmark['etag'] = 'N/A'
bookmark['status_code'] = 'N/A'
bookmark['description'] = ''
finally:
fetch_cache[url] = {
'etag': bookmark.get('etag'),
'status_code': bookmark.get('status_code'),
'dead_link': bookmark.get('dead_link'),
'description': bookmark.get('description'),
}
return bookmark
async def process_bookmarks_async(bookmarks):
async with aiohttp.ClientSession() as session:
tasks = []
for bookmark in bookmarks:
task = asyncio.ensure_future(fetch_url_info(session, bookmark))
tasks.append(task)
await asyncio.gather(*tasks)
def generate_summary(bookmark):
description = bookmark.get('description', '')
if description:
bookmark['summary'] = description
else:
title = bookmark.get('title', '')
if title:
bookmark['summary'] = title
else:
bookmark['summary'] = 'No summary available.'
return bookmark
def assign_category(bookmark):
if bookmark.get('dead_link'):
bookmark['category'] = 'Dead Link'
return bookmark
summary = bookmark.get('summary', '').lower()
assigned_category = 'Uncategorized'
# Keywords associated with each category
category_keywords = {
"Social Media": ["social media", "networking", "friends", "connect", "posts", "profile"],
"News and Media": ["news", "journalism", "media", "headlines", "breaking news"],
"Education and Learning": ["education", "learning", "courses", "tutorial", "university", "academy", "study"],
"Entertainment": ["entertainment", "movies", "tv shows", "games", "comics", "fun"],
"Shopping and E-commerce": ["shopping", "e-commerce", "buy", "sell", "marketplace", "deals", "store"],
"Finance and Banking": ["finance", "banking", "investment", "money", "economy", "stock", "trading"],
"Technology": ["technology", "tech", "gadgets", "software", "computers", "innovation"],
"Health and Fitness": ["health", "fitness", "medical", "wellness", "exercise", "diet"],
"Travel and Tourism": ["travel", "tourism", "destinations", "hotels", "flights", "vacation"],
"Food and Recipes": ["food", "recipes", "cooking", "cuisine", "restaurant", "dining"],
"Sports": ["sports", "scores", "teams", "athletics", "matches", "leagues"],
"Arts and Culture": ["arts", "culture", "museum", "gallery", "exhibition", "artistic"],
"Government and Politics": ["government", "politics", "policy", "election", "public service"],
"Business and Economy": ["business", "corporate", "industry", "economy", "markets"],
"Science and Research": ["science", "research", "experiment", "laboratory", "study", "scientific"],
"Personal Blogs and Journals": ["blog", "journal", "personal", "diary", "thoughts", "opinions"],
"Job Search and Careers": ["jobs", "careers", "recruitment", "resume", "employment", "hiring"],
"Music and Audio": ["music", "audio", "songs", "albums", "artists", "bands"],
"Videos and Movies": ["video", "movies", "film", "clips", "trailers", "cinema"],
"Reference and Knowledge Bases": ["reference", "encyclopedia", "dictionary", "wiki", "knowledge", "information"],
}
for category, keywords in category_keywords.items():
for keyword in keywords:
if re.search(r'\b' + re.escape(keyword) + r'\b', summary):
assigned_category = category
break
if assigned_category != 'Uncategorized':
break
bookmark['category'] = assigned_category
return bookmark
def vectorize_and_index(bookmarks):
summaries = [bookmark['summary'] for bookmark in bookmarks]
embeddings = embedding_model.encode(summaries)
dimension = embeddings.shape[1]
faiss_idx = faiss.IndexFlatL2(dimension)
faiss_idx.add(np.array(embeddings))
return faiss_idx, embeddings
def bookmarks_to_dataframe():
data = []
for i, bookmark in enumerate(bookmarks):
index = i + 1
status = "Dead Link" if bookmark.get('dead_link') else "Active"
data.append({
'Index': index,
'Title': bookmark['title'],
'URL': bookmark['url'],
'Category': bookmark.get('category', 'Uncategorized'),
'Status': status,
'Summary': bookmark.get('summary', ''),
})
df = pd.DataFrame(data)
return df
def process_uploaded_file(file):
global bookmarks, faiss_index
if file is None:
return "Please upload a bookmarks HTML file.", pd.DataFrame()
try:
file_content = file.decode('utf-8')
except UnicodeDecodeError:
return "Error decoding the file. Please ensure it's a valid HTML file.", pd.DataFrame()
bookmarks = parse_bookmarks(file_content)
if not bookmarks:
return "No bookmarks found in the uploaded file.", pd.DataFrame()
# Asynchronously fetch bookmark info
asyncio.run(process_bookmarks_async(bookmarks))
# Generate summaries and assign categories
for bookmark in bookmarks:
generate_summary(bookmark)
assign_category(bookmark)
faiss_index, embeddings = vectorize_and_index(bookmarks)
message = f"Successfully processed {len(bookmarks)} bookmarks."
bookmark_df = bookmarks_to_dataframe()
return message, bookmark_df
def chatbot_response(user_query):
if faiss_index is None or not bookmarks:
return "No bookmarks available. Please upload and process your bookmarks first."
# Vectorize user query
user_embedding = embedding_model.encode([user_query])
D, I = faiss_index.search(np.array(user_embedding), k=5) # Retrieve top 5 matches
# Generate response
response = ""
for idx in I[0]:
if idx < len(bookmarks):
bookmark = bookmarks[idx]
index = idx + 1 # Start index at 1
response += f"{index}. Title: {bookmark['title']}\nURL: {bookmark['url']}\nCategory: {bookmark.get('category', 'Uncategorized')}\nSummary: {bookmark['summary']}\n\n"
return response.strip()
def edit_bookmark(row):
global faiss_index
try:
bookmark_idx = int(row['Index']) - 1 # Adjust index to match list (starting at 0)
if bookmark_idx < 0 or bookmark_idx >= len(bookmarks):
return "Invalid bookmark index.", bookmarks_to_dataframe()
bookmarks[bookmark_idx]['title'] = row['Title']
bookmarks[bookmark_idx]['url'] = row['URL']
bookmarks[bookmark_idx]['category'] = row['Category']
# Re-fetch bookmark info
asyncio.run(process_bookmarks_async([bookmarks[bookmark_idx]]))
generate_summary(bookmarks[bookmark_idx])
# Rebuild the FAISS index
faiss_index, embeddings = vectorize_and_index(bookmarks)
message = "Bookmark updated successfully."
updated_df = bookmarks_to_dataframe()
return message, updated_df
except Exception as e:
return f"Error: {str(e)}", bookmarks_to_dataframe()
def delete_bookmarks(selected_indices):
global faiss_index
try:
indices = sorted([int(idx) - 1 for idx in selected_indices], reverse=True)
for idx in indices:
if 0 <= idx < len(bookmarks):
bookmarks.pop(idx)
# Rebuild the FAISS index
if bookmarks:
faiss_index, embeddings = vectorize_and_index(bookmarks)
else:
faiss_index = None
message = "Selected bookmarks deleted successfully."
updated_df = bookmarks_to_dataframe()
return message, updated_df
except Exception as e:
return f"Error: {str(e)}", bookmarks_to_dataframe()
def export_bookmarks():
if not bookmarks:
return None
# Create an HTML content similar to the imported bookmarks file
soup = BeautifulSoup("<!DOCTYPE NETSCAPE-Bookmark-file-1><Title>Bookmarks</Title><H1>Bookmarks</H1>", 'html.parser')
dl = soup.new_tag('DL')
for bookmark in bookmarks:
dt = soup.new_tag('DT')
a = soup.new_tag('A', href=bookmark['url'])
a.string = bookmark['title']
dt.append(a)
dl.append(dt)
soup.append(dl)
html_content = str(soup)
return html_content
def build_app():
with gr.Blocks(css="app.css") as demo:
gr.Markdown("<h1>Bookmark Manager App</h1>")
with gr.Tab("Upload and Process Bookmarks"):
upload = gr.File(label="Upload Bookmarks HTML File", type='binary')
process_button = gr.Button("Process Bookmarks")
output_text = gr.Textbox(label="Output")
bookmark_table = gr.Dataframe(label="Bookmarks", interactive=False)
def update_bookmark_table(file):
message, df = process_uploaded_file(file)
return message, df
process_button.click(
update_bookmark_table,
inputs=upload,
outputs=[output_text, bookmark_table]
)
with gr.Tab("Chat with Bookmarks"):
user_input = gr.Textbox(label="Ask about your bookmarks")
chat_output = gr.Textbox(label="Chatbot Response")
chat_button = gr.Button("Send")
chat_button.click(
chatbot_response,
inputs=user_input,
outputs=chat_output
)
with gr.Tab("Manage Bookmarks"):
manage_output = gr.Textbox(label="Manage Output")
bookmark_table_manage = gr.Dataframe(label="Bookmarks", interactive=True)
selected_indices = gr.Textbox(label="Selected Indices (comma-separated)", visible=False)
delete_button = gr.Button("Delete Selected Bookmarks")
export_button = gr.Button("Export Bookmarks")
download_link = gr.File(label="Download Exported Bookmarks", interactive=False)
def update_manage_table():
df = bookmarks_to_dataframe()
return df
def delete_selected_bookmarks(dataframe):
selected_indices = dataframe['Index'].tolist()
message, updated_df = delete_bookmarks(selected_indices)
return message, updated_df
def export_bookmarks_file():
content = export_bookmarks()
if content:
with open('bookmarks.html', 'w', encoding='utf-8') as f:
f.write(content)
return 'bookmarks.html'
else:
return None
bookmark_table_manage.change(
edit_bookmark,
inputs=bookmark_table_manage,
outputs=[manage_output, bookmark_table_manage]
)
delete_button.click(
delete_selected_bookmarks,
inputs=bookmark_table_manage,
outputs=[manage_output, bookmark_table_manage]
)
export_button.click(
export_bookmarks_file,
inputs=None,
outputs=download_link
)
# Initial load of the bookmarks table
bookmark_table_manage.value = update_manage_table()
demo.launch()
if __name__ == "__main__":
build_app()
|