siddhartharya commited on
Commit
db87ed3
Β·
verified Β·
1 Parent(s): f4479b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +224 -395
app.py CHANGED
@@ -19,10 +19,6 @@ import threading
19
  # Import OpenAI library
20
  import openai
21
 
22
- # Suppress only the single warning from urllib3 needed.
23
- import urllib3
24
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
25
-
26
  # Set up logging to output to the console
27
  logger = logging.getLogger(__name__)
28
  logger.setLevel(logging.INFO)
@@ -38,8 +34,9 @@ console_handler.setFormatter(formatter)
38
  # Add the handler to the logger
39
  logger.addHandler(console_handler)
40
 
41
- # Initialize variables
42
- logger.info("Initializing variables")
 
43
  faiss_index = None
44
  bookmarks = []
45
  fetch_cache = {}
@@ -80,21 +77,7 @@ if not GROQ_API_KEY:
80
  logger.error("GROQ_API_KEY environment variable not set.")
81
 
82
  openai.api_key = GROQ_API_KEY
83
- openai.api_base = "https://api.groq.com/openai/v1" # Ensure this is the correct base URL
84
-
85
- # Initialize semaphore for rate limiting (allowing 1 concurrent API call)
86
- api_semaphore = threading.Semaphore(1)
87
-
88
- # Global variables for models to enable lazy loading
89
- embedding_model = None
90
-
91
- def get_embedding_model():
92
- global embedding_model
93
- if embedding_model is None:
94
- logger.info("Loading SentenceTransformer model...")
95
- embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
96
- logger.info("SentenceTransformer model loaded.")
97
- return embedding_model
98
 
99
  def extract_main_content(soup):
100
  """
@@ -171,20 +154,21 @@ def generate_summary_and_assign_category(bookmark):
171
  """
172
  logger.info(f"Generating summary and assigning category for bookmark: {bookmark.get('url')}")
173
 
174
- max_retries = 5
175
  retry_count = 0
176
- base_wait = 1 # Initial wait time in seconds
177
 
178
  while retry_count < max_retries:
179
  try:
180
  html_content = bookmark.get('html_content', '')
181
 
182
- # Parse HTML content
183
  soup = BeautifulSoup(html_content, 'html.parser')
 
 
184
  metadata = get_page_metadata(soup)
185
  main_content = extract_main_content(soup)
186
 
187
- # Prepare prompt
188
  content_parts = []
189
  if metadata['title']:
190
  content_parts.append(f"Title: {metadata['title']}")
@@ -197,17 +181,18 @@ def generate_summary_and_assign_category(bookmark):
197
 
198
  content_text = '\n'.join(content_parts)
199
 
200
- # Determine prompt type
201
  error_keywords = ['Access Denied', 'Security Check', 'Cloudflare', 'captcha', 'unusual traffic']
202
  if not content_text or len(content_text.split()) < 50:
203
  use_prior_knowledge = True
204
- logger.info(f"Content for {bookmark.get('url')} is insufficient. Using prior knowledge.")
205
  elif any(keyword.lower() in content_text.lower() for keyword in error_keywords):
206
  use_prior_knowledge = True
207
- logger.info(f"Content for {bookmark.get('url')} contains error messages. Using prior knowledge.")
208
  else:
209
  use_prior_knowledge = False
210
 
 
211
  if use_prior_knowledge:
212
  prompt = f"""
213
  You are a knowledgeable assistant with up-to-date information as of 2023.
@@ -236,34 +221,52 @@ Summary: [Your summary]
236
  Category: [One category]
237
  """
238
 
239
- # Acquire semaphore before making API call
240
- api_semaphore.acquire()
241
- try:
242
- # Call the LLM via Groq Cloud API with exponential backoff
243
- response = openai.ChatCompletion.create(
244
- model='llama-3.1-70b-versatile', # Ensure this is the correct model name
245
- messages=[
246
- {"role": "user", "content": prompt}
247
- ],
248
- max_tokens=150,
249
- temperature=0.5,
250
- )
251
- finally:
252
- # Release semaphore after API call
253
- api_semaphore.release()
254
-
 
 
 
 
 
 
 
255
  content = response['choices'][0]['message']['content'].strip()
256
  if not content:
257
  raise ValueError("Empty response received from the model.")
258
 
259
- # Parse response
260
  summary_match = re.search(r"Summary:\s*(.*)", content)
261
  category_match = re.search(r"Category:\s*(.*)", content)
262
 
263
- bookmark['summary'] = summary_match.group(1).strip() if summary_match else 'No summary available.'
264
- bookmark['category'] = category_match.group(1).strip().strip('"') if category_match else 'Uncategorized'
 
 
 
 
 
 
 
 
 
 
 
265
 
266
- # Additional validation (optional)
267
  summary_lower = bookmark['summary'].lower()
268
  url_lower = bookmark['url'].lower()
269
  if 'social media' in summary_lower or 'twitter' in summary_lower or 'x.com' in url_lower:
@@ -272,27 +275,19 @@ Category: [One category]
272
  bookmark['category'] = 'Reference and Knowledge Bases'
273
 
274
  logger.info("Successfully generated summary and assigned category")
275
-
276
- # **Introduce Fixed Delay After API Call**
277
- time.sleep(1) # Wait for 1 second before the next API call
278
-
279
- break # Exit loop on success
280
 
281
  except openai.error.RateLimitError as e:
282
  retry_count += 1
283
- wait_time = base_wait * (2 ** retry_count) # Exponential backoff
284
- logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying... (Attempt {retry_count}/{max_retries})")
285
  time.sleep(wait_time)
286
  except Exception as e:
287
  logger.error(f"Error generating summary and assigning category: {e}", exc_info=True)
288
  bookmark['summary'] = 'No summary available.'
289
  bookmark['category'] = 'Uncategorized'
290
- break # Exit loop on non-rate limit errors
291
-
292
- if retry_count == max_retries:
293
- logger.error(f"Failed to generate summary for {bookmark.get('url')} after {max_retries} attempts.")
294
- bookmark['summary'] = 'No summary available.'
295
- bookmark['category'] = 'Uncategorized'
296
 
297
  def parse_bookmarks(file_content):
298
  """
@@ -332,7 +327,7 @@ def fetch_url_info(bookmark):
332
  'User-Agent': 'Mozilla/5.0',
333
  'Accept-Language': 'en-US,en;q=0.9',
334
  }
335
- response = requests.get(url, headers=headers, timeout=5, verify=True, allow_redirects=True) # Set verify=True
336
  bookmark['etag'] = response.headers.get('ETag', 'N/A')
337
  bookmark['status_code'] = response.status_code
338
 
@@ -352,13 +347,6 @@ def fetch_url_info(bookmark):
352
  bookmark['description'] = ''
353
  logger.info(f"Fetched information for {url}")
354
 
355
- except requests.exceptions.SSLError as e:
356
- bookmark['dead_link'] = True
357
- bookmark['etag'] = 'N/A'
358
- bookmark['status_code'] = 'SSL Error'
359
- bookmark['description'] = ''
360
- bookmark['html_content'] = ''
361
- logger.error(f"SSL error fetching URL info for {url}: {e}", exc_info=True)
362
  except requests.exceptions.Timeout:
363
  bookmark['dead_link'] = False # Mark as 'Unknown' instead of 'Dead'
364
  bookmark['etag'] = 'N/A'
@@ -389,26 +377,17 @@ def vectorize_and_index(bookmarks_list):
389
  """
390
  Create vector embeddings for bookmarks and build FAISS index with ID mapping.
391
  """
392
- global faiss_index
393
  logger.info("Vectorizing summaries and building FAISS index")
394
  try:
395
- # Use .get('summary', '') to avoid KeyError
396
- summaries = [bookmark.get('summary', '') for bookmark in bookmarks_list]
397
-
398
- # Check for any empty summaries and log them
399
- for i, summary in enumerate(summaries):
400
- if not summary:
401
- logger.warning(f"Bookmark at index {i} is missing a summary.")
402
- summaries[i] = 'No summary available.'
403
-
404
- embeddings = get_embedding_model().encode(summaries).astype('float32')
405
  dimension = embeddings.shape[1]
406
- if faiss_index is None:
407
- faiss_index = faiss.IndexIDMap(faiss.IndexFlatL2(dimension))
408
  # Assign unique IDs to each bookmark
409
  ids = np.array([bookmark['id'] for bookmark in bookmarks_list], dtype=np.int64)
410
- faiss_index.add_with_ids(embeddings, ids)
411
  logger.info("FAISS index built successfully with IDs")
 
412
  except Exception as e:
413
  logger.error(f"Error in vectorizing and indexing: {e}", exc_info=True)
414
  raise
@@ -462,7 +441,7 @@ def display_bookmarks():
462
  logger.info("HTML display generated")
463
  return cards
464
 
465
- def process_uploaded_file(file, state_bookmarks):
466
  """
467
  Process the uploaded bookmarks file.
468
  """
@@ -471,39 +450,23 @@ def process_uploaded_file(file, state_bookmarks):
471
 
472
  if file is None:
473
  logger.warning("No file uploaded")
474
- return (
475
- "⚠️ Please upload a bookmarks HTML file.",
476
- "",
477
- state_bookmarks # Return the unchanged state
478
- )
479
 
480
  try:
481
  file_content = file.decode('utf-8')
482
  except UnicodeDecodeError as e:
483
- logger.error(f"Error decoding the file: {e}")
484
- return (
485
- "⚠️ Error decoding the file. Please ensure it's a valid HTML file.",
486
- "",
487
- state_bookmarks # Return the unchanged state
488
- )
489
 
490
  try:
491
  bookmarks = parse_bookmarks(file_content)
492
  except Exception as e:
493
- logger.error(f"Error parsing bookmarks: {e}")
494
- return (
495
- "⚠️ Error parsing the bookmarks HTML file.",
496
- "",
497
- state_bookmarks # Return the unchanged state
498
- )
499
 
500
  if not bookmarks:
501
  logger.warning("No bookmarks found in the uploaded file")
502
- return (
503
- "⚠️ No bookmarks found in the uploaded file.",
504
- "",
505
- state_bookmarks # Return the unchanged state
506
- )
507
 
508
  # Assign unique IDs to bookmarks
509
  for idx, bookmark in enumerate(bookmarks):
@@ -511,135 +474,94 @@ def process_uploaded_file(file, state_bookmarks):
511
 
512
  # Fetch bookmark info concurrently
513
  logger.info("Fetching URL info concurrently")
514
- with ThreadPoolExecutor(max_workers=3) as executor: # Adjusted max_workers to 3
515
  executor.map(fetch_url_info, bookmarks)
516
 
517
- # Generate summaries and assign categories
518
- logger.info("Generating summaries and assigning categories")
519
- with ThreadPoolExecutor(max_workers=1) as executor: # Adjusted max_workers to 1
520
  executor.map(generate_summary_and_assign_category, bookmarks)
521
 
522
- # Log bookmarks to verify 'summary' and 'category' presence
523
- for idx, bookmark in enumerate(bookmarks):
524
- if 'summary' not in bookmark or 'category' not in bookmark:
525
- logger.error(f"Bookmark at index {idx} is missing 'summary' or 'category': {bookmark}")
526
- else:
527
- logger.debug(f"Bookmark {idx} processed with summary and category.")
528
-
529
  try:
530
- vectorize_and_index(bookmarks)
531
  except Exception as e:
532
  logger.error(f"Error building FAISS index: {e}", exc_info=True)
533
- return (
534
- "⚠️ Error building search index.",
535
- "",
536
- state_bookmarks # Return the unchanged state
537
- )
538
 
539
  message = f"βœ… Successfully processed {len(bookmarks)} bookmarks."
540
  logger.info(message)
541
 
542
  # Generate displays and updates
543
  bookmark_html = display_bookmarks()
544
- choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})" for i, bookmark in enumerate(bookmarks)]
 
545
 
546
- return (
547
- message,
548
- bookmark_html,
549
- bookmarks.copy() # Return the updated state
550
- )
551
 
552
- def delete_selected_bookmarks(selected_indices, state_bookmarks):
553
  """
554
  Delete selected bookmarks and remove their vectors from the FAISS index.
555
  """
556
  global bookmarks, faiss_index
557
  if not selected_indices:
558
- return "⚠️ No bookmarks selected.", gr.update(choices=[]), bookmark_display_manage.update(value=display_bookmarks()), state_bookmarks
559
 
560
  ids_to_delete = []
561
  indices_to_delete = []
562
  for s in selected_indices:
563
- try:
564
- idx = int(s.split('.')[0]) - 1
565
- if 0 <= idx < len(state_bookmarks):
566
- bookmark_id = state_bookmarks[idx]['id']
567
- ids_to_delete.append(bookmark_id)
568
- indices_to_delete.append(idx)
569
- logger.info(f"Deleting bookmark at index {idx + 1}")
570
- except ValueError:
571
- logger.error(f"Invalid selection format: {s}")
572
 
573
  # Remove vectors from FAISS index
574
  if faiss_index is not None and ids_to_delete:
575
  faiss_index.remove_ids(np.array(ids_to_delete, dtype=np.int64))
576
 
577
  # Remove bookmarks from the list (reverse order to avoid index shifting)
578
- bookmarks = state_bookmarks.copy()
579
  for idx in sorted(indices_to_delete, reverse=True):
580
  bookmarks.pop(idx)
581
 
582
  message = "πŸ—‘οΈ Selected bookmarks deleted successfully."
583
  logger.info(message)
584
- choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})" for i, bookmark in enumerate(bookmarks)]
 
585
 
586
- return (
587
- message,
588
- gr.update(choices=choices, value=[]),
589
- bookmark_display_manage.update(value=display_bookmarks()),
590
- bookmarks.copy() # Return the updated state
591
- )
592
 
593
- def edit_selected_bookmarks_category(selected_indices, new_category, state_bookmarks):
594
  """
595
  Edit category of selected bookmarks.
596
  """
597
  if not selected_indices:
598
- return (
599
- "⚠️ No bookmarks selected.",
600
- gr.update(choices=[]),
601
- bookmark_display_manage.update(value=display_bookmarks()),
602
- state_bookmarks
603
- )
604
  if not new_category:
605
- return (
606
- "⚠️ No new category selected.",
607
- gr.update(choices=[]),
608
- bookmark_display_manage.update(value=display_bookmarks()),
609
- state_bookmarks
610
- )
611
 
612
- bookmarks = state_bookmarks.copy()
613
- for s in selected_indices:
614
- try:
615
- idx = int(s.split('.')[0]) - 1
616
- if 0 <= idx < len(bookmarks):
617
- bookmarks[idx]['category'] = new_category
618
- logger.info(f"Updated category for bookmark {idx + 1} to {new_category}")
619
- except ValueError:
620
- logger.error(f"Invalid selection format: {s}")
621
 
622
  message = "✏️ Category updated for selected bookmarks."
623
  logger.info(message)
624
 
625
  # Update choices and display
626
- choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})" for i, bookmark in enumerate(bookmarks)]
 
627
 
628
- return (
629
- message,
630
- gr.update(choices=choices, value=[]),
631
- bookmark_display_manage.update(value=display_bookmarks()),
632
- bookmarks.copy() # Return the updated state
633
- )
634
 
635
- def export_bookmarks(state_bookmarks):
636
  """
637
  Export bookmarks to an HTML file.
638
  """
639
- bookmarks = state_bookmarks
640
  if not bookmarks:
641
  logger.warning("No bookmarks to export")
642
- return "⚠️ No bookmarks to export."
643
 
644
  try:
645
  logger.info("Exporting bookmarks to HTML")
@@ -653,36 +575,30 @@ def export_bookmarks(state_bookmarks):
653
  dl.append(dt)
654
  soup.append(dl)
655
  html_content = str(soup)
656
- # Encode the HTML content to base64 for download
657
- b64 = base64.b64encode(html_content.encode()).decode()
658
- href = f'data:text/html;base64,{b64}'
 
659
  logger.info("Bookmarks exported successfully")
660
- return f'<a href="{href}" download="exported_bookmarks.html">πŸ’Ύ Download Exported Bookmarks</a>'
661
  except Exception as e:
662
- logger.error(f"Error exporting bookmarks: {e}")
663
- return "⚠️ Error exporting bookmarks."
664
 
665
- def chatbot_response(user_query, chat_history, state_bookmarks):
666
  """
667
  Generate chatbot response using the FAISS index and embeddings, maintaining chat history.
668
  """
669
- if not GROQ_API_KEY:
670
- logger.warning("GROQ_API_KEY not set.")
671
- return chat_history + [{"role": "system", "content": "⚠️ API key not set. Please set the GROQ_API_KEY environment variable in the Hugging Face Space settings."}]
672
-
673
- bookmarks = state_bookmarks
674
- if not bookmarks:
675
  logger.warning("No bookmarks available for chatbot")
676
- return chat_history + [{"role": "system", "content": "⚠️ No bookmarks available. Please upload and process your bookmarks first."}]
 
677
 
678
  logger.info(f"Chatbot received query: {user_query}")
679
 
680
  try:
681
- # Ensure embedding model is loaded
682
- model = get_embedding_model()
683
-
684
  # Encode the user query
685
- query_vector = model.encode([user_query]).astype('float32')
686
 
687
  # Search the FAISS index
688
  k = 5 # Number of results to return
@@ -694,75 +610,65 @@ def chatbot_response(user_query, chat_history, state_bookmarks):
694
  matching_bookmarks = [id_to_bookmark.get(id) for id in ids if id in id_to_bookmark]
695
 
696
  if not matching_bookmarks:
697
- response_text = "No relevant bookmarks found for your query."
698
- logger.info(response_text)
699
- return chat_history + [{"role": "assistant", "content": response_text}]
700
 
701
  # Format the response
702
- bookmarks_info = "\n\n".join([
703
- f"**Title:** {bookmark['title']}\n**URL:** {bookmark['url']}\n**Summary:** {bookmark['summary']}"
704
  for bookmark in matching_bookmarks
705
  ])
706
 
707
- # Construct the prompt
708
  prompt = f"""
709
  A user asked: "{user_query}"
710
  Based on the bookmarks below, provide a helpful answer to the user's query, referencing the relevant bookmarks.
711
-
712
  Bookmarks:
713
  {bookmarks_info}
714
-
715
  Provide a concise and helpful response.
716
  """
717
 
718
- # Acquire semaphore before making API call
719
- api_semaphore.acquire()
720
- try:
721
- # Call the LLM via Groq Cloud API with exponential backoff
722
- max_retries = 5
723
- retry_count = 0
724
- base_wait = 1 # Initial wait time in seconds
725
-
726
- while retry_count < max_retries:
727
- try:
728
- response = openai.ChatCompletion.create(
729
- model='llama-3.1-70b-versatile', # Ensure this is the correct model name
730
- messages=[
731
- {"role": "user", "content": prompt}
732
- ],
733
- max_tokens=300,
734
- temperature=0.7,
735
- )
736
- answer = response['choices'][0]['message']['content'].strip()
737
- logger.info("Chatbot response generated")
738
-
739
- # **Introduce Fixed Delay After API Call**
740
- time.sleep(1) # Wait for 1 second before the next API call
741
-
742
- return chat_history + [{"role": "user", "content": user_query}, {"role": "assistant", "content": answer}]
743
- except openai.error.RateLimitError as e:
744
- retry_count += 1
745
- wait_time = base_wait * (2 ** retry_count) # Exponential backoff
746
- logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying... (Attempt {retry_count}/{max_retries})")
747
- time.sleep(wait_time)
748
- except Exception as e:
749
- error_message = f"⚠️ Error processing your query: {str(e)}"
750
- logger.error(error_message, exc_info=True)
751
- return chat_history + [{"role": "assistant", "content": error_message}]
752
-
753
- # If max retries reached
754
- error_message = "⚠️ Unable to process your query at the moment. Please try again later."
755
- logger.error(error_message)
756
- return chat_history + [{"role": "assistant", "content": error_message}]
757
-
758
- finally:
759
- # Release semaphore after API call
760
- api_semaphore.release()
761
-
762
  except Exception as e:
763
  error_message = f"⚠️ Error processing your query: {str(e)}"
764
  logger.error(error_message, exc_info=True)
765
- return chat_history + [{"role": "assistant", "content": error_message}]
 
766
 
767
  def build_app():
768
  """
@@ -770,186 +676,109 @@ def build_app():
770
  """
771
  try:
772
  logger.info("Building Gradio app")
773
- with gr.Blocks(css="app.css") as demo: # Load external CSS file
774
- # Shared states
775
- state_bookmarks = gr.State([])
776
- chat_history = gr.State([])
777
-
778
  # General Overview
779
  gr.Markdown("""
780
- # πŸ“š SmartMarks - AI Browser Bookmarks Manager
781
-
782
- Welcome to **SmartMarks**, your intelligent assistant for managing browser bookmarks. SmartMarks leverages AI to help you organize, search, and interact with your bookmarks seamlessly. Whether you're looking to categorize your links, retrieve information quickly, or maintain an updated list, SmartMarks has you covered.
783
-
784
- ---
785
-
786
- ## πŸš€ **How to Use SmartMarks**
787
-
788
- SmartMarks is divided into three main sections:
789
-
790
- 1. **πŸ“‚ Upload and Process Bookmarks:** Import your existing bookmarks and let SmartMarks analyze and categorize them for you.
791
- 2. **πŸ’¬ Chat with Bookmarks:** Interact with your bookmarks using natural language queries to find relevant links effortlessly.
792
- 3. **πŸ› οΈ Manage Bookmarks:** View, edit, delete, and export your bookmarks with ease.
793
-
794
- Navigate through the tabs to explore each feature in detail.
795
- """)
796
 
797
  # Upload and Process Bookmarks Tab
798
  with gr.Tab("Upload and Process Bookmarks"):
799
  gr.Markdown("""
800
- ## πŸ“‚ **Upload and Process Bookmarks**
801
-
802
- ### πŸ“ **Steps to Upload and Process:**
803
-
804
- 1. **Upload Bookmarks File:**
805
- - Click on the **"οΏ½οΏ½ Upload Bookmarks HTML File"** button.
806
- - Select your browser's exported bookmarks HTML file from your device.
807
-
808
- 2. **Process Bookmarks:**
809
- - After uploading, click on the **"βš™οΈ Process Bookmarks"** button.
810
- - SmartMarks will parse your bookmarks, fetch additional information, generate summaries, and categorize each link based on predefined categories.
811
-
812
- 3. **View Processed Bookmarks:**
813
- - Once processing is complete, your bookmarks will be displayed in an organized and visually appealing format below.
814
- """)
815
 
816
  upload = gr.File(label="πŸ“ Upload Bookmarks HTML File", type='binary')
817
  process_button = gr.Button("βš™οΈ Process Bookmarks")
818
  output_text = gr.Textbox(label="βœ… Output", interactive=False)
819
  bookmark_display = gr.HTML(label="πŸ“„ Processed Bookmarks")
820
 
821
- process_button.click(
822
- process_uploaded_file,
823
- inputs=[upload, state_bookmarks],
824
- outputs=[output_text, bookmark_display, state_bookmarks]
825
- )
826
-
827
  # Chat with Bookmarks Tab
828
  with gr.Tab("Chat with Bookmarks"):
829
  gr.Markdown("""
830
- ## πŸ’¬ **Chat with Bookmarks**
831
-
832
- ### πŸ€– **How to Interact:**
833
-
834
- 1. **Enter Your Query:**
835
- - In the **"✍️ Ask about your bookmarks"** textbox, type your question or keyword related to your bookmarks. For example, "Do I have any bookmarks about GenerativeAI?"
836
-
837
- 2. **Submit Your Query:**
838
- - Click the **"πŸ“¨ Send"** button to submit your query.
839
-
840
- 3. **Receive AI-Driven Responses:**
841
- - SmartMarks will analyze your query and provide relevant bookmarks that match your request, making it easier to find specific links without manual searching.
842
-
843
- 4. **View Chat History:**
844
- - All your queries and the corresponding AI responses are displayed in the chat history for your reference.
845
- """)
846
-
847
- with gr.Row():
848
- chat_history_display = gr.Chatbot(label="πŸ—¨οΈ Chat History", type="messages")
849
- with gr.Column(scale=1):
850
- chat_input = gr.Textbox(
851
- label="✍️ Ask about your bookmarks",
852
- placeholder="e.g., Do I have any bookmarks about GenerativeAI?",
853
- lines=1,
854
- interactive=True
855
- )
856
- chat_button = gr.Button("πŸ“¨ Send")
857
-
858
- # When user presses Enter in chat_input
859
- chat_input.submit(
860
- chatbot_response,
861
- inputs=[chat_input, chat_history_display, state_bookmarks],
862
- outputs=chat_history_display
863
- )
864
-
865
- # When user clicks Send button
866
- chat_button.click(
867
- chatbot_response,
868
- inputs=[chat_input, chat_history_display, state_bookmarks],
869
- outputs=chat_history_display
870
  )
 
871
 
872
  # Manage Bookmarks Tab
873
  with gr.Tab("Manage Bookmarks"):
874
  gr.Markdown("""
875
- ## πŸ› οΈ **Manage Bookmarks**
876
-
877
- ### πŸ—‚οΈ **Features:**
878
-
879
- 1. **View Bookmarks:**
880
- - All your processed bookmarks are displayed here with their respective categories and summaries.
881
-
882
- 2. **Select Bookmarks:**
883
- - Use the checkboxes next to each bookmark to select one, multiple, or all bookmarks you wish to manage.
884
-
885
- 3. **Delete Selected Bookmarks:**
886
- - After selecting the desired bookmarks, click the **"πŸ—‘οΈ Delete Selected"** button to remove them from your list.
887
-
888
- 4. **Edit Categories:**
889
- - Select the bookmarks you want to re-categorize.
890
- - Choose a new category from the dropdown menu labeled **"πŸ†• New Category"**.
891
- - Click the **"✏️ Edit Category"** button to update their categories.
892
-
893
- 5. **Export Bookmarks:**
894
- - Click the **"πŸ’Ύ Export"** button to download your updated bookmarks as an HTML file.
895
- - This file can be uploaded back to your browser to reflect the changes made within SmartMarks.
896
-
897
- 6. **Refresh Bookmarks:**
898
- - Click the **"πŸ”„ Refresh Bookmarks"** button to ensure the latest state is reflected in the display.
899
- """)
900
 
901
  manage_output = gr.Textbox(label="πŸ”„ Status", interactive=False)
902
  bookmark_selector = gr.CheckboxGroup(
903
  label="βœ… Select Bookmarks",
904
- choices=[],
905
- value=[]
 
 
 
 
906
  )
907
- new_category_input = gr.Dropdown(label="πŸ†• New Category", choices=CATEGORIES, value="Uncategorized")
908
  bookmark_display_manage = gr.HTML(label="πŸ“„ Bookmarks")
909
 
910
  with gr.Row():
911
  delete_button = gr.Button("πŸ—‘οΈ Delete Selected")
912
  edit_category_button = gr.Button("✏️ Edit Category")
913
  export_button = gr.Button("πŸ’Ύ Export")
914
- refresh_button = gr.Button("πŸ”„ Refresh Bookmarks")
915
-
916
- download_link = gr.HTML(label="πŸ“₯ Download Exported Bookmarks")
917
-
918
- # Define button actions
919
- delete_button.click(
920
- delete_selected_bookmarks,
921
- inputs=[bookmark_selector, state_bookmarks],
922
- outputs=[manage_output, bookmark_selector, bookmark_display_manage, state_bookmarks]
923
- )
924
 
925
- edit_category_button.click(
926
- edit_selected_bookmarks_category,
927
- inputs=[bookmark_selector, new_category_input, state_bookmarks],
928
- outputs=[manage_output, bookmark_selector, bookmark_display_manage, state_bookmarks]
929
- )
930
-
931
- export_button.click(
932
- export_bookmarks,
933
- inputs=[state_bookmarks],
934
- outputs=download_link
935
- )
936
-
937
- refresh_button.click(
938
- lambda bookmarks: (
939
- [
940
- f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})" for i, bookmark in enumerate(bookmarks)
941
- ],
942
- display_bookmarks()
943
- ),
944
- inputs=[state_bookmarks],
945
- outputs=[bookmark_selector, bookmark_display_manage]
946
- )
 
 
 
 
 
 
 
 
 
947
 
948
  logger.info("Launching Gradio app")
949
  demo.launch(debug=True)
950
  except Exception as e:
951
- logger.error(f"Error building Gradio app: {e}", exc_info=True)
952
- print(f"Error building Gradio app: {e}")
953
 
954
  if __name__ == "__main__":
955
  build_app()
 
19
  # Import OpenAI library
20
  import openai
21
 
 
 
 
 
22
  # Set up logging to output to the console
23
  logger = logging.getLogger(__name__)
24
  logger.setLevel(logging.INFO)
 
34
  # Add the handler to the logger
35
  logger.addHandler(console_handler)
36
 
37
+ # Initialize models and variables
38
+ logger.info("Initializing models and variables")
39
+ embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
40
  faiss_index = None
41
  bookmarks = []
42
  fetch_cache = {}
 
77
  logger.error("GROQ_API_KEY environment variable not set.")
78
 
79
  openai.api_key = GROQ_API_KEY
80
+ openai.api_base = "https://api.groq.com/openai/v1"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
  def extract_main_content(soup):
83
  """
 
154
  """
155
  logger.info(f"Generating summary and assigning category for bookmark: {bookmark.get('url')}")
156
 
157
+ max_retries = 3
158
  retry_count = 0
 
159
 
160
  while retry_count < max_retries:
161
  try:
162
  html_content = bookmark.get('html_content', '')
163
 
164
+ # Get the HTML soup object from the bookmark
165
  soup = BeautifulSoup(html_content, 'html.parser')
166
+
167
+ # Extract metadata and main content
168
  metadata = get_page_metadata(soup)
169
  main_content = extract_main_content(soup)
170
 
171
+ # Prepare content for the prompt
172
  content_parts = []
173
  if metadata['title']:
174
  content_parts.append(f"Title: {metadata['title']}")
 
181
 
182
  content_text = '\n'.join(content_parts)
183
 
184
+ # Detect insufficient or erroneous content
185
  error_keywords = ['Access Denied', 'Security Check', 'Cloudflare', 'captcha', 'unusual traffic']
186
  if not content_text or len(content_text.split()) < 50:
187
  use_prior_knowledge = True
188
+ logger.info(f"Content for {bookmark.get('url')} is insufficient. Instructing LLM to use prior knowledge.")
189
  elif any(keyword.lower() in content_text.lower() for keyword in error_keywords):
190
  use_prior_knowledge = True
191
+ logger.info(f"Content for {bookmark.get('url')} contains error messages. Instructing LLM to use prior knowledge.")
192
  else:
193
  use_prior_knowledge = False
194
 
195
+ # Shortened prompts
196
  if use_prior_knowledge:
197
  prompt = f"""
198
  You are a knowledgeable assistant with up-to-date information as of 2023.
 
221
  Category: [One category]
222
  """
223
 
224
+ # Estimate tokens
225
+ def estimate_tokens(text):
226
+ return len(text) / 4 # Approximate token estimation
227
+
228
+ prompt_tokens = estimate_tokens(prompt)
229
+ max_tokens = 150 # Reduced from 200
230
+ total_tokens = prompt_tokens + max_tokens
231
+
232
+ # Calculate required delay
233
+ tokens_per_minute = 60000 # Adjust based on your rate limit
234
+ tokens_per_second = tokens_per_minute / 60
235
+ required_delay = total_tokens / tokens_per_second
236
+ sleep_time = max(required_delay, 1)
237
+
238
+ # Call the LLM via Groq Cloud API
239
+ response = openai.ChatCompletion.create(
240
+ model='llama-3.1-70b-versatile', # Using the specified model
241
+ messages=[
242
+ {"role": "user", "content": prompt}
243
+ ],
244
+ max_tokens=int(max_tokens),
245
+ temperature=0.5,
246
+ )
247
  content = response['choices'][0]['message']['content'].strip()
248
  if not content:
249
  raise ValueError("Empty response received from the model.")
250
 
251
+ # Parse the response
252
  summary_match = re.search(r"Summary:\s*(.*)", content)
253
  category_match = re.search(r"Category:\s*(.*)", content)
254
 
255
+ if summary_match:
256
+ bookmark['summary'] = summary_match.group(1).strip()
257
+ else:
258
+ bookmark['summary'] = 'No summary available.'
259
+
260
+ if category_match:
261
+ category = category_match.group(1).strip().strip('"')
262
+ if category in CATEGORIES:
263
+ bookmark['category'] = category
264
+ else:
265
+ bookmark['category'] = 'Uncategorized'
266
+ else:
267
+ bookmark['category'] = 'Uncategorized'
268
 
269
+ # Simple keyword-based validation (Optional)
270
  summary_lower = bookmark['summary'].lower()
271
  url_lower = bookmark['url'].lower()
272
  if 'social media' in summary_lower or 'twitter' in summary_lower or 'x.com' in url_lower:
 
275
  bookmark['category'] = 'Reference and Knowledge Bases'
276
 
277
  logger.info("Successfully generated summary and assigned category")
278
+ time.sleep(sleep_time)
279
+ break # Exit the retry loop upon success
 
 
 
280
 
281
  except openai.error.RateLimitError as e:
282
  retry_count += 1
283
+ wait_time = int(e.headers.get("Retry-After", 5))
284
+ logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying...")
285
  time.sleep(wait_time)
286
  except Exception as e:
287
  logger.error(f"Error generating summary and assigning category: {e}", exc_info=True)
288
  bookmark['summary'] = 'No summary available.'
289
  bookmark['category'] = 'Uncategorized'
290
+ break # Exit the retry loop on other exceptions
 
 
 
 
 
291
 
292
  def parse_bookmarks(file_content):
293
  """
 
327
  'User-Agent': 'Mozilla/5.0',
328
  'Accept-Language': 'en-US,en;q=0.9',
329
  }
330
+ response = requests.get(url, headers=headers, timeout=5, verify=False, allow_redirects=True)
331
  bookmark['etag'] = response.headers.get('ETag', 'N/A')
332
  bookmark['status_code'] = response.status_code
333
 
 
347
  bookmark['description'] = ''
348
  logger.info(f"Fetched information for {url}")
349
 
 
 
 
 
 
 
 
350
  except requests.exceptions.Timeout:
351
  bookmark['dead_link'] = False # Mark as 'Unknown' instead of 'Dead'
352
  bookmark['etag'] = 'N/A'
 
377
  """
378
  Create vector embeddings for bookmarks and build FAISS index with ID mapping.
379
  """
 
380
  logger.info("Vectorizing summaries and building FAISS index")
381
  try:
382
+ summaries = [bookmark['summary'] for bookmark in bookmarks_list]
383
+ embeddings = embedding_model.encode(summaries)
 
 
 
 
 
 
 
 
384
  dimension = embeddings.shape[1]
385
+ index = faiss.IndexIDMap(faiss.IndexFlatL2(dimension))
 
386
  # Assign unique IDs to each bookmark
387
  ids = np.array([bookmark['id'] for bookmark in bookmarks_list], dtype=np.int64)
388
+ index.add_with_ids(np.array(embeddings).astype('float32'), ids)
389
  logger.info("FAISS index built successfully with IDs")
390
+ return index
391
  except Exception as e:
392
  logger.error(f"Error in vectorizing and indexing: {e}", exc_info=True)
393
  raise
 
441
  logger.info("HTML display generated")
442
  return cards
443
 
444
+ def process_uploaded_file(file):
445
  """
446
  Process the uploaded bookmarks file.
447
  """
 
450
 
451
  if file is None:
452
  logger.warning("No file uploaded")
453
+ return "Please upload a bookmarks HTML file.", '', gr.update(choices=[]), display_bookmarks()
 
 
 
 
454
 
455
  try:
456
  file_content = file.decode('utf-8')
457
  except UnicodeDecodeError as e:
458
+ logger.error(f"Error decoding the file: {e}", exc_info=True)
459
+ return "Error decoding the file. Please ensure it's a valid HTML file.", '', gr.update(choices=[]), display_bookmarks()
 
 
 
 
460
 
461
  try:
462
  bookmarks = parse_bookmarks(file_content)
463
  except Exception as e:
464
+ logger.error(f"Error parsing bookmarks: {e}", exc_info=True)
465
+ return "Error parsing the bookmarks HTML file.", '', gr.update(choices=[]), display_bookmarks()
 
 
 
 
466
 
467
  if not bookmarks:
468
  logger.warning("No bookmarks found in the uploaded file")
469
+ return "No bookmarks found in the uploaded file.", '', gr.update(choices=[]), display_bookmarks()
 
 
 
 
470
 
471
  # Assign unique IDs to bookmarks
472
  for idx, bookmark in enumerate(bookmarks):
 
474
 
475
  # Fetch bookmark info concurrently
476
  logger.info("Fetching URL info concurrently")
477
+ with ThreadPoolExecutor(max_workers=20) as executor:
478
  executor.map(fetch_url_info, bookmarks)
479
 
480
+ # Process bookmarks concurrently with LLM calls
481
+ logger.info("Processing bookmarks with LLM concurrently")
482
+ with ThreadPoolExecutor(max_workers=5) as executor:
483
  executor.map(generate_summary_and_assign_category, bookmarks)
484
 
 
 
 
 
 
 
 
485
  try:
486
+ faiss_index = vectorize_and_index(bookmarks)
487
  except Exception as e:
488
  logger.error(f"Error building FAISS index: {e}", exc_info=True)
489
+ return "Error building search index.", '', gr.update(choices=[]), display_bookmarks()
 
 
 
 
490
 
491
  message = f"βœ… Successfully processed {len(bookmarks)} bookmarks."
492
  logger.info(message)
493
 
494
  # Generate displays and updates
495
  bookmark_html = display_bookmarks()
496
+ choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
497
+ for i, bookmark in enumerate(bookmarks)]
498
 
499
+ return message, bookmark_html, gr.update(choices=choices), bookmark_html
 
 
 
 
500
 
501
+ def delete_selected_bookmarks(selected_indices):
502
  """
503
  Delete selected bookmarks and remove their vectors from the FAISS index.
504
  """
505
  global bookmarks, faiss_index
506
  if not selected_indices:
507
+ return "⚠️ No bookmarks selected.", gr.update(choices=[]), display_bookmarks()
508
 
509
  ids_to_delete = []
510
  indices_to_delete = []
511
  for s in selected_indices:
512
+ idx = int(s.split('.')[0]) - 1
513
+ if 0 <= idx < len(bookmarks):
514
+ bookmark_id = bookmarks[idx]['id']
515
+ ids_to_delete.append(bookmark_id)
516
+ indices_to_delete.append(idx)
517
+ logger.info(f"Deleting bookmark at index {idx + 1}")
 
 
 
518
 
519
  # Remove vectors from FAISS index
520
  if faiss_index is not None and ids_to_delete:
521
  faiss_index.remove_ids(np.array(ids_to_delete, dtype=np.int64))
522
 
523
  # Remove bookmarks from the list (reverse order to avoid index shifting)
 
524
  for idx in sorted(indices_to_delete, reverse=True):
525
  bookmarks.pop(idx)
526
 
527
  message = "πŸ—‘οΈ Selected bookmarks deleted successfully."
528
  logger.info(message)
529
+ choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
530
+ for i, bookmark in enumerate(bookmarks)]
531
 
532
+ return message, gr.update(choices=choices), display_bookmarks()
 
 
 
 
 
533
 
534
+ def edit_selected_bookmarks_category(selected_indices, new_category):
535
  """
536
  Edit category of selected bookmarks.
537
  """
538
  if not selected_indices:
539
+ return "⚠️ No bookmarks selected.", gr.update(choices=[]), display_bookmarks()
 
 
 
 
 
540
  if not new_category:
541
+ return "⚠️ No new category selected.", gr.update(choices=[]), display_bookmarks()
 
 
 
 
 
542
 
543
+ indices = [int(s.split('.')[0])-1 for s in selected_indices]
544
+ for idx in indices:
545
+ if 0 <= idx < len(bookmarks):
546
+ bookmarks[idx]['category'] = new_category
547
+ logger.info(f"Updated category for bookmark {idx + 1} to {new_category}")
 
 
 
 
548
 
549
  message = "✏️ Category updated for selected bookmarks."
550
  logger.info(message)
551
 
552
  # Update choices and display
553
+ choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
554
+ for i, bookmark in enumerate(bookmarks)]
555
 
556
+ return message, gr.update(choices=choices), display_bookmarks()
 
 
 
 
 
557
 
558
+ def export_bookmarks():
559
  """
560
  Export bookmarks to an HTML file.
561
  """
 
562
  if not bookmarks:
563
  logger.warning("No bookmarks to export")
564
+ return None # Return None instead of a message
565
 
566
  try:
567
  logger.info("Exporting bookmarks to HTML")
 
575
  dl.append(dt)
576
  soup.append(dl)
577
  html_content = str(soup)
578
+ # Save to a temporary file
579
+ output_file = "exported_bookmarks.html"
580
+ with open(output_file, 'w', encoding='utf-8') as f:
581
+ f.write(html_content)
582
  logger.info("Bookmarks exported successfully")
583
+ return output_file # Return the file path
584
  except Exception as e:
585
+ logger.error(f"Error exporting bookmarks: {e}", exc_info=True)
586
+ return None # Return None in case of error
587
 
588
+ def chatbot_response(user_query, chat_history):
589
  """
590
  Generate chatbot response using the FAISS index and embeddings, maintaining chat history.
591
  """
592
+ if not bookmarks or faiss_index is None:
 
 
 
 
 
593
  logger.warning("No bookmarks available for chatbot")
594
+ chat_history.append((user_query, "⚠️ No bookmarks available. Please upload and process your bookmarks first."))
595
+ return chat_history
596
 
597
  logger.info(f"Chatbot received query: {user_query}")
598
 
599
  try:
 
 
 
600
  # Encode the user query
601
+ query_vector = embedding_model.encode([user_query]).astype('float32')
602
 
603
  # Search the FAISS index
604
  k = 5 # Number of results to return
 
610
  matching_bookmarks = [id_to_bookmark.get(id) for id in ids if id in id_to_bookmark]
611
 
612
  if not matching_bookmarks:
613
+ answer = "No relevant bookmarks found for your query."
614
+ chat_history.append((user_query, answer))
615
+ return chat_history
616
 
617
  # Format the response
618
+ bookmarks_info = "\n".join([
619
+ f"Title: {bookmark['title']}\nURL: {bookmark['url']}\nSummary: {bookmark['summary']}"
620
  for bookmark in matching_bookmarks
621
  ])
622
 
623
+ # Use the LLM via Groq Cloud API to generate a response
624
  prompt = f"""
625
  A user asked: "{user_query}"
626
  Based on the bookmarks below, provide a helpful answer to the user's query, referencing the relevant bookmarks.
 
627
  Bookmarks:
628
  {bookmarks_info}
 
629
  Provide a concise and helpful response.
630
  """
631
 
632
+ # Estimate tokens
633
+ def estimate_tokens(text):
634
+ return len(text) / 4 # Approximate token estimation
635
+
636
+ prompt_tokens = estimate_tokens(prompt)
637
+ max_tokens = 300 # Adjust as needed
638
+ total_tokens = prompt_tokens + max_tokens
639
+
640
+ # Calculate required delay
641
+ tokens_per_minute = 60000 # Adjust based on your rate limit
642
+ tokens_per_second = tokens_per_minute / 60
643
+ required_delay = total_tokens / tokens_per_second
644
+ sleep_time = max(required_delay, 1)
645
+
646
+ response = openai.ChatCompletion.create(
647
+ model='llama-3.1-70b-versatile', # Using the specified model
648
+ messages=[
649
+ {"role": "user", "content": prompt}
650
+ ],
651
+ max_tokens=int(max_tokens),
652
+ temperature=0.7,
653
+ )
654
+ answer = response['choices'][0]['message']['content'].strip()
655
+ logger.info("Chatbot response generated")
656
+ time.sleep(sleep_time)
657
+
658
+ # Append the interaction to chat history
659
+ chat_history.append((user_query, answer))
660
+ return chat_history
661
+
662
+ except openai.error.RateLimitError as e:
663
+ wait_time = int(e.headers.get("Retry-After", 5))
664
+ logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying...")
665
+ time.sleep(wait_time)
666
+ return chatbot_response(user_query, chat_history) # Retry after waiting
 
 
 
 
 
 
 
 
 
667
  except Exception as e:
668
  error_message = f"⚠️ Error processing your query: {str(e)}"
669
  logger.error(error_message, exc_info=True)
670
+ chat_history.append((user_query, error_message))
671
+ return chat_history
672
 
673
  def build_app():
674
  """
 
676
  """
677
  try:
678
  logger.info("Building Gradio app")
679
+ with gr.Blocks(css="app.css") as demo:
 
 
 
 
680
  # General Overview
681
  gr.Markdown("""
682
+ # πŸ“š SmartMarks - AI Browser Bookmarks Manager
683
+ Welcome to **SmartMarks**, your intelligent assistant for managing browser bookmarks. SmartMarks leverages AI to help you organize, search, and interact with your bookmarks seamlessly.
684
+ ---
685
+ ## πŸš€ **How to Use SmartMarks**
686
+ SmartMarks is divided into three main sections:
687
+ 1. **πŸ“‚ Upload and Process Bookmarks:** Import your existing bookmarks and let SmartMarks analyze and categorize them for you.
688
+ 2. **πŸ’¬ Chat with Bookmarks:** Interact with your bookmarks using natural language queries to find relevant links effortlessly.
689
+ 3. **πŸ› οΈ Manage Bookmarks:** View, edit, delete, and export your bookmarks with ease.
690
+ """)
 
 
 
 
 
 
 
691
 
692
  # Upload and Process Bookmarks Tab
693
  with gr.Tab("Upload and Process Bookmarks"):
694
  gr.Markdown("""
695
+ ## πŸ“‚ **Upload and Process Bookmarks**
696
+ ### πŸ“ **Steps:**
697
+ 1. Click on the "Upload Bookmarks HTML File" button
698
+ 2. Select your bookmarks file
699
+ 3. Click "Process Bookmarks" to analyze and organize your bookmarks
700
+ """)
 
 
 
 
 
 
 
 
 
701
 
702
  upload = gr.File(label="πŸ“ Upload Bookmarks HTML File", type='binary')
703
  process_button = gr.Button("βš™οΈ Process Bookmarks")
704
  output_text = gr.Textbox(label="βœ… Output", interactive=False)
705
  bookmark_display = gr.HTML(label="πŸ“„ Processed Bookmarks")
706
 
 
 
 
 
 
 
707
  # Chat with Bookmarks Tab
708
  with gr.Tab("Chat with Bookmarks"):
709
  gr.Markdown("""
710
+ ## πŸ’¬ **Chat with Bookmarks**
711
+ Ask questions about your bookmarks and get relevant results.
712
+ """)
713
+
714
+ chatbot = gr.Chatbot(label="πŸ’¬ Chat with SmartMarks")
715
+ user_input = gr.Textbox(
716
+ label="✍️ Ask about your bookmarks",
717
+ placeholder="e.g., Do I have any bookmarks about AI?"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
718
  )
719
+ chat_button = gr.Button("πŸ“¨ Send")
720
 
721
  # Manage Bookmarks Tab
722
  with gr.Tab("Manage Bookmarks"):
723
  gr.Markdown("""
724
+ ## πŸ› οΈ **Manage Bookmarks**
725
+ Select bookmarks to delete or edit their categories.
726
+ """)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
727
 
728
  manage_output = gr.Textbox(label="πŸ”„ Status", interactive=False)
729
  bookmark_selector = gr.CheckboxGroup(
730
  label="βœ… Select Bookmarks",
731
+ choices=[]
732
+ )
733
+ new_category = gr.Dropdown(
734
+ label="πŸ†• New Category",
735
+ choices=CATEGORIES,
736
+ value="Uncategorized"
737
  )
 
738
  bookmark_display_manage = gr.HTML(label="πŸ“„ Bookmarks")
739
 
740
  with gr.Row():
741
  delete_button = gr.Button("πŸ—‘οΈ Delete Selected")
742
  edit_category_button = gr.Button("✏️ Edit Category")
743
  export_button = gr.Button("πŸ’Ύ Export")
 
 
 
 
 
 
 
 
 
 
744
 
745
+ download_link = gr.File(label="πŸ“₯ Download Exported Bookmarks")
746
+
747
+ # Set up event handlers
748
+ process_button.click(
749
+ process_uploaded_file,
750
+ inputs=upload,
751
+ outputs=[output_text, bookmark_display, bookmark_selector, bookmark_display_manage]
752
+ )
753
+
754
+ chat_button.click(
755
+ chatbot_response,
756
+ inputs=[user_input, chatbot],
757
+ outputs=chatbot
758
+ )
759
+
760
+ delete_button.click(
761
+ delete_selected_bookmarks,
762
+ inputs=bookmark_selector,
763
+ outputs=[manage_output, bookmark_selector, bookmark_display_manage]
764
+ )
765
+
766
+ edit_category_button.click(
767
+ edit_selected_bookmarks_category,
768
+ inputs=[bookmark_selector, new_category],
769
+ outputs=[manage_output, bookmark_selector, bookmark_display_manage]
770
+ )
771
+
772
+ export_button.click(
773
+ export_bookmarks,
774
+ outputs=download_link
775
+ )
776
 
777
  logger.info("Launching Gradio app")
778
  demo.launch(debug=True)
779
  except Exception as e:
780
+ logger.error(f"Error building the app: {e}", exc_info=True)
781
+ print(f"Error building the app: {e}")
782
 
783
  if __name__ == "__main__":
784
  build_app()