siddhartharya commited on
Commit
8ba26a5
β€’
1 Parent(s): 00cf45f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -187
app.py CHANGED
@@ -19,10 +19,6 @@ import threading
19
  # Import OpenAI library
20
  import openai
21
 
22
- # Suppress only the single warning from urllib3 needed.
23
- import urllib3
24
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
25
-
26
  # Set up logging to output to the console
27
  logger = logging.getLogger(__name__)
28
  logger.setLevel(logging.INFO)
@@ -38,8 +34,8 @@ console_handler.setFormatter(formatter)
38
  # Add the handler to the logger
39
  logger.addHandler(console_handler)
40
 
41
- # Initialize variables and models
42
- logger.info("Initializing variables and models")
43
  embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
44
  faiss_index = None
45
  bookmarks = []
@@ -81,10 +77,7 @@ if not GROQ_API_KEY:
81
  logger.error("GROQ_API_KEY environment variable not set.")
82
 
83
  openai.api_key = GROQ_API_KEY
84
- openai.api_base = "https://api.groq.com/openai/v1" # Ensure this is the correct base URL
85
-
86
- # Initialize semaphore for rate limiting (allowing 1 concurrent API call)
87
- api_semaphore = threading.Semaphore(1)
88
 
89
  def extract_main_content(soup):
90
  """
@@ -233,7 +226,7 @@ Category: [One category]
233
  return len(text) / 4 # Approximate token estimation
234
 
235
  prompt_tokens = estimate_tokens(prompt)
236
- max_tokens = 150 # Adjusted from 200
237
  total_tokens = prompt_tokens + max_tokens
238
 
239
  # Calculate required delay
@@ -242,22 +235,15 @@ Category: [One category]
242
  required_delay = total_tokens / tokens_per_second
243
  sleep_time = max(required_delay, 1)
244
 
245
- # Acquire semaphore before making API call
246
- api_semaphore.acquire()
247
- try:
248
- # Call the LLM via Groq Cloud API
249
- response = openai.ChatCompletion.create(
250
- model='llama-3.1-70b-versatile', # Using the specified model
251
- messages=[
252
- {"role": "user", "content": prompt}
253
- ],
254
- max_tokens=int(max_tokens),
255
- temperature=0.5,
256
- )
257
- finally:
258
- # Release semaphore after API call
259
- api_semaphore.release()
260
-
261
  content = response['choices'][0]['message']['content'].strip()
262
  if not content:
263
  raise ValueError("Empty response received from the model.")
@@ -295,7 +281,7 @@ Category: [One category]
295
  except openai.error.RateLimitError as e:
296
  retry_count += 1
297
  wait_time = int(e.headers.get("Retry-After", 5))
298
- logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying... (Attempt {retry_count}/{max_retries})")
299
  time.sleep(wait_time)
300
  except Exception as e:
301
  logger.error(f"Error generating summary and assigning category: {e}", exc_info=True)
@@ -391,7 +377,6 @@ def vectorize_and_index(bookmarks_list):
391
  """
392
  Create vector embeddings for bookmarks and build FAISS index with ID mapping.
393
  """
394
- global faiss_index
395
  logger.info("Vectorizing summaries and building FAISS index")
396
  try:
397
  summaries = [bookmark['summary'] for bookmark in bookmarks_list]
@@ -401,7 +386,6 @@ def vectorize_and_index(bookmarks_list):
401
  # Assign unique IDs to each bookmark
402
  ids = np.array([bookmark['id'] for bookmark in bookmarks_list], dtype=np.int64)
403
  index.add_with_ids(np.array(embeddings).astype('float32'), ids)
404
- faiss_index = index
405
  logger.info("FAISS index built successfully with IDs")
406
  return index
407
  except Exception as e:
@@ -457,7 +441,7 @@ def display_bookmarks():
457
  logger.info("HTML display generated")
458
  return cards
459
 
460
- def process_uploaded_file(file, state_bookmarks):
461
  """
462
  Process the uploaded bookmarks file.
463
  """
@@ -466,23 +450,23 @@ def process_uploaded_file(file, state_bookmarks):
466
 
467
  if file is None:
468
  logger.warning("No file uploaded")
469
- return "Please upload a bookmarks HTML file.", '', state_bookmarks, display_bookmarks()
470
 
471
  try:
472
  file_content = file.decode('utf-8')
473
  except UnicodeDecodeError as e:
474
  logger.error(f"Error decoding the file: {e}", exc_info=True)
475
- return "Error decoding the file. Please ensure it's a valid HTML file.", '', state_bookmarks, display_bookmarks()
476
 
477
  try:
478
  bookmarks = parse_bookmarks(file_content)
479
  except Exception as e:
480
  logger.error(f"Error parsing bookmarks: {e}", exc_info=True)
481
- return "Error parsing the bookmarks HTML file.", '', state_bookmarks, display_bookmarks()
482
 
483
  if not bookmarks:
484
  logger.warning("No bookmarks found in the uploaded file")
485
- return "No bookmarks found in the uploaded file.", '', state_bookmarks, display_bookmarks()
486
 
487
  # Assign unique IDs to bookmarks
488
  for idx, bookmark in enumerate(bookmarks):
@@ -490,19 +474,19 @@ def process_uploaded_file(file, state_bookmarks):
490
 
491
  # Fetch bookmark info concurrently
492
  logger.info("Fetching URL info concurrently")
493
- with ThreadPoolExecutor(max_workers=3) as executor: # Adjusted max_workers to 3
494
  executor.map(fetch_url_info, bookmarks)
495
 
496
  # Process bookmarks concurrently with LLM calls
497
  logger.info("Processing bookmarks with LLM concurrently")
498
- with ThreadPoolExecutor(max_workers=1) as executor: # Adjusted max_workers to 1
499
  executor.map(generate_summary_and_assign_category, bookmarks)
500
 
501
  try:
502
  faiss_index = vectorize_and_index(bookmarks)
503
  except Exception as e:
504
  logger.error(f"Error building FAISS index: {e}", exc_info=True)
505
- return "Error building search index.", '', state_bookmarks, display_bookmarks()
506
 
507
  message = f"βœ… Successfully processed {len(bookmarks)} bookmarks."
508
  logger.info(message)
@@ -512,12 +496,9 @@ def process_uploaded_file(file, state_bookmarks):
512
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
513
  for i, bookmark in enumerate(bookmarks)]
514
 
515
- # Update state
516
- state_bookmarks = bookmarks.copy()
517
 
518
- return message, bookmark_html, state_bookmarks, bookmark_html
519
-
520
- def delete_selected_bookmarks(selected_indices, state_bookmarks):
521
  """
522
  Delete selected bookmarks and remove their vectors from the FAISS index.
523
  """
@@ -548,19 +529,16 @@ def delete_selected_bookmarks(selected_indices, state_bookmarks):
548
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
549
  for i, bookmark in enumerate(bookmarks)]
550
 
551
- # Update state
552
- state_bookmarks = bookmarks.copy()
553
-
554
  return message, gr.update(choices=choices), display_bookmarks()
555
 
556
- def edit_selected_bookmarks_category(selected_indices, new_category, state_bookmarks):
557
  """
558
  Edit category of selected bookmarks.
559
  """
560
  if not selected_indices:
561
- return "⚠️ No bookmarks selected.", gr.update(choices=[]), display_bookmarks(), state_bookmarks
562
  if not new_category:
563
- return "⚠️ No new category selected.", gr.update(choices=[]), display_bookmarks(), state_bookmarks
564
 
565
  indices = [int(s.split('.')[0])-1 for s in selected_indices]
566
  for idx in indices:
@@ -575,10 +553,7 @@ def edit_selected_bookmarks_category(selected_indices, new_category, state_bookm
575
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
576
  for i, bookmark in enumerate(bookmarks)]
577
 
578
- # Update state
579
- state_bookmarks = bookmarks.copy()
580
-
581
- return message, gr.update(choices=choices), display_bookmarks(), state_bookmarks
582
 
583
  def export_bookmarks():
584
  """
@@ -616,7 +591,7 @@ def chatbot_response(user_query, chat_history):
616
  """
617
  if not bookmarks or faiss_index is None:
618
  logger.warning("No bookmarks available for chatbot")
619
- chat_history.append({"role": "assistant", "content": "⚠️ No bookmarks available. Please upload and process your bookmarks first."})
620
  return chat_history
621
 
622
  logger.info(f"Chatbot received query: {user_query}")
@@ -636,7 +611,7 @@ def chatbot_response(user_query, chat_history):
636
 
637
  if not matching_bookmarks:
638
  answer = "No relevant bookmarks found for your query."
639
- chat_history.append({"role": "assistant", "content": answer})
640
  return chat_history
641
 
642
  # Format the response
@@ -668,28 +643,20 @@ Provide a concise and helpful response.
668
  required_delay = total_tokens / tokens_per_second
669
  sleep_time = max(required_delay, 1)
670
 
671
- # Acquire semaphore before making API call
672
- api_semaphore.acquire()
673
- try:
674
- # Call the LLM via Groq Cloud API
675
- response = openai.ChatCompletion.create(
676
- model='llama-3.1-70b-versatile', # Using the specified model
677
- messages=[
678
- {"role": "user", "content": prompt}
679
- ],
680
- max_tokens=int(max_tokens),
681
- temperature=0.7,
682
- )
683
- finally:
684
- # Release semaphore after API call
685
- api_semaphore.release()
686
-
687
  answer = response['choices'][0]['message']['content'].strip()
688
  logger.info("Chatbot response generated")
689
  time.sleep(sleep_time)
690
 
691
  # Append the interaction to chat history
692
- chat_history.append({"role": "assistant", "content": answer})
693
  return chat_history
694
 
695
  except openai.error.RateLimitError as e:
@@ -700,7 +667,7 @@ Provide a concise and helpful response.
700
  except Exception as e:
701
  error_message = f"⚠️ Error processing your query: {str(e)}"
702
  logger.error(error_message, exc_info=True)
703
- chat_history.append({"role": "assistant", "content": error_message})
704
  return chat_history
705
 
706
  def build_app():
@@ -710,119 +677,53 @@ def build_app():
710
  try:
711
  logger.info("Building Gradio app")
712
  with gr.Blocks(css="app.css") as demo:
713
- # Initialize state
714
- state_bookmarks = gr.State([])
715
-
716
  # General Overview
717
  gr.Markdown("""
718
- # πŸ“š SmartMarks - AI Browser Bookmarks Manager
719
-
720
- Welcome to **SmartMarks**, your intelligent assistant for managing browser bookmarks. SmartMarks leverages AI to help you organize, search, and interact with your bookmarks seamlessly.
721
-
722
- ---
723
-
724
- ## πŸš€ **How to Use SmartMarks**
725
-
726
- SmartMarks is divided into three main sections:
727
-
728
- 1. **πŸ“‚ Upload and Process Bookmarks:** Import your existing bookmarks and let SmartMarks analyze and categorize them for you.
729
- 2. **πŸ’¬ Chat with Bookmarks:** Interact with your bookmarks using natural language queries to find relevant links effortlessly.
730
- 3. **πŸ› οΈ Manage Bookmarks:** View, edit, delete, and export your bookmarks with ease.
731
-
732
- Navigate through the tabs to explore each feature in detail.
733
- """)
734
 
735
  # Upload and Process Bookmarks Tab
736
  with gr.Tab("Upload and Process Bookmarks"):
737
  gr.Markdown("""
738
- ## πŸ“‚ **Upload and Process Bookmarks**
739
-
740
- ### πŸ“ **Steps to Upload and Process:**
741
-
742
- 1. **Upload Bookmarks File:**
743
- - Click on the **"πŸ“ Upload Bookmarks HTML File"** button.
744
- - Select your browser's exported bookmarks HTML file from your device.
745
-
746
- 2. **Process Bookmarks:**
747
- - After uploading, click on the **"βš™οΈ Process Bookmarks"** button.
748
- - SmartMarks will parse your bookmarks, fetch additional information, generate summaries, and categorize each link based on predefined categories.
749
-
750
- 3. **View Processed Bookmarks:**
751
- - Once processing is complete, your bookmarks will be displayed in an organized and visually appealing format below.
752
- """)
753
 
754
  upload = gr.File(label="πŸ“ Upload Bookmarks HTML File", type='binary')
755
  process_button = gr.Button("βš™οΈ Process Bookmarks")
756
  output_text = gr.Textbox(label="βœ… Output", interactive=False)
757
  bookmark_display = gr.HTML(label="πŸ“„ Processed Bookmarks")
758
 
759
- process_button.click(
760
- process_uploaded_file,
761
- inputs=[upload, state_bookmarks],
762
- outputs=[output_text, bookmark_display, state_bookmarks, bookmark_display]
763
- )
764
-
765
  # Chat with Bookmarks Tab
766
  with gr.Tab("Chat with Bookmarks"):
767
  gr.Markdown("""
768
- ## πŸ’¬ **Chat with Bookmarks**
 
 
769
 
770
- ### πŸ€– **How to Interact:**
771
-
772
- 1. **Enter Your Query:**
773
- - In the **"✍️ Ask about your bookmarks"** textbox, type your question or keyword related to your bookmarks. For example, "Do I have any bookmarks about GenerativeAI?"
774
-
775
- 2. **Submit Your Query:**
776
- - Click the **"πŸ“¨ Send"** button to submit your query.
777
-
778
- 3. **Receive AI-Driven Responses:**
779
- - SmartMarks will analyze your query and provide relevant bookmarks that match your request, making it easier to find specific links without manual searching.
780
-
781
- 4. **View Chat History:**
782
- - All your queries and the corresponding AI responses are displayed in the chat history for your reference.
783
- """)
784
-
785
- chatbot = gr.Chatbot(label="πŸ’¬ Chat with SmartMarks", type='messages')
786
  user_input = gr.Textbox(
787
  label="✍️ Ask about your bookmarks",
788
  placeholder="e.g., Do I have any bookmarks about AI?"
789
  )
790
  chat_button = gr.Button("πŸ“¨ Send")
791
 
792
- chat_button.click(
793
- chatbot_response,
794
- inputs=[user_input, chatbot],
795
- outputs=chatbot
796
- )
797
-
798
  # Manage Bookmarks Tab
799
  with gr.Tab("Manage Bookmarks"):
800
  gr.Markdown("""
801
- ## πŸ› οΈ **Manage Bookmarks**
802
-
803
- ### πŸ—‚οΈ **Features:**
804
-
805
- 1. **View Bookmarks:**
806
- - All your processed bookmarks are displayed here with their respective categories and summaries.
807
-
808
- 2. **Select Bookmarks:**
809
- - Use the checkboxes next to each bookmark to select one, multiple, or all bookmarks you wish to manage.
810
-
811
- 3. **Delete Selected Bookmarks:**
812
- - After selecting the desired bookmarks, click the **"πŸ—‘οΈ Delete Selected"** button to remove them from your list.
813
-
814
- 4. **Edit Categories:**
815
- - Select the bookmarks you want to re-categorize.
816
- - Choose a new category from the dropdown menu labeled **"πŸ†• New Category"**.
817
- - Click the **"✏️ Edit Category"** button to update their categories.
818
-
819
- 5. **Export Bookmarks:**
820
- - Click the **"πŸ’Ύ Export"** button to download your updated bookmarks as an HTML file.
821
- - This file can be uploaded back to your browser to reflect the changes made within SmartMarks.
822
-
823
- 6. **Refresh Bookmarks:**
824
- - Click the **"πŸ”„ Refresh Bookmarks"** button to ensure the latest state is reflected in the display.
825
- """)
826
 
827
  manage_output = gr.Textbox(label="πŸ”„ Status", interactive=False)
828
  bookmark_selector = gr.CheckboxGroup(
@@ -840,38 +741,38 @@ Navigate through the tabs to explore each feature in detail.
840
  delete_button = gr.Button("πŸ—‘οΈ Delete Selected")
841
  edit_category_button = gr.Button("✏️ Edit Category")
842
  export_button = gr.Button("πŸ’Ύ Export")
843
- refresh_button = gr.Button("πŸ”„ Refresh Bookmarks")
844
 
845
  download_link = gr.File(label="πŸ“₯ Download Exported Bookmarks")
846
 
847
- # Define button actions
848
- delete_button.click(
849
- delete_selected_bookmarks,
850
- inputs=[bookmark_selector, state_bookmarks],
851
- outputs=[manage_output, bookmark_selector, bookmark_display_manage]
852
- )
853
 
854
- edit_category_button.click(
855
- edit_selected_bookmarks_category,
856
- inputs=[bookmark_selector, new_category, state_bookmarks],
857
- outputs=[manage_output, bookmark_selector, bookmark_display_manage, state_bookmarks]
858
- )
859
 
860
- export_button.click(
861
- export_bookmarks,
862
- outputs=download_link
863
- )
 
864
 
865
- refresh_button.click(
866
- lambda state_bookmarks: (
867
- [
868
- f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})" for i, bookmark in enumerate(state_bookmarks)
869
- ],
870
- display_bookmarks()
871
- ),
872
- inputs=[state_bookmarks],
873
- outputs=[bookmark_selector, bookmark_display_manage]
874
- )
875
 
876
  logger.info("Launching Gradio app")
877
  demo.launch(debug=True)
 
19
  # Import OpenAI library
20
  import openai
21
 
 
 
 
 
22
  # Set up logging to output to the console
23
  logger = logging.getLogger(__name__)
24
  logger.setLevel(logging.INFO)
 
34
  # Add the handler to the logger
35
  logger.addHandler(console_handler)
36
 
37
+ # Initialize models and variables
38
+ logger.info("Initializing models and variables")
39
  embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
40
  faiss_index = None
41
  bookmarks = []
 
77
  logger.error("GROQ_API_KEY environment variable not set.")
78
 
79
  openai.api_key = GROQ_API_KEY
80
+ openai.api_base = "https://api.groq.com/openai/v1"
 
 
 
81
 
82
  def extract_main_content(soup):
83
  """
 
226
  return len(text) / 4 # Approximate token estimation
227
 
228
  prompt_tokens = estimate_tokens(prompt)
229
+ max_tokens = 150 # Reduced from 200
230
  total_tokens = prompt_tokens + max_tokens
231
 
232
  # Calculate required delay
 
235
  required_delay = total_tokens / tokens_per_second
236
  sleep_time = max(required_delay, 1)
237
 
238
+ # Call the LLM via Groq Cloud API
239
+ response = openai.ChatCompletion.create(
240
+ model='llama-3.1-70b-versatile', # Using the specified model
241
+ messages=[
242
+ {"role": "user", "content": prompt}
243
+ ],
244
+ max_tokens=int(max_tokens),
245
+ temperature=0.5,
246
+ )
 
 
 
 
 
 
 
247
  content = response['choices'][0]['message']['content'].strip()
248
  if not content:
249
  raise ValueError("Empty response received from the model.")
 
281
  except openai.error.RateLimitError as e:
282
  retry_count += 1
283
  wait_time = int(e.headers.get("Retry-After", 5))
284
+ logger.warning(f"Rate limit reached. Waiting for {wait_time} seconds before retrying...")
285
  time.sleep(wait_time)
286
  except Exception as e:
287
  logger.error(f"Error generating summary and assigning category: {e}", exc_info=True)
 
377
  """
378
  Create vector embeddings for bookmarks and build FAISS index with ID mapping.
379
  """
 
380
  logger.info("Vectorizing summaries and building FAISS index")
381
  try:
382
  summaries = [bookmark['summary'] for bookmark in bookmarks_list]
 
386
  # Assign unique IDs to each bookmark
387
  ids = np.array([bookmark['id'] for bookmark in bookmarks_list], dtype=np.int64)
388
  index.add_with_ids(np.array(embeddings).astype('float32'), ids)
 
389
  logger.info("FAISS index built successfully with IDs")
390
  return index
391
  except Exception as e:
 
441
  logger.info("HTML display generated")
442
  return cards
443
 
444
+ def process_uploaded_file(file):
445
  """
446
  Process the uploaded bookmarks file.
447
  """
 
450
 
451
  if file is None:
452
  logger.warning("No file uploaded")
453
+ return "Please upload a bookmarks HTML file.", '', gr.update(choices=[]), display_bookmarks()
454
 
455
  try:
456
  file_content = file.decode('utf-8')
457
  except UnicodeDecodeError as e:
458
  logger.error(f"Error decoding the file: {e}", exc_info=True)
459
+ return "Error decoding the file. Please ensure it's a valid HTML file.", '', gr.update(choices=[]), display_bookmarks()
460
 
461
  try:
462
  bookmarks = parse_bookmarks(file_content)
463
  except Exception as e:
464
  logger.error(f"Error parsing bookmarks: {e}", exc_info=True)
465
+ return "Error parsing the bookmarks HTML file.", '', gr.update(choices=[]), display_bookmarks()
466
 
467
  if not bookmarks:
468
  logger.warning("No bookmarks found in the uploaded file")
469
+ return "No bookmarks found in the uploaded file.", '', gr.update(choices=[]), display_bookmarks()
470
 
471
  # Assign unique IDs to bookmarks
472
  for idx, bookmark in enumerate(bookmarks):
 
474
 
475
  # Fetch bookmark info concurrently
476
  logger.info("Fetching URL info concurrently")
477
+ with ThreadPoolExecutor(max_workers=20) as executor:
478
  executor.map(fetch_url_info, bookmarks)
479
 
480
  # Process bookmarks concurrently with LLM calls
481
  logger.info("Processing bookmarks with LLM concurrently")
482
+ with ThreadPoolExecutor(max_workers=5) as executor:
483
  executor.map(generate_summary_and_assign_category, bookmarks)
484
 
485
  try:
486
  faiss_index = vectorize_and_index(bookmarks)
487
  except Exception as e:
488
  logger.error(f"Error building FAISS index: {e}", exc_info=True)
489
+ return "Error building search index.", '', gr.update(choices=[]), display_bookmarks()
490
 
491
  message = f"βœ… Successfully processed {len(bookmarks)} bookmarks."
492
  logger.info(message)
 
496
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
497
  for i, bookmark in enumerate(bookmarks)]
498
 
499
+ return message, bookmark_html, gr.update(choices=choices), bookmark_html
 
500
 
501
+ def delete_selected_bookmarks(selected_indices):
 
 
502
  """
503
  Delete selected bookmarks and remove their vectors from the FAISS index.
504
  """
 
529
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
530
  for i, bookmark in enumerate(bookmarks)]
531
 
 
 
 
532
  return message, gr.update(choices=choices), display_bookmarks()
533
 
534
+ def edit_selected_bookmarks_category(selected_indices, new_category):
535
  """
536
  Edit category of selected bookmarks.
537
  """
538
  if not selected_indices:
539
+ return "⚠️ No bookmarks selected.", gr.update(choices=[]), display_bookmarks()
540
  if not new_category:
541
+ return "⚠️ No new category selected.", gr.update(choices=[]), display_bookmarks()
542
 
543
  indices = [int(s.split('.')[0])-1 for s in selected_indices]
544
  for idx in indices:
 
553
  choices = [f"{i+1}. {bookmark['title']} (Category: {bookmark['category']})"
554
  for i, bookmark in enumerate(bookmarks)]
555
 
556
+ return message, gr.update(choices=choices), display_bookmarks()
 
 
 
557
 
558
  def export_bookmarks():
559
  """
 
591
  """
592
  if not bookmarks or faiss_index is None:
593
  logger.warning("No bookmarks available for chatbot")
594
+ chat_history.append((user_query, "⚠️ No bookmarks available. Please upload and process your bookmarks first."))
595
  return chat_history
596
 
597
  logger.info(f"Chatbot received query: {user_query}")
 
611
 
612
  if not matching_bookmarks:
613
  answer = "No relevant bookmarks found for your query."
614
+ chat_history.append((user_query, answer))
615
  return chat_history
616
 
617
  # Format the response
 
643
  required_delay = total_tokens / tokens_per_second
644
  sleep_time = max(required_delay, 1)
645
 
646
+ response = openai.ChatCompletion.create(
647
+ model='llama-3.1-70b-versatile', # Using the specified model
648
+ messages=[
649
+ {"role": "user", "content": prompt}
650
+ ],
651
+ max_tokens=int(max_tokens),
652
+ temperature=0.7,
653
+ )
 
 
 
 
 
 
 
 
654
  answer = response['choices'][0]['message']['content'].strip()
655
  logger.info("Chatbot response generated")
656
  time.sleep(sleep_time)
657
 
658
  # Append the interaction to chat history
659
+ chat_history.append((user_query, answer))
660
  return chat_history
661
 
662
  except openai.error.RateLimitError as e:
 
667
  except Exception as e:
668
  error_message = f"⚠️ Error processing your query: {str(e)}"
669
  logger.error(error_message, exc_info=True)
670
+ chat_history.append((user_query, error_message))
671
  return chat_history
672
 
673
  def build_app():
 
677
  try:
678
  logger.info("Building Gradio app")
679
  with gr.Blocks(css="app.css") as demo:
 
 
 
680
  # General Overview
681
  gr.Markdown("""
682
+ # πŸ“š SmartMarks - AI Browser Bookmarks Manager
683
+ Welcome to **SmartMarks**, your intelligent assistant for managing browser bookmarks. SmartMarks leverages AI to help you organize, search, and interact with your bookmarks seamlessly.
684
+ ---
685
+ ## πŸš€ **How to Use SmartMarks**
686
+ SmartMarks is divided into three main sections:
687
+ 1. **πŸ“‚ Upload and Process Bookmarks:** Import your existing bookmarks and let SmartMarks analyze and categorize them for you.
688
+ 2. **πŸ’¬ Chat with Bookmarks:** Interact with your bookmarks using natural language queries to find relevant links effortlessly.
689
+ 3. **πŸ› οΈ Manage Bookmarks:** View, edit, delete, and export your bookmarks with ease.
690
+ """)
 
 
 
 
 
 
 
691
 
692
  # Upload and Process Bookmarks Tab
693
  with gr.Tab("Upload and Process Bookmarks"):
694
  gr.Markdown("""
695
+ ## πŸ“‚ **Upload and Process Bookmarks**
696
+ ### πŸ“ **Steps:**
697
+ 1. Click on the "Upload Bookmarks HTML File" button
698
+ 2. Select your bookmarks file
699
+ 3. Click "Process Bookmarks" to analyze and organize your bookmarks
700
+ """)
 
 
 
 
 
 
 
 
 
701
 
702
  upload = gr.File(label="πŸ“ Upload Bookmarks HTML File", type='binary')
703
  process_button = gr.Button("βš™οΈ Process Bookmarks")
704
  output_text = gr.Textbox(label="βœ… Output", interactive=False)
705
  bookmark_display = gr.HTML(label="πŸ“„ Processed Bookmarks")
706
 
 
 
 
 
 
 
707
  # Chat with Bookmarks Tab
708
  with gr.Tab("Chat with Bookmarks"):
709
  gr.Markdown("""
710
+ ## πŸ’¬ **Chat with Bookmarks**
711
+ Ask questions about your bookmarks and get relevant results.
712
+ """)
713
 
714
+ chatbot = gr.Chatbot(label="πŸ’¬ Chat with SmartMarks")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
715
  user_input = gr.Textbox(
716
  label="✍️ Ask about your bookmarks",
717
  placeholder="e.g., Do I have any bookmarks about AI?"
718
  )
719
  chat_button = gr.Button("πŸ“¨ Send")
720
 
 
 
 
 
 
 
721
  # Manage Bookmarks Tab
722
  with gr.Tab("Manage Bookmarks"):
723
  gr.Markdown("""
724
+ ## πŸ› οΈ **Manage Bookmarks**
725
+ Select bookmarks to delete or edit their categories.
726
+ """)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
727
 
728
  manage_output = gr.Textbox(label="πŸ”„ Status", interactive=False)
729
  bookmark_selector = gr.CheckboxGroup(
 
741
  delete_button = gr.Button("πŸ—‘οΈ Delete Selected")
742
  edit_category_button = gr.Button("✏️ Edit Category")
743
  export_button = gr.Button("πŸ’Ύ Export")
 
744
 
745
  download_link = gr.File(label="πŸ“₯ Download Exported Bookmarks")
746
 
747
+ # Set up event handlers
748
+ process_button.click(
749
+ process_uploaded_file,
750
+ inputs=upload,
751
+ outputs=[output_text, bookmark_display, bookmark_selector, bookmark_display_manage]
752
+ )
753
 
754
+ chat_button.click(
755
+ chatbot_response,
756
+ inputs=[user_input, chatbot],
757
+ outputs=chatbot
758
+ )
759
 
760
+ delete_button.click(
761
+ delete_selected_bookmarks,
762
+ inputs=bookmark_selector,
763
+ outputs=[manage_output, bookmark_selector, bookmark_display_manage]
764
+ )
765
 
766
+ edit_category_button.click(
767
+ edit_selected_bookmarks_category,
768
+ inputs=[bookmark_selector, new_category],
769
+ outputs=[manage_output, bookmark_selector, bookmark_display_manage]
770
+ )
771
+
772
+ export_button.click(
773
+ export_bookmarks,
774
+ outputs=download_link
775
+ )
776
 
777
  logger.info("Launching Gradio app")
778
  demo.launch(debug=True)