ans123 commited on
Commit
a48e10f
·
verified ·
1 Parent(s): 46e419c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -12
app.py CHANGED
@@ -636,11 +636,22 @@ with gr.Blocks(theme=gr.themes.Soft(), title="EmotionToAction") as demo:
636
  outputs=[status_output, plan_display, current_plan_state]
637
  )
638
 
639
- # Find & Add Resources Button Click (Async)
640
  async def handle_find_resources(username, current_analysis, faiss_index_s, faiss_metadata_s, progress=gr.Progress(track_tqdm=True)):
641
- if not username: return "Please enter username.", faiss_index_s, faiss_metadata_s, "Idle"
642
- if not current_analysis: return "Please analyze an entry first.", faiss_index_s, faiss_metadata_s, "Idle"
643
- if not CRAWL4AI_AVAILABLE: return "crawl4ai library not installed.", faiss_index_s, faiss_metadata_s, "Error"
 
 
 
 
 
 
 
 
 
 
 
644
 
645
  status_msg = "Starting resource finding..."
646
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg # Initial update
@@ -653,30 +664,44 @@ with gr.Blocks(theme=gr.themes.Soft(), title="EmotionToAction") as demo:
653
  status_msg = "Searching web..."
654
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
655
  urls = get_web_urls(search_term, num_results=3) # Limit URLs
656
- if not urls: yield "No relevant URLs found.", faiss_index_s, faiss_metadata_s, "No URLs found."; return
 
 
 
 
657
 
658
  progress(0.3, desc=f"Crawling {len(urls)} pages...")
659
  status_msg = f"Crawling {len(urls)} pages..."
660
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
661
- crawled_content = await crawl_webpages_simple(urls) # Async call
662
- if not crawled_content: yield "Crawling failed or yielded no content.", faiss_index_s, faiss_metadata_s, "Crawling failed."; return
 
 
 
 
 
663
 
664
  progress(0.7, desc="Adding content to FAISS index...")
665
  status_msg = "Adding content to index..."
666
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
667
- # Note: add_to_faiss modifies the index/metadata objects in place
668
- index_obj = faiss_index_s # Get current index from state
669
- meta_obj = faiss_metadata_s # Get current metadata from state
 
 
670
  _, _, added_count = add_to_faiss(index_obj, meta_obj, crawled_content)
671
 
672
  if added_count > 0:
673
- # IMPORTANT: Save the modified index and metadata back to disk
674
  save_faiss_index(index_obj, meta_obj)
675
  status_msg = f"Successfully added {added_count} content chunks to the index."
676
- yield status_msg, index_obj, meta_obj, status_msg # Return updated state objects
 
677
  else:
678
  status_msg = "Crawled content, but failed to add anything to the index."
 
679
  yield status_msg, index_obj, meta_obj, status_msg
 
680
 
681
  # Use the wrapper for async function
682
  find_resources_button.click(
 
636
  outputs=[status_output, plan_display, current_plan_state]
637
  )
638
 
639
+ # Find & Add Resources Button Click (Async) - CORRECTED
640
  async def handle_find_resources(username, current_analysis, faiss_index_s, faiss_metadata_s, progress=gr.Progress(track_tqdm=True)):
641
+ # --- Check 1 ---
642
+ if not username:
643
+ yield "Please enter username.", faiss_index_s, faiss_metadata_s, "Idle"
644
+ return # Stop execution
645
+
646
+ # --- Check 2 ---
647
+ if not current_analysis:
648
+ yield "Please analyze an entry first.", faiss_index_s, faiss_metadata_s, "Idle"
649
+ return # Stop execution
650
+
651
+ # --- Check 3 ---
652
+ if not CRAWL4AI_AVAILABLE:
653
+ yield "crawl4ai library not installed.", faiss_index_s, faiss_metadata_s, "Error"
654
+ return # Stop execution
655
 
656
  status_msg = "Starting resource finding..."
657
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg # Initial update
 
664
  status_msg = "Searching web..."
665
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
666
  urls = get_web_urls(search_term, num_results=3) # Limit URLs
667
+
668
+ # --- Check 4 ---
669
+ if not urls:
670
+ yield "No relevant URLs found.", faiss_index_s, faiss_metadata_s, "No URLs found."
671
+ return # Stop execution
672
 
673
  progress(0.3, desc=f"Crawling {len(urls)} pages...")
674
  status_msg = f"Crawling {len(urls)} pages..."
675
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
676
+ # Use await for the async crawl function
677
+ crawled_content = await crawl_webpages_simple(urls)
678
+
679
+ # --- Check 5 ---
680
+ if not crawled_content:
681
+ yield "Crawling failed or yielded no content.", faiss_index_s, faiss_metadata_s, "Crawling failed."
682
+ return # Stop execution
683
 
684
  progress(0.7, desc="Adding content to FAISS index...")
685
  status_msg = "Adding content to index..."
686
  yield status_msg, faiss_index_s, faiss_metadata_s, status_msg
687
+
688
+ # Get current index/metadata objects from state to modify them
689
+ index_obj = faiss_index_s
690
+ meta_obj = faiss_metadata_s
691
+ # add_to_faiss modifies the objects in place
692
  _, _, added_count = add_to_faiss(index_obj, meta_obj, crawled_content)
693
 
694
  if added_count > 0:
695
+ # Save the modified index and metadata back to disk
696
  save_faiss_index(index_obj, meta_obj)
697
  status_msg = f"Successfully added {added_count} content chunks to the index."
698
+ # Yield the final update including potentially modified state objects
699
+ yield status_msg, index_obj, meta_obj, status_msg
700
  else:
701
  status_msg = "Crawled content, but failed to add anything to the index."
702
+ # Yield the final update (state objects might not have changed but yield them anyway)
703
  yield status_msg, index_obj, meta_obj, status_msg
704
+ # No explicit return needed here, generator finishes naturally
705
 
706
  # Use the wrapper for async function
707
  find_resources_button.click(