Michaeldavidstein commited on
Commit
a195ff7
·
verified ·
1 Parent(s): d4514c4

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +157 -29
app.py CHANGED
@@ -52,6 +52,12 @@ from mem0 import MemoryClient
52
  import streamlit as st
53
  from datetime import datetime
54
 
 
 
 
 
 
 
55
  #====================================SETUP=====================================#
56
  # Fetch secrets from Hugging Face Spaces
57
  api_key = os.environ['api_key']
@@ -563,44 +569,89 @@ def filter_input_with_llama_guard(user_input, model="llama-guard-3-8b"):
563
 
564
  #============================= Adding Memory to the agent using mem0 ===============================#
565
 
566
- # NutritionBot class
567
  class NutritionBot:
568
- def __init__(self):
569
  """
570
- Initialize the NutritionBot class, setting up memory, the LLM client, tools, and the agent executor.
 
 
 
 
571
  """
 
 
572
 
573
  # Initialize a memory client to store and retrieve customer interactions
574
- self.memory = MemoryClient(api_key="mock_memory_api_key") # Replace with actual API key
 
575
 
576
- # Initialize the OpenAI client using the provided credentials
577
  self.client = ChatOpenAI(
578
- model_name="gpt-4", # Specify the model to use (e.g., GPT-4)
579
- api_key="mock_openai_api_key" # Replace with actual API key
 
 
 
580
  )
 
581
 
582
- # Define tools available to the chatbot, including agentic_rag
583
- tools = [agentic_rag]
 
584
 
585
- # Define the system prompt to set the behavior of the chatbot
586
- system_prompt = """You are a caring and knowledgeable Medical Support Agent, specializing in nutrition disorder-related guidance. Your goal is to provide accurate, empathetic, and tailored nutritional recommendations while ensuring a seamless customer experience."""
 
587
 
588
- # Build the prompt template for the agent
589
- prompt = ChatPromptTemplate.from_messages([
590
- ("system", system_prompt), # System instructions
591
- ("human", "{input}"), # Placeholder for human input
592
- ("placeholder", "{agent_scratchpad}") # Placeholder for intermediate reasoning steps
593
- ])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
594
 
595
- # Create an agent capable of interacting with tools and executing tasks
596
- agent = create_tool_calling_agent(self.client, tools, prompt)
597
 
598
- # Wrap the agent in an executor to manage tool interactions and execution flow
599
- self.agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
600
 
601
  def handle_customer_query(self, user_id: str, query: str) -> str:
602
  """
603
- Process a customer's query and provide a response, taking into account past interactions.
604
 
605
  Args:
606
  user_id (str): Unique identifier for the customer.
@@ -609,15 +660,92 @@ class NutritionBot:
609
  Returns:
610
  str: Chatbot's response.
611
  """
612
- # Use the agentic_rag tool to process the query
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
613
  try:
614
- # Call the agentic_rag tool directly
615
- result = agentic_rag(query)
616
- response = result.get("response", "I'm sorry, I couldn't generate a response.")
617
- return response
 
 
 
618
  except Exception as e:
619
- return f"An error occurred while processing your query: {str(e)}"
620
-
621
 
622
  #=====================User Interface using streamlit ===========================#
623
  def nutrition_disorder_streamlit():
 
52
  import streamlit as st
53
  from datetime import datetime
54
 
55
+ import traceback
56
+ import time
57
+ import random
58
+ from datetime import datetime
59
+ from typing import Dict, List
60
+
61
  #====================================SETUP=====================================#
62
  # Fetch secrets from Hugging Face Spaces
63
  api_key = os.environ['api_key']
 
569
 
570
  #============================= Adding Memory to the agent using mem0 ===============================#
571
 
 
572
  class NutritionBot:
573
+ def __init__(self, api_key: str, api_base: str):
574
  """
575
+ Initialize the NutritionBot class, setting up memory and the LLM client.
576
+
577
+ Args:
578
+ api_key (str): The OpenAI API key for authenticating requests.
579
+ api_base (str): The custom OpenAI API base endpoint.
580
  """
581
+ print(f"Initializing NutritionBot with OpenAI API key: {api_key}")
582
+ print(f"Using custom OpenAI API base: {api_base}")
583
 
584
  # Initialize a memory client to store and retrieve customer interactions
585
+ self.memory = MemoryClient(api_key=userdata.get("mem0_api_key"))
586
+ print("Memory client initialized.")
587
 
588
+ # Initialize the OpenAI client using the provided credentials, custom API base, and model
589
  self.client = ChatOpenAI(
590
+ model_name="gpt-4o-mini", # Correct model name
591
+ api_key=api_key,
592
+ openai_api_base=api_base,
593
+ temperature=0.7, # Controls randomness in responses
594
+ verbose=True # Enable verbose logging for debugging
595
  )
596
+ print("OpenAI client initialized with custom API base and model gpt-4o-mini.")
597
 
598
+ def get_relevant_history(self, user_id: str, query: str) -> List[Dict]:
599
+ """
600
+ Retrieve past interactions relevant to the current query.
601
 
602
+ Args:
603
+ user_id (str): Unique identifier for the customer.
604
+ query (str): The customer's current query.
605
 
606
+ Returns:
607
+ List[Dict]: A list of relevant past interactions.
608
+ """
609
+ print("Entering get_relevant_history function...")
610
+ try:
611
+ history = self.memory.search(
612
+ query=query,
613
+ user_id=user_id,
614
+ limit=3
615
+ )
616
+ print("Relevant history retrieved:", history)
617
+ return history
618
+ except Exception as e:
619
+ print(f"Error retrieving history: {e}")
620
+ traceback.print_exc()
621
+ return []
622
+
623
+ def query_model(self, prompt: str) -> str:
624
+ """
625
+ Query the OpenAI model directly using the prompt.
626
 
627
+ Args:
628
+ prompt (str): The input prompt for the model.
629
 
630
+ Returns:
631
+ str: The assistant's response.
632
+ """
633
+ print("Querying the OpenAI model...")
634
+ try:
635
+ # Use the correct input format for ChatOpenAI
636
+ messages = [
637
+ {"role": "system", "content": "You are a helpful assistant."},
638
+ {"role": "user", "content": prompt}
639
+ ]
640
+ response = self.client.invoke(messages) # Use the `invoke()` method
641
+ print("Raw response from OpenAI API:", response)
642
+
643
+ # Since the response is an AIMessage object, extract the content directly
644
+ content = response.content # Access the `content` property of the AIMessage
645
+ print("Extracted response content:", content)
646
+ return content
647
+ except Exception as e:
648
+ print(f"Error querying the model: {e}")
649
+ traceback.print_exc()
650
+ return "I'm sorry, I couldn't process your request. Please try again later."
651
 
652
  def handle_customer_query(self, user_id: str, query: str) -> str:
653
  """
654
+ Process a customer's query and provide a response, incorporating past interactions for context.
655
 
656
  Args:
657
  user_id (str): Unique identifier for the customer.
 
660
  Returns:
661
  str: Chatbot's response.
662
  """
663
+ print("Entering handle_customer_query function...")
664
+
665
+ # Retrieve relevant history for the user
666
+ relevant_history = self.get_relevant_history(user_id, query)
667
+
668
+ # Build context from past interactions
669
+ context = "Previous relevant interactions:\n"
670
+ for memory in relevant_history:
671
+ context += f"Customer: {memory['query']}\n"
672
+ context += f"Support: {memory['response']}\n---\n"
673
+
674
+ # Create the prompt
675
+ prompt = f"""
676
+ Context:
677
+ {context}
678
+
679
+ Current customer query: {query}
680
+
681
+ Provide a helpful response that takes into account any relevant past interactions.
682
+ """
683
+ print("Final prompt being sent to the model:")
684
+ print(prompt)
685
+
686
+ # Retry logic with exponential backoff
687
+ max_retries = 3
688
+ for attempt in range(max_retries):
689
+ try:
690
+ print(f"Querying model (attempt {attempt + 1})...")
691
+ response_content = self.query_model(prompt)
692
+ if not response_content:
693
+ raise ValueError("Model returned an empty response.")
694
+
695
+ # Store the interaction
696
+ self.store_customer_interaction(
697
+ user_id=user_id,
698
+ message=query,
699
+ response=response_content,
700
+ metadata={"type": "support_query"}
701
+ )
702
+ return response_content
703
+
704
+ except Exception as e:
705
+ print(f"Error querying the model (attempt {attempt + 1}): {e}")
706
+ traceback.print_exc()
707
+ if attempt < max_retries - 1:
708
+ wait_time = (2 ** attempt) + random.uniform(0, 1) # Exponential backoff with jitter
709
+ print(f"Retrying in {wait_time:.2f} seconds...")
710
+ time.sleep(wait_time)
711
+ else:
712
+ return "I'm sorry, I couldn't process your request. Please try again later."
713
+
714
+ def store_customer_interaction(self, user_id: str, message: str, response: str, metadata: Dict = None):
715
+ """
716
+ Store customer interaction in memory for future reference.
717
+
718
+ Args:
719
+ user_id (str): Unique identifier for the customer.
720
+ message (str): Customer's query or message.
721
+ response (str): Chatbot's response.
722
+ metadata (Dict, optional): Additional metadata for the interaction.
723
+ """
724
+ print("Entering store_customer_interaction function...")
725
+
726
+ if metadata is None:
727
+ metadata = {}
728
+
729
+ # Add timestamp to metadata
730
+ metadata["timestamp"] = datetime.now().isoformat()
731
+
732
+ # Format the interaction for storage
733
+ conversation = [
734
+ {"role": "user", "content": message},
735
+ {"role": "assistant", "content": response}
736
+ ]
737
+
738
  try:
739
+ self.memory.add(
740
+ conversation,
741
+ user_id=user_id,
742
+ output_format="v1.1",
743
+ metadata=metadata
744
+ )
745
+ print("Interaction stored successfully.")
746
  except Exception as e:
747
+ print(f"Error storing interaction: {e}")
748
+ traceback.print_exc()
749
 
750
  #=====================User Interface using streamlit ===========================#
751
  def nutrition_disorder_streamlit():