Spaces:
No application file
No application file
File size: 3,613 Bytes
3f4930f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import os
import streamlit as st
import requests
from dotenv import load_dotenv
load_dotenv()
# Set environment variables
os.environ["GROQ_API_KEY"] = st.secrets["GROQ_API_KEY"]
os.environ["TAVILY_API_KEY"] = st.secrets["TAVILY_API_KEY"]
from langchain_groq import ChatGroq
from langchain_community.tools.tavily_search import TavilySearchResults
from langgraph.prebuilt import create_react_agent
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
def get_response_from_ai_agent(llm_id, query, allow_search, system_prompt):
"""
Create and invoke an AI agent with optional search capabilities
"""
try:
# Initialize LLM with proper configuration
llm = ChatGroq(
api_key=os.environ.get("GROQ_API_KEY"),
model_name=llm_id
)
# Setup tools based on allow_search flag
tools = []
if allow_search:
tools.append(TavilySearchResults(
api_key=os.environ.get("TAVILY_API_KEY"),
max_results=2
))
# Create the agent
agent = create_react_agent(
model=llm,
tools=tools
)
# Prepare the initial messages
initial_messages = [
SystemMessage(content=system_prompt),
HumanMessage(content=query)
]
# Create proper state with messages
state = {
"messages": initial_messages,
"next_steps": [],
"structured_response": None
}
# Invoke agent with proper state
response = agent.invoke(state)
# Handle response
if isinstance(response, dict):
if "messages" in response:
messages = response["messages"]
ai_messages = [msg for msg in messages if isinstance(msg, AIMessage)]
if ai_messages:
return ai_messages[-1].content
elif "structured_response" in response:
return response["structured_response"]
return "I apologize, but I couldn't generate a proper response. Please try again."
except Exception as e:
print(f"Debug - Error in get_response_from_ai_agent: {str(e)}")
raise Exception(f"Agent error: {str(e)}")
# Streamlit UI
st.set_page_config(page_title="AI Chatbot Agents", layout="centered")
st.title("AI Chatbot Agents")
st.write("Create and Interact with the AI Agents!")
system_prompt = st.text_area(
"Define your AI Agent: ",
height=70,
placeholder="Type your system prompt here...",
value="You are a helpful AI assistant."
)
MODEL_NAMES_GROQ = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"]
selected_model = st.selectbox("Select Groq Model:", MODEL_NAMES_GROQ)
allow_web_search = st.checkbox("Allow Web Search")
user_query = st.text_area(
"Enter your query: ",
height=150,
placeholder="Ask Anything!"
)
if st.button("Ask Agent!"):
if not user_query.strip():
st.error("Please enter a query!")
else:
try:
with st.spinner("Getting response..."):
response = get_response_from_ai_agent(
llm_id=selected_model,
query=user_query,
allow_search=allow_web_search,
system_prompt=system_prompt
)
st.subheader("Agent Response")
st.markdown(response)
except Exception as e:
st.error(f"An error occurred: {str(e)}") |