File size: 2,286 Bytes
1d5e089
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from dotenv import load_dotenv
load_dotenv()

import os
from langchain_groq import ChatGroq
from langchain_community.tools.tavily_search import TavilySearchResults
from langgraph.prebuilt import create_react_agent
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage

def get_response_from_ai_agent(llm_id, query, allow_search, system_prompt):
    """
    Create and invoke an AI agent with optional search capabilities
    """
    try:
        # Initialize LLM with proper configuration
        llm = ChatGroq(
            api_key=os.environ.get("GROQ_API_KEY"),
            model_name=llm_id
        )
        
        # Setup tools based on allow_search flag
        tools = []
        if allow_search:
            tools.append(TavilySearchResults(
                api_key=os.environ.get("TAVILY_API_KEY"),
                max_results=2
            ))
        
        # Create the agent
        agent = create_react_agent(
            model=llm,
            tools=tools
        )
        
        # Prepare the initial messages
        initial_messages = [
            SystemMessage(content=system_prompt),
            HumanMessage(content=query)
        ]
        
        # Create proper state with messages
        state = {
            "messages": initial_messages,
            "next_steps": [],
            "structured_response": None
        }
        
        # Invoke agent with proper state
        response = agent.invoke(state)
        
        # Handle response
        if isinstance(response, dict):
            # Extract messages from response
            if "messages" in response:
                messages = response["messages"]
                # Get the last AI message
                ai_messages = [msg for msg in messages if isinstance(msg, AIMessage)]
                if ai_messages:
                    return ai_messages[-1].content
            # Check structured response
            elif "structured_response" in response:
                return response["structured_response"]
        
        return "I apologize, but I couldn't generate a proper response. Please try again."
        
    except Exception as e:
        print(f"Debug - Error in get_response_from_ai_agent: {str(e)}")
        raise Exception(f"Agent error: {str(e)}")