Phoenix07 commited on
Commit
944842c
·
1 Parent(s): b9d3e18

agents.py modified

Browse files
Files changed (1) hide show
  1. agents.py +41 -5
agents.py CHANGED
@@ -5,7 +5,9 @@ from langgraph.graph import START, StateGraph, MessagesState
5
  from langgraph.prebuilt import tools_condition
6
  from langgraph.prebuilt import ToolNode
7
  from langchain_google_genai import ChatGoogleGenerativeAI
 
8
  from langchain.agents import initialize_agent, Tool
 
9
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
  from langchain_community.tools.tavily_search import TavilySearchResults
11
  from langchain_community.document_loaders import WikipediaLoader
@@ -108,7 +110,26 @@ def arvix_search(query: str) -> str:
108
 
109
  # load the system prompt from the file
110
  with open("system_prompt.txt", "r", encoding="utf-8") as f:
111
- system_prompt = f.read()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
  # System message
114
  sys_msg = SystemMessage(content=system_prompt)
@@ -146,10 +167,25 @@ tools = [
146
  # Build graph function
147
  def build_graph(provider: str = "groq"):
148
  """Build the graph"""
149
-
150
- # Google Gemini
151
- llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
152
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
  # Bind tools to LLM
154
  llm_with_tools = llm.bind_tools(tools)
155
 
 
5
  from langgraph.prebuilt import tools_condition
6
  from langgraph.prebuilt import ToolNode
7
  from langchain_google_genai import ChatGoogleGenerativeAI
8
+ # from langchain_openai import ChatOpenAI
9
  from langchain.agents import initialize_agent, Tool
10
+ from langchain_groq import ChatGroq
11
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
12
  from langchain_community.tools.tavily_search import TavilySearchResults
13
  from langchain_community.document_loaders import WikipediaLoader
 
110
 
111
  # load the system prompt from the file
112
  with open("system_prompt.txt", "r", encoding="utf-8") as f:
113
+ # system_prompt = f.read()
114
+ system_prompt='''
115
+ You are a helpful assistant tasked with answering questions using a set of tools.
116
+
117
+ Your final answer must strictly follow this format:
118
+ FINAL ANSWER: [ANSWER]
119
+
120
+ Only write the answer in that exact format. Do not explain anything. Do not include any other text.
121
+
122
+ If you are provided with a similar question and its final answer, and the current question is **exactly the same**, then simply return the same final answer without using any tools.
123
+
124
+ Only use tools if the current question is different from the similar one.
125
+
126
+ Examples:
127
+ - FINAL ANSWER: FunkMonk
128
+ - FINAL ANSWER: Paris
129
+ - FINAL ANSWER: 128
130
+
131
+ If you do not follow this format exactly, your response will be considered incorrect.
132
+ '''
133
 
134
  # System message
135
  sys_msg = SystemMessage(content=system_prompt)
 
167
  # Build graph function
168
  def build_graph(provider: str = "groq"):
169
  """Build the graph"""
170
+ # Load environment variables from .env file
171
+ if provider == "google":
172
+ # Google Gemini
173
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
174
+ elif provider == "groq":
175
+ # Groq https://console.groq.com/docs/models
176
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
177
+ # elif provider == "openai":
178
+ # # OpenAI
179
+ # llm = ChatOpenAI(model="gpt-4", temperature=0)
180
+ elif provider == "huggingface":
181
+ llm = ChatHuggingFace(
182
+ llm=HuggingFaceEndpoint(
183
+ url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
184
+ temperature=0,
185
+ ),
186
+ )
187
+ else:
188
+ raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
189
  # Bind tools to LLM
190
  llm_with_tools = llm.bind_tools(tools)
191