hienbm commited on
Commit
a5392e6
·
verified ·
1 Parent(s): 36028bb

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -6
app.py CHANGED
@@ -169,15 +169,33 @@ if user_query is not None and find_youtube_links(user_query) != "":
169
  st.session_state.chat_history.append(AIMessage(content=response))
170
 
171
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
 
173
  # Function to get a response from the model
174
- def get_response_1(user_query, chat_history):
175
- chain = template | chat_model | StrOutputParser()
176
  response = chain.invoke({
177
- "context": chat_history,
178
- "question": user_query,
179
  })
180
-
181
  return response
182
 
183
  if user_query is not None and user_query != "" and find_youtube_links(user_query) == "":
@@ -186,7 +204,7 @@ if user_query is not None and user_query != "" and find_youtube_links(user_query
186
  with st.chat_message("Human"):
187
  st.markdown(user_query)
188
 
189
- response = get_response_1(user_query, st.session_state.chat_history)
190
 
191
  # Remove any unwanted prefixes from the response
192
  response = response.replace("AI response:", "").replace("chat response:", "").replace("bot response:", "").strip()
 
169
  st.session_state.chat_history.append(AIMessage(content=response))
170
 
171
 
172
+ template_2 = """
173
+ You are a genius trader with extensive knowledge of the financial and stock markets, capable of providing deep and insightful analysis of financial stocks with remarkable accuracy.
174
+
175
+ **ALWAYS**
176
+ Be as detailed as possible, but don't make up any information that’s not from the context.
177
+ If you don't know an answer, say you don't know.
178
+ Let's think step by step.
179
+
180
+ Please ensure responses are informative, accurate, and tailored to the user's queries and preferences.
181
+ Use natural language to engage users and provide readable content throughout your response.
182
+
183
+ Chat history:
184
+ {chat_history}
185
+
186
+ User question:
187
+ {user_question}
188
+ """
189
+
190
+ prompt_2 = ChatPromptTemplate.from_template(template_2)
191
 
192
  # Function to get a response from the model
193
+ def get_response_2(user_query, chat_history):
194
+ chain = prompt_2 | chat_model | StrOutputParser()
195
  response = chain.invoke({
196
+ "chat_history": chat_history,
197
+ "user_question": user_query,
198
  })
 
199
  return response
200
 
201
  if user_query is not None and user_query != "" and find_youtube_links(user_query) == "":
 
204
  with st.chat_message("Human"):
205
  st.markdown(user_query)
206
 
207
+ response = get_response_2(user_query, st.session_state.chat_history)
208
 
209
  # Remove any unwanted prefixes from the response
210
  response = response.replace("AI response:", "").replace("chat response:", "").replace("bot response:", "").strip()