rajkstats commited on
Commit
266ff42
·
1 Parent(s): d0b43f2

added comments

Browse files
Files changed (1) hide show
  1. app.py +9 -1
app.py CHANGED
@@ -151,7 +151,11 @@ async def generate_answer(query):
151
  """
152
  Generate an answer to the user's query using a conversational retrieval chain and handle callbacks for related questions and papers.
153
  """
 
 
154
  message_history = ChatMessageHistory()
 
 
155
  memory = ConversationBufferMemory(
156
  memory_key="chat_history",
157
  output_key="answer",
@@ -159,6 +163,7 @@ async def generate_answer(query):
159
  return_messages=True,
160
  )
161
 
 
162
  chain = ConversationalRetrievalChain.from_llm(
163
  ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, streaming=True),
164
  chain_type="stuff",
@@ -168,9 +173,11 @@ async def generate_answer(query):
168
  )
169
 
170
  try:
 
171
  cb = cl.AsyncLangchainCallbackHandler()
172
- #evaluator = PharmAssistEvaluator()
173
  feedback_callback = EvaluatorCallbackHandler(evaluators=[PharmAssistEvaluator(),HarmfulnessEvaluator(),AIDetectionEvaluator()])
 
 
174
  res = await chain.acall(query, callbacks=[cb,feedback_callback])
175
  answer = res["answer"]
176
  source_documents = res["source_documents"]
@@ -217,6 +224,7 @@ async def on_related_question_selected(action: cl.Action):
217
  await cl.Message(content=question, author="User").send()
218
 
219
  answer, text_elements, related_question_actions, related_papers, query = await generate_answer(question)
 
220
  await cl.Message(content=answer, elements=text_elements, author="PharmAssistAI").send()
221
 
222
  # Send related questions as a separate message
 
151
  """
152
  Generate an answer to the user's query using a conversational retrieval chain and handle callbacks for related questions and papers.
153
  """
154
+
155
+ # Initialize a message history to track the conversation
156
  message_history = ChatMessageHistory()
157
+
158
+ # Set up memory to hold the conversation context and return answers
159
  memory = ConversationBufferMemory(
160
  memory_key="chat_history",
161
  output_key="answer",
 
163
  return_messages=True,
164
  )
165
 
166
+ # Create a retrieval chain combining the LLM and the retriever
167
  chain = ConversationalRetrievalChain.from_llm(
168
  ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, streaming=True),
169
  chain_type="stuff",
 
173
  )
174
 
175
  try:
176
+ # Define callback handler for asynchronous operations
177
  cb = cl.AsyncLangchainCallbackHandler()
 
178
  feedback_callback = EvaluatorCallbackHandler(evaluators=[PharmAssistEvaluator(),HarmfulnessEvaluator(),AIDetectionEvaluator()])
179
+
180
+ # Process the incoming message using the conversational chain
181
  res = await chain.acall(query, callbacks=[cb,feedback_callback])
182
  answer = res["answer"]
183
  source_documents = res["source_documents"]
 
224
  await cl.Message(content=question, author="User").send()
225
 
226
  answer, text_elements, related_question_actions, related_papers, query = await generate_answer(question)
227
+ # Send the processed answer back to the user
228
  await cl.Message(content=answer, elements=text_elements, author="PharmAssistAI").send()
229
 
230
  # Send related questions as a separate message