Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -357,15 +357,16 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
357 |
response = fetch_google_flights()
|
358 |
return response, extract_addresses(response)
|
359 |
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
|
|
|
|
364 |
context = retriever.get_relevant_documents(message)
|
365 |
-
|
366 |
-
# Format the prompt
|
367 |
prompt = prompt_template.format(context=context, question=message)
|
368 |
|
|
|
369 |
if selected_model == chat_model:
|
370 |
# Use GPT-4o with Langchain
|
371 |
qa_chain = RetrievalQA.from_chain_type(
|
@@ -380,10 +381,10 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
380 |
elif selected_model == phi_pipe:
|
381 |
# Use Phi-3.5 directly with the formatted prompt
|
382 |
response = selected_model(prompt, **{
|
383 |
-
"max_new_tokens": 300,
|
384 |
"return_full_text": False,
|
385 |
-
"temperature": 0.5,
|
386 |
-
"do_sample":
|
387 |
})[0]['generated_text']
|
388 |
return response, extract_addresses(response)
|
389 |
|
|
|
357 |
response = fetch_google_flights()
|
358 |
return response, extract_addresses(response)
|
359 |
|
360 |
+
# Use a simple, direct prompt for Phi-3.5
|
361 |
+
if selected_model == phi_pipe:
|
362 |
+
prompt = f"Here is the information : , {message}"
|
363 |
+
else:
|
364 |
+
# Use the existing prompt templates for GPT-4o
|
365 |
+
prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
|
366 |
context = retriever.get_relevant_documents(message)
|
|
|
|
|
367 |
prompt = prompt_template.format(context=context, question=message)
|
368 |
|
369 |
+
if retrieval_mode == "VDB":
|
370 |
if selected_model == chat_model:
|
371 |
# Use GPT-4o with Langchain
|
372 |
qa_chain = RetrievalQA.from_chain_type(
|
|
|
381 |
elif selected_model == phi_pipe:
|
382 |
# Use Phi-3.5 directly with the formatted prompt
|
383 |
response = selected_model(prompt, **{
|
384 |
+
"max_new_tokens": 300,
|
385 |
"return_full_text": False,
|
386 |
+
"temperature": 0.5,
|
387 |
+
"do_sample": False,
|
388 |
})[0]['generated_text']
|
389 |
return response, extract_addresses(response)
|
390 |
|