jchen8000 commited on
Commit
cc53565
·
verified ·
1 Parent(s): 617ca15

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -8
app.py CHANGED
@@ -94,14 +94,13 @@ def generate_response(query, history, model, temperature, max_tokens, top_p, see
94
  custom_rag_prompt = PromptTemplate.from_template(template)
95
 
96
 
97
-     # Step 1: Prepare inputs manually
98
-     docs = retriever.invoke(query)
99
-     context = format_docs(docs)
100
-     inputs = {"context": context, "question": query}
101
-
102
-     # Step 2: Get the final prompt string
103
-     prompt_value = custom_rag_prompt.invoke(inputs)
104
-     final_prompt = prompt_value.to_string()
105
  print("Final Prompt Sent to LLM:\n", final_prompt)
106
 
107
  rag_chain = (
 
94
  custom_rag_prompt = PromptTemplate.from_template(template)
95
 
96
 
97
+ # Retrive the final prompt that sent to the LLM
98
+ docs = retriever.invoke(query)
99
+ context = format_docs(docs)
100
+ inputs = {"context": context, "question": query}
101
+
102
+ prompt_value = custom_rag_prompt.invoke(inputs)
103
+ final_prompt = prompt_value.to_string()
 
104
  print("Final Prompt Sent to LLM:\n", final_prompt)
105
 
106
  rag_chain = (