Mattral commited on
Commit
8b3d7a3
·
verified ·
1 Parent(s): f0eee84

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -2
app.py CHANGED
@@ -64,9 +64,15 @@ def answer_question(question, documents):
64
  context = "\n\n".join([doc.page_content for doc in documents])
65
  full_context = f"{context}"
66
  prompt = ChatPromptTemplate.from_template(template)
67
- chain = prompt | client # Send the prompt to Hugging Face's model via InferenceClient
 
 
 
 
 
 
 
68
 
69
- return chain.invoke({"question": question, "context": full_context})
70
 
71
  # Streamlit file uploader for PDF
72
  uploaded_file = st.file_uploader(
 
64
  context = "\n\n".join([doc.page_content for doc in documents])
65
  full_context = f"{context}"
66
  prompt = ChatPromptTemplate.from_template(template)
67
+
68
+ # Use the prompt and send it directly to the Hugging Face model
69
+ question_with_context = prompt.format(question=question, context=full_context)
70
+
71
+ # Use the client (InferenceClient) to get a response
72
+ response = client.query(question_with_context)
73
+
74
+ return response["generated_text"] # Assuming the response is in "generated_text"
75
 
 
76
 
77
  # Streamlit file uploader for PDF
78
  uploaded_file = st.file_uploader(