Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -65,36 +65,43 @@ class MathAnswer(dspy.Signature):
|
|
65 |
answer = dspy.OutputField()
|
66 |
|
67 |
# === DSPy Programs ===
|
|
|
|
|
|
|
|
|
68 |
|
69 |
-
# return dspy.Output(answer=answer, retrieved_context=context)
|
70 |
class MathRetrievalQA(dspy.Program):
|
71 |
def forward(self, question):
|
72 |
print("Inside MathRetrievalQA...")
|
73 |
context_items = retrieve_from_qdrant(question)
|
74 |
context = "\n".join([item["solution"] for item in context_items if "solution" in item])
|
75 |
print("Context for generation:", context)
|
|
|
76 |
if not context:
|
77 |
return {"answer": "", "retrieved_context": ""}
|
78 |
|
79 |
-
#
|
80 |
-
prompt = f"
|
81 |
-
raw_answer = qa_pipeline(prompt, max_new_tokens=100)[0]["generated_text"]
|
82 |
-
|
83 |
-
# Step 2: Send raw answer to Gemini for formatting
|
84 |
-
format_prompt = f"""You are a helpful math assistant. Please format the following answer into a clear, step-by-step solution for better readability.
|
85 |
|
86 |
Question: {question}
|
87 |
|
88 |
-
|
89 |
-
{
|
|
|
|
|
90 |
|
91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
-
response = gemini_model.generate_content(format_prompt)
|
94 |
-
formatted_answer = response.text
|
95 |
|
96 |
-
|
97 |
-
return {"answer": formatted_answer, "retrieved_context": context}
|
98 |
|
99 |
class WebFallbackQA(dspy.Program):
|
100 |
def forward(self, question):
|
|
|
65 |
answer = dspy.OutputField()
|
66 |
|
67 |
# === DSPy Programs ===
|
68 |
+
import google.generativeai as genai
|
69 |
+
|
70 |
+
# Configure Gemini
|
71 |
+
genai.configure(api_key="AIzaSyBO3-HG-WcITn58PdpK7mMyvFQitoH00qA") # Replace with your key
|
72 |
|
|
|
73 |
class MathRetrievalQA(dspy.Program):
|
74 |
def forward(self, question):
|
75 |
print("Inside MathRetrievalQA...")
|
76 |
context_items = retrieve_from_qdrant(question)
|
77 |
context = "\n".join([item["solution"] for item in context_items if "solution" in item])
|
78 |
print("Context for generation:", context)
|
79 |
+
|
80 |
if not context:
|
81 |
return {"answer": "", "retrieved_context": ""}
|
82 |
|
83 |
+
# Prompt Gemini to generate and format a math answer
|
84 |
+
prompt = f"""You are a math expert. Given the question and context below, generate a step-by-step solution in a clear, neat, and well-formatted way.
|
|
|
|
|
|
|
|
|
85 |
|
86 |
Question: {question}
|
87 |
|
88 |
+
Context:
|
89 |
+
{context}
|
90 |
+
|
91 |
+
Answer:"""
|
92 |
|
93 |
+
try:
|
94 |
+
model = genai.GenerativeModel('gemini-2.0-flash') # or use 'gemini-1.5-flash'
|
95 |
+
response = model.generate_content(prompt)
|
96 |
+
formatted_answer = response.text
|
97 |
+
print("Gemini Answer:", formatted_answer)
|
98 |
+
return {"answer": formatted_answer, "retrieved_context": context}
|
99 |
+
except Exception as e:
|
100 |
+
print("Gemini generation error:", e)
|
101 |
+
return {"answer": "⚠️ Gemini failed to generate an answer.", "retrieved_context": context}
|
102 |
|
|
|
|
|
103 |
|
104 |
+
# return dspy.Output(answer=answer, retrieved_context=context)
|
|
|
105 |
|
106 |
class WebFallbackQA(dspy.Program):
|
107 |
def forward(self, question):
|