Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
import google.generativeai as genai | |
import json | |
from PIL import Image | |
MODEL_ID = "gemini-2.0-flash-exp" # Keep the model ID as is | |
try: | |
api_key = os.getenv("GEMINI_API_KEY") | |
model_id = MODEL_ID | |
genai.configure(api_key=api_key) | |
except Exception as e: | |
st.error(f"Error: {e}") | |
st.stop() | |
model = genai.GenerativeModel(MODEL_ID) | |
chat = model.start_chat() | |
def get_local_pdf_path(): | |
""" | |
Returns the path to the local PDF file. | |
""" | |
try: | |
pdf_path = os.path.join("problems", "problems.pdf") | |
if not os.path.exists(pdf_path): | |
raise FileNotFoundError(f"{pdf_path} does not exist.") | |
return pdf_path | |
except Exception as e: | |
st.error(f"Failed to find the local PDF: {e}") | |
st.stop() # Stop if the file is not found | |
# Initialize conversation history in Streamlit session state | |
if "conversation_history" not in st.session_state: | |
st.session_state.conversation_history = [] | |
if "uploaded_file_part" not in st.session_state: # Store the file *part* | |
st.session_state.uploaded_file_part = None | |
if "uploaded_pdf_path" not in st.session_state: | |
st.session_state.uploaded_pdf_path = get_local_pdf_path() | |
def multimodal_prompt(pdf_path, text_prompt): | |
""" | |
Sends a multimodal prompt to Gemini, handling file uploads efficiently. | |
Args: | |
pdf_path: The path to the PDF file. | |
text_prompt: The text prompt for the model. | |
Returns: | |
The model's response as a string, or an error message. | |
""" | |
try: | |
if st.session_state.uploaded_file_part is None: # First time, upload | |
pdf_part = genai.upload_file(pdf_path, mime_type="application/pdf") | |
st.session_state.uploaded_file_part = pdf_part | |
prompt = [text_prompt, pdf_part] # First turn includes the actual file | |
else: # Subsequent turns, reference the file | |
prompt = [text_prompt, st.session_state.uploaded_file_part] # Subsequent turns include the file reference | |
response = chat.send_message(prompt) | |
# Update conversation history | |
st.session_state.conversation_history.append({"role": "user", "content": text_prompt, "has_pdf": True}) | |
st.session_state.conversation_history.append({"role": "assistant", "content": response.text}) | |
return response.text | |
except Exception as e: | |
return f"An error occurred: {e}" | |
# --- Main Page --- | |
st.title("📚❓Problem Solving Tutor") | |
about = """ | |
**How to use this App** | |
Replace this placeholder with the actual text. | |
""" | |
import re | |
import json | |
# Define constants | |
TEXT_PROMPT = """Use the provided document. Read the list of 5 quadratic equations. | |
Return your response as a JSON list. Do not include any extra text, explanations, or backslashes. | |
Example JSON output: | |
[ | |
"x^2 - 5x + 6 = 0", | |
"2x^2 + 3x - 1 = 0", | |
"x^2 - 9 = 0", | |
"3x^2 - 2x + 4 = 0", | |
"x^2 + 8x + 15 = 0" | |
] | |
""" | |
# Define a function to extract equations from the AI response | |
def extract_equations(response): | |
try: | |
if isinstance(response, str): | |
response = response.strip().replace("\n", "").replace("\r", "") | |
if response.lower().startswith("json"): | |
response = response[4:].strip() | |
if response.startswith("[") and response.endswith("]"): | |
return json.loads(response) | |
else: | |
st.error("Error: AI response is not in expected JSON list format.") | |
return [] | |
elif isinstance(response, list): | |
return response | |
else: | |
st.error("Error: Unexpected response format from AI.") | |
return [] | |
except json.JSONDecodeError: | |
st.error("Error: Failed to parse AI response as a list.") | |
return [] | |
# Define a function to extract quadratic equations from the problems | |
def extract_quadratic_equations(problems): | |
equations = [] | |
for problem in problems: | |
match = re.search(r'([0-9x\^\+\-\=\s]+)', problem) | |
if match: | |
equations.append(match.group(1).strip()) | |
else: | |
st.warning(f"Could not extract equation from: '{problem}'") | |
return equations | |
# Main code | |
with st.spinner("AI is thinking..."): | |
if st.session_state.get("uploaded_pdf_path") is None: | |
st.session_state.uploaded_pdf_path = get_local_pdf_path() | |
filepath = st.session_state.uploaded_pdf_path | |
response = multimodal_prompt(filepath, TEXT_PROMPT) | |
# Debugging: Print response | |
st.write("Raw AI Response:", response) | |
# Extract equations | |
problems = extract_equations(response) | |
if problems: | |
equations = extract_quadratic_equations(problems) | |
st.write("Extracted Equations:") | |
for equation in equations: | |
st.write(equation) | |
else: | |
st.error("Error: No valid equations extracted.") | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {e}") | |
st.markdown("Visit our Hugging Face Space!") | |
st.markdown("© 2025 WVSU AI Dev Team 🤖 ✨") |