File size: 4,980 Bytes
f28ca64 0a7ea0e f28ca64 0a7ea0e 6ce669d f28ca64 0a7ea0e 6ce669d f28ca64 0a7ea0e 6ce669d 0a7ea0e f28ca64 0a7ea0e f28ca64 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
import streamlit as st
import os
import google.generativeai as genai
import json
from PIL import Image
MODEL_ID = "gemini-2.0-flash-exp" # Keep the model ID as is
try:
api_key = os.getenv("GEMINI_API_KEY")
model_id = MODEL_ID
genai.configure(api_key=api_key)
except Exception as e:
st.error(f"Error: {e}")
st.stop()
model = genai.GenerativeModel(MODEL_ID)
chat = model.start_chat()
def get_local_pdf_path():
"""
Returns the path to the local PDF file.
"""
try:
pdf_path = os.path.join("problems", "problems.pdf")
if not os.path.exists(pdf_path):
raise FileNotFoundError(f"{pdf_path} does not exist.")
return pdf_path
except Exception as e:
st.error(f"Failed to find the local PDF: {e}")
st.stop() # Stop if the file is not found
# Initialize conversation history in Streamlit session state
if "conversation_history" not in st.session_state:
st.session_state.conversation_history = []
if "uploaded_file_part" not in st.session_state: # Store the file *part*
st.session_state.uploaded_file_part = None
if "uploaded_pdf_path" not in st.session_state:
st.session_state.uploaded_pdf_path = get_local_pdf_path()
def multimodal_prompt(pdf_path, text_prompt):
"""
Sends a multimodal prompt to Gemini, handling file uploads efficiently.
Args:
pdf_path: The path to the PDF file.
text_prompt: The text prompt for the model.
Returns:
The model's response as a string, or an error message.
"""
try:
if st.session_state.uploaded_file_part is None: # First time, upload
pdf_part = genai.upload_file(pdf_path, mime_type="application/pdf")
st.session_state.uploaded_file_part = pdf_part
prompt = [text_prompt, pdf_part] # First turn includes the actual file
else: # Subsequent turns, reference the file
prompt = [text_prompt, st.session_state.uploaded_file_part] # Subsequent turns include the file reference
response = chat.send_message(prompt)
# Update conversation history
st.session_state.conversation_history.append({"role": "user", "content": text_prompt, "has_pdf": True})
st.session_state.conversation_history.append({"role": "assistant", "content": response.text})
return response.text
except Exception as e:
return f"An error occurred: {e}"
# --- Main Page ---
st.title("📚❓Problem Solving Tutor")
about = """
**How to use this App**
Replace this placeholder with the actual text.
"""
import re
import json
# Define constants
TEXT_PROMPT = """Use the provided document. Read the list of 5 quadratic equations.
Return your response as a JSON list. Do not include any extra text, explanations, or backslashes.
Example JSON output:
[
"x^2 - 5x + 6 = 0",
"2x^2 + 3x - 1 = 0",
"x^2 - 9 = 0",
"3x^2 - 2x + 4 = 0",
"x^2 + 8x + 15 = 0"
]
"""
# Define a function to extract equations from the AI response
def extract_equations(response):
try:
if isinstance(response, str):
response = response.strip().replace("\n", "").replace("\r", "")
if response.lower().startswith("json"):
response = response[4:].strip()
if response.startswith("[") and response.endswith("]"):
return json.loads(response)
else:
st.error("Error: AI response is not in expected JSON list format.")
return []
elif isinstance(response, list):
return response
else:
st.error("Error: Unexpected response format from AI.")
return []
except json.JSONDecodeError:
st.error("Error: Failed to parse AI response as a list.")
return []
# Define a function to extract quadratic equations from the problems
def extract_quadratic_equations(problems):
equations = []
for problem in problems:
match = re.search(r'([0-9x\^\+\-\=\s]+)', problem)
if match:
equations.append(match.group(1).strip())
else:
st.warning(f"Could not extract equation from: '{problem}'")
return equations
# Main code
with st.spinner("AI is thinking..."):
if st.session_state.get("uploaded_pdf_path") is None:
st.session_state.uploaded_pdf_path = get_local_pdf_path()
filepath = st.session_state.uploaded_pdf_path
response = multimodal_prompt(filepath, TEXT_PROMPT)
# Debugging: Print response
st.write("Raw AI Response:", response)
# Extract equations
problems = extract_equations(response)
if problems:
equations = extract_quadratic_equations(problems)
st.write("Extracted Equations:")
for equation in equations:
st.write(equation)
else:
st.error("Error: No valid equations extracted.")
except Exception as e:
st.error(f"An unexpected error occurred: {e}")
st.markdown("Visit our Hugging Face Space!")
st.markdown("© 2025 WVSU AI Dev Team 🤖 ✨") |