File size: 5,049 Bytes
4e9bd1e 466fac2 c3883a9 4e9bd1e 466fac2 c3883a9 4e9bd1e 466fac2 4e9bd1e 466fac2 4e9bd1e 466fac2 4e9bd1e 466fac2 4e9bd1e c3883a9 466fac2 4e9bd1e 612150f 466fac2 612150f c3883a9 4e9bd1e 612150f c3883a9 4e9bd1e 612150f 4e9bd1e c3883a9 fef3306 4e9bd1e 612150f 4e9bd1e c3883a9 612150f 4e9bd1e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 |
from flask import Flask, request, render_template, jsonify, send_from_directory
from PIL import Image
import io
import os
import re
import matplotlib.pyplot as plt
import tempfile
from gradio_client import Client, handle_file
from dataclasses import dataclass
from typing import List, Optional
import logging
from google import genai
from google.genai import types
# Logging configuration
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class MathSolver:
def __init__(self):
plt.switch_backend('Agg') # Non-interactive backend
def query_gemini(self, image_path: str, prompt: str) -> str:
try:
img = Image.open(image_path)
buffered = io.BytesIO()
img.save(buffered, format="PNG")
img_byte_arr = buffered.getvalue()
response = client.models.generate_content(
model="gemini-2.0-flash-thinking-exp-01-21",
config={'thinking_config': {'include_thoughts': True}, 'temperature': 1, 'max_output_tokens': 8192},
contents=[
{'parts': [{'text': prompt}, {'inline_data': {'mime_type': 'image/png', 'data': img_byte_arr}}]}
]
)
full_response = ""
for candidate in response.candidates:
for part in candidate.content.parts:
if part.thought:
full_response += f"<br><b>Thought:</b><br> {part.text}<br>"
else:
full_response += f"<br><b>Answer:</b><br> {part.text}<br>"
return full_response
except Exception as e:
logger.error(f"Gemini Error: {str(e)}")
raise
@staticmethod
def query_qwen2(image_path: str, question: str) -> str:
try:
client = Client("Qwen/Qwen2.5-Math-Demo")
return client.predict(
image=handle_file(image_path),
sketchpad=None,
question=question,
api_name="/math_chat_bot"
)
except Exception as e:
logger.error(f"Qwen2 Error: {str(e)}")
raise
@staticmethod
def extract_and_execute_python_code(text: str) -> Optional[List[str]]:
code_blocks = re.findall(r'```python\n(.*?)```', text, re.DOTALL)
if not code_blocks:
return None
image_paths = []
for code in code_blocks:
try:
code = "import numpy as np\n" + code
code = code.replace("\\", "\\\\")
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmpfile:
plt.figure()
exec(code)
plt.savefig(tmpfile.name)
plt.close()
relative_path = os.path.basename(tmpfile.name)
image_paths.append(relative_path)
except Exception as e:
logger.error(f"Error generating graph: {str(e)}")
continue
return image_paths if image_paths else None
# Application configuration
app = Flask(__name__)
GOOGLE_API_KEY = os.environ.get("TOKEN")
client = genai.Client(
api_key=GOOGLE_API_KEY,
http_options={'api_version': 'v1alpha'},
)
math_solver = MathSolver()
@app.route('/')
def index():
return render_template('math.html')
@app.route('/upload', methods=['POST'])
def upload_image():
if 'image' not in request.files:
return jsonify({'error': 'No image provided'}), 400
file = request.files['image']
if not file.filename:
return jsonify({'error': 'No file selected'}), 400
model_choice = request.form.get('model_choice', 'gemini')
custom_instruction = request.form.get('custom_instruction', '')
prompt = f"Solve this math problem. Provide a complete solution with rendering LaTeX. {custom_instruction}"
try:
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
file.save(temp_file.name)
result = (
math_solver.query_gemini(temp_file.name, prompt)
if model_choice == "mariam's"
else math_solver.query_qwen2(temp_file.name, prompt)
)
image_paths = math_solver.extract_and_execute_python_code(result)
os.unlink(temp_file.name)
return jsonify({
'result': result,
'model': model_choice,
'image_paths': image_paths,
'temp_dir': tempfile.gettempdir()
})
except Exception as e:
logger.error(f"Error processing: {str(e)}")
return jsonify({'error': str(e)}), 500
@app.route('/temp/<path:filename>')
def serve_temp_image(filename):
try:
return send_from_directory(tempfile.gettempdir(), filename)
except Exception as e:
logger.error(f"Error sending image: {str(e)}")
return jsonify({'error': 'Image not found'}), 404
if __name__ == '__main__':
app.run(debug=True) |