MCQ_Converter / app.py
FredOru's picture
(chores) reorganised code
c76398e
raw
history blame
1.58 kB
import gradio as gr
import base64
import prompts
from openai import OpenAI
from dotenv import load_dotenv
load_dotenv()
client = OpenAI()
PROMPT = prompts.SINGLE_QCM_PROMPT
# Function to encode the image
def encode_image(image_path):
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode("utf-8")
def process(image_path):
try:
response = client.chat.completions.create(
model="gpt-4o",
# response_format={ "type": "json_object" }, # si nécessaire
messages=[
# {"role": "system", "content": "You are a helpful assistant designed to output JSON."},
{
"role": "user",
"content": [
{"type": "text", "text": PROMPT},
{
"type": "image_url",
"image_url": {
"url": f"data:image/jpeg;base64,{encode_image(image_path)}"
},
},
],
}
],
temperature=0.2,
# max_tokens=256,
# top_p=1,
# frequency_penalty=0,
# presence_penalty=0
)
return response.choices[0].message.content
except Exception as e:
print(f"an error occurred : {e}")
return {"error": str(e)}
iface = gr.Interface(
fn=process,
inputs=gr.Image(type="filepath"),
outputs=gr.JSON(),
)
iface.launch()