SmartMath_AI / app.py
dsfdfsghgf's picture
Update app.py
5583e3c verified
raw
history blame
845 Bytes
import subprocess
subprocess.check_call(["pip", "install", "transformers", "torch"])
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# Laden des Modells und des Tokenizers
model_name = "Qwen/Qwen2.5-Math"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Gradio-Interface für Math-Lösungen
def solve_math(input_text):
inputs = tokenizer(input_text, return_tensors="pt")
outputs = model.generate(inputs["input_ids"])
return tokenizer.decode(outputs[0])
# Gradio App erstellen
iface = gr.Interface(
fn=solve_math,
inputs="text",
outputs="text",
title="SmartMath_AI",
description=" Ein KI-gestütztes Tool, das komplexe mathematische Berechnungen durchführt und intuitive Erklärungen liefert."
)
iface.launch()