Spaces:
Runtime error
Runtime error
File size: 2,111 Bytes
c9f32bb f6635e8 4091725 f6635e8 06b6877 f6635e8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import os
os.system("pip install -U bitsandbytes==0.45.3 transformers accelerate torch --no-cache-dir")
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel
from functools import lru_cache
# Define models
BASE_MODEL = "deepseek-ai/deepseek-math-7b-rl"
FINETUNED_MODEL = "LaibaIrfan/emoji_math"
# Load tokenizer and model
@lru_cache()
def load_model():
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL, trust_remote_code=True) # Use base model tokenizer
base_model = AutoModelForCausalLM.from_pretrained(
BASE_MODEL,
torch_dtype=torch.float32,
device_map="cpu" )
model = PeftModel.from_pretrained(base_model, FINETUNED_MODEL, device_map="cpu")
return tokenizer, model
# Load the model
tokenizer, model = load_model()
# Function to generate the result
def generate_result(incorrect_math):
input_text = f"Incorrect: {incorrect_math}\nCorrect:"
# Move input to GPU
inputs = tokenizer(input_text, return_tensors="pt").to("cuda")
# Generate output on GPU
output = model.generate(**inputs, max_length=200)
return tokenizer.decode(output[0], skip_special_tokens=True)
# Gradio Interface
iface = gr.Interface(
fn=generate_result,
inputs="text",
outputs="text",
title="Emoji Math Solver 🧮",
description="Enter an emoji-based math equation, and the model will generate the correct answer!"
)
iface.launch(debug=True, share=True, inline=True)
# Function to generate result
def generate_result(incorrect_math):
input_text = f"Incorrect: {incorrect_math}\nCorrect:"
inputs = tokenizer(input_text, return_tensors="pt").to("cuda") # Use GPU if available
output = model.generate(**inputs, max_length=200)
return tokenizer.decode(output[0], skip_special_tokens=True)
# Gradio Interface
iface = gr.Interface(
fn=generate_result,
inputs="text",
outputs="text",
title="Emoji Math Solver 🧮",
description="Enter an emoji-based math equation, and the model will generate the correct answer!"
)
iface.launch(share=True) |