philipp-zettl's picture
Update app.py
9e7abab verified
raw
history blame contribute delete
508 Bytes
import gradio as gr
from transformers import AutoTokenizer
from model import MultiHeadClassification
import spaces
model = MultiHeadClassification.from_pretrained(
'philipp-zettl/multi-head-sequence-classification-model',
{"GGU": 3, "sentiment": 3}
).to('cuda')
tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-m3')
#@spaces.GPU()
def generate(prompt):
inputs = tokenizer([prompt]).to('cuda')
return model(**inputs)
gr.Interface(
generate,
inputs="text",
outputs="label"
)