Spaces:
Sleeping
Sleeping
import gradio as gr | |
import re | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
# Modell und Tokenizer laden | |
tokenizer = AutoTokenizer.from_pretrained("openai-community/gpt2") | |
model = AutoModelForCausalLM.from_pretrained("openai-community/gpt2") | |
def text_to_emoji(text): | |
# remove special characters | |
text_cleaned = re.sub(r"[.,!?;:]", "", text) | |
prompt = f"Convert the following sentence into an emoji-sequence which conveys a similar meaning and return only the emojis, no explanation:\n\n\"{text_cleaned}\"" | |
# Tokenisieren | |
inputs = tokenizer(prompt, return_tensors="pt") | |
# Antwort generieren | |
outputs = model.generate(**inputs, max_new_tokens=25, do_sample=True, temperature=0.7) | |
# Antwort decodieren | |
result = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return result | |
# Gradio UI | |
iface = gr.Interface( | |
fn=text_to_emoji, | |
inputs=gr.Textbox(lines=2, placeholder="Enter a sentence..."), | |
outputs="text", | |
title="AI-Powered Emoji Translator", | |
description="Enter a sentence, and the AI will transform it into an emoji-version 🥳" | |
) | |
iface.launch() |