Spaces:
Sleeping
Sleeping
File size: 1,154 Bytes
e8a2d7c c644909 3db21cd 78f153b 3db21cd e8a2d7c c644909 3db21cd 8a0b589 3db21cd e8a2d7c 6c5550d e8a2d7c 6c5550d bf8b184 e8a2d7c e374794 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import gradio as gr
import re
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
# Modell und Tokenizer laden
tokenizer = AutoTokenizer.from_pretrained("openai-community/gpt2")
model = AutoModelForCausalLM.from_pretrained("openai-community/gpt2")
def text_to_emoji(text):
# remove special characters
text_cleaned = re.sub(r"[.,!?;:]", "", text)
prompt = f"Convert the following sentence into an emoji-sequence which conveys a similar meaning and return only the emojis, no explanation:\n\n\"{text_cleaned}\""
# Tokenisieren
inputs = tokenizer(prompt, return_tensors="pt")
# Antwort generieren
outputs = model.generate(**inputs, max_new_tokens=25, do_sample=True, temperature=0.7)
# Antwort decodieren
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
return result
# Gradio UI
iface = gr.Interface(
fn=text_to_emoji,
inputs=gr.Textbox(lines=2, placeholder="Enter a sentence..."),
outputs="text",
title="AI-Powered Emoji Translator",
description="Enter a sentence, and the AI will transform it into an emoji-version 🥳"
)
iface.launch() |