import gradio as gr from huggingface_hub import InferenceClient import os import re # Load API key from environment variables HF_API_TOKEN = os.getenv("HUG_TOKEN_READ") # Hugging Face Inference API Client #client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.1", token=HF_API_TOKEN) client = InferenceClient(model="openGPT-X/Teuken-7B-instruct-commercial-v0.4", token=HF_API_TOKEN) # Function to translate text into emojis def text_to_emoji(text): # remove special characters text_cleaned = re.sub(r"[.,!?;:]", "", text) prompt = f"Convert this sentence into an emoji-sequence of the same meaning and return only the emojis, no explanation:\n\n\"{text_cleaned}\"" response = client.text_generation(prompt, max_new_tokens=50) return response # Gradio UI iface = gr.Interface( fn=text_to_emoji, inputs=gr.Textbox(lines=2, placeholder="Enter a sentence..."), outputs="text", title="AI-Powered Emoji Translator", description="Enter a sentence, and the AI will transform it into an emoji-version 🥳" ) iface.launch()