Spaces:
Sleeping
Sleeping
File size: 1,235 Bytes
d60a3d5 a5e48ff d60a3d5 21fca8b a5e48ff ff6eafd d60a3d5 21fca8b d60a3d5 21fca8b d60a3d5 ff6eafd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
from huggingface_hub import InferenceClient
import gradio as gr
# Ensure your Hugging Face API key is correctly set up in your environment or passed here
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
def translate_text(input_text, target_language):
prompt = f"Translate the following text into {target_language}: {input_text}"
try:
response = client(text=prompt)
# Depending on the model's response structure, adjust the following line:
translated_text = response[0]['generated_text'] if 'generated_text' in response[0] else "Translation error or model response format has changed."
except Exception as e:
translated_text = f"Error: {str(e)}"
return translated_text
iface = gr.Interface(
fn=translate_text,
inputs=[
gr.Textbox(label="Text to Translate", placeholder="Enter the text you want to translate here..."),
gr.Textbox(label="Target Language", placeholder="Enter the target language (e.g., French, Spanish)..."),
],
outputs=gr.Textbox(label="Translated Text"),
title="Simple Translator with Mixtral",
description="Translate text to your specified language using the Mixtral model from Hugging Face."
)
iface.launch()
|