Spaces:
Sleeping
Sleeping
from huggingface_hub import InferenceClient | |
import gradio as gr | |
# Ensure your Hugging Face API key is correctly set up in your environment or passed here | |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1") | |
def translate_text(input_text, target_language): | |
prompt = f"Translate the following text into {target_language}: {input_text}" | |
try: | |
response = client(text=prompt) | |
# Depending on the model's response structure, adjust the following line: | |
translated_text = response[0]['generated_text'] if 'generated_text' in response[0] else "Translation error or model response format has changed." | |
except Exception as e: | |
translated_text = f"Error: {str(e)}" | |
return translated_text | |
iface = gr.Interface( | |
fn=translate_text, | |
inputs=[ | |
gr.Textbox(label="Text to Translate", placeholder="Enter the text you want to translate here..."), | |
gr.Textbox(label="Target Language", placeholder="Enter the target language (e.g., French, Spanish)..."), | |
], | |
outputs=gr.Textbox(label="Translated Text"), | |
title="Simple Translator with Mixtral", | |
description="Translate text to your specified language using the Mixtral model from Hugging Face." | |
) | |
iface.launch() | |