Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,17 @@
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
import gradio as gr
|
3 |
|
|
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
6 |
def translate_text(input_text, target_language):
|
7 |
prompt = f"Translate the following text into {target_language}: {input_text}"
|
8 |
-
|
9 |
-
|
|
|
|
|
|
|
|
|
10 |
return translated_text
|
11 |
|
12 |
iface = gr.Interface(
|
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
import gradio as gr
|
3 |
|
4 |
+
# Ensure your Hugging Face API key is correctly set up in your environment or passed here
|
5 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
6 |
|
7 |
def translate_text(input_text, target_language):
|
8 |
prompt = f"Translate the following text into {target_language}: {input_text}"
|
9 |
+
try:
|
10 |
+
response = client(text=prompt)
|
11 |
+
# Depending on the model's response structure, adjust the following line:
|
12 |
+
translated_text = response[0]['generated_text'] if 'generated_text' in response[0] else "Translation error or model response format has changed."
|
13 |
+
except Exception as e:
|
14 |
+
translated_text = f"Error: {str(e)}"
|
15 |
return translated_text
|
16 |
|
17 |
iface = gr.Interface(
|