Lenylvt commited on
Commit
a03a333
·
verified ·
1 Parent(s): ca6492d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -19
app.py CHANGED
@@ -1,28 +1,48 @@
1
- from huggingface_hub import InferenceApi
2
  import gradio as gr
 
3
 
4
- # Note: Replace "your_hugging_face_api_key" with your actual API key.
5
- client = InferenceApi("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
 
7
- def translate_text(input_text, target_language):
8
- prompt = f"Translate the following text into {target_language}: {input_text}"
9
- try:
10
- # Adjusted to use a hypothetical 'generate' or similar method.
11
- # You'll need to replace this with the actual method for sending inference requests.
12
- response = client.generate(inputs=prompt)
13
- # The response structure depends on the model and the API's current design.
14
- # You may need to adjust how you extract the translated text from the response.
15
- translated_text = response['generated_text'] if 'generated_text' in response else "Translation error or model response format has changed."
16
- except Exception as e:
17
- translated_text = f"Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  return translated_text
19
 
 
20
  iface = gr.Interface(
21
  fn=translate_text,
22
- inputs=[gr.Textbox(label="Text to Translate"), gr.Textbox(label="Target Language")],
23
- outputs=gr.Textbox(label="Translated Text"),
24
- title="Simple Translator with Mixtral",
25
- description="Translate text to your specified language using the Mixtral model from Hugging Face."
26
  )
27
 
28
- iface.launch()
 
 
 
1
  import gradio as gr
2
+ from transformers import MarianMTModel, MarianTokenizer
3
 
4
+ # Specify the model name from the Hugging Face Hub, for example, an English to French model by the University of Helsinki
5
+ model_name = "Helsinki-NLP/opus-mt-en-fr"
6
 
7
+ # Load the tokenizer and model
8
+ tokenizer = MarianTokenizer.from_pretrained(model_name)
9
+ model = MarianMTModel.from_pretrained(model_name)
10
+
11
+ # Function to handle translation
12
+ def translate_text(text, target_language):
13
+ # Adjust the model_name based on the target language
14
+ # Note: You'd need to find the exact model names for each language pair you want to support
15
+ model_name_map = {
16
+ "French": "Helsinki-NLP/opus-mt-en-fr",
17
+ "German": "Helsinki-NLP/opus-mt-en-de",
18
+ "Spanish": "Helsinki-NLP/opus-mt-en-es",
19
+ }
20
+
21
+ selected_model_name = model_name_map.get(target_language, "Helsinki-NLP/opus-mt-en-fr")
22
+
23
+ # Load the selected model and tokenizer
24
+ tokenizer = MarianTokenizer.from_pretrained(selected_model_name)
25
+ model = MarianMTModel.from_pretrained(selected_model_name)
26
+
27
+ # Prepare the text for translation
28
+ encoded_text = tokenizer.prepare_seq2seq_batch([text], return_tensors="pt")
29
+
30
+ # Perform the translation
31
+ translated = model.generate(**encoded_text)
32
+
33
+ # Decode the translated text
34
+ translated_text = tokenizer.decode(translated[0], skip_special_tokens=True)
35
+
36
  return translated_text
37
 
38
+ # Define the interface
39
  iface = gr.Interface(
40
  fn=translate_text,
41
+ inputs=[gr.inputs.Textbox(lines=2, placeholder="Enter text to translate..."), gr.inputs.Dropdown(["French", "German", "Spanish"], label="Select Language")],
42
+ outputs=[gr.outputs.Textbox()],
43
+ title="Text Translator with Helsinki NLP Models",
44
+ description="Select a language to translate English text into using University of Helsinki models."
45
  )
46
 
47
+ # Launch the app
48
+ iface.launch()