himanishprak23 commited on
Commit
238ef35
·
verified ·
1 Parent(s): f71f2b3

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -0
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, TFAutoModelForSeq2SeqLM
3
+
4
+ # Define the model repository and tokenizer checkpoint
5
+ model_checkpoint = "himanishprak23/neural_machine_translation"
6
+ tokenizer_checkpoint = "Helsinki-NLP/opus-mt-en-hi"
7
+
8
+ # Load the tokenizer from Helsinki-NLP and model from Hugging Face repository
9
+ tokenizer = AutoTokenizer.from_pretrained(tokenizer_checkpoint)
10
+ model = TFAutoModelForSeq2SeqLM.from_pretrained(model_checkpoint)
11
+
12
+ def translate_text(input_text):
13
+ tokenized_input = tokenizer(input_text, return_tensors='tf', max_length=128, truncation=True)
14
+ generated_tokens = model.generate(**tokenized_input, max_length=128)
15
+ predicted_text = tokenizer.decode(generated_tokens[0], skip_special_tokens=True)
16
+ return predicted_text
17
+
18
+ # Create the Gradio interface
19
+ iface = gr.Interface(
20
+ fn=translate_text,
21
+ inputs=gr.components.Textbox(lines=2, placeholder="Enter text to translate from English to Hindi..."),
22
+ outputs=gr.components.Textbox(),
23
+ title="English to Hindi Translator",
24
+ description="Enter English text and get the Hindi translation."
25
+ )
26
+
27
+ # Launch the Gradio app
28
+ iface.launch()