aditi2222 commited on
Commit
3a49715
Β·
1 Parent(s): bc6cb8b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +81 -2
app.py CHANGED
@@ -1,4 +1,80 @@
1
- import torch
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  from transformers import BartTokenizer, BartForConditionalGeneration
3
  import gradio as gr
4
  from transformers import AutoTokenizer, AutoModelWithLMHead, TranslationPipeline
@@ -32,4 +108,7 @@ def article_summarization(result):
32
 
33
  iface = gr.Interface(fn=article_summarization,title="Summarization in English",description="facebook/bart-large-cnn for summarization in English", inputs=gr.inputs.Textbox(lines=50, placeholder="Enter newspaper article to be summarized"), outputs=["result"])
34
 
35
- iface.launch()
 
 
 
 
1
+ """
2
+ πŸ—£οΈ Translator - Translate text from one language to another.
3
+ Application file made with Streamlit.
4
+ Author:
5
+ - @ChainYo
6
+ """
7
+
8
+ import re
9
+ import streamlit as st
10
+
11
+ from datetime import datetime
12
+ from transformers import pipeline
13
+ from available_models import MODELS
14
+
15
+
16
+ st.set_page_config(page_title="Translator", page_icon="πŸ—£οΈ")
17
+ st.title("πŸ—£οΈ Translator")
18
+
19
+ st.write("To add a new model, hit me up! ⬆️")
20
+
21
+ with st.expander(label="❓ How does it work", expanded=True):
22
+
23
+
24
+ lang1, lang2 = st.columns(2)
25
+ lang1.selectbox(
26
+ "Source Language", ["πŸ‡«πŸ‡· French"],
27
+ key="input_lang", index=1,
28
+ )
29
+ lang2.selectbox(
30
+ "Target Language", ["πŸ‡¬πŸ‡§ English"],
31
+ key="output_lang", index=0,
32
+ )
33
+
34
+ selected_model = MODELS[f"{st.session_state['input_lang']}->{st.session_state['output_lang']}"]
35
+
36
+
37
+ if selected_model[0] == None:
38
+ st.write("No model available for this pair.")
39
+ elif selected_model[0] == 0:
40
+ st.write("No translation necessary.")
41
+ else:
42
+ st.markdown(f"""
43
+ **Selected model:** [{selected_model[0]}]({selected_model[1]})
44
+ """)
45
+
46
+ input_text = st.text_area("Enter text to translate:", height=400, key="input")
47
+ translate_text = st.button("Translate")
48
+
49
+ if translate_text:
50
+ with st.spinner(text="βš™οΈ Model loading..."):
51
+ task = pipeline(
52
+ "translation",
53
+ model=selected_model[0],
54
+ tokenizer=selected_model[0],
55
+ )
56
+
57
+ progress_bar = st.progress(0)
58
+ with st.spinner(text="πŸ”„ Translating..."):
59
+ text_to_translate = re.split('(?<=[.!?]) +', input_text)
60
+ total_progress = len(text_to_translate)
61
+
62
+ for i, text in enumerate(text_to_translate):
63
+ translation = task(text)
64
+ text_to_translate[i] = translation[0]["translation_text"]
65
+ progress_bar.progress((i + 1) / total_progress)
66
+
67
+ st.success("πŸ—£οΈ Translated!")
68
+ st.write(f"**Translation:** {' '.join(text_to_translate)}")
69
+
70
+
71
+
72
+
73
+
74
+ '''
75
+
76
+
77
+ import torch
78
  from transformers import BartTokenizer, BartForConditionalGeneration
79
  import gradio as gr
80
  from transformers import AutoTokenizer, AutoModelWithLMHead, TranslationPipeline
 
108
 
109
  iface = gr.Interface(fn=article_summarization,title="Summarization in English",description="facebook/bart-large-cnn for summarization in English", inputs=gr.inputs.Textbox(lines=50, placeholder="Enter newspaper article to be summarized"), outputs=["result"])
110
 
111
+ iface.launch()
112
+
113
+
114
+ '''