bhadresh-savani commited on
Commit
78f4796
·
1 Parent(s): 626b394

updated app

Browse files
Files changed (1) hide show
  1. app.py +33 -33
app.py CHANGED
@@ -3,45 +3,45 @@ from transformers import AutoTokenizer,AutoModelForSeq2SeqLM
3
 
4
  @st.cache(show_spinner=False, persist=True)
5
  def load_model(input_complex_sentence,model):
6
- \t
7
- \tbase_path = "flax-community/"
8
- \tmodel_path = base_path + model
9
- \ttokenizer = AutoTokenizer.from_pretrained(model_path)
10
- \tmodel = AutoModelForSeq2SeqLM.from_pretrained(model_path)
11
- \t
12
- \ttokenized_sentence = tokenizer(input_complex_sentence,return_tensors="pt")
13
- \tresult = model.generate(tokenized_sentence['input_ids'],attention_mask = tokenized_sentence['attention_mask'],max_length=256,num_beams=5)
14
- \tgenerated_sentence = tokenizer.decode(result[0],skip_special_tokens=True)
15
- \t
16
- \treturn generated_sentence
17
 
18
  def main():
19
 
20
- \tst.sidebar.title("🧠 Sentence Simplifier")
21
- \tst.title("Sentence Split in English using T5 Variants")
22
- \tst.write("Sentence Split is the task of **dividing a long Complex Sentence into Simple Sentences**")
23
- \t
24
- \tmodel = st.sidebar.selectbox(
25
- \t\t\t\t "Please Choose the Model",
26
- \t\t\t\t ("t5-base-wikisplit","t5-v1_1-base-wikisplit", "byt5-base-wikisplit","t5-large-wikisplit"))
27
 
28
- \tst.sidebar.write('''
29
- \t\t## Applications:
30
- \t\t* Sentence Simplification
31
- \t\t* Data Augmentation
32
- \t\t* Sentence Rephrase
33
- \t''')
34
 
35
- \tst.sidebar.write("[More Exploration](https://github.com/bhadreshpsavani/t5-sentence-split)")
36
- \t
37
- \texample = "Mary likes to play football in her freetime whenever she meets with her friends that are very nice people."
38
- \tinput_complex_sentence = st.text_area("Please type a Complex Sentence to split",example)
39
 
40
- \tif st.button('Split'):
41
- \t\twith st.spinner("Spliting Sentence...🧠"):
42
- \t\t\tgenerated_sentence = load_model(input_complex_sentence, model)
43
- \t\tst.write(generated_sentence)
44
 
45
 
46
  if __name__ == "__main__":
47
- \tmain()
 
3
 
4
  @st.cache(show_spinner=False, persist=True)
5
  def load_model(input_complex_sentence,model):
6
+
7
+ base_path = "flax-community/"
8
+ model_path = base_path + model
9
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
10
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_path)
11
+
12
+ tokenized_sentence = tokenizer(input_complex_sentence,return_tensors="pt")
13
+ result = model.generate(tokenized_sentence['input_ids'],attention_mask = tokenized_sentence['attention_mask'],max_length=256,num_beams=5)
14
+ generated_sentence = tokenizer.decode(result[0],skip_special_tokens=True)
15
+
16
+ return generated_sentence
17
 
18
  def main():
19
 
20
+ st.sidebar.title("🧠Sentence Simplifier")
21
+ st.title("Sentence Split in English using T5 Variants")
22
+ st.write("Sentence Split is the task of **dividing a long Complex Sentence into Simple Sentences**")
23
+
24
+ model = st.sidebar.selectbox(
25
+ "Please Choose the Model",
26
+ ("t5-base-wikisplit","t5-v1_1-base-wikisplit", "byt5-base-wikisplit","t5-large-wikisplit"))
27
 
28
+ st.sidebar.write('''
29
+ ## Applications:
30
+ * Sentence Simplification
31
+ * Data Augmentation
32
+ * Sentence Rephrase
33
+ ''')
34
 
35
+ st.sidebar.write("[More Exploration](https://github.com/bhadreshpsavani/t5-sentence-split)")
36
+
37
+ example = "Mary likes to play football in her freetime whenever she meets with her friends that are very nice people."
38
+ input_complex_sentence = st.text_area("Please type a Complex Sentence to split",example)
39
 
40
+ if st.button('Split'):
41
+ with st.spinner("Spliting Sentence...🧠"):
42
+ generated_sentence = load_model(input_complex_sentence, model)
43
+ st.write(generated_sentence)
44
 
45
 
46
  if __name__ == "__main__":
47
+ main()