shaneavh commited on
Commit
ee23318
·
1 Parent(s): 379acbf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -9
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  from transformers import PegasusForConditionalGeneration, PegasusTokenizer
3
 
4
- st.title("Paraphrase sentences")
5
 
6
  model_name = "tuner007/pegasus_paraphrase"
7
  torch_device = "cpu"
@@ -36,17 +36,16 @@ def get_response(
36
  return tgt_text
37
 
38
 
39
- num_beams = 10
40
- num_return_sequences = st.slider("Number of paraphrases", 1, 10, 5, 1)
41
- context = st.text_area(label="Enter a sentence to paraphrase", max_chars=384)
 
 
 
42
 
43
- with st.expander("Advanced"):
44
- temperature = st.slider("Temperature", 0.1, 5.0, 1.5, 0.1)
45
- max_length = st.slider("Max length", 10, 100, 60, 10)
46
  if context:
47
  response = get_response(
48
  context, num_return_sequences, num_beams, max_length, temperature
49
  )
50
 
51
- for paraphrase in response:
52
- st.write(paraphrase)
 
1
  import streamlit as st
2
  from transformers import PegasusForConditionalGeneration, PegasusTokenizer
3
 
4
+ st.title("Paraphrase Generator")
5
 
6
  model_name = "tuner007/pegasus_paraphrase"
7
  torch_device = "cpu"
 
36
  return tgt_text
37
 
38
 
39
+ context = st.text_input(label="Enter a sentence to paraphrase", value="How do I make a deposit?")
40
+
41
+ num_return_sequences = st.sidebar.slider("Number of paraphrases", 1, 20, 10, 1)
42
+ num_beams = num_return_sequences
43
+ temperature = st.sidebar.slider("Temperature", 0.1, 5.0, 1.5, 0.1)
44
+ max_length = st.sidebar.slider("Max length", 10, 100, 60, 10)
45
 
 
 
 
46
  if context:
47
  response = get_response(
48
  context, num_return_sequences, num_beams, max_length, temperature
49
  )
50
 
51
+ st.write(response)