merve HF Staff commited on
Commit
744dc4b
Β·
1 Parent(s): 4f34d4f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -26,7 +26,7 @@ st.write("Try it yourself here πŸ‘‡πŸ»")
26
  generator = pipeline("text2text-generation", model = "mrm8488/t5-small-finetuned-quora-for-paraphrasing")
27
 
28
  default_value_gen = "How can I put out grease fire?"
29
- sent = st.text_area("Input", placeholder = default_value_gen, height = 10)
30
  outputs = generator(sent)
31
  st.write("Paraphrased Example:")
32
  st.write(outputs[0]["generated_text"])
@@ -41,7 +41,7 @@ st.write("Your English intent classification model will be between these two mod
41
  model_id = "Helsinki-NLP/opus-mt-en-fr"
42
 
43
  default_value_tr = "How are you?"
44
- tr_input = st.text_input("Input in English", placeholder = default_value_tr, key = "translation")
45
  outputs = query(tr_input, model_id, api_token)
46
  st.write("Translated Example:")
47
  st.write(outputs[0]["translation_text"])
@@ -52,8 +52,8 @@ st.subheader("Easy Information Retrieval")
52
  st.write("If you're making a chatbot that needs to provide information to user, you can take user's query and search for the answer in the documents you have, using question answering models. Look at the example and try it yourself here πŸ‘‡πŸ»")
53
 
54
  qa_model = pipeline("question-answering")
55
- question = st.text_area("Question", placeholder = "What does transformers do?", height = 5)
56
- context = st.text_area("Context", placeholder = "πŸ€— Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio.")
57
  output_answer = qa_model(question = question, context = context)
58
  st.write("Answer:")
59
  st.write(output_answer["answer"])
 
26
  generator = pipeline("text2text-generation", model = "mrm8488/t5-small-finetuned-quora-for-paraphrasing")
27
 
28
  default_value_gen = "How can I put out grease fire?"
29
+ sent = st.text_area(label = "Input", value = default_value_gen, height = 10)
30
  outputs = generator(sent)
31
  st.write("Paraphrased Example:")
32
  st.write(outputs[0]["generated_text"])
 
41
  model_id = "Helsinki-NLP/opus-mt-en-fr"
42
 
43
  default_value_tr = "How are you?"
44
+ tr_input = st.text_area(label = "Input in English", value = default_value_tr, height = 5)
45
  outputs = query(tr_input, model_id, api_token)
46
  st.write("Translated Example:")
47
  st.write(outputs[0]["translation_text"])
 
52
  st.write("If you're making a chatbot that needs to provide information to user, you can take user's query and search for the answer in the documents you have, using question answering models. Look at the example and try it yourself here πŸ‘‡πŸ»")
53
 
54
  qa_model = pipeline("question-answering")
55
+ question = st.text_area(label = "Question", value = "What does transformers do?", height = 5)
56
+ context = st.text_area(label = "Context", value = "πŸ€— Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio.")
57
  output_answer = qa_model(question = question, context = context)
58
  st.write("Answer:")
59
  st.write(output_answer["answer"])