Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import model as demo_chat
|
|
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
6 |
st.title("Hi, I am Chatbot Philio :woman:")
|
@@ -18,7 +19,7 @@ scrollable_div_style = """
|
|
18 |
</style>
|
19 |
"""
|
20 |
|
21 |
-
llm_chain = demo_chat.chain()
|
22 |
|
23 |
def render_chat_history(chat_history):
|
24 |
#renders chat history
|
@@ -43,8 +44,10 @@ if input_text := st.chat_input(placeholder="Here you can chat with our hotel boo
|
|
43 |
st.session_state.chat_history.append({"role" : "human", "content" : input_text}) #append message to chat history
|
44 |
|
45 |
with st.spinner("Generating response..."):
|
46 |
-
first_answer = llm_chain.predict(input = input_text)
|
47 |
-
answer = first_answer.strip()
|
|
|
|
|
48 |
|
49 |
with st.chat_message("assistant"):
|
50 |
st.markdown(answer)
|
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import model as demo_chat
|
4 |
+
import request as re
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
6 |
|
7 |
st.title("Hi, I am Chatbot Philio :woman:")
|
|
|
19 |
</style>
|
20 |
"""
|
21 |
|
22 |
+
#llm_chain = demo_chat.chain()
|
23 |
|
24 |
def render_chat_history(chat_history):
|
25 |
#renders chat history
|
|
|
44 |
st.session_state.chat_history.append({"role" : "human", "content" : input_text}) #append message to chat history
|
45 |
|
46 |
with st.spinner("Generating response..."):
|
47 |
+
#first_answer = llm_chain.predict(input = input_text)
|
48 |
+
#answer = first_answer.strip()
|
49 |
+
input = input_text + "AI:"
|
50 |
+
answer = re.generate_response(input)
|
51 |
|
52 |
with st.chat_message("assistant"):
|
53 |
st.markdown(answer)
|