babelAI's picture
Update app.py
d4c8e4f verified
raw
history blame
1.27 kB
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import(
AIMessage,
HumanMessage,
SystemMessage
)
# From here down is all the Streamlit UI.
st.set_page_config(page_title="λ°μ΄ν„°μŠ€νŠΈλ¦Όμ¦ˆ μ±— λͺ¨λΈ", page_icon=":robot:")
st.header("μ•ˆλ…•ν•˜μ„Έμš”. μ €λŠ” λ°μ΄ν„°μŠ€νŠΈλ¦Όμ¦ˆμ—μ„œ μ œμž‘ν•œ κ³ ν˜ˆμ•• μ „λ¬Έ 상담 AI λŒ€ν™” λ΄‡μ΄μ—μš”. μ €λŠ” λ§μ”€ν•˜μ‹œλŠ” 것을 κΈ°μ–΅ν•˜λ‹ˆ κ³„μ†ν•΄μ„œ μ§ˆλ¬Έν•΄μ£Όμ‹œλ©΄ λ˜μš”.")
if "sessionMessages" not in st.session_state:
st.session_state.sessionMessages = [
SystemMessage(content="You are a renowned doctor specializing in hypertension.")
]
def load_answer(question):
st.session_state.sessinMessages.append(HumanMessage(content=question))
assistant_answer = chat(st.session_state.sessionMessages)
st.session_state.sessionMessages.append(AIMessage(content=assistant_answer.content))
return assistant_answer.content
def get_text():
input_text = st.text_input("μƒλ‹΄μž: ", key= input)
return input_text
chat = ChatOpenAI(temperature=0)
user_input=get_text()
submit = st.button('Generate')
if submit:
response = load_answer(user_input)
st.subheader("Answer:")
st.write(response,key=1)