Spaces:
Sleeping
Sleeping
import streamlit as st | |
from langchain_core.messages.chat import ChatMessage | |
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_openai import ChatOpenAI | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_teddynote.prompts import load_prompt | |
from dotenv import load_dotenv | |
from langchain import hub | |
load_dotenv() | |
st.title("๋๋ง์ ์ฑGPT๐ฌ") | |
# ์ฒ์ 1๋ฒ๋ง ์คํํ๊ธฐ ์ํ ์ฝ๋ | |
if "messages" not in st.session_state: | |
st.session_state["messages"] = [] | |
# ์ฌ์ด๋๋ฐ ์์ฑ | |
with st.sidebar: | |
clear_btn = st.button("๋ํ ์ด๊ธฐํ") | |
selected_prompt = st.selectbox("ํ๋กฌํํธ๋ฅผ ์ ํํด ์ฃผ์ธ์", ("๊ธฐ๋ณธ๋ชจ๋"), index=0) | |
# ์ด์ ๋ํ๋ฅผ ์ถ๋ ฅ | |
def print_messages(): | |
for chat_message in st.session_state["messages"]: | |
st.chat_message(chat_message.role).write(chat_message.content) | |
# ์๋ก์ด ๋ฉ์์ง๋ฅผ ์ถ๊ฐ | |
def add_message(role, message): | |
st.session_state["messages"].append(ChatMessage(role=role, content=message)) | |
# ์ฒด์ธ ์์ฑ | |
def create_chain(prompt_type): | |
prompt = ChatPromptTemplate.from_messages( | |
[ | |
( | |
"system", | |
"๋น์ ์ ์น์ ํ AI ์ด์์คํดํธ์ ๋๋ค. ๋ค์์ ์ง๋ฌธ์ ๊ฐ๊ฒฐํ๊ฒ ๋ต๋ณํด ์ฃผ์ธ์.", | |
), | |
("user", "#Question:\n{question}"), | |
] | |
) | |
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) | |
return prompt | llm | StrOutputParser() | |
if clear_btn: | |
st.session_state["messages"] = [] | |
print_messages() | |
user_input = st.chat_input("๊ถ๊ธํ ๋ด์ฉ์ ๋ฌผ์ด๋ณด์ธ์!") | |
if user_input: | |
st.chat_message("user").write(user_input) | |
chain = create_chain(selected_prompt) | |
response = chain.stream({"question": user_input}) | |
with st.chat_message("assistant"): | |
container = st.empty() | |
ai_answer = "" | |
for token in response: | |
ai_answer += token | |
container.markdown(ai_answer) | |
add_message("user", user_input) | |
add_message("assistant", ai_answer) | |