Spaces:
Runtime error
Runtime error
File size: 2,032 Bytes
fc4be52 1e37cb5 fc4be52 f38280a fc4be52 e7a03b8 fc4be52 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import os
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.callbacks import StreamlitCallbackHandler
from langchain.memory import ConversationBufferMemory
from langchain.prompts import MessagesPlaceholder
st.title("Chatbot")
api_model = os.getenv("OPENAI_API_MODEL")
temperature = os.getenv("OPENAI_API_TEMPERATURE")
origin_text = st.sidebar.text_area("γ·γΉγγ γγγ³γγε
₯ε")
system_prompt = origin_text if origin_text else os.getenv("system_prompt")
print(system_prompt)
def create_agent_chain():
chat = ChatOpenAI(
model_name = api_model,
temperature = temperature,
streaming = True,
)
agent_kwargs = {
"extra_prompt_messages": [MessagesPlaceholder(variable_name = "memory")],
}
memory = ConversationBufferMemory(memory_key = "memory", return_messages = True)
tools = load_tools(["ddg-search"])
return initialize_agent(
tools,
chat,
agent = AgentType.OPENAI_FUNCTIONS,
agent_kwargs = agent_kwargs,
memory = memory,
)
if "messages" not in st.session_state:
st.session_state.messages = []
if "agent_chain" not in st.session_state:
st.session_state.agent_chain = create_agent_chain()
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
prompt = st.chat_input("What is up?")
if prompt:
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
callback = StreamlitCallbackHandler(st.container())
response = st.session_state.agent_chain.run(system_prompt + prompt, callbacks = [callback])
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response}) |