chat_bot / app.py
venkat charan
Update app.py
7c298c3 verified
raw
history blame
1.66 kB
import os
from langchain_core.prompts import ChatPromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferWindowMemory
from operator import itemgetter
from langchain_core.runnables import RunnableLambda,RunnablePassthrough
import streamlit as st
genai_key = os.getenv("gen_key")
model = ChatGoogleGenerativeAI(temperature=0,model='gemini-1.5-pro',max_output_tokens=150,convert_system_message_to_human=True,api_key=genai_key)
prompt=ChatPromptTemplate.from_messages([
("system","you are a good assistant"),
MessagesPlaceholder(variable_name="history"),
("human","{input}")])
memory=ConversationBufferWindowMemory(k=3,return_messages=True)
chain=(RunnablePassthrough.assign(history=RunnableLambda(memory.load_memory_variables)|
itemgetter("history"))|prompt|model)
# Streamlit interface
st.title("chat bot")
st.write("Enter your input text:")
# User input
user_input = st.text_area("Input Text")
# Generate and display the response
if st.button("Generate Response"):
res = chain.invoke({"input": user_input})
st.write("Generated Response:")
st.write(res.content)
# Save the context
memory.save_context({"input": user_input}, {"output": res.content})
# Display the conversation history
#st.write("Conversation History:")
#for msg in memory.buffer:
# st.write(f"{msg['role']}: {msg['content']}")
# Optionally, you can also add a button to reset the conversation history
if st.button("End Conversation"):
st.write("Conversation ended.")
memory.clear()