Spaces:
Sleeping
Sleeping
File size: 2,288 Bytes
dac5706 7c298c3 dac5706 ee6eb40 dac5706 66f912f de74493 b8d26cc de74493 5964cb6 de74493 5964cb6 f068914 de74493 5964cb6 dc04c58 5964cb6 dac5706 de74493 5964cb6 de74493 dac5706 de74493 dac5706 de74493 1668774 de74493 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import os
from langchain_core.prompts import ChatPromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferWindowMemory
from operator import itemgetter
from langchain_core.runnables import RunnableLambda,RunnablePassthrough
import streamlit as st
genai_key = os.getenv("gen_key")
model = ChatGoogleGenerativeAI(temperature=0,model='gemini-1.5-pro',max_output_tokens=150,convert_system_message_to_human=True,google_api_key=genai_key)
prompt=ChatPromptTemplate.from_messages([
("system","you are a good assistant"),
MessagesPlaceholder(variable_name="history"),
("human","{input}")])
memory=ConversationBufferWindowMemory(k=3,return_messages=True)
chain=(RunnablePassthrough.assign(history=RunnableLambda(memory.load_memory_variables)|
itemgetter("history"))|prompt|model)
# Streamlit interface
st.title("chat bot")
st.write("Enter your input text:")
def end_conv():
st.write("Conversation ended.")
st.session_state.conversation_history = []
# Initialize session state for conversation history if not already done
if 'conversation_history' not in st.session_state:
st.session_state.conversation_history = []
# User input
user_input = st.text_area("Input Text")
# Generate and display the response
if st.button("Generate Response"):
# Load current conversation history
history = memory.load_memory_variables({})['history']
# Invoke the chain to get the response
res = chain.invoke({"input": user_input})
response_content = res.content
st.write("Generated Response:")
st.write(response_content)
# Save the context in memory and session state
memory.save_context({"input": user_input}, {"output": response_content})
st.session_state.conversation_history.extend([{"role": "human", "content": user_input}, {"role": "assistant", "content": response_content}])
# Display the updated conversation history
#st.write("Conversation History:")
#for msg in st.session_state.conversation_history:
# st.write(f"{msg['role']}: {msg['content']}")
# End conversation button
if st.button("End Conversation"):
end_conv() |