import os from langchain_core.prompts import ChatPromptTemplate from langchain_google_genai import ChatGoogleGenerativeAI from langchain_core.prompts import MessagesPlaceholder from langchain.memory import ConversationBufferWindowMemory from operator import itemgetter from langchain_core.runnables import RunnableLambda,RunnablePassthrough import streamlit as st genai_key = os.getenv("gen_key") model = ChatGoogleGenerativeAI(temperature=0,model='gemini-1.5-pro',max_output_tokens=150,convert_system_message_to_human=True,google_api_key=genai_key) prompt=ChatPromptTemplate.from_messages([ ("system","you are a good assistant that give information about mentioned topic."), MessagesPlaceholder(variable_name="history"), ("human","{input}")]) memory=ConversationBufferWindowMemory(k=3,return_messages=True) chain=(RunnablePassthrough.assign(history=RunnableLambda(memory.load_memory_variables)| itemgetter("history"))|prompt|model) # Streamlit interface st.title("chat bot") st.write("Enter your input text:") def end_conv(): st.write("Conversation ended.") # Initialize session state for conversation history if not already done # User input user_input = st.text_area("Input Text") # Generate and display the response if st.button("Generate Response"): # Load current conversation history history = memory.load_memory_variables({})['history'] # Invoke the chain to get the response res = chain.invoke({"input": user_input}) response_content = res.content st.write("Generated Response:") st.write(response_content) # Save the context in memory and session state memory.save_context({"input": user_input}, {"output": response_content}) # Display the updated conversation history #st.write("Conversation History:") #for msg in st.session_state.conversation_history: # st.write(f"{msg['role']}: {msg['content']}") # End conversation button if st.button("End Conversation"): end_conv()