File size: 1,660 Bytes
dac5706
 
 
 
 
 
 
7c298c3
dac5706
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import os
from langchain_core.prompts import ChatPromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferWindowMemory
from operator import itemgetter
from langchain_core.runnables import RunnableLambda,RunnablePassthrough
import streamlit as st

genai_key = os.getenv("gen_key")

model = ChatGoogleGenerativeAI(temperature=0,model='gemini-1.5-pro',max_output_tokens=150,convert_system_message_to_human=True,api_key=genai_key)

prompt=ChatPromptTemplate.from_messages([
     ("system","you are a good assistant"),
     MessagesPlaceholder(variable_name="history"),
     ("human","{input}")])


memory=ConversationBufferWindowMemory(k=3,return_messages=True)

chain=(RunnablePassthrough.assign(history=RunnableLambda(memory.load_memory_variables)|
       itemgetter("history"))|prompt|model)

# Streamlit interface
st.title("chat bot")
st.write("Enter your input text:")

# User input
user_input = st.text_area("Input Text")

# Generate and display the response
if st.button("Generate Response"):
    res = chain.invoke({"input": user_input})
    st.write("Generated Response:")
    st.write(res.content)

        # Save the context
    memory.save_context({"input": user_input}, {"output": res.content})

        # Display the conversation history
    #st.write("Conversation History:")
    #for msg in memory.buffer:
    #    st.write(f"{msg['role']}: {msg['content']}")

    # Optionally, you can also add a button to reset the conversation history
if st.button("End Conversation"):
    st.write("Conversation ended.")
    memory.clear()