File size: 3,816 Bytes
e85383d d0ceb72 e872638 d0ceb72 e85383d d0ceb72 421230a e901fae 421230a e901fae 421230a d0ceb72 e901fae d0ceb72 e85383d 421230a e85383d 421230a e85383d d0ceb72 421230a e85383d e901fae e85383d 421230a e85383d 421230a e85383d 421230a e85383d d0ceb72 e85383d 421230a e85383d 421230a e85383d 421230a e85383d 421230a e85383d fe8b598 421230a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
# import os
# import streamlit as st
# from langchain.chains import ConversationChain
# from langchain_openai import ChatOpenAI
# from langchain.memory import ConversationBufferMemory
# os.environ["OPENAI_API_KEY"] = ""
# # Intialize the chatbot
# @st.cache_resource
# def init_chatbot():
# memory = ConversationBufferMemory()
# chatbot = ConversationChain(
# llm =ChatOpenAI(model = "gpt-4o-mini"),
# memory = memory,
# verbose = False
# )
# return chatbot
# # Streamlit Application
# st.title("Langchain Chatbot")
# st.write("Hi, I'm a chatbot built with Langchain powered by GPT. How can I assist you today?")
# user_input = st.text_input("You:", placeholder = "Ask me anything....")
# if user_input:
# with st.spinner("Thinking......"):
# resp = chatbot.run(user_input)
# st.write(f"Chatbot: {resp}")
import os
import streamlit as st
from langchain.chains import ConversationChain
from langchain_openai import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from streamlit_extras.add_vertical_space import add_vertical_space
# Set up OpenAI API Key securely
if "openai_api_key" in st.secrets:
os.environ["OPENAI_API_KEY"] = st.secrets["openai_api_key"]
else:
st.error("π OpenAI API Key is missing! Please add it to Streamlit secrets.")
st.stop()
# Initialize chatbot with memory
def init_chatbot(model="gpt-4o-mini"):
try:
memory = ConversationBufferMemory()
chatbot = ConversationChain(llm=ChatOpenAI(model=model), memory=memory, verbose=False)
return chatbot
except Exception as e:
st.error(f"β οΈ Error initializing chatbot: {e}")
return None
if "chatbot" not in st.session_state:
st.session_state.chatbot = init_chatbot()
# Custom Styling
st.markdown("""
<style>
body {
background-color: #f5f5f5;
}
.stChatMessage {
padding: 10px;
border-radius: 10px;
margin: 5px 0;
}
.user-message {
background-color: #dcf8c6;
text-align: right;
}
.bot-message {
background-color: #ffffff;
}
</style>
""", unsafe_allow_html=True)
# Sidebar - Model Selection
st.sidebar.title("βοΈ Settings")
model_choice = st.sidebar.radio("Select Model", ("gpt-4o-mini", "gpt-4", "gpt-3.5-turbo"))
# Update chatbot model if changed
if model_choice != st.session_state.chatbot.llm.model_name:
st.session_state.chatbot = init_chatbot(model_choice)
# Title and Description
st.title("π¬ LangChain AI Chatbot")
st.write("### Hi, I'm a chatbot powered by GPT. How can I assist you today?")
# Chat history storage
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# User Input via Chat Input (better UX)
user_input = st.chat_input("Type your message here...")
# Process input
if user_input:
with st.spinner("Thinking..."):
try:
response = st.session_state.chatbot.run(user_input)
if response:
st.session_state.chat_history.append(("user", user_input))
st.session_state.chat_history.append(("bot", response))
except Exception as e:
st.error(f"β οΈ Error generating response: {e}")
# Display chat history
st.write("### π¨οΈ Conversation")
for role, text in st.session_state.chat_history:
with st.chat_message(role):
st.markdown(f"**{role.capitalize()}**: {text}")
# Collapsible Chat History
with st.expander("π View Full Chat History"):
for role, text in st.session_state.chat_history:
st.write(f"**{role.capitalize()}**: {text}")
# Add spacing
add_vertical_space(2)
# Footer
st.markdown("---")
st.markdown("Developed with β€οΈ using Streamlit & LangChain")
|