import streamlit as st
import os
from langchain.schema import HumanMessage, AIMessage, SystemMessage
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv
load_dotenv()
## Streamlit UI
st.set_page_config(page_title='Conversational Q&A Chatbot', page_icon=':robot_face:', layout='wide')
st.markdown("
Conversational Q&A Chatbot
", unsafe_allow_html=True)
st.markdown("Hey, Let's Chat!
", unsafe_allow_html=True)
chat_model = ChatOpenAI(openai_api_key=os.getenv("OPENAI_API_KEY_NEW"), model='gpt-3.5-turbo', temperature=0.7)
if 'chat_history' not in st.session_state:
st.session_state['chat_history'] = [
SystemMessage(content="You are an intelligent chatbot. Please answer the following questions.")
]
def get_chatmodel_response(question):
st.session_state['chat_history'].append(HumanMessage(content=question))
response = chat_model(st.session_state['chat_history'])
st.session_state['chat_history'].append(AIMessage(content=response.content))
return response.content
input_container = st.container()
response_container = st.container()
with input_container:
with st.form(key='input_form', clear_on_submit=True):
user_input = st.text_input("Input:", key="Input", placeholder="Ask me anything...", label_visibility='collapsed')
submit = st.form_submit_button('Ask the question..')
# JavaScript to trigger form submission on Enter key press
st.markdown("""
""", unsafe_allow_html=True)
if submit:
if user_input.strip() == "":
st.warning("Please enter a question first.")
else:
with st.spinner('Thinking...'):
response = get_chatmodel_response(user_input)
with response_container:
st.subheader('The response is:')
st.write(response)
if 'chat_history' in st.session_state and st.session_state['chat_history']:
st.markdown("Chat History
", unsafe_allow_html=True)
for msg in st.session_state['chat_history']:
if isinstance(msg, HumanMessage):
st.markdown(f"{msg.content}
", unsafe_allow_html=True)
elif isinstance(msg, AIMessage):
st.markdown(f"{msg.content}
", unsafe_allow_html=True)