|
import streamlit as st |
|
import os |
|
from langchain.schema import HumanMessage, AIMessage, SystemMessage |
|
from langchain_openai import ChatOpenAI |
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
|
|
|
|
|
|
|
|
st.set_page_config(page_title='Conversational Q&A Chatbot', page_icon=':robot_face:', layout='wide') |
|
st.markdown("<h1 style='text-align: center; color: #ed4613;'>Conversational Q&A Chatbot</h1>", unsafe_allow_html=True) |
|
st.markdown("<h4 style='text-align: center; color: #4CAF50;'>Hey, Let's Chat!</h4>", unsafe_allow_html=True) |
|
|
|
chat_model = ChatOpenAI(openai_api_key=os.getenv("OPENAI_API_KEY_NEW"), model='gpt-3.5-turbo', temperature=0.7) |
|
|
|
if 'chat_history' not in st.session_state: |
|
st.session_state['chat_history'] = [ |
|
SystemMessage(content="You are an intelligent chatbot. Please answer the following questions.") |
|
] |
|
|
|
def get_chatmodel_response(question): |
|
st.session_state['chat_history'].append(HumanMessage(content=question)) |
|
response = chat_model(st.session_state['chat_history']) |
|
st.session_state['chat_history'].append(AIMessage(content=response.content)) |
|
return response.content |
|
|
|
input_container = st.container() |
|
response_container = st.container() |
|
|
|
with input_container: |
|
with st.form(key='input_form', clear_on_submit=True): |
|
user_input = st.text_input("Input:", key="Input", placeholder="Ask me anything...", label_visibility='collapsed') |
|
submit = st.form_submit_button('Ask the question..') |
|
|
|
|
|
st.markdown(""" |
|
<script> |
|
document.addEventListener('DOMContentLoaded', function() { |
|
const inputBox = document.querySelector('input[type="text"]'); |
|
inputBox.addEventListener('keydown', function(event) { |
|
if (event.key === 'Enter') { |
|
event.preventDefault(); |
|
document.querySelector('button[type="submit"]').click(); |
|
} |
|
}); |
|
}); |
|
</script> |
|
""", unsafe_allow_html=True) |
|
|
|
if submit: |
|
if user_input.strip() == "": |
|
st.warning("Please enter a question first.") |
|
else: |
|
with st.spinner('Thinking...'): |
|
response = get_chatmodel_response(user_input) |
|
with response_container: |
|
st.subheader('The response is:') |
|
st.write(response) |
|
|
|
if 'chat_history' in st.session_state and st.session_state['chat_history']: |
|
st.markdown("<h3 style='color: #4CAF50;'>Chat History</h3>", unsafe_allow_html=True) |
|
for msg in st.session_state['chat_history']: |
|
if isinstance(msg, HumanMessage): |
|
st.markdown(f"<div style='text-align: right; color: #0000FF;'>{msg.content}</div>", unsafe_allow_html=True) |
|
elif isinstance(msg, AIMessage): |
|
st.markdown(f"<div style='text-align: left; color: #FF5733;'>{msg.content}</div>", unsafe_allow_html=True) |
|
|