DJOMGA TOUKO Peter Charles
migrate to GPT 4
b4fd81f
import streamlit as st
from openai import OpenAI
from app_kb_handler import *
# model = "gpt-3.5-turbo"
model = "gpt-4-turbo"
# ------------------------------------------------------------------------------------------------
# SIDEBAR
# ------------------------------------------------------------------------------------------------
st.sidebar.title('OpenAI Knowledge Base of Irembo')
st.sidebar.write('This chat bot is build with RAG architecture and OpenAI as LLM. All the Knowledge Base have been crawl automatically from the website https://support.irembo.gov.rw/ ')
def onchange_openai_key():
print(st.session_state.openai_key)
openai_key = st.sidebar.text_input('OpenAI key', on_change=onchange_openai_key, key='openai_key')
def submit_openai_key(model=model):
if(openai_key == None or openai_key==''):
st.sidebar.write('Please provide the key before')
return
else:
client = OpenAI(api_key=openai_key)
model = model
completion = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": "You are an assistant giving simple and short answer for question of child"},
{"role": "user", "content": "count from 0 to 10"}
]
)
st.sidebar.write(f'Simple count : {completion.choices[0].message.content}')
submit_key = st.sidebar.button(label='Submit', on_click=submit_openai_key)
# ------------------------------------------------------------------------------------------------
# CHAT TITLE
# ------------------------------------------------------------------------------------------------
st.title('OpenAI Knowledge Base of Irembo')
st.write(f'Ask any question regarding using Irembo platform to apply for any services.')
def askQuestion(model=model, question=''):
if(openai_key == None or openai_key==''):
print('Please provide the key before')
return 'LLM API is not defined. Please provide the key before'
else:
if "df" not in st.session_state:
st.session_state.df = get_embeddings()
return answer_question(api_key=openai_key, question=f'{question}', df=st.session_state.df, model=model)
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Display chat messages from history on app rerun
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# React to user input
if prompt := st.chat_input("What is up?"):
with st.status('Running', expanded=True) as status:
# Display user message in chat message container
st.chat_message("user").markdown(prompt)
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
response = askQuestion(question=prompt)
# Display assistant response in chat message container
with st.chat_message("assistant"):
st.markdown(response)
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": response})
status.update(label='Reponse of last question', state="complete", expanded=True)