Spaces:
Runtime error
Runtime error
# Import the necessary libraries | |
import openai | |
import streamlit as st | |
from openai import OpenAI # TODO: Install the OpenAI library using pip install openai | |
st.title("Mini Project 2: Streamlit Chatbot") | |
client = None | |
if "is_valid" not in st.session_state: | |
st.session_state["is_valid"] = False | |
if 'api_key' not in st.session_state: | |
st.session_state["api_key"] = "" | |
def check_openai_api_key(): | |
client = OpenAI(api_key=st.session_state["api_key"]) | |
st.session_state["is_valid"] = False | |
try: | |
client.models.list() | |
except openai.APIError as e: | |
st.session_state["is_valid"] = False | |
except openai.APIConnectionError as e: | |
st.session_state["is_valid"] = False | |
except openai.AuthenticationError as e: | |
st.session_state["is_valid"] = False | |
else: | |
st.session_state["is_valid"] = True | |
st.success("Valid OpenAI API key entered successfully!") | |
if not st.session_state["is_valid"]: | |
st.session_state["api_key"] = st.text_input(label="Enter your OpenAI API Key", type="password") | |
check_openai_api_key() | |
if st.session_state["is_valid"]: | |
client = OpenAI(api_key=st.session_state["api_key"]) | |
# Check for existing session state variables | |
if "openai_model" not in st.session_state: | |
st.session_state['openai_model'] = "gpt-3.5-turbo" | |
if "messages" not in st.session_state: | |
st.session_state['messages'] = [] # {"role": "assistant", "content": "text"} | |
# Display existing chat messages | |
for message in st.session_state['messages']: | |
st.chat_message(message['role']).write(message['content']) | |
# Wait for user input | |
if prompt := st.chat_input("What would you like to chat about?"): | |
# ... (append user message to messages) | |
st.session_state['messages'].append({"role": "user", "content": prompt}) | |
# ... (display user message) | |
st.chat_message(st.session_state['messages'][-1]['role']).write(st.session_state['messages'][-1]['content']) | |
# ... (send request to OpenAI API | |
# Generate AI response | |
with st.chat_message("assistant"): | |
# ... (send request to OpenAI API) | |
stream = client.chat.completions.create( | |
model=st.session_state['openai_model'], | |
messages=st.session_state['messages'], | |
stream=True, | |
) | |
# ... (get AI response and display it) | |
response = st.write_stream(stream) | |
# ... (append AI response to messages) | |
st.session_state['messages'].append({"role": "assistant", "content": response}) |