Spaces:
Sleeping
Sleeping
fix bugs and logging
Browse files
app.py
CHANGED
@@ -43,6 +43,9 @@ if "follow_up_mode" not in st.session_state:
|
|
43 |
if "generated_question" not in st.session_state:
|
44 |
st.session_state.generated_question = None # Stores the generated question for persistence
|
45 |
|
|
|
|
|
|
|
46 |
# Function to find the top 1 most similar question based on user input
|
47 |
def find_top_question(query):
|
48 |
# Generate embedding for the query
|
@@ -65,13 +68,14 @@ def find_top_question(query):
|
|
65 |
|
66 |
# Function to generate response using OpenAI API with debugging logs
|
67 |
def generate_response(messages):
|
68 |
-
|
69 |
-
st.
|
70 |
|
71 |
response = client.chat.completions.create(
|
72 |
model="o1-mini",
|
73 |
messages=messages,
|
74 |
)
|
|
|
75 |
return response.choices[0].message.content
|
76 |
|
77 |
# User input form for generating a new question
|
@@ -108,12 +112,6 @@ if generate_button:
|
|
108 |
# Store generated question in session state for persistence in sidebar
|
109 |
st.session_state.generated_question = response
|
110 |
|
111 |
-
# Display assistant response in chat message container and add to session history
|
112 |
-
with st.chat_message("assistant"):
|
113 |
-
st.markdown(response)
|
114 |
-
|
115 |
-
st.session_state.messages.append({"role": "assistant", "content": response})
|
116 |
-
|
117 |
# Enable follow-up mode after generating the initial question
|
118 |
st.session_state.follow_up_mode = True
|
119 |
|
@@ -141,7 +139,7 @@ if st.session_state.follow_up_mode:
|
|
141 |
|
142 |
st.session_state.messages.append({"role": "assistant", "content": assistant_response})
|
143 |
|
144 |
-
# Sidebar content to display persistent generated question
|
145 |
st.sidebar.markdown("## Generated Question")
|
146 |
if st.session_state.generated_question:
|
147 |
st.sidebar.markdown(st.session_state.generated_question)
|
@@ -153,4 +151,10 @@ st.sidebar.markdown("""
|
|
153 |
This is a Real-World Interview Question Generator powered by OpenAI's API.
|
154 |
Enter a company name, topic, and level of difficulty, and it will transform a relevant question into a real-world interview scenario!
|
155 |
Continue chatting with the assistant in the chatbox below.
|
156 |
-
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
if "generated_question" not in st.session_state:
|
44 |
st.session_state.generated_question = None # Stores the generated question for persistence
|
45 |
|
46 |
+
if "debug_logs" not in st.session_state:
|
47 |
+
st.session_state.debug_logs = [] # Stores debug logs for toggling
|
48 |
+
|
49 |
# Function to find the top 1 most similar question based on user input
|
50 |
def find_top_question(query):
|
51 |
# Generate embedding for the query
|
|
|
68 |
|
69 |
# Function to generate response using OpenAI API with debugging logs
|
70 |
def generate_response(messages):
|
71 |
+
debug_log_entry = {"messages": messages}
|
72 |
+
st.session_state.debug_logs.append(debug_log_entry) # Store debug log
|
73 |
|
74 |
response = client.chat.completions.create(
|
75 |
model="o1-mini",
|
76 |
messages=messages,
|
77 |
)
|
78 |
+
|
79 |
return response.choices[0].message.content
|
80 |
|
81 |
# User input form for generating a new question
|
|
|
112 |
# Store generated question in session state for persistence in sidebar
|
113 |
st.session_state.generated_question = response
|
114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
115 |
# Enable follow-up mode after generating the initial question
|
116 |
st.session_state.follow_up_mode = True
|
117 |
|
|
|
139 |
|
140 |
st.session_state.messages.append({"role": "assistant", "content": assistant_response})
|
141 |
|
142 |
+
# Sidebar content to display persistent generated question (left sidebar)
|
143 |
st.sidebar.markdown("## Generated Question")
|
144 |
if st.session_state.generated_question:
|
145 |
st.sidebar.markdown(st.session_state.generated_question)
|
|
|
151 |
This is a Real-World Interview Question Generator powered by OpenAI's API.
|
152 |
Enter a company name, topic, and level of difficulty, and it will transform a relevant question into a real-world interview scenario!
|
153 |
Continue chatting with the assistant in the chatbox below.
|
154 |
+
""")
|
155 |
+
|
156 |
+
# Right sidebar toggleable debug logs
|
157 |
+
with st.expander("Debug Logs (Toggle On/Off)", expanded=False):
|
158 |
+
if len(st.session_state.debug_logs) > 0:
|
159 |
+
for log_entry in reversed(st.session_state.debug_logs): # Show most recent logs first
|
160 |
+
st.write(log_entry)
|