rishabhpr commited on
Commit
1535def
·
verified ·
1 Parent(s): 9a9df29
Files changed (1) hide show
  1. app.py +23 -18
app.py CHANGED
@@ -28,17 +28,12 @@ model = SentenceTransformer("all-MiniLM-L6-v2").to(device)
28
  with open("prompt.txt", "r") as file:
29
  system_prompt = file.read()
30
 
31
- st.title("Real-World Programming Quesion Mock Interview")
32
 
33
- # Initialize chat history
34
  if "messages" not in st.session_state:
35
  st.session_state.messages = [{"role": "assistant", "content": system_prompt}]
36
 
37
- # Display chat messages from history on app rerun
38
- for message in st.session_state.messages[1:]: # Skip the system message
39
- with st.chat_message(message["role"]):
40
- st.markdown(message["content"])
41
-
42
  # Function to find the top 1 most similar question based on user input
43
  def find_top_question(query):
44
  # Generate embedding for the query
@@ -70,15 +65,21 @@ def generate_response(prompt):
70
  )
71
  return response.choices[0].message.content
72
 
73
- # React to user input
74
- if prompt := st.chat_input("Enter a LeetCode-related query (e.g., 'google backtracking'):"):
75
- # Display user message in chat message container
76
- st.chat_message("user").markdown(prompt)
77
- # Add user message to chat history
78
- st.session_state.messages.append({"role": "user", "content": prompt})
 
79
 
80
- # Find the top question based on user input
81
- top_question = find_top_question(prompt)
 
 
 
 
 
82
 
83
  # Prepare a detailed prompt for GPT using the top question's details
84
  detailed_prompt = (
@@ -94,15 +95,19 @@ if prompt := st.chat_input("Enter a LeetCode-related query (e.g., 'google backtr
94
  # Generate response using GPT-4 with detailed prompt and debugging logs
95
  response = generate_response(detailed_prompt)
96
 
97
- # Display assistant response in chat message container
98
  with st.chat_message("assistant"):
99
  st.markdown(response)
100
 
101
- # Add assistant response to chat history
102
  st.session_state.messages.append({"role": "assistant", "content": response})
103
 
 
 
 
 
 
104
  st.sidebar.markdown("""
105
  ## About
106
  This is a Real-World Interview Question Generator powered by OpenAI's API.
107
- Enter a company name, topic and level of difficulty, and it will transform a relevant question into a real-world interview scenario!
108
  """)
 
28
  with open("prompt.txt", "r") as file:
29
  system_prompt = file.read()
30
 
31
+ st.title("Real-World Programming Question Mock Interview")
32
 
33
+ # Initialize chat history in session state
34
  if "messages" not in st.session_state:
35
  st.session_state.messages = [{"role": "assistant", "content": system_prompt}]
36
 
 
 
 
 
 
37
  # Function to find the top 1 most similar question based on user input
38
  def find_top_question(query):
39
  # Generate embedding for the query
 
65
  )
66
  return response.choices[0].message.content
67
 
68
+ # User input form for generating a new question
69
+ with st.form(key="input_form"):
70
+ company = st.text_input("Company", value="Google") # Default value: Google
71
+ difficulty = st.selectbox("Difficulty", ["Easy", "Medium", "Hard"], index=1) # Default: Medium
72
+ topic = st.text_input("Topic (e.g., Backtracking)", value="Backtracking") # Default: Backtracking
73
+
74
+ generate_button = st.form_submit_button(label="Generate")
75
 
76
+ if generate_button:
77
+ # Clear session state and start a new conversation history with system prompt
78
+ st.session_state.messages = [{"role": "assistant", "content": system_prompt}]
79
+
80
+ # Create a query from user inputs and find the most relevant question
81
+ query = f"{company} {difficulty} {topic}"
82
+ top_question = find_top_question(query)
83
 
84
  # Prepare a detailed prompt for GPT using the top question's details
85
  detailed_prompt = (
 
95
  # Generate response using GPT-4 with detailed prompt and debugging logs
96
  response = generate_response(detailed_prompt)
97
 
98
+ # Display assistant response in chat message container and add to session history
99
  with st.chat_message("assistant"):
100
  st.markdown(response)
101
 
 
102
  st.session_state.messages.append({"role": "assistant", "content": response})
103
 
104
+ # Display chat messages from history on app rerun (for subsequent conversation)
105
+ for message in st.session_state.messages[1:]: # Skip the system message
106
+ with st.chat_message(message["role"]):
107
+ st.markdown(message["content"])
108
+
109
  st.sidebar.markdown("""
110
  ## About
111
  This is a Real-World Interview Question Generator powered by OpenAI's API.
112
+ Enter a company name, topic, and level of difficulty, and it will transform a relevant question into a real-world interview scenario!
113
  """)