jchen8000 commited on
Commit
cffdd6e
·
verified ·
1 Parent(s): cb5cffb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +112 -14
app.py CHANGED
@@ -37,6 +37,38 @@ You can change the LLM models in the **Additional Inputs** at the bottom of the
37
 
38
  """
39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  sample_button = "Load User Guide and Installation Guide documents"
41
 
42
 
@@ -110,7 +142,7 @@ def format_docs(docs):
110
  return "\n\n".join(doc.page_content for doc in docs)
111
 
112
  def generate_response(query, history, model, temperature, max_tokens, top_p, seed):
113
-
114
  if vector_store is None:
115
  return "Please upload and index a PDF at the Indexing tab."
116
 
@@ -145,7 +177,7 @@ additional_inputs = [
145
 
146
 
147
  # Create the Gradio interface
148
- with gr.Blocks(theme=gr.themes.Default()) as demo:
149
  with gr.Tab("Indexing"):
150
  gr.Markdown(desc)
151
  # pdf_input = gr.File(label="Upload PDF", file_types=[".pdf"])
@@ -159,22 +191,88 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
159
 
160
  with gr.Tab("Chatbot"):
161
  with gr.Row():
162
- with gr.Column():
163
- gr.ChatInterface(
164
- fn=generate_response,
165
- chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  examples=examples_questions,
167
- additional_inputs=additional_inputs,
 
 
168
  cache_examples=False,
169
  )
170
- # with gr.Column():
171
- # retrieve_button = gr.Button("Retrieve Relevant Info")
172
- # relevant_info = gr.Textbox(
173
- # label="Retrieved Information",
174
- # interactive=False,
175
- # lines=20,
176
- # )
177
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
  # Launch the Gradio app
180
  demo.launch(share=True)
 
37
 
38
  """
39
 
40
+ chatbot_css = """
41
+ .gradio-container {
42
+ font-family: 'Inter', sans-serif;
43
+ border-radius: 12px;
44
+ overflow: hidden;
45
+ }
46
+ .panel {
47
+ border-radius: 8px;
48
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
49
+ }
50
+ .gr-button {
51
+ border-radius: 8px;
52
+ padding: 10px 20px;
53
+ font-weight: bold;
54
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
55
+ transition: all 0.2s ease-in-out;
56
+ }
57
+ .gr-button:hover {
58
+ transform: translateY(-2px);
59
+ box-shadow: 0 4px 8px rgba(0, 0, 0, 0.15);
60
+ }
61
+ .gr-textbox textarea {
62
+ border-radius: 8px;
63
+ }
64
+ .gr-slider {
65
+ padding: 10px 0;
66
+ }
67
+ .gr-tabitem {
68
+ padding: 20px;
69
+ }
70
+ """
71
+
72
  sample_button = "Load User Guide and Installation Guide documents"
73
 
74
 
 
142
  return "\n\n".join(doc.page_content for doc in docs)
143
 
144
  def generate_response(query, history, model, temperature, max_tokens, top_p, seed):
145
+
146
  if vector_store is None:
147
  return "Please upload and index a PDF at the Indexing tab."
148
 
 
177
 
178
 
179
  # Create the Gradio interface
180
+ with gr.Blocks(theme=gr.themes.Default(), css=chatbot_css) as demo:
181
  with gr.Tab("Indexing"):
182
  gr.Markdown(desc)
183
  # pdf_input = gr.File(label="Upload PDF", file_types=[".pdf"])
 
191
 
192
  with gr.Tab("Chatbot"):
193
  with gr.Row():
194
+ with gr.Column(scale=2):
195
+ chatbot = gr.Chatbot(
196
+ show_label=False,
197
+ show_share_button=False,
198
+ show_copy_button=True,
199
+ layout="panel",
200
+ height=500, # Set a fixed height for the chatbot
201
+ avatar_images=(
202
+ "https://placehold.co/60x60/FFD700/000000?text=U", # User avatar
203
+ "https://placehold.co/60x60/6366F1/FFFFFF?text=AI" # Bot avatar
204
+ )
205
+ )
206
+ msg = gr.Textbox(
207
+ label="Your Message",
208
+ placeholder="Type your message here...",
209
+ show_copy_button=True,
210
+ container=False # Prevent it from being wrapped in a default container
211
+ )
212
+ with gr.Row():
213
+ submit_btn = gr.Button("Send", variant="primary")
214
+ clear_btn = gr.ClearButton() # Will be configured below
215
+
216
+ gr.Examples(
217
  examples=examples_questions,
218
+ inputs=[msg],
219
+ outputs=[msg], # Update the message input with the example
220
+ label="Quick Examples",
221
  cache_examples=False,
222
  )
 
 
 
 
 
 
 
223
 
224
+ with gr.Column(scale=1):
225
+ # Additional inputs as separate Gradio components (interactive but don't affect dummy fn)
226
+ model_name_textbox = gr.Textbox(label="Model Name", value="dummy-model", interactive=True)
227
+ temperature_slider = gr.Slider(minimum=0, maximum=1, value=0.7, step=0.01, label="Temperature", interactive=True)
228
+ max_tokens_slider = gr.Slider(minimum=10, maximum=2000, value=500, step=10, label="Max Tokens", interactive=True)
229
+ top_p_slider = gr.Slider(minimum=0, maximum=1, value=0.9, step=0.01, label="Top P", interactive=True)
230
+ seed_number = gr.Number(minimum=0, maximum=100000, value=0, step=1, label="Seed", precision=0, interactive=True)
231
+
232
+ gr.Markdown("### Retrieved Information")
233
+ # Textbox for relevant_info
234
+ relevant_info_textbox = gr.Textbox(
235
+ label="Retrieved Information",
236
+ interactive=False, # Not editable by the user
237
+ lines=20,
238
+ show_copy_button=True,
239
+ autoscroll=True,
240
+ container=True # Ensure it has a container for styling
241
+ )
242
+
243
+ # --- Event Handling ---
244
+ # This function acts as a wrapper to process inputs and distribute outputs
245
+ def process_chat_and_info(message, chat_history, model, temp, max_tok, top_p_val, seed_val):
246
+ # Call your generate_response function which returns two values
247
+ bot_message, retrieved_info = generate_response(
248
+ message, chat_history, model, temp, max_tok, top_p_val, seed_val
249
+ )
250
+
251
+ # Update the chat history for the chatbot component
252
+ chat_history.append((message, bot_message))
253
 
254
+ # Return values in the order of the outputs list
255
+ return chat_history, retrieved_info, "" # Clear the message input after sending
256
+
257
+ # Bind the `process_chat_and_info` function to the submit event of the message textbox
258
+ msg.submit(
259
+ fn=process_chat_and_info,
260
+ inputs=[msg, chatbot, model_name_textbox, temperature_slider, max_tokens_slider, top_p_slider, seed_number],
261
+ outputs=[chatbot, relevant_info_textbox, msg], # Order matters here: chatbot, relevant_info, then msg
262
+ queue=False # Set to True if you expect heavy load
263
+ )
264
+
265
+ # Bind the `process_chat_and_info` function to the click event of the send button
266
+ submit_btn.click(
267
+ fn=process_chat_and_info,
268
+ inputs=[msg, chatbot, model_name_textbox, temperature_slider, max_tokens_slider, top_p_slider, seed_number],
269
+ outputs=[chatbot, relevant_info_textbox, msg], # Order matters here
270
+ queue=False # Set to True if you expect heavy load
271
+ )
272
+
273
+ # Configure the clear button to clear both the chatbot and the relevant_info_textbox
274
+ clear_btn.add([msg, chatbot, relevant_info_textbox])
275
+
276
+
277
  # Launch the Gradio app
278
  demo.launch(share=True)