frankaging commited on
Commit
669503d
·
1 Parent(s): 71ecf23
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -294,17 +294,18 @@ with gr.Blocks(css=css, fill_height=True) as demo:
294
  with gr.Row(min_height=500, equal_height=True):
295
  # Left side: chat area
296
  with gr.Column(scale=7):
297
- chat_interface = gr.ChatInterface(
298
- fn=generate,
299
- title="Conditionally Steer AI Responses Based on Topics",
300
- description="""This is an experimental chatbot that you can steer using topics you care about:
301
 
302
  Step 1: Choose a topic to detect (e.g., "Google")
303
  Step 2: Choose a topic you want the model to discuss when the previous topic comes up (e.g., "ethics")
304
 
305
- Try it out! For example, set it to detect "Google" topics and steer toward discussing "ethics", and ask "List some search engines and their pros and cons". We intervene on Gemma-2-2B-it by adding steering vectors to the residual stream at layer 20.""",
 
 
 
 
306
  additional_inputs=[selected_detection, selected_subspaces],
307
- fill_height=True,
308
  )
309
 
310
  # Right side: concept detection and steering
 
294
  with gr.Row(min_height=500, equal_height=True):
295
  # Left side: chat area
296
  with gr.Column(scale=7):
297
+ gr.Markdown("""# Conditionally Steer AI Responses Based on Topics""")
298
+ gr.Markdown("""This is an experimental chatbot that you can steer using topics you care about:
 
 
299
 
300
  Step 1: Choose a topic to detect (e.g., "Google")
301
  Step 2: Choose a topic you want the model to discuss when the previous topic comes up (e.g., "ethics")
302
 
303
+ We intervene on Gemma-2-2B-it by adding steering vectors to the residual stream at layer 20.""")
304
+ chat_interface = gr.ChatInterface(
305
+ fn=generate,
306
+ chatbot=gr.Chatbot(),
307
+ textbox=gr.Textbox(placeholder="List some search engines with their pros and cons", container=False, scale=7),
308
  additional_inputs=[selected_detection, selected_subspaces],
 
309
  )
310
 
311
  # Right side: concept detection and steering