harpreetsahota commited on
Commit
d0568c3
·
verified ·
1 Parent(s): 5283206

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -44
app.py CHANGED
@@ -108,47 +108,7 @@ with gr.Blocks() as demo:
108
  value=list(PROMPT_STRATEGIES.keys())[0],
109
  label="Select Prompt Strategy"
110
  )
111
-
112
- with gr.Row():
113
- override_params = gr.Checkbox(
114
- label="Override Template Parameters",
115
- value=False
116
- )
117
-
118
- with gr.Row():
119
- with gr.Column(visible=False) as param_controls:
120
- max_tokens = gr.Slider(
121
- minimum=1,
122
- maximum=2048,
123
- value=512,
124
- step=1,
125
- label="Max new tokens"
126
- )
127
- temperature = gr.Slider(
128
- minimum=0.1,
129
- maximum=4.0,
130
- value=0.7,
131
- step=0.1,
132
- label="Temperature"
133
- )
134
- top_p = gr.Slider(
135
- minimum=0.1,
136
- maximum=1.0,
137
- value=0.95,
138
- step=0.05,
139
- label="Top-p (nucleus sampling)"
140
- )
141
 
142
- with gr.Accordion("Current Prompt Details", open=False):
143
- system_prompt_display = gr.TextArea(
144
- label="System Prompt",
145
- interactive=False,
146
- lines=3
147
- )
148
- current_messages_display = gr.JSON(
149
- label="Full Conversation Context",
150
- )
151
-
152
  chatbot = gr.ChatInterface(
153
  fn=respond,
154
  additional_inputs=[
@@ -160,6 +120,50 @@ with gr.Blocks() as demo:
160
  top_p,
161
  ]
162
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
 
164
  def toggle_param_controls(override):
165
  return gr.Column(visible=override)
@@ -169,13 +173,10 @@ with gr.Blocks() as demo:
169
  system_context = prompts["system_context"]
170
  formatted_system_message = system_context.format(prompt_strategy=prompt_template.template)
171
 
172
- # Get the template parameters
173
- params = prompt_template.parameters
174
-
175
  return (
176
  formatted_system_message,
177
  {
178
- "Template Parameters": params,
179
  "Prompt Strategy": prompt_template.template
180
  }
181
  )
 
108
  value=list(PROMPT_STRATEGIES.keys())[0],
109
  label="Select Prompt Strategy"
110
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
 
 
 
 
 
 
 
 
 
 
112
  chatbot = gr.ChatInterface(
113
  fn=respond,
114
  additional_inputs=[
 
120
  top_p,
121
  ]
122
  )
123
+
124
+ # Parameters and Prompt Details section below the chat
125
+ with gr.Row():
126
+ with gr.Column():
127
+ # Parameter Controls
128
+ override_params = gr.Checkbox(
129
+ label="Override Template Parameters",
130
+ value=False
131
+ )
132
+
133
+ with gr.Column(visible=False) as param_controls:
134
+ max_tokens = gr.Slider(
135
+ minimum=1,
136
+ maximum=2048,
137
+ value=512,
138
+ step=1,
139
+ label="Max new tokens"
140
+ )
141
+ temperature = gr.Slider(
142
+ minimum=0.1,
143
+ maximum=4.0,
144
+ value=0.7,
145
+ step=0.1,
146
+ label="Temperature"
147
+ )
148
+ top_p = gr.Slider(
149
+ minimum=0.1,
150
+ maximum=1.0,
151
+ value=0.95,
152
+ step=0.05,
153
+ label="Top-p (nucleus sampling)"
154
+ )
155
+
156
+ with gr.Column():
157
+ # Prompt Details
158
+ with gr.Accordion("Current Prompt Details", open=False):
159
+ system_prompt_display = gr.TextArea(
160
+ label="System Prompt",
161
+ interactive=False,
162
+ lines=3
163
+ )
164
+ current_messages_display = gr.JSON(
165
+ label="Full Conversation Context",
166
+ )
167
 
168
  def toggle_param_controls(override):
169
  return gr.Column(visible=override)
 
173
  system_context = prompts["system_context"]
174
  formatted_system_message = system_context.format(prompt_strategy=prompt_template.template)
175
 
 
 
 
176
  return (
177
  formatted_system_message,
178
  {
179
+ "Template Parameters": prompt_template.parameters,
180
  "Prompt Strategy": prompt_template.template
181
  }
182
  )