Update app.py
Browse files
app.py
CHANGED
@@ -111,6 +111,7 @@ json_output = gr.JSON()
|
|
111 |
css = '''
|
112 |
|
113 |
.bubble {
|
|
|
114 |
border-radius: 10px;
|
115 |
padding: 10px;
|
116 |
margin-top: 15px;
|
@@ -186,14 +187,9 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
186 |
with gr.Column(scale=1):
|
187 |
gr.Markdown('<span style="font-size:180px;">🤔</span>')
|
188 |
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
# btn = gr.Button(txt)
|
193 |
-
# model_btns.append(btn)
|
194 |
-
# for btn in model_btns:
|
195 |
-
# btn.click(reset_new_model, [global_state])
|
196 |
-
|
197 |
with gr.Blocks():
|
198 |
gr.Markdown('''
|
199 |
Here are some examples of prompts we can analyze their internal representations
|
@@ -219,9 +215,6 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
219 |
|
220 |
progress_dummy = gr.Markdown('', elem_id='progress_dummy')
|
221 |
|
222 |
-
with gr.Group('Interpretation'):
|
223 |
-
interpretation_prompt = gr.Text(suggested_interpretation_prompts[0], label='Interpretation Prompt')
|
224 |
-
|
225 |
interpretation_bubbles = [gr.Textbox('', container=False, visible=False, elem_classes=['bubble',
|
226 |
'even_bubble' if i % 2 == 0 else 'odd_bubble'])
|
227 |
for i in range(model.config.num_hidden_layers)]
|
@@ -239,6 +232,14 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
239 |
top_k = gr.Slider(1, 1000, value=50, step=1, label='top k')
|
240 |
top_p = gr.Slider(0., 1., value=0.95, label='top p')
|
241 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
242 |
# event listeners
|
243 |
for i, btn in enumerate(tokens_container):
|
244 |
btn.click(partial(run_interpretation, i=i), [global_state, interpretation_prompt,
|
|
|
111 |
css = '''
|
112 |
|
113 |
.bubble {
|
114 |
+
border: none
|
115 |
border-radius: 10px;
|
116 |
padding: 10px;
|
117 |
margin-top: 15px;
|
|
|
187 |
with gr.Column(scale=1):
|
188 |
gr.Markdown('<span style="font-size:180px;">🤔</span>')
|
189 |
|
190 |
+
with gr.Group('Interpretation'):
|
191 |
+
interpretation_prompt = gr.Text(suggested_interpretation_prompts[0], label='Interpretation Prompt')
|
192 |
+
|
|
|
|
|
|
|
|
|
|
|
193 |
with gr.Blocks():
|
194 |
gr.Markdown('''
|
195 |
Here are some examples of prompts we can analyze their internal representations
|
|
|
215 |
|
216 |
progress_dummy = gr.Markdown('', elem_id='progress_dummy')
|
217 |
|
|
|
|
|
|
|
218 |
interpretation_bubbles = [gr.Textbox('', container=False, visible=False, elem_classes=['bubble',
|
219 |
'even_bubble' if i % 2 == 0 else 'odd_bubble'])
|
220 |
for i in range(model.config.num_hidden_layers)]
|
|
|
232 |
top_k = gr.Slider(1, 1000, value=50, step=1, label='top k')
|
233 |
top_p = gr.Slider(0., 1., value=0.95, label='top p')
|
234 |
|
235 |
+
# with gr.Group():
|
236 |
+
# with gr.Row():
|
237 |
+
# for txt in model_info.keys():
|
238 |
+
# btn = gr.Button(txt)
|
239 |
+
# model_btns.append(btn)
|
240 |
+
# for btn in model_btns:
|
241 |
+
# btn.click(reset_new_model, [global_state])
|
242 |
+
|
243 |
# event listeners
|
244 |
for i, btn in enumerate(tokens_container):
|
245 |
btn.click(partial(run_interpretation, i=i), [global_state, interpretation_prompt,
|