Update app.py
Browse files
app.py
CHANGED
@@ -99,7 +99,7 @@ def run_interpretation(global_state, raw_interpretation_prompt, max_new_tokens,
|
|
99 |
generation_texts = tokenizer.batch_decode(generated)
|
100 |
progress_dummy_output = ''
|
101 |
return ([progress_dummy_output] +
|
102 |
-
[gr.Textbox(text.replace('\n', ' '), visible=True, container=False) for text in generation_texts]
|
103 |
)
|
104 |
|
105 |
|
@@ -182,6 +182,9 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
182 |
with gr.Row():
|
183 |
with gr.Column(scale=5):
|
184 |
gr.Markdown('# π Self-Interpreting Models')
|
|
|
|
|
|
|
185 |
# gr.Markdown(
|
186 |
# '**πΎ This space is a simple introduction to the emerging trend of models interpreting their OWN hidden states in free form natural language!!πΎ**',
|
187 |
# # elem_classes=['explanation_accordion']
|
@@ -205,7 +208,6 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
205 |
we expect to get back a summary of the information that exists inside the hidden state from different layers and different runs!! How cool is that! π―π―π―
|
206 |
''', line_breaks=True)
|
207 |
|
208 |
-
gr.Markdown('<b style="color: red;">Model outputs are not filtered and might include undesired language!</b>')
|
209 |
# with gr.Column(scale=1):
|
210 |
# gr.Markdown('<span style="font-size:180px;">π€</span>')
|
211 |
|
@@ -238,7 +240,7 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
|
|
238 |
use_gpu = False # gr.Checkbox(value=False, label='Use GPU')
|
239 |
progress_dummy = gr.Markdown('', elem_id='progress_dummy')
|
240 |
|
241 |
-
interpretation_bubbles = [gr.Textbox('',
|
242 |
'even_bubble' if i % 2 == 0 else 'odd_bubble'])
|
243 |
for i in range(model.config.num_hidden_layers)]
|
244 |
|
|
|
99 |
generation_texts = tokenizer.batch_decode(generated)
|
100 |
progress_dummy_output = ''
|
101 |
return ([progress_dummy_output] +
|
102 |
+
[gr.Textbox(text.replace('\n', ' '), visible=True, container=False, label=f'Layer {i}') for text in generation_texts]
|
103 |
)
|
104 |
|
105 |
|
|
|
182 |
with gr.Row():
|
183 |
with gr.Column(scale=5):
|
184 |
gr.Markdown('# π Self-Interpreting Models')
|
185 |
+
|
186 |
+
gr.Markdown('<b style="color: #8B0000;">Model outputs are not filtered and might include undesired language!</b>')
|
187 |
+
|
188 |
# gr.Markdown(
|
189 |
# '**πΎ This space is a simple introduction to the emerging trend of models interpreting their OWN hidden states in free form natural language!!πΎ**',
|
190 |
# # elem_classes=['explanation_accordion']
|
|
|
208 |
we expect to get back a summary of the information that exists inside the hidden state from different layers and different runs!! How cool is that! π―π―π―
|
209 |
''', line_breaks=True)
|
210 |
|
|
|
211 |
# with gr.Column(scale=1):
|
212 |
# gr.Markdown('<span style="font-size:180px;">π€</span>')
|
213 |
|
|
|
240 |
use_gpu = False # gr.Checkbox(value=False, label='Use GPU')
|
241 |
progress_dummy = gr.Markdown('', elem_id='progress_dummy')
|
242 |
|
243 |
+
interpretation_bubbles = [gr.Textbox('', container=False, visible=False, elem_classes=['bubble',
|
244 |
'even_bubble' if i % 2 == 0 else 'odd_bubble'])
|
245 |
for i in range(model.config.num_hidden_layers)]
|
246 |
|