Update app.py
Browse files
app.py
CHANGED
|
@@ -62,8 +62,8 @@ def reset_model(model_name, *extra_components, with_extra_components=True):
|
|
| 62 |
gc.collect()
|
| 63 |
if with_extra_components:
|
| 64 |
return ([welcome_message.format(model_name=model_name)]
|
| 65 |
-
+ [gr.Textbox(visible=False) for _ in range(len(interpretation_bubbles))]
|
| 66 |
-
+ [gr.Button(visible=False) for _ in range(len(tokens_container))]
|
| 67 |
+ [*extra_components])
|
| 68 |
|
| 69 |
|
|
@@ -113,7 +113,7 @@ def run_interpretation(raw_interpretation_prompt, max_new_tokens, do_sample,
|
|
| 113 |
generation_texts = global_state.tokenizer.batch_decode(generated)
|
| 114 |
progress_dummy_output = ''
|
| 115 |
bubble_outputs = [gr.Textbox(text.replace('\n', ' '), visible=True, container=False, label=f'Layer {i}') for text in generation_texts]
|
| 116 |
-
bubble_outputs += [gr.Textbox(visible=False) for _ in range(MAX_NUM_LAYERS - len(bubble_outputs))]
|
| 117 |
return [progress_dummy_output, *bubble_outputs]
|
| 118 |
|
| 119 |
|
|
@@ -221,6 +221,7 @@ with gr.Blocks(theme=gr.themes.Default(), css='styles.css') as demo:
|
|
| 221 |
original_prompt_raw.change(lambda: [gr.Button(visible=False) for _ in range(MAX_PROMPT_TOKENS)], [], tokens_container)
|
| 222 |
|
| 223 |
extra_components = [interpretation_prompt, original_prompt_raw, original_prompt_btn]
|
| 224 |
-
model_chooser.change(reset_model, [model_chooser, *extra_components],
|
|
|
|
| 225 |
|
| 226 |
demo.launch()
|
|
|
|
| 62 |
gc.collect()
|
| 63 |
if with_extra_components:
|
| 64 |
return ([welcome_message.format(model_name=model_name)]
|
| 65 |
+
+ [gr.Textbox('', visible=False) for _ in range(len(interpretation_bubbles))]
|
| 66 |
+
+ [gr.Button('', visible=False) for _ in range(len(tokens_container))]
|
| 67 |
+ [*extra_components])
|
| 68 |
|
| 69 |
|
|
|
|
| 113 |
generation_texts = global_state.tokenizer.batch_decode(generated)
|
| 114 |
progress_dummy_output = ''
|
| 115 |
bubble_outputs = [gr.Textbox(text.replace('\n', ' '), visible=True, container=False, label=f'Layer {i}') for text in generation_texts]
|
| 116 |
+
bubble_outputs += [gr.Textbox('', visible=False) for _ in range(MAX_NUM_LAYERS - len(bubble_outputs))]
|
| 117 |
return [progress_dummy_output, *bubble_outputs]
|
| 118 |
|
| 119 |
|
|
|
|
| 221 |
original_prompt_raw.change(lambda: [gr.Button(visible=False) for _ in range(MAX_PROMPT_TOKENS)], [], tokens_container)
|
| 222 |
|
| 223 |
extra_components = [interpretation_prompt, original_prompt_raw, original_prompt_btn]
|
| 224 |
+
model_chooser.change(reset_model, [model_chooser, *extra_components],
|
| 225 |
+
[welcome_model, *interpretation_bubbles, *tokens_container, *extra_components])
|
| 226 |
|
| 227 |
demo.launch()
|