Update app.py
Browse files
app.py
CHANGED
@@ -1,25 +1,39 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoTokenizer
|
|
|
3 |
|
4 |
with gr.Blocks() as demo:
|
5 |
huggingface = gr.Textbox(value="unsloth/Meta-Llama-3.1-8B-Instruct", show_label=True, label="huggingface ID")
|
|
|
6 |
button = gr.Button(value="get chat template")
|
7 |
output = gr.Textbox(lines=10.0, show_copy_button=True, show_label=True, label="chat template", visible=False)
|
8 |
|
9 |
-
@button.click(inputs=[huggingface], outputs=[output])
|
10 |
-
def submit(huggingface_id):
|
11 |
try:
|
12 |
-
|
13 |
-
|
14 |
-
{
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
return gr.update(value=template, visible=True)
|
21 |
except Exception as err:
|
22 |
raise gr.Error(f"Could not get chat template: {err}")
|
23 |
return gr.update(visible=False)
|
24 |
|
25 |
-
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoTokenizer
|
3 |
+
from jinja2 import Template
|
4 |
|
5 |
with gr.Blocks() as demo:
|
6 |
huggingface = gr.Textbox(value="unsloth/Meta-Llama-3.1-8B-Instruct", show_label=True, label="huggingface ID")
|
7 |
+
jinja_template = gr.Textbox(value="", show_label=True, label="Jinja Template")
|
8 |
button = gr.Button(value="get chat template")
|
9 |
output = gr.Textbox(lines=10.0, show_copy_button=True, show_label=True, label="chat template", visible=False)
|
10 |
|
11 |
+
@button.click(inputs=[huggingface, jinja_template], outputs=[output])
|
12 |
+
def submit(huggingface_id, jinja_template_text):
|
13 |
try:
|
14 |
+
if jinja_template_text:
|
15 |
+
template = Template(jinja_template_text).render(
|
16 |
+
{
|
17 |
+
"messages": [
|
18 |
+
{"role": "system", "content": "system-prompt"},
|
19 |
+
{"role": "user", "content": "user-prompt"},
|
20 |
+
{"role": "assistant", "content": "assistant-prompt"}
|
21 |
+
]
|
22 |
+
}
|
23 |
+
)
|
24 |
+
print(template)
|
25 |
+
else:
|
26 |
+
template = AutoTokenizer.from_pretrained(huggingface_id).apply_chat_template(
|
27 |
+
[
|
28 |
+
{"role": "system", "content": "system-prompt"},
|
29 |
+
{"role": "user", "content": "user-prompt"},
|
30 |
+
{"role": "assistant", "content": "assistant-prompt"}
|
31 |
+
],
|
32 |
+
tokenize=False
|
33 |
+
)
|
34 |
return gr.update(value=template, visible=True)
|
35 |
except Exception as err:
|
36 |
raise gr.Error(f"Could not get chat template: {err}")
|
37 |
return gr.update(visible=False)
|
38 |
|
39 |
+
demo.launch()
|