|
import gradio as gr |
|
from transformers import AutoTokenizer |
|
from jinja2 import Template |
|
|
|
with gr.Blocks() as demo: |
|
huggingface = gr.Textbox(value="unsloth/Meta-Llama-3.1-8B-Instruct", show_label=True, label="huggingface ID") |
|
jinja_template = gr.Textbox(value="", show_label=True, label="Jinja Template") |
|
button = gr.Button(value="get chat template") |
|
output = gr.Textbox(lines=10.0, show_copy_button=True, show_label=True, label="chat template", visible=False) |
|
|
|
@button.click(inputs=[huggingface, jinja_template], outputs=[output]) |
|
def submit(huggingface_id, jinja_template_text): |
|
try: |
|
if jinja_template_text: |
|
template = Template(jinja_template_text).render( |
|
{ |
|
"messages": [ |
|
{"role": "system", "content": "system-prompt"}, |
|
{"role": "user", "content": "user-prompt"}, |
|
{"role": "assistant", "content": "assistant-prompt"} |
|
] |
|
} |
|
) |
|
print(template) |
|
else: |
|
template = AutoTokenizer.from_pretrained(huggingface_id).apply_chat_template( |
|
[ |
|
{"role": "system", "content": "system-prompt"}, |
|
{"role": "user", "content": "user-prompt"}, |
|
{"role": "assistant", "content": "assistant-prompt"} |
|
], |
|
tokenize=False |
|
) |
|
return gr.update(value=template, visible=True) |
|
except Exception as err: |
|
raise gr.Error(f"Could not get chat template: {err}") |
|
return gr.update(visible=False) |
|
|
|
demo.launch() |
|
|