File size: 1,744 Bytes
0429e33 aeabc48 6b06025 0429e33 5569b47 6b06025 0429e33 a59a665 0429e33 6b06025 aeabc48 6b06025 aeabc48 6c117ab aeabc48 0429e33 6b06025 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import gradio as gr
from transformers import AutoTokenizer
from jinja2 import Template
with gr.Blocks() as demo:
huggingface = gr.Textbox(value="unsloth/Meta-Llama-3.1-8B-Instruct", show_label=True, label="huggingface ID")
jinja_template = gr.Textbox(value="", show_label=True, label="Jinja Template")
button = gr.Button(value="get chat template")
output = gr.Textbox(lines=10.0, show_copy_button=True, show_label=True, label="chat template", visible=False)
@button.click(inputs=[huggingface, jinja_template], outputs=[output])
def submit(huggingface_id, jinja_template_text):
try:
if jinja_template_text:
template = Template(jinja_template_text).render(
{
"messages": [
{"role": "system", "content": "system-prompt"},
{"role": "user", "content": "user-prompt"},
{"role": "assistant", "content": "assistant-prompt"}
]
}
)
print(template)
else:
template = AutoTokenizer.from_pretrained(huggingface_id).apply_chat_template(
[
{"role": "system", "content": "system-prompt"},
{"role": "user", "content": "user-prompt"},
{"role": "assistant", "content": "assistant-prompt"}
],
tokenize=False
)
return gr.update(value=template, visible=True)
except Exception as err:
raise gr.Error(f"Could not get chat template: {err}")
return gr.update(visible=False)
demo.launch()
|