|
import gradio as gr |
|
import transformers |
|
|
|
title = """🙋🏻♂️Welcome to 🌟Tonic's 🤳🏻Phi-4 Demo""" |
|
|
|
description = """ |
|
This demo uses Microsoft's Phi-4 model for text generation. |
|
- System Prompt: Sets the context/role for the AI |
|
- User Prompt: Your specific question or request |
|
- Max Tokens: Maximum length of the generated response |
|
- Temperature: Controls randomness (higher = more creative, lower = more focused) |
|
""" |
|
|
|
|
|
join_us = """ |
|
## Join us: |
|
🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻 |
|
[](https://discord.gg/qdfnvSPcqP) |
|
On 🤗Huggingface: [MultiTransformer](https://huggingface.co/MultiTransformer) |
|
On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to🌟 [Dark Thoughts](https://github.com/MultiTonic/thinking-dataset) |
|
🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗 |
|
""" |
|
|
|
|
|
def generate_response(system_prompt, user_prompt, max_tokens, temperature): |
|
pipeline = transformers.pipeline( |
|
"text-generation", |
|
model="microsoft/phi-4", |
|
model_kwargs={"torch_dtype": "auto"}, |
|
device_map="auto", |
|
) |
|
|
|
messages = [ |
|
{"role": "system", "content": system_prompt}, |
|
{"role": "user", "content": user_prompt}, |
|
] |
|
|
|
outputs = pipeline( |
|
messages, |
|
max_new_tokens=max_tokens, |
|
temperature=temperature, |
|
do_sample=True |
|
) |
|
|
|
return outputs[0]["generated_text"] |
|
|
|
|
|
examples = [ |
|
[ |
|
"You are a medieval knight and must provide explanations to modern people.", |
|
"How should I explain the Internet?", |
|
128, |
|
0.7 |
|
], |
|
[ |
|
"You are a wise wizard from ancient times.", |
|
"What would you call a smartphone?", |
|
256, |
|
0.8 |
|
], |
|
[ |
|
"You are a time-traveling merchant from the year 1400.", |
|
"How would you describe modern cars?", |
|
200, |
|
0.6 |
|
], |
|
[ |
|
"You are a medieval monk who specializes in manuscripts.", |
|
"What do you think about e-books?", |
|
150, |
|
0.7 |
|
], |
|
[ |
|
"You are a castle guard from the Middle Ages.", |
|
"What do you think about modern security systems?", |
|
180, |
|
0.9 |
|
] |
|
] |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown(title) |
|
gr.Markdown(description) |
|
gr.Markdown(joinus) |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
system_prompt = gr.Textbox( |
|
label="System Prompt", |
|
placeholder="Enter system prompt...", |
|
value="You are a medieval knight and must provide explanations to modern people." |
|
) |
|
user_prompt = gr.Textbox( |
|
label="User Prompt", |
|
placeholder="Enter your question...", |
|
value="How should I explain the Internet?" |
|
) |
|
|
|
with gr.Row(): |
|
max_tokens = gr.Slider( |
|
minimum=1, |
|
maximum=512, |
|
value=128, |
|
step=1, |
|
label="Maximum Tokens" |
|
) |
|
temperature = gr.Slider( |
|
minimum=0.1, |
|
maximum=1.0, |
|
value=0.7, |
|
step=0.1, |
|
label="Temperature" |
|
) |
|
|
|
submit_btn = gr.Button("🚀 Generate Response") |
|
|
|
with gr.Column(): |
|
output = gr.Textbox( |
|
label="Generated Response", |
|
lines=10 |
|
) |
|
|
|
gr.Examples( |
|
examples=examples, |
|
inputs=[system_prompt, user_prompt, max_tokens, temperature], |
|
outputs=output, |
|
fn=generate_response, |
|
cache_examples=True, |
|
label="Example Prompts" |
|
) |
|
|
|
submit_btn.click( |
|
fn=generate_response, |
|
inputs=[system_prompt, user_prompt, max_tokens, temperature], |
|
outputs=output |
|
) |
|
|
|
gr.Markdown(""" |
|
### 📝 Parameters: |
|
- **System Prompt**: Sets the behavior/role of the AI (e.g., medieval knight, wizard, merchant) |
|
- **User Prompt**: Your question or input about modern concepts |
|
- **Maximum Tokens**: Controls the maximum length of the generated response |
|
- **Temperature**: Controls randomness (higher = more creative, lower = more focused) |
|
|
|
### 💡 Tips: |
|
1. Try different historical personas in the system prompt |
|
2. Ask about modern technology from a historical perspective |
|
3. Adjust temperature for more varied or consistent responses |
|
4. Use the examples below for inspiration |
|
""") |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |