Spaces:
Sleeping
Sleeping
File size: 2,973 Bytes
298d7d8 99ced92 298d7d8 f3c8c91 298d7d8 f66a341 298d7d8 f66a341 99ced92 298d7d8 f66a341 298d7d8 99ced92 298d7d8 f66a341 298d7d8 f66a341 99ced92 298d7d8 99ced92 f66a341 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
import gradio as gr
from huggingface_hub import InferenceClient
import sys
import io
import traceback
# Initialize the AI model
model_name = "Qwen/Qwen2.5-72B-Instruct"
client = InferenceClient(model_name)
def llm_inference(user_sample):
eos_token = "<|endoftext|>"
output = client.chat.completions.create(
messages=[
{"role": "system", "content": "You are a Python language guide. Write code on the user topic. If the input is code, correct it for mistakes."},
{"role": "user", "content": f"Write only python code without any explanation: {user_sample}"},
],
stream=False,
temperature=0.7,
top_p=0.1,
max_tokens=412,
stop=[eos_token]
)
response = ''
for choice in output.choices:
response += choice['message']['content']
return response
def chat(user_input, history):
response = llm_inference(user_input)
history.append((user_input, response))
return history, history
def execute_code(code):
old_stdout = sys.stdout
redirected_output = sys.stdout = io.StringIO()
try:
exec(code, {})
output = redirected_output.getvalue()
except Exception as e:
output = f"Error: {e}\n{traceback.format_exc()}"
finally:
sys.stdout = old_stdout
return output
def solve_math_task(math_task):
# Generate Python code for the math task
generated_code = llm_inference(f"Create a Python program to solve the following math problem:\n{math_task}")
# Execute the generated code
execution_result = execute_code(generated_code)
return generated_code, execution_result
with gr.Blocks() as demo:
gr.Markdown("# π Python Helper Chatbot")
with gr.Tab("Chat"):
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Type your message here...")
msg.submit(chat, inputs=[msg, chatbot], outputs=[chatbot, chatbot])
with gr.Tab("Interpreter"):
gr.Markdown("### π₯οΈ Test Your Code")
code_input = gr.Code(language="python")
run_button = gr.Button("Run Code")
code_output = gr.Textbox(label="Output")
run_button.click(execute_code, inputs=code_input, outputs=code_output)
with gr.Tab("Math Solver"):
gr.Markdown("### π Math Task Solver")
math_input = gr.Textbox(placeholder="Enter your mathematical task here...", lines=2)
solve_button = gr.Button("Solve Task")
with gr.Row():
generated_code_output = gr.Code(label="Generated Python Code", language="python")
with gr.Row():
execution_output = gr.Textbox(label="Execution Result", lines=10)
solve_button.click(solve_math_task, inputs=math_input, outputs=[generated_code_output, execution_output])
with gr.Tab("Logs"):
gr.Markdown("### π Logs")
log_output = gr.Textbox(label="Logs", lines=10, interactive=False)
# Launch the Gradio app
demo.launch()
|