ilyassh commited on
Commit
39dd26a
Β·
verified Β·
1 Parent(s): 99ced92

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -24
app.py CHANGED
@@ -3,6 +3,7 @@ from huggingface_hub import InferenceClient
3
  import sys
4
  import io
5
  import traceback
 
6
 
7
  # Initialize the AI model
8
  model_name = "Qwen/Qwen2.5-72B-Instruct"
@@ -12,8 +13,14 @@ def llm_inference(user_sample):
12
  eos_token = "<|endoftext|>"
13
  output = client.chat.completions.create(
14
  messages=[
15
- {"role": "system", "content": "You are a Python language guide. Write code on the user topic. If the input is code, correct it for mistakes."},
16
- {"role": "user", "content": f"Write only python code without any explanation: {user_sample}"},
 
 
 
 
 
 
17
  ],
18
  stream=False,
19
  temperature=0.7,
@@ -26,11 +33,6 @@ def llm_inference(user_sample):
26
  response += choice['message']['content']
27
  return response
28
 
29
- def chat(user_input, history):
30
- response = llm_inference(user_input)
31
- history.append((user_input, response))
32
- return history, history
33
-
34
  def execute_code(code):
35
  old_stdout = sys.stdout
36
  redirected_output = sys.stdout = io.StringIO()
@@ -43,14 +45,43 @@ def execute_code(code):
43
  sys.stdout = old_stdout
44
  return output
45
 
46
- def solve_math_task(math_task):
47
- # Generate Python code for the math task
48
- generated_code = llm_inference(f"Create a Python program to solve the following math problem:\n{math_task}")
49
-
50
- # Execute the generated code
51
- execution_result = execute_code(generated_code)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
- return generated_code, execution_result
 
 
54
 
55
  with gr.Blocks() as demo:
56
  gr.Markdown("# 🐍 Python Helper Chatbot")
@@ -66,16 +97,6 @@ with gr.Blocks() as demo:
66
  code_output = gr.Textbox(label="Output")
67
  run_button.click(execute_code, inputs=code_input, outputs=code_output)
68
 
69
- with gr.Tab("Math Solver"):
70
- gr.Markdown("### πŸ“ Math Task Solver")
71
- math_input = gr.Textbox(placeholder="Enter your mathematical task here...", lines=2)
72
- solve_button = gr.Button("Solve Task")
73
- with gr.Row():
74
- generated_code_output = gr.Code(label="Generated Python Code", language="python")
75
- with gr.Row():
76
- execution_output = gr.Textbox(label="Execution Result", lines=10)
77
- solve_button.click(solve_math_task, inputs=math_input, outputs=[generated_code_output, execution_output])
78
-
79
  with gr.Tab("Logs"):
80
  gr.Markdown("### πŸ“œ Logs")
81
  log_output = gr.Textbox(label="Logs", lines=10, interactive=False)
 
3
  import sys
4
  import io
5
  import traceback
6
+ import re # Import the regular expressions module
7
 
8
  # Initialize the AI model
9
  model_name = "Qwen/Qwen2.5-72B-Instruct"
 
13
  eos_token = "<|endoftext|>"
14
  output = client.chat.completions.create(
15
  messages=[
16
+ {
17
+ "role": "system",
18
+ "content": "You are a Python language guide. Write code on the user topic. If the input is code, correct it for mistakes."
19
+ },
20
+ {
21
+ "role": "user",
22
+ "content": f"Write only python code without any explanation: {user_sample}"
23
+ },
24
  ],
25
  stream=False,
26
  temperature=0.7,
 
33
  response += choice['message']['content']
34
  return response
35
 
 
 
 
 
 
36
  def execute_code(code):
37
  old_stdout = sys.stdout
38
  redirected_output = sys.stdout = io.StringIO()
 
45
  sys.stdout = old_stdout
46
  return output
47
 
48
+ def is_math_task(user_input):
49
+ """
50
+ Simple heuristic to determine if the user input is a math task.
51
+ This can be enhanced with more sophisticated methods or NLP techniques.
52
+ """
53
+ math_keywords = ['calculate', 'compute', 'solve', 'integrate', 'differentiate', 'derivative', 'integral', 'factorial', 'sum', 'product']
54
+ operators = ['+', '-', '*', '/', '^', '**', 'sqrt', 'sin', 'cos', 'tan', 'log', 'exp']
55
+ user_input_lower = user_input.lower()
56
+ return any(keyword in user_input_lower for keyword in math_keywords) or any(op in user_input for op in operators)
57
+
58
+ def chat(user_input, history):
59
+ """
60
+ Handles the chat interaction. If the user input is detected as a math task,
61
+ it generates Python code to solve it, strips any code tags, executes the code,
62
+ and returns the result.
63
+ """
64
+ if is_math_task(user_input):
65
+ # Generate Python code for the math task
66
+ generated_code = llm_inference(f"Create a Python program to solve the following math problem:\n{user_input}")
67
+
68
+ # Strip code tags using regex
69
+ # This regex removes ```python and ``` or any other markdown code fences
70
+ cleaned_code = re.sub(r"```(?:python)?\n?", "", generated_code).strip()
71
+ cleaned_code = re.sub(r"```", "", cleaned_code).strip()
72
+
73
+ # Execute the cleaned code
74
+ execution_result = execute_code(cleaned_code)
75
+
76
+ # Prepare the responses
77
+ assistant_response = f"**Generated Python Code:**\n```python\n{cleaned_code}\n```\n\n**Execution Result:**\n```\n{execution_result}\n```"
78
+ else:
79
+ # For regular chat messages, use the AI's response
80
+ assistant_response = llm_inference(user_input)
81
 
82
+ # Append to chat history
83
+ history.append((user_input, assistant_response))
84
+ return history, history
85
 
86
  with gr.Blocks() as demo:
87
  gr.Markdown("# 🐍 Python Helper Chatbot")
 
97
  code_output = gr.Textbox(label="Output")
98
  run_button.click(execute_code, inputs=code_input, outputs=code_output)
99
 
 
 
 
 
 
 
 
 
 
 
100
  with gr.Tab("Logs"):
101
  gr.Markdown("### πŸ“œ Logs")
102
  log_output = gr.Textbox(label="Logs", lines=10, interactive=False)