Seunggg commited on
Commit
abc9568
·
verified ·
1 Parent(s): 1c0fdf3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -11
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  from peft import PeftModel
4
  import torch
 
5
  import json
6
 
7
  model_id = "deepseek-ai/deepseek-coder-1.3b-base"
@@ -43,38 +44,42 @@ from ask_api import ask_with_sensor # 引入调用函数
43
 
44
  def respond(user_input):
45
  if not user_input.strip():
46
- return "请输入植物相关的问题 😊"
47
 
48
  # 获取 Render 实时传感器数据
49
  try:
50
  sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
51
  sensor_data = sensor_response.json().get("sensorData", None)
 
52
  except Exception as e:
 
53
  sensor_data = None
54
 
55
- # 生成用于 LoRA 本地推理的 prompt
56
  prompt = f"用户提问:{user_input}\n"
57
  if sensor_data:
58
  prompt += f"当前传感器数据:{json.dumps(sensor_data, ensure_ascii=False)}\n"
59
  prompt += "请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:"
60
 
61
- # 本地 LoRA 推理
62
  try:
63
  result = pipe(prompt)
64
- return result[0]["generated_text"]
65
  except Exception as e:
66
- return f"生成建议时出错:{str(e)}"
 
 
67
 
68
  # Gradio 界面
69
  gr.Interface(
70
  fn=respond,
71
- inputs=[
72
- gr.Textbox(lines=4, label="植物问题"),
73
- gr.Textbox(lines=2, label="传感器数据 (JSON 格式)", placeholder='{"temperature": 25, "humidity": 60}')
 
74
  ],
75
- outputs="text",
76
- title="🌱 植物助手 - 本地 LoRA + Render 联动版",
77
- description="结合本地建议和传感器分析结果。"
78
  ).launch()
79
 
80
 
 
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  from peft import PeftModel
4
  import torch
5
+ import requests
6
  import json
7
 
8
  model_id = "deepseek-ai/deepseek-coder-1.3b-base"
 
44
 
45
  def respond(user_input):
46
  if not user_input.strip():
47
+ return "暂无传感器数据", "请输入植物相关的问题 😊"
48
 
49
  # 获取 Render 实时传感器数据
50
  try:
51
  sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
52
  sensor_data = sensor_response.json().get("sensorData", None)
53
+ sensor_display = json.dumps(sensor_data, ensure_ascii=False, indent=2) if sensor_data else "暂无传感器数据"
54
  except Exception as e:
55
+ sensor_display = "⚠️ 获取失败:" + str(e)
56
  sensor_data = None
57
 
58
+ # 构建提示词
59
  prompt = f"用户提问:{user_input}\n"
60
  if sensor_data:
61
  prompt += f"当前传感器数据:{json.dumps(sensor_data, ensure_ascii=False)}\n"
62
  prompt += "请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:"
63
 
64
+ # 模型生成
65
  try:
66
  result = pipe(prompt)
67
+ answer = result[0]["generated_text"]
68
  except Exception as e:
69
+ answer = f"生成建议时出错:{str(e)}"
70
+
71
+ return sensor_display, answer
72
 
73
  # Gradio 界面
74
  gr.Interface(
75
  fn=respond,
76
+ inputs=gr.Textbox(lines=4, label="植物问题"),
77
+ outputs=[
78
+ gr.Textbox(label="🧪 当前传感器数据", lines=6, interactive=False),
79
+ gr.Textbox(label="🤖 回答建议", lines=8, interactive=False)
80
  ],
81
+ title="🌱 植物助手 - 实时联动版",
82
+ description="结合 Render 实时传感器数据 + 本地 LoRA 模型,生成更合理建议。"
 
83
  ).launch()
84
 
85