paleDriver7 commited on
Commit
b3f08bb
·
verified ·
1 Parent(s): 76502d8

Update backend/app.py

Browse files
Files changed (1) hide show
  1. backend/app.py +48 -44
backend/app.py CHANGED
@@ -1,44 +1,48 @@
1
- from flask import Flask, request, jsonify, render_template, send_from_directory
2
- from flask_cors import CORS
3
- import os
4
- from llama3 import LlaMa3 # 导入您的 LlaMa3 类
5
-
6
- app = Flask(__name__)
7
- CORS(app)
8
-
9
- # 实例化 LlaMa3 模型
10
- llama3_model = LlaMa3()
11
-
12
- @app.route('/')
13
- def index():
14
- # 返回 HTML 页面
15
- return render_template('index.html')
16
-
17
- @app.route('/chat', methods=['POST'])
18
- def chat():
19
- # 获取前端发送的用户消息
20
- user_message = request.json.get('message', '')
21
-
22
- if not user_message.strip():
23
- return jsonify({"response": "请输入有效内容!"}), 400
24
-
25
- try:
26
- # 构造聊天上下文
27
- messages = [{"role": "user", "content": user_message}]
28
-
29
- # 调用 LlaMa3 的 chat 方法生成回复
30
- ai_response = llama3_model.chat(messages)
31
-
32
- # 返回 AI 的回复
33
- return jsonify({"response": ai_response})
34
- except Exception as e:
35
- print(f"Error during llama3 call: {e}")
36
- return jsonify({"response": "发生错误,请稍后重试!"}), 500
37
-
38
- @app.route('/favicon.ico')
39
- def favicon():
40
- return send_from_directory(os.path.join(app.root_path, 'static'),
41
- 'favicon.ico', mimetype='image/vnd.microsoft.icon')
42
-
43
- if __name__ == '__main__':
44
- app.run(debug=True, host='127.0.0.1', port=5000)
 
 
 
 
 
1
+ from flask import Flask, request, jsonify, render_template, send_from_directory
2
+ from flask_cors import CORS
3
+
4
+ import sys
5
+ import os
6
+ sys.path.append(os.path.dirname(__file__)) # 确保当前目录加入模块搜索路径
7
+
8
+ from llama3 import LlaMa3 # 导入您的 LlaMa3 类
9
+
10
+ app = Flask(__name__)
11
+ CORS(app)
12
+
13
+ # 实例化 LlaMa3 模型
14
+ llama3_model = LlaMa3()
15
+
16
+ @app.route('/')
17
+ def index():
18
+ # 返回 HTML 页面
19
+ return render_template('index.html')
20
+
21
+ @app.route('/chat', methods=['POST'])
22
+ def chat():
23
+ # 获取前端发送的用户消息
24
+ user_message = request.json.get('message', '')
25
+
26
+ if not user_message.strip():
27
+ return jsonify({"response": "请输入有效内容!"}), 400
28
+
29
+ try:
30
+ # 构造聊天上下文
31
+ messages = [{"role": "user", "content": user_message}]
32
+
33
+ # 调用 LlaMa3 的 chat 方法生成回复
34
+ ai_response = llama3_model.chat(messages)
35
+
36
+ # 返回 AI 的回复
37
+ return jsonify({"response": ai_response})
38
+ except Exception as e:
39
+ print(f"Error during llama3 call: {e}")
40
+ return jsonify({"response": "发生错误,请稍后重试!"}), 500
41
+
42
+ @app.route('/favicon.ico')
43
+ def favicon():
44
+ return send_from_directory(os.path.join(app.root_path, 'static'),
45
+ 'favicon.ico', mimetype='image/vnd.microsoft.icon')
46
+
47
+ if __name__ == '__main__':
48
+ app.run(debug=True, host='127.0.0.1', port=5000)