File size: 2,284 Bytes
b3f08bb
 
 
 
 
 
 
 
 
2c42205
b3f08bb
 
 
 
 
cd311fc
1fee8a1
 
 
cd311fc
ac22045
b3f08bb
 
 
268f183
 
 
ac22045
 
 
 
 
 
 
 
 
 
 
2c42205
3395f93
 
 
 
 
ac22045
 
b3f08bb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0ae1033
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
from flask import Flask, request, jsonify, render_template, send_from_directory
from flask_cors import CORS

import sys
import os
sys.path.append(os.path.dirname(__file__))  # 确保当前目录加入模块搜索路径

from llama3 import LlaMa3  # 导入您的 LlaMa3 类

app = Flask(__name__, static_folder='frontend_dist')
CORS(app) 

# 实例化 LlaMa3 模型
llama3_model = LlaMa3()

# 检验响应可用
#@app.route("/")
#def health_check():
#    return "Service is running!", 200

'''
@app.route('/')
def index():
    # 返回 HTML 页面
    #return render_template('index_s.html')
    # 提供前端的 index.html 文件
    return send_from_directory(app.static_folder, 'index.html')
'''
#添加一个路由来处理静态资源的请求
@app.route('/<path:filename>')
def serve_static(filename):
    return send_from_directory(app.static_folder, filename)

#配置默认路由
#让 Flask 将所有非静态文件请求重定向到 index.html,以支持前端的 SPA 路由
@app.route('/')
@app.route('/<path:subpath>')
def index(subpath=None):
    return send_from_directory(app.static_folder, 'index.html')

#确保任何未知的路径都返回 index.html
@app.route('/<path:path>', methods=['GET'])
def catch_all(path):
    return send_from_directory(app.static_folder, 'index.html')



@app.route('/chat', methods=['POST'])
def chat():
    # 获取前端发送的用户消息
    user_message = request.json.get('message', '')

    if not user_message.strip():
        return jsonify({"response": "请输入有效内容!"}), 400

    try:
        # 构造聊天上下文
        messages = [{"role": "user", "content": user_message}]
        
        # 调用 LlaMa3 的 chat 方法生成回复
        ai_response = llama3_model.chat(messages)
        
        # 返回 AI 的回复
        return jsonify({"response": ai_response})
    except Exception as e:
        print(f"Error during llama3 call: {e}")
        return jsonify({"response": "发生错误,请稍后重试!"}), 500

@app.route('/favicon.ico')
def favicon():
    return send_from_directory(os.path.join(app.root_path, 'static'),
                               'favicon.ico', mimetype='image/vnd.microsoft.icon')

if __name__ == '__main__':
    app.run(debug=True, host='127.0.0.1', port=7860)