File size: 1,362 Bytes
60471a3
 
 
 
b891a21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import sys
print(sys.path)


from flask import Flask, request, jsonify, render_template, send_from_directory
from flask_cors import CORS
import os
from llama3 import LlaMa3  # 导入您的 LlaMa3 类

app = Flask(__name__)
CORS(app) 

# 实例化 LlaMa3 模型
llama3_model = LlaMa3()

@app.route('/')
def index():
    # 返回 HTML 页面
    return render_template('index.html')

@app.route('/chat', methods=['POST'])
def chat():
    # 获取前端发送的用户消息
    user_message = request.json.get('message', '')

    if not user_message.strip():
        return jsonify({"response": "请输入有效内容!"}), 400

    try:
        # 构造聊天上下文
        messages = [{"role": "user", "content": user_message}]
        
        # 调用 LlaMa3 的 chat 方法生成回复
        ai_response = llama3_model.chat(messages)
        
        # 返回 AI 的回复
        return jsonify({"response": ai_response})
    except Exception as e:
        print(f"Error during llama3 call: {e}")
        return jsonify({"response": "发生错误,请稍后重试!"}), 500

@app.route('/favicon.ico')
def favicon():
    return send_from_directory(os.path.join(app.root_path, 'static'),
                               'favicon.ico', mimetype='image/vnd.microsoft.icon')

if __name__ == '__main__':
    app.run(debug=True, host='127.0.0.1', port=5000)