from flask import Flask, request, jsonify, render_template, send_from_directory from flask_cors import CORS import sys import os sys.path.append(os.path.dirname(__file__)) # 确保当前目录加入模块搜索路径 from llama3 import LlaMa3 # 导入您的 LlaMa3 类 app = Flask(__name__, static_folder='frontend_dist') CORS(app) # 实例化 LlaMa3 模型 llama3_model = LlaMa3() # 检验响应可用 #@app.route("/") #def health_check(): # return "Service is running!", 200 ''' @app.route('/') def index(): # 返回 HTML 页面 #return render_template('index_s.html') # 提供前端的 index.html 文件 return send_from_directory(app.static_folder, 'index.html') ''' #添加一个路由来处理静态资源的请求 @app.route('/') def serve_static(filename): return send_from_directory(app.static_folder, filename) #配置默认路由 #让 Flask 将所有非静态文件请求重定向到 index.html,以支持前端的 SPA 路由 @app.route('/') @app.route('/') def index(subpath=None): return send_from_directory(app.static_folder, 'index.html') #确保任何未知的路径都返回 index.html @app.route('/', methods=['GET']) def catch_all(path): return send_from_directory(app.static_folder, 'index.html') @app.route('/chat', methods=['POST']) def chat(): # 获取前端发送的用户消息 user_message = request.json.get('message', '') if not user_message.strip(): return jsonify({"response": "请输入有效内容!"}), 400 try: # 构造聊天上下文 messages = [{"role": "user", "content": user_message}] # 调用 LlaMa3 的 chat 方法生成回复 ai_response = llama3_model.chat(messages) # 返回 AI 的回复 return jsonify({"response": ai_response}) except Exception as e: print(f"Error during llama3 call: {e}") return jsonify({"response": "发生错误,请稍后重试!"}), 500 @app.route('/favicon.ico') def favicon(): return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico', mimetype='image/vnd.microsoft.icon') if __name__ == '__main__': app.run(debug=True, host='127.0.0.1', port=7860)