paleDriver7 commited on
Commit
b891a21
·
verified ·
1 Parent(s): ed45b2b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -1
app.py CHANGED
@@ -1,7 +1,55 @@
 
1
  from fastapi import FastAPI
2
 
3
  app = FastAPI()
4
 
5
  @app.get("/")
6
  def greet_json():
7
- return {"Hello": "World!"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
  from fastapi import FastAPI
3
 
4
  app = FastAPI()
5
 
6
  @app.get("/")
7
  def greet_json():
8
+ return {"Hello": "World!"}
9
+
10
+ '''
11
+
12
+ from flask import Flask, request, jsonify, render_template, send_from_directory
13
+ from flask_cors import CORS
14
+ import os
15
+ from llama3 import LlaMa3 # 导入您的 LlaMa3 类
16
+
17
+ app = Flask(__name__)
18
+ CORS(app)
19
+
20
+ # 实例化 LlaMa3 模型
21
+ llama3_model = LlaMa3()
22
+
23
+ @app.route('/')
24
+ def index():
25
+ # 返回 HTML 页面
26
+ return render_template('index.html')
27
+
28
+ @app.route('/chat', methods=['POST'])
29
+ def chat():
30
+ # 获取前端发送的用户消息
31
+ user_message = request.json.get('message', '')
32
+
33
+ if not user_message.strip():
34
+ return jsonify({"response": "请输入有效内容!"}), 400
35
+
36
+ try:
37
+ # 构造聊天上下文
38
+ messages = [{"role": "user", "content": user_message}]
39
+
40
+ # 调用 LlaMa3 的 chat 方法生成回复
41
+ ai_response = llama3_model.chat(messages)
42
+
43
+ # 返回 AI 的回复
44
+ return jsonify({"response": ai_response})
45
+ except Exception as e:
46
+ print(f"Error during llama3 call: {e}")
47
+ return jsonify({"response": "发生错误,请稍后重试!"}), 500
48
+
49
+ @app.route('/favicon.ico')
50
+ def favicon():
51
+ return send_from_directory(os.path.join(app.root_path, 'static'),
52
+ 'favicon.ico', mimetype='image/vnd.microsoft.icon')
53
+
54
+ if __name__ == '__main__':
55
+ app.run(debug=True, host='127.0.0.1', port=5000)