dfa32412 commited on
Commit
1b38468
·
verified ·
1 Parent(s): afb9586

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -157
app.py CHANGED
@@ -1,157 +1,157 @@
1
- # encoding:utf-8
2
- from flask import Flask, request, Response, stream_with_context, jsonify
3
- import requests
4
- import json
5
- from flask_cors import CORS, cross_origin
6
- import time
7
-
8
- app = Flask(__name__)
9
- models = ["gpt-4o","gemini-pro","claude-sonnet-3.5","deepseek-v3","deepseek-r1","blackboxai-pro"]
10
-
11
- # OpenAI API的URL
12
- OPENAI_API_URL = "https://www.blackbox.ai/api/chat"
13
-
14
- @app.route('/hf/v1/chat/completions', methods=['POST'])
15
- @cross_origin(origin='*') # 允许所有域的请求
16
- def openai_chat_stream():
17
- # 获取客户端发送的数据
18
- data = request.json
19
- null = None
20
- true = True
21
- false = False
22
-
23
- model = data["model"]
24
-
25
- if model not in models:
26
- return "不支持该模型", 500
27
-
28
- messages = data["messages"]
29
- prompt = None
30
- for m in messages:
31
- if m["role"] == "system":
32
- prompt = m["content"]
33
- break
34
-
35
- requestBody = {"messages":messages,"agentMode":{},"id":"NEX4Hei",
36
- "previewToken":null,"userId":null,"codeModelMode":true,
37
- "trendingAgentMode":{},"isMicMode":false,
38
- "userSystemPrompt":prompt,"maxTokens":10240,
39
- "playgroundTopP":null,"playgroundTemperature":null,
40
- "isChromeExt":false,"githubToken":"","clickedAnswer2":false,
41
- "clickedAnswer3":false,"clickedForceWebSearch":false,
42
- "visitFromDelta":false,"isMemoryEnabled":false,
43
- "mobileClient":false,
44
- "userSelectedModel":model,
45
- "validated":"00f37b34-a166-4efb-bce5-1312d87f2f94",
46
- "imageGenerationMode":false,"webSearchModePrompt":false,
47
- "deepSearchMode":false,"domains":null,"vscodeClient":false}
48
- # requestBody = json.dumps(requestBody)
49
-
50
-
51
- # 构建请求头
52
- headers = {
53
- "Content-Type": "application/json",
54
- "origin": "https://www.blackbox.ai",
55
- "referer": "https://www.blackbox.ai",
56
- "Accept": "*/*",
57
- "Connection": "keep-alive",
58
- # 'content-type': str(len(requestBody)),
59
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
60
- }
61
-
62
- try:
63
- enable_stream = False
64
- if "stream" in data:
65
- enable_stream = data["stream"]
66
-
67
-
68
- # 发送HTTP请求到OpenAI API
69
- response = requests.post(
70
- OPENAI_API_URL,
71
- headers=headers,
72
- json=requestBody,
73
- stream=enable_stream # 启用流式传输
74
- )
75
-
76
- # 检查响应状态码
77
- if response.status_code != 200:
78
- return jsonify(
79
- {'error': f"OpenAI API error: {response.status_code}, {response.text}"}), response.status_code
80
-
81
- if enable_stream:
82
- # 使用stream_with_context来处理流式响应
83
- def generate():
84
- size = 0
85
- for chunk in response.iter_lines():
86
-
87
- if chunk:
88
- text = chunk.decode("utf-8")
89
- body = {
90
- "id": "chatcmpl-5e9568c7-31d8-4a25-9eaf-ac5f872fb461",
91
- "object": "chat.completion.chunk",
92
- "created": time.time(),
93
- "model": data["model"],
94
- "choices": [
95
- {
96
- "index": 0,
97
- "delta": {
98
- "content": text,
99
- "role": "assistant"
100
- },
101
- "finish_reason": null
102
- }
103
- ],
104
- "usage": null
105
- }
106
-
107
- # # 解析OpenAI的流式响应
108
- yield "data:" + json.dumps(body,ensure_ascii=False) + "\n\n"
109
- yield "[DONE]"
110
- return Response(stream_with_context(generate()), content_type='text/event-stream')
111
- else:
112
- text = ""
113
- for chunk in response.iter_lines():
114
-
115
- if chunk:
116
- text += chunk.decode("utf-8") + "\n\n"
117
-
118
- return jsonify({
119
- "id": "chatcmpl-5e9568c7-31d8-4a25-9eaf-ac5f872fb461",
120
- "object": "chat.completion.chunk",
121
- "created": time.time(),
122
- "model": data["model"],
123
- "choices": [
124
- {
125
- "index": 0,
126
- "message": {
127
- "content": text,
128
- "role": "assistant"
129
- },
130
- "finish_reason": null
131
- }
132
- ],
133
- "usage": null
134
- })
135
- # # 解析OpenAI的流式响应
136
-
137
-
138
-
139
-
140
- except Exception as e:
141
- return jsonify({'error': str(e)}), 500
142
-
143
- @app.route('/hf/v1/models', methods=['POST','GET'])
144
- @cross_origin(origin='*') # 允许所有域的请求
145
- def get_models():
146
- data = []
147
- for model in models:
148
- data.append({"id": model,
149
- "object": "model"})
150
-
151
- return jsonify({
152
- "data": data
153
- })
154
-
155
-
156
- if __name__ == '__main__':
157
- app.run()
 
1
+ # encoding:utf-8
2
+ from flask import Flask, request, Response, stream_with_context, jsonify
3
+ import requests
4
+ import json
5
+ from flask_cors import CORS, cross_origin
6
+ import time
7
+
8
+ app = Flask(__name__)
9
+ models = ["gpt-4o","gemini-pro","claude-sonnet-3.5","deepseek-v3","deepseek-r1","blackboxai-pro"]
10
+
11
+ # OpenAI API的URL
12
+ OPENAI_API_URL = "https://www.blackbox.ai/api/chat"
13
+
14
+ @app.route('/hf/v1/chat/completions', methods=['POST'])
15
+ @cross_origin(origin='*') # 允许所有域的请求
16
+ def openai_chat_stream():
17
+ # 获取客户端发送的数据
18
+ data = request.json
19
+ null = None
20
+ true = True
21
+ false = False
22
+
23
+ model = data["model"]
24
+
25
+ if model not in models:
26
+ return "不支持该模型", 500
27
+
28
+ messages = data["messages"]
29
+ prompt = None
30
+ for m in messages:
31
+ if m["role"] == "system":
32
+ prompt = m["content"]
33
+ break
34
+
35
+ requestBody = {"messages":messages,"agentMode":{},"id":"NEX4Hei",
36
+ "previewToken":null,"userId":null,"codeModelMode":true,
37
+ "trendingAgentMode":{},"isMicMode":false,
38
+ "userSystemPrompt":prompt,"maxTokens":10240,
39
+ "playgroundTopP":null,"playgroundTemperature":null,
40
+ "isChromeExt":false,"githubToken":"","clickedAnswer2":false,
41
+ "clickedAnswer3":false,"clickedForceWebSearch":false,
42
+ "visitFromDelta":false,"isMemoryEnabled":false,
43
+ "mobileClient":false,
44
+ "userSelectedModel":model,
45
+ "validated":"00f37b34-a166-4efb-bce5-1312d87f2f94",
46
+ "imageGenerationMode":false,"webSearchModePrompt":false,
47
+ "deepSearchMode":false,"domains":null,"vscodeClient":false}
48
+ # requestBody = json.dumps(requestBody)
49
+
50
+
51
+ # 构建请求头
52
+ headers = {
53
+ "Content-Type": "application/json",
54
+ "origin": "https://www.blackbox.ai",
55
+ "referer": "https://www.blackbox.ai",
56
+ "Accept": "*/*",
57
+ "Connection": "keep-alive",
58
+ # 'content-type': str(len(requestBody)),
59
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
60
+ }
61
+
62
+ try:
63
+ enable_stream = False
64
+ if "stream" in data:
65
+ enable_stream = data["stream"]
66
+
67
+
68
+ # 发送HTTP请求到OpenAI API
69
+ response = requests.post(
70
+ OPENAI_API_URL,
71
+ headers=headers,
72
+ json=requestBody,
73
+ stream=enable_stream # 启用流式传输
74
+ )
75
+
76
+ # 检查响应状态码
77
+ if response.status_code != 200:
78
+ return jsonify(
79
+ {'error': f"OpenAI API error: {response.status_code}, {response.text}"}), response.status_code
80
+
81
+ if enable_stream:
82
+ # 使用stream_with_context来处理流式响应
83
+ def generate():
84
+ size = 0
85
+ for chunk in response.iter_lines():
86
+
87
+ if chunk:
88
+ text = chunk.decode("utf-8")
89
+ body = {
90
+ "id": "chatcmpl-5e9568c7-31d8-4a25-9eaf-ac5f872fb461",
91
+ "object": "chat.completion.chunk",
92
+ "created": time.time(),
93
+ "model": data["model"],
94
+ "choices": [
95
+ {
96
+ "index": 0,
97
+ "delta": {
98
+ "content": text,
99
+ "role": "assistant"
100
+ },
101
+ "finish_reason": null
102
+ }
103
+ ],
104
+ "usage": null
105
+ }
106
+
107
+ # # 解析OpenAI的流式响应
108
+ yield "data:" + json.dumps(body,ensure_ascii=False) + "\n\n"
109
+ yield "[DONE]"
110
+ return Response(stream_with_context(generate()), content_type='text/event-stream')
111
+ else:
112
+ text = ""
113
+ for chunk in response.iter_lines():
114
+
115
+ if chunk:
116
+ text += chunk.decode("utf-8") + "\n\n"
117
+
118
+ return jsonify({
119
+ "id": "chatcmpl-5e9568c7-31d8-4a25-9eaf-ac5f872fb461",
120
+ "object": "chat.completion.chunk",
121
+ "created": time.time(),
122
+ "model": data["model"],
123
+ "choices": [
124
+ {
125
+ "index": 0,
126
+ "message": {
127
+ "content": text,
128
+ "role": "assistant"
129
+ },
130
+ "finish_reason": null
131
+ }
132
+ ],
133
+ "usage": null
134
+ })
135
+ # # 解析OpenAI的流式响应
136
+
137
+
138
+
139
+
140
+ except Exception as e:
141
+ return jsonify({'error': str(e)}), 500
142
+
143
+ @app.route('/hf/v1/models', methods=['POST','GET'])
144
+ @cross_origin(origin='*') # 允许所有域的请求
145
+ def get_models():
146
+ data = []
147
+ for model in models:
148
+ data.append({"id": model,
149
+ "object": "model"})
150
+
151
+ return jsonify({
152
+ "data": data
153
+ })
154
+
155
+
156
+ if __name__ == '__main__':
157
+ app.run(host='0.0.0.0', port=7860)