rkihacker commited on
Commit
ac94146
·
verified ·
1 Parent(s): 65bf292

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +75 -0
main.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ import requests
4
+ import uuid
5
+ from datetime import datetime
6
+
7
+ app = FastAPI()
8
+
9
+ class ChatRequest(BaseModel):
10
+ model: str
11
+ messages: list
12
+
13
+ @app.post("/v1/chat/completions")
14
+ async def chat_completions(chat_request: ChatRequest):
15
+ # Extract the user prompt from the last user message
16
+ user_prompt = ""
17
+ for msg in reversed(chat_request.messages):
18
+ if msg["role"] == "user":
19
+ user_prompt = msg["content"]
20
+ break
21
+
22
+ # Prepare the payload for blackbox.ai
23
+ payload = {
24
+ "query": user_prompt,
25
+ "agentMode": True
26
+ }
27
+
28
+ # Define headers (replicating from the original cURL as needed)
29
+ url = "https://www.blackbox.ai/api/image-generator"
30
+ headers = {
31
+ "sec-ch-ua-full-version-list": "\"Google Chrome\";v=\"131.0.6778.266\", \"Chromium\";v=\"131.0.6778.266\", \"Not_A Brand\";v=\"24.0.0.0\"",
32
+ "sec-ch-ua-platform": "\"Windows\"",
33
+ "Referer": "https://www.blackbox.ai/agent/create/new",
34
+ "sec-ch-ua": "\"Google Chrome\";v=\"131\", \"Chromium\";v=\"131\", \"Not_A Brand\";v=\"24\"",
35
+ "sec-ch-ua-bitness": "\"64\"",
36
+ "sec-ch-ua-model": "\"\"",
37
+ "sec-ch-ua-mobile": "?0",
38
+ "sec-ch-ua-arch": "\"x86\"",
39
+ "sec-ch-ua-full-version": "\"131.0.6778.266\"",
40
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
41
+ "Content-Type": "text/plain;charset=UTF-8",
42
+ "sec-ch-ua-platform-version": "\"19.0.0\""
43
+ }
44
+
45
+ response = requests.post(url, json=payload, headers=headers, timeout=30)
46
+ response_json = response.json()
47
+ markdown_result = response_json.get("markdown", "")
48
+
49
+ # Post-process the response from Blackbox if needed
50
+ cleaned_full_response = markdown_result.strip()
51
+
52
+ # Very simplistic token counting
53
+ prompt_tokens = len(user_prompt.split())
54
+ completion_tokens = len(cleaned_full_response.split())
55
+
56
+ # Build an OpenAI-like response
57
+ request_id = str(uuid.uuid4())
58
+ return {
59
+ "id": request_id,
60
+ "object": "chat.completion",
61
+ "created": int(datetime.now().timestamp()),
62
+ "model": chat_request.model,
63
+ "choices": [
64
+ {
65
+ "index": 0,
66
+ "message": {"role": "assistant", "content": cleaned_full_response},
67
+ "finish_reason": "stop",
68
+ }
69
+ ],
70
+ "usage": {
71
+ "prompt_tokens": prompt_tokens,
72
+ "completion_tokens": completion_tokens,
73
+ "total_tokens": prompt_tokens + completion_tokens,
74
+ },
75
+ }