Spaces:
Sleeping
Sleeping
File size: 3,194 Bytes
08a3216 6aea54c 08a3216 9ebd969 08a3216 770bf77 9ebd969 5a86b8e 08a3216 9ebd969 61d6fd1 9ebd969 61d6fd1 9ebd969 08a3216 5a86b8e b07bcdd 9ebd969 b07bcdd 5a86b8e 08a3216 61d6fd1 08a3216 6aea54c 61d6fd1 6aea54c 61d6fd1 6aea54c 949dc99 770bf77 949dc99 6aea54c 949dc99 770bf77 949dc99 770bf77 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
from fastapi import FastAPI, Request, Header, HTTPException
from fastapi.responses import JSONResponse, Response, StreamingResponse
import httpx
import socket
app = FastAPI()
REAL_API_KEY = "sk-94NDKhKQkhKoYnY65mg4NAIFK5BqNiCtxo8u3PsDpb0IucZt"
BASE_URL = "https://fast.typegpt.net"
PUBLIC_AUTH_TOKEN = "TypeGPT-Free4ALL"
@app.on_event("startup")
async def startup_event():
try:
hostname = socket.gethostname()
server_ip = socket.gethostbyname(hostname)
print("===== Server Started =====")
print(f"π‘ Server IP: {server_ip}")
except Exception as e:
print(f"β οΈ Could not determine server IP: {e}")
@app.api_route("/{path:path}", methods=["GET", "POST"])
async def proxy(request: Request, path: str, authorization: str = Header(None)):
if not authorization or not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Missing or malformed Authorization header.")
token = authorization.replace("Bearer ", "").strip()
if token != PUBLIC_AUTH_TOKEN:
raise HTTPException(status_code=401, detail="Invalid Authorization token. Use 'TypeGPT-Free4ALL'.")
target_url = f"{BASE_URL}/{path}"
headers = {
"Authorization": f"Bearer {REAL_API_KEY}",
"Content-Type": request.headers.get("content-type", "application/json"),
"Accept": "application/json",
"User-Agent": "FastAPI-Proxy"
}
body = await request.body()
is_stream = b'"stream":true' in body or b'"stream": true' in body
print(f"π Forwarding to: {target_url}")
print(f"π¦ Request Body (first 200 chars): {body[:200]}")
print(f"π Stream mode: {is_stream}")
async with httpx.AsyncClient(timeout=60) as client:
try:
if is_stream:
# For streaming, use .stream()
upstream_response = await client.stream(
method=request.method,
url=target_url,
headers=headers,
content=body
)
async def stream_generator():
async for chunk in upstream_response.aiter_bytes():
yield chunk
return StreamingResponse(stream_generator(), status_code=upstream_response.status_code, media_type="text/event-stream")
else:
# For normal response, use .request()
response = await client.request(
method=request.method,
url=target_url,
headers=headers,
content=body
)
except httpx.RequestError as e:
raise HTTPException(status_code=502, detail=f"Backend request failed: {e}")
print(f"β©οΈ TypeGPT Status: {response.status_code}")
print(f"π§Ύ Response Snippet: {response.text[:200]}")
try:
return JSONResponse(content=response.json(), status_code=response.status_code)
except Exception:
return Response(
content=response.content,
status_code=response.status_code,
media_type=response.headers.get("content-type", "text/plain")
)
|