Spaces:
Sleeping
Sleeping
File size: 3,423 Bytes
08a3216 6aea54c 08a3216 9ebd969 08a3216 ef4717a 9a0435e 9ebd969 5a86b8e 08a3216 9ebd969 61d6fd1 9ebd969 61d6fd1 9ebd969 08a3216 5a86b8e ef4717a b07bcdd ef4717a b07bcdd 5a86b8e 08a3216 ef4717a 08a3216 61d6fd1 e85b9cc 61d6fd1 08a3216 6aea54c 61d6fd1 6aea54c 61d6fd1 719cbf5 ef4717a 719cbf5 e85b9cc 719cbf5 ef4717a 770bf77 719cbf5 ef4717a 719cbf5 770bf77 719cbf5 949dc99 719cbf5 949dc99 719cbf5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
from fastapi import FastAPI, Request, Header, HTTPException
from fastapi.responses import JSONResponse, Response, StreamingResponse
import httpx
import socket
app = FastAPI()
# Your real TypeGPT API key
REAL_API_KEY = "sk-kwWVUQPDsLemvilLcyzSSWRzo8sctCzxzlbdN0ZC5ZUCCv0m"
BASE_URL = "https://fast.typegpt.net"
PUBLIC_AUTH_TOKEN = "TypeGPT-Free4ALL"
@app.on_event("startup")
async def startup_event():
try:
hostname = socket.gethostname()
server_ip = socket.gethostbyname(hostname)
print("===== Server Started =====")
print(f"π‘ Server IP: {server_ip}")
except Exception as e:
print(f"β οΈ Could not determine server IP: {e}")
@app.api_route("/{path:path}", methods=["GET", "POST"])
async def proxy(request: Request, path: str, authorization: str = Header(None)):
# Validate Authorization header
if not authorization or not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Missing or malformed Authorization header.")
token = authorization.replace("Bearer ", "").strip()
if token != PUBLIC_AUTH_TOKEN:
raise HTTPException(status_code=401, detail="Invalid Authorization token. Use 'TypeGPT-Free4ALL'.")
# Build backend request
target_url = f"{BASE_URL}/{path}"
headers = {
"Authorization": f"Bearer {REAL_API_KEY}",
"Content-Type": request.headers.get("content-type", "application/json"),
"Accept": "text/event-stream",
"User-Agent": "FastAPI-Proxy"
}
body = await request.body()
is_stream = b'"stream":true' in body or b'"stream": true' in body
print(f"π Forwarding to: {target_url}")
print(f"π¦ Request Body (first 200 chars): {body[:200]}")
print(f"π Stream mode: {is_stream}")
if is_stream:
# Stream response immediately, as chunks arrive
async def stream_generator():
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
method=request.method,
url=target_url,
headers=headers,
content=body
) as upstream_response:
async for chunk in upstream_response.aiter_raw():
yield chunk # raw bytes streamed directly
return StreamingResponse(
stream_generator(),
status_code=200,
media_type="text/event-stream"
)
else:
# Normal JSON response
async with httpx.AsyncClient(timeout=60) as client:
try:
response = await client.request(
method=request.method,
url=target_url,
headers=headers,
content=body
)
except httpx.RequestError as e:
raise HTTPException(status_code=502, detail=f"Backend request failed: {e}")
print(f"β©οΈ TypeGPT Status: {response.status_code}")
print(f"π§Ύ Response Snippet: {response.text[:200]}")
try:
return JSONResponse(content=response.json(), status_code=response.status_code)
except Exception:
return Response(
content=response.content,
status_code=response.status_code,
media_type=response.headers.get("content-type", "text/plain")
)
|