Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, Request, Header, HTTPException | |
from fastapi.responses import JSONResponse, Response, StreamingResponse | |
import httpx | |
import socket | |
app = FastAPI() | |
# Your real TypeGPT API key | |
REAL_API_KEY = "sk-kwWVUQPDsLemvilLcyzSSWRzo8sctCzxzlbdN0ZC5ZUCCv0m" | |
BASE_URL = "https://fast.typegpt.net" | |
PUBLIC_AUTH_TOKEN = "TypeGPT-Free4ALL" | |
async def startup_event(): | |
try: | |
hostname = socket.gethostname() | |
server_ip = socket.gethostbyname(hostname) | |
print("===== Server Started =====") | |
print(f"π‘ Server IP: {server_ip}") | |
except Exception as e: | |
print(f"β οΈ Could not determine server IP: {e}") | |
async def proxy(request: Request, path: str, authorization: str = Header(None)): | |
# Validate Authorization header | |
if not authorization or not authorization.startswith("Bearer "): | |
raise HTTPException(status_code=401, detail="Missing or malformed Authorization header.") | |
token = authorization.replace("Bearer ", "").strip() | |
if token != PUBLIC_AUTH_TOKEN: | |
raise HTTPException(status_code=401, detail="Invalid Authorization token. Use 'TypeGPT-Free4ALL'.") | |
# Build backend request | |
target_url = f"{BASE_URL}/{path}" | |
headers = { | |
"Authorization": f"Bearer {REAL_API_KEY}", | |
"Content-Type": request.headers.get("content-type", "application/json"), | |
"Accept": "text/event-stream", | |
"User-Agent": "FastAPI-Proxy" | |
} | |
body = await request.body() | |
is_stream = b'"stream":true' in body or b'"stream": true' in body | |
print(f"π Forwarding to: {target_url}") | |
print(f"π¦ Request Body (first 200 chars): {body[:200]}") | |
print(f"π Stream mode: {is_stream}") | |
if is_stream: | |
# Stream response immediately, as chunks arrive | |
async def stream_generator(): | |
async with httpx.AsyncClient(timeout=None) as client: | |
async with client.stream( | |
method=request.method, | |
url=target_url, | |
headers=headers, | |
content=body | |
) as upstream_response: | |
async for chunk in upstream_response.aiter_raw(): | |
yield chunk # raw bytes streamed directly | |
return StreamingResponse( | |
stream_generator(), | |
status_code=200, | |
media_type="text/event-stream" | |
) | |
else: | |
# Normal JSON response | |
async with httpx.AsyncClient(timeout=60) as client: | |
try: | |
response = await client.request( | |
method=request.method, | |
url=target_url, | |
headers=headers, | |
content=body | |
) | |
except httpx.RequestError as e: | |
raise HTTPException(status_code=502, detail=f"Backend request failed: {e}") | |
print(f"β©οΈ TypeGPT Status: {response.status_code}") | |
print(f"π§Ύ Response Snippet: {response.text[:200]}") | |
try: | |
return JSONResponse(content=response.json(), status_code=response.status_code) | |
except Exception: | |
return Response( | |
content=response.content, | |
status_code=response.status_code, | |
media_type=response.headers.get("content-type", "text/plain") | |
) | |