File size: 1,653 Bytes
8145173 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
from fastapi import FastAPI, Depends, HTTPException, Request, Response
from fastapi.responses import JSONResponse, StreamingResponse
from api.auth import verify_app_secret
from api.config import ALLOWED_MODELS
from api.models import ChatRequest
from api.utils import process_response
from api.logger import setup_logger
logger = setup_logger(__name__)
app = FastAPI(
title="Your API",
docs_url=None, # Disable Swagger UI
redoc_url=None, # Disable ReDoc
openapi_url=None, # Disable OpenAPI schema
)
# Include your authentication if needed
# from api.auth import verify_app_secret
@app.post("/v1/chat/completions")
async def chat_completions(
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
):
logger.info("Processing chat completion request")
if request.model not in [model["id"] for model in ALLOWED_MODELS]:
raise HTTPException(
status_code=400,
detail=f"Model {request.model} is not allowed."
)
# Process the response using Editee API
if request.stream:
# If streaming is required
generator = process_response(request, stream=True)
return StreamingResponse(generator, media_type="text/event-stream")
else:
# Non-streaming response
response_data = await process_response(request)
return JSONResponse(content=response_data)
# Health check endpoints
@app.get("/")
@app.get("/healthz")
@app.get("/ready")
@app.get("/alive")
@app.get("/status")
@app.get("/health")
def health_check(request: Request):
return Response(content='{"status": "ok"}', media_type="application/json")
|