Create routes.py
Browse files- api/routes.py +60 -0
api/routes.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
3 |
+
from fastapi.responses import StreamingResponse, JSONResponse
|
4 |
+
from api.auth import verify_app_secret
|
5 |
+
from api.config import GIZAI_API_ENDPOINT, GIZAI_BASE_URL
|
6 |
+
from api.models import ChatRequest, ImageResponseModel, ChatCompletionResponse
|
7 |
+
from api.utils import process_gizai_response
|
8 |
+
from api.logger import setup_logger
|
9 |
+
|
10 |
+
logger = setup_logger(__name__)
|
11 |
+
|
12 |
+
router = APIRouter()
|
13 |
+
|
14 |
+
@router.options("/v1/gizai/chat/completions")
|
15 |
+
@router.options("/api/v1/gizai/chat/completions")
|
16 |
+
async def gizai_chat_completions_options():
|
17 |
+
return Response(
|
18 |
+
status_code=200,
|
19 |
+
headers={
|
20 |
+
"Access-Control-Allow-Origin": "*",
|
21 |
+
"Access-Control-Allow-Methods": "POST, OPTIONS",
|
22 |
+
"Access-Control-Allow-Headers": "Content-Type, Authorization",
|
23 |
+
},
|
24 |
+
)
|
25 |
+
|
26 |
+
@router.get("/v1/gizai/models")
|
27 |
+
@router.get("/api/v1/gizai/models")
|
28 |
+
async def list_gizai_models():
|
29 |
+
return {"object": "list", "data": GizAI.models} # Assuming GizAI.models is accessible
|
30 |
+
|
31 |
+
@router.post("/v1/gizai/chat/completions")
|
32 |
+
@router.post("/api/v1/gizai/chat/completions")
|
33 |
+
async def gizai_chat_completions(
|
34 |
+
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
|
35 |
+
):
|
36 |
+
logger.info("Entering GizAI chat_completions route")
|
37 |
+
logger.info(f"Processing chat completion request for model: {request.model}")
|
38 |
+
|
39 |
+
model = GizAI.get_model(request.model)
|
40 |
+
if model not in GizAI.models:
|
41 |
+
raise HTTPException(
|
42 |
+
status_code=400,
|
43 |
+
detail=f"Model {request.model} is not supported. Supported models are: {', '.join(GizAI.models)}",
|
44 |
+
)
|
45 |
+
|
46 |
+
if request.stream:
|
47 |
+
logger.info("Streaming response")
|
48 |
+
return StreamingResponse(process_gizai_response(request, model), media_type="text/event-stream")
|
49 |
+
else:
|
50 |
+
logger.info("Non-streaming response")
|
51 |
+
return await process_gizai_response(request, model)
|
52 |
+
|
53 |
+
@router.route('/')
|
54 |
+
@router.route('/healthz')
|
55 |
+
@router.route('/ready')
|
56 |
+
@router.route('/alive')
|
57 |
+
@router.route('/status')
|
58 |
+
@router.get("/health")
|
59 |
+
def health_check(request: Request):
|
60 |
+
return Response(content=json.dumps({"status": "ok"}), media_type="application/json")
|