Niansuh commited on
Commit
e1cf019
·
verified ·
1 Parent(s): 8104ccc

Update api/provider/gizai.py

Browse files
Files changed (1) hide show
  1. api/provider/gizai.py +5 -26
api/provider/gizai.py CHANGED
@@ -1,38 +1,17 @@
1
  import uuid
2
  from datetime import datetime
3
  import json
4
- from typing import Any, Dict, Optional
5
 
6
  import httpx
7
  from fastapi import HTTPException
8
  from api.models import ChatRequest
9
  from api.logger import setup_logger
 
10
 
11
  logger = setup_logger(__name__)
12
 
13
- # Base URL for giz.ai
14
- GIZAI_BASE_URL = "https://app.giz.ai"
15
- GIZAI_API_ENDPOINT = f"{GIZAI_BASE_URL}/api/data/users/inferenceServer.infer"
16
-
17
- # Headers for giz.ai
18
- GIZAI_HEADERS = {
19
- 'Accept': 'application/json, text/plain, */*',
20
- 'Accept-Language': 'en-US,en;q=0.9',
21
- 'Cache-Control': 'no-cache',
22
- 'Connection': 'keep-alive',
23
- 'Content-Type': 'application/json',
24
- 'Origin': 'https://app.giz.ai',
25
- 'Pragma': 'no-cache',
26
- 'Sec-Fetch-Dest': 'empty',
27
- 'Sec-Fetch-Mode': 'cors',
28
- 'Sec-Fetch-Site': 'same-origin',
29
- 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
30
- 'sec-ch-ua': '"Not?A_Brand";v="99", "Chromium";v="130"',
31
- 'sec-ch-ua-mobile': '?0',
32
- 'sec-ch-ua-platform': '"Linux"'
33
- }
34
-
35
- # List of models supported by giz.ai
36
  GIZAI_CHAT_MODELS = [
37
  'chat-gemini-flash',
38
  'chat-gemini-pro',
@@ -73,6 +52,7 @@ GIZAI_MODEL_ALIASES = {
73
  }
74
 
75
  def get_gizai_model(model: str) -> str:
 
76
  if model in GIZAI_MODELS:
77
  return model
78
  elif model in GIZAI_MODEL_ALIASES:
@@ -85,8 +65,7 @@ def is_image_model(model: str) -> bool:
85
  return model in GIZAI_IMAGE_MODELS
86
 
87
  async def process_streaming_response(request: ChatRequest):
88
- # giz.ai does not support streaming
89
- # So we can raise an error or process as non-streaming
90
  return await process_non_streaming_response(request)
91
 
92
  async def process_non_streaming_response(request: ChatRequest):
 
1
  import uuid
2
  from datetime import datetime
3
  import json
4
+ from typing import Any, Dict
5
 
6
  import httpx
7
  from fastapi import HTTPException
8
  from api.models import ChatRequest
9
  from api.logger import setup_logger
10
+ from api.config import MODEL_MAPPING, GIZAI_API_ENDPOINT, GIZAI_HEADERS
11
 
12
  logger = setup_logger(__name__)
13
 
14
+ # List of models supported by GizAI
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  GIZAI_CHAT_MODELS = [
16
  'chat-gemini-flash',
17
  'chat-gemini-pro',
 
52
  }
53
 
54
  def get_gizai_model(model: str) -> str:
55
+ model = MODEL_MAPPING.get(model, model)
56
  if model in GIZAI_MODELS:
57
  return model
58
  elif model in GIZAI_MODEL_ALIASES:
 
65
  return model in GIZAI_IMAGE_MODELS
66
 
67
  async def process_streaming_response(request: ChatRequest):
68
+ # GizAI does not support streaming; handle as non-streaming
 
69
  return await process_non_streaming_response(request)
70
 
71
  async def process_non_streaming_response(request: ChatRequest):