File size: 3,586 Bytes
6331a12 2efedcc 6331a12 2efedcc 6331a12 2efedcc 6331a12 2efedcc 6331a12 2efedcc 6331a12 d94491c 6331a12 d94491c 2efedcc 6331a12 2efedcc 6331a12 d94491c 2efedcc 6331a12 d94491c 2efedcc d94491c 6331a12 d94491c 2efedcc 6331a12 d94491c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
import os
from dotenv import load_dotenv
load_dotenv()
# Base URL and Common Headers for GizAI
BASE_URL = "https://app.giz.ai/assistant/"
common_headers = {
'Accept': 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Content-Type': 'application/json',
'Origin': 'https://app.giz.ai',
'Pragma': 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
'sec-ch-ua': '"Not?A_Brand";v="99", "Chromium";v="130"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Linux"'
}
# Header Configurations for GizAI API Calls
def get_headers_api_chat(referer_url):
return {**common_headers, 'Referer': referer_url}
def get_headers_chat(chat_url, next_action, next_router_state_tree):
return {
**common_headers,
'Accept': 'text/x-component',
'Content-Type': 'text/plain;charset=UTF-8',
'Referer': chat_url,
'next-action': next_action,
'next-router-state-tree': next_router_state_tree,
'next-url': '/',
}
# Application Secret for Authentication
APP_SECRET = os.getenv("APP_SECRET")
# Default Model
default_model = 'chat-gemini-flash'
# Supported Chat Models
chat_models = [
'chat-gemini-flash',
'chat-gemini-pro',
'chat-gpt4m',
'chat-gpt4',
'claude-sonnet',
'claude-haiku',
'llama-3-70b',
'llama-3-8b',
'mistral-large',
'chat-o1-mini'
]
# Supported Image Models
image_models = [
'flux1',
'sdxl',
'sd',
'sd35',
]
# Combined Models List
models = [*chat_models, *image_models]
# Model Aliases
model_aliases = {
# Chat model aliases
"gemini-flash": "chat-gemini-flash",
"gemini-pro": "chat-gemini-pro",
"gpt-4o-mini": "chat-gpt4m",
"gpt-4o": "chat-gpt4",
"claude-3.5-sonnet": "claude-sonnet",
"claude-3-haiku": "claude-haiku",
"llama-3.1-70b": "llama-3-70b",
"llama-3.1-8b": "llama-3-8b",
"o1-mini": "chat-o1-mini",
# Image model aliases
"sd-1.5": "sd",
"sd-3.5": "sd35",
"flux-schnell": "flux1",
}
# Allowed Models with ID and Name
ALLOWED_MODELS = [
{"id": "chat-gemini-flash", "name": "Chat Gemini Flash"},
{"id": "chat-gemini-pro", "name": "Chat Gemini Pro"},
{"id": "chat-gpt4m", "name": "Chat GPT-4m"},
{"id": "chat-gpt4", "name": "Chat GPT-4"},
{"id": "claude-sonnet", "name": "Claude Sonnet"},
{"id": "claude-haiku", "name": "Claude Haiku"},
{"id": "llama-3-70b", "name": "LLaMA 3-70B"},
{"id": "llama-3-8b", "name": "LLaMA 3-8B"},
{"id": "mistral-large", "name": "Mistral Large"},
{"id": "chat-o1-mini", "name": "Chat O1 Mini"},
{"id": "flux1", "name": "Flux1"},
{"id": "sdxl", "name": "SDXL"},
{"id": "sd", "name": "SD"},
{"id": "sd35", "name": "SD35"},
]
# Mapping from model IDs to their canonical names
MODEL_MAPPING = {
"chat-gemini-flash": "chat-gemini-flash",
"chat-gemini-pro": "chat-gemini-pro",
"chat-gpt4m": "chat-gpt4m",
"chat-gpt4": "chat-gpt4",
"claude-sonnet": "claude-sonnet",
"claude-haiku": "claude-haiku",
"llama-3-70b": "llama-3-70b",
"llama-3-8b": "llama-3-8b",
"mistral-large": "mistral-large",
"chat-o1-mini": "chat-o1-mini",
"flux1": "flux1",
"sdxl": "sdxl",
"sd": "sd",
"sd35": "sd35",
}
# Removed MODEL_PREFIXES, MODEL_REFERERS, and AGENT_MODE as per user instruction
|