|
from fastapi import FastAPI, Request |
|
from pydantic import BaseModel |
|
import requests |
|
import json |
|
import os |
|
|
|
app = FastAPI() |
|
|
|
API_URL = "https://api.typegpt.net/v1/chat/completions" |
|
API_KEY = "sk-XzS5hhsa3vpIcRLz3prQirBQXOx2hPydPzSpzdRcE1YddnNm" |
|
BACKEND_MODEL = "pixtral-large-latest" |
|
|
|
with open("model_map.json", "r") as f: |
|
MODEL_PROMPTS = json.load(f) |
|
|
|
class Message(BaseModel): |
|
role: str |
|
content: str |
|
|
|
class ChatRequest(BaseModel): |
|
model: str |
|
messages: list[Message] |
|
|
|
@app.post("/v1/chat/completions") |
|
async def openai_compatible(request: ChatRequest): |
|
user_messages = [m for m in request.messages if m.role == "user"] |
|
if not user_messages: |
|
return {"error": "No user message provided."} |
|
|
|
model_prompt = MODEL_PROMPTS.get(request.model, "You are a helpful AI assistant.") |
|
payload = { |
|
"model": BACKEND_MODEL, |
|
"messages": [ |
|
{"role": "system", "content": model_prompt} |
|
] + [{"role": "user", "content": m.content} for m in user_messages] |
|
} |
|
|
|
headers = { |
|
"Authorization": f"Bearer {API_KEY}", |
|
"Content-Type": "application/json" |
|
} |
|
|
|
response = requests.post(API_URL, headers=headers, json=payload) |
|
return response.json() |
|
|