Update app.py with Open Router endpoint
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from fastapi import FastAPI, Depends
|
2 |
from fastapi.responses import StreamingResponse
|
3 |
from pydantic import BaseModel
|
4 |
from typing import Annotated
|
@@ -6,9 +6,11 @@ from mistralai import Mistral
|
|
6 |
from google import genai
|
7 |
from google.genai import types
|
8 |
from auth import verify_token
|
|
|
9 |
import os
|
10 |
import httpx
|
11 |
import base64
|
|
|
12 |
|
13 |
app = FastAPI()
|
14 |
|
@@ -18,6 +20,8 @@ mistral_client = Mistral(api_key=mistral)
|
|
18 |
gemini = os.environ.get('GEMINI_KEY', '')
|
19 |
gemini_client = genai.Client(api_key=gemini)
|
20 |
|
|
|
|
|
21 |
@app.get("/")
|
22 |
def hello():
|
23 |
return {"Hello": "World!"}
|
@@ -76,4 +80,36 @@ async def gemini_multimodal(request: GeminiMultimodalRequest, token: Annotated[s
|
|
76 |
contents=[request.prompt, image]
|
77 |
)
|
78 |
|
79 |
-
return {"response": response.text}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, Depends, Query
|
2 |
from fastapi.responses import StreamingResponse
|
3 |
from pydantic import BaseModel
|
4 |
from typing import Annotated
|
|
|
6 |
from google import genai
|
7 |
from google.genai import types
|
8 |
from auth import verify_token
|
9 |
+
from enum import Enum
|
10 |
import os
|
11 |
import httpx
|
12 |
import base64
|
13 |
+
import json
|
14 |
|
15 |
app = FastAPI()
|
16 |
|
|
|
20 |
gemini = os.environ.get('GEMINI_KEY', '')
|
21 |
gemini_client = genai.Client(api_key=gemini)
|
22 |
|
23 |
+
open_router = os.environ.get('OPEN_ROUTER_KEY', '')
|
24 |
+
|
25 |
@app.get("/")
|
26 |
def hello():
|
27 |
return {"Hello": "World!"}
|
|
|
80 |
contents=[request.prompt, image]
|
81 |
)
|
82 |
|
83 |
+
return {"response": response.text}
|
84 |
+
|
85 |
+
class ModelName(str, Enum):
|
86 |
+
deepseek_r1 = "deepseek/deepseek-r1:free"
|
87 |
+
gemini_2_flash_lite = "google/gemini-2.0-flash-lite-preview-02-05:free"
|
88 |
+
gemini_2_pro = "google/gemini-2.0-pro-exp-02-05:free"
|
89 |
+
llama_3_3 = "meta-llama/llama-3.3-70b-instruct:free"
|
90 |
+
mistral_small_3 ="mistralai/mistral-small-24b-instruct-2501:free"
|
91 |
+
|
92 |
+
@app.post("/open-router")
|
93 |
+
async def open_router(model: ModelName = Query(..., description="Select a model"), prompt: str, token: Annotated[str, Depends(verify_token)]):
|
94 |
+
async with httpx.AsyncClient() as client:
|
95 |
+
response = await client.post(
|
96 |
+
url="https://openrouter.ai/api/v1/chat/completions",
|
97 |
+
headers={
|
98 |
+
"Authorization": f"Bearer {open_router}",
|
99 |
+
"Content-Type": "application/json",
|
100 |
+
"HTTP-Referer": "<YOUR_SITE_URL>", # Optional
|
101 |
+
"X-Title": "<YOUR_SITE_NAME>", # Optional
|
102 |
+
},
|
103 |
+
json={
|
104 |
+
"model": model,
|
105 |
+
"messages": [
|
106 |
+
{
|
107 |
+
"role": "user",
|
108 |
+
"content": prompt,
|
109 |
+
}
|
110 |
+
],
|
111 |
+
}
|
112 |
+
)
|
113 |
+
|
114 |
+
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
115 |
+
return response.json()
|