File size: 7,355 Bytes
ea9930e 4961041 1c8d495 4961041 f1452cf e90ba75 7f333a5 02f2a60 4961041 961ee44 8d8b237 02f2a60 ceb1558 4961041 f1452cf 164dc5b 02f2a60 ceb1558 4961041 f1452cf 4961041 f1452cf 4961041 f1452cf 7ace2fd f1452cf 04a20c9 f1452cf 17fa3e7 b4f7d5b f1452cf e90ba75 8d4e960 e90ba75 8d8b237 e90ba75 02f2a60 b17b476 5624998 1a58f5b 5624998 1a58f5b 02f2a60 164dc5b 02f2a60 b17b476 ea9930e b17b476 aa196e5 b17b476 02f2a60 ea9930e aa196e5 ea9930e c85feb6 ea9930e c85feb6 ea9930e c85feb6 ea9930e c85feb6 ea9930e c85feb6 ea9930e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 |
from fastapi import FastAPI, Depends, Query, File, UploadFile
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from typing import Annotated
from mistralai import Mistral
from google import genai
from google.genai import types
from auth import verify_token
from enum import Enum
import os
import httpx
import base64
import json
app = FastAPI()
mistral = os.environ.get('MISTRAL_KEY', '')
mistral_client = Mistral(api_key=mistral)
gemini = os.environ.get('GEMINI_KEY', '')
gemini_client = genai.Client(api_key=gemini)
open_router_key = os.environ.get('OPEN_ROUTER_KEY', '')
@app.get("/")
def hello():
return {"Hello": "World!"}
class LLMRequest(BaseModel):
model: str
prompt: str
@app.post("/mistral")
async def mistral(request: LLMRequest, token: Annotated[str, Depends(verify_token)]):
async def generate():
response = await mistral_client.chat.stream_async(
model=request.model,
messages=[
{
"role": "user",
"content": request.prompt,
}
],
)
async for chunk in response:
if chunk.data.choices[0].delta.content is not None:
yield chunk.data.choices[0].delta.content
return StreamingResponse(generate(), media_type="text/plain")
@app.post("/gemini")
async def gemini(request: LLMRequest, token: Annotated[str, Depends(verify_token)]):
async def generate():
response = gemini_client.models.generate_content_stream(
model=request.model,
contents=[request.prompt])
for chunk in response:
if chunk.text:
yield chunk.text
return StreamingResponse(generate(), media_type="text/plain")
class GeminiMultimodalRequest(BaseModel):
model: str
prompt: str
image: str # url or base64
@app.post("/gemini/multimodal")
async def gemini_multimodal(request: GeminiMultimodalRequest, token: Annotated[str, Depends(verify_token)]):
if request.image.startswith('http'):
async with httpx.AsyncClient() as client:
image = await client.get(request.image)
#image = types.Part.from_bytes(data=image.content, mime_type="image/jpeg")
else:
image = types.Part.from_bytes(data=base64.b64decode(request.image), mime_type="image/jpeg")
response = gemini_client.models.generate_content(
model=request.model,
contents=[request.prompt, image]
)
return {"response": response.text}
class ModelName(str, Enum):
deepseek_r1 = "deepseek/deepseek-r1:free"
gemini_2_flash_lite = "google/gemini-2.0-flash-lite-preview-02-05:free"
gemini_2_pro = "google/gemini-2.0-pro-exp-02-05:free"
llama_3_3 = "meta-llama/llama-3.3-70b-instruct:free"
mistral_small_3 ="mistralai/mistral-small-24b-instruct-2501:free"
@app.post("/open-router/text")
async def open_router_text(
token: Annotated[str, Depends(verify_token)],
model: ModelName = Query(..., description="Select a model"),
prompt: str = Query(..., description="Enter your prompt")
):
async with httpx.AsyncClient() as client:
response = await client.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {str(open_router_key)}",
"Content-Type": "application/json",
"HTTP-Referer": "<YOUR_SITE_URL>", # Optional
"X-Title": "<YOUR_SITE_NAME>", # Optional
},
json={
"model": model,
"messages": [
{
"role": "user",
"content": prompt,
}
],
}
)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
return response.json()
class MultiModelName(str, Enum):
qwen_vl_plus = "qwen/qwen-vl-plus:free"
qwen_vl_72b = "qwen/qwen2.5-vl-72b-instruct:free"
gemini_2_flash_lite = "google/gemini-2.0-flash-lite-preview-02-05:free"
gemini_2_pro = "google/gemini-2.0-pro-exp-02-05:free"
llama_3_2_vision = "meta-llama/llama-3.2-11b-vision-instruct:free"
@app.post("/open-router/multimodal-url")
async def open_router_multimodal(
token: Annotated[str, Depends(verify_token)],
model: MultiModelName = Query(..., description="Select a model"),
prompt: str = Query(..., description="Enter your prompt (ex: What is in this image?)"),
image_url: str = Query(..., description="Enter the image URL"),
):
async with httpx.AsyncClient() as client:
response = await client.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {str(open_router_key)}",
"Content-Type": "application/json",
"HTTP-Referer": "<YOUR_SITE_URL>", # Optional
"X-Title": "<YOUR_SITE_NAME>", # Optional
},
json={
"model": model,
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": prompt,
},
{
"type": "image_url",
"image_url": {
"url": image_url,
}
}
]
}
],
}
)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
return response.json()
@app.post("/open-router/multimodal-b64")
async def open_router_multimodal_upload(
token: Annotated[str, Depends(verify_token)],
image: UploadFile = File(...),
model: MultiModelName = Query(..., description="Select a model"),
prompt: str = Query(..., description="Enter your prompt (ex: What is in this image?)")
):
image_bytes = await image.read()
encoded_string = base64.b64encode(image_bytes).decode('utf-8')
img = f"data:{image.content_type};base64,{encoded_string}"
async with httpx.AsyncClient() as client:
response = await client.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {str(open_router_key)}",
"Content-Type": "application/json",
"HTTP-Referer": "<YOUR_SITE_URL>", # Optional
"X-Title": "<YOUR_SITE_NAME>", # Optional
},
json={
"model": model,
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": prompt,
},
{
"type": "image_url",
"image_url": {
"url": img,
}
}
]
}
],
}
)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
return response.json()
|