kevinkal commited on
Commit
ea9930e
·
verified ·
1 Parent(s): b17b476

Update app.py with Open Router multimodal with base64 support

Browse files
Files changed (1) hide show
  1. app.py +48 -3
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from fastapi import FastAPI, Depends, Query
2
  from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from typing import Annotated
@@ -125,7 +125,7 @@ class MultiModelName(str, Enum):
125
  gemini_2_pro = "google/gemini-2.0-pro-exp-02-05:free"
126
  llama_3_2_vision = "meta-llama/llama-3.2-11b-vision-instruct:free"
127
 
128
- @app.post("/open-router/multimodal")
129
  async def open_router_multimodal(
130
  token: Annotated[str, Depends(verify_token)],
131
  model: MultiModelName = Query(..., description="Select a model"),
@@ -164,4 +164,49 @@ async def open_router_multimodal(
164
  )
165
 
166
  response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
167
- return response.json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Depends, Query, File, UploadFile
2
  from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from typing import Annotated
 
125
  gemini_2_pro = "google/gemini-2.0-pro-exp-02-05:free"
126
  llama_3_2_vision = "meta-llama/llama-3.2-11b-vision-instruct:free"
127
 
128
+ @app.post("/open-router/multimodal-url")
129
  async def open_router_multimodal(
130
  token: Annotated[str, Depends(verify_token)],
131
  model: MultiModelName = Query(..., description="Select a model"),
 
164
  )
165
 
166
  response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
167
+ return response.json()
168
+
169
+ @app.post("/open-router/multimodal-b64")
170
+ async def open_router_multimodal_upload(
171
+ token: Annotated[str, Depends(verify_token)],
172
+ image: UploadFile = File(...),
173
+ prompt: str = Query(..., description="Enter your prompt (ex: What is in this image?")
174
+ ):
175
+ image_bytes = await image.read()
176
+ encoded_string = base64.b64encode(image_bytes).decode('utf-8')
177
+ img = f"data:{image.content_type};base64,{encoded_string}"
178
+
179
+ async with httpx.AsyncClient() as client:
180
+ response = await client.post(
181
+ url="https://openrouter.ai/api/v1/chat/completions",
182
+ headers={
183
+ "Authorization": f"Bearer {str(open_router_key)}",
184
+ "Content-Type": "application/json",
185
+ "HTTP-Referer": "<YOUR_SITE_URL>", # Optional
186
+ "X-Title": "<YOUR_SITE_NAME>", # Optional
187
+ },
188
+ json={
189
+ "model": model,
190
+ "messages": [
191
+ {
192
+ "role": "user",
193
+ "content": [
194
+ {
195
+ "type": "text",
196
+ "text": prompt,
197
+ },
198
+ {
199
+ "type": "image_url",
200
+ "image_url": {
201
+ "url": img,
202
+ }
203
+ }
204
+ ]
205
+ }
206
+ ],
207
+ }
208
+ )
209
+
210
+ response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
211
+ return response.json()
212
+