Spaces:
Sleeping
Sleeping
Gouzi Mohaled
commited on
Commit
·
ad17334
1
Parent(s):
0688f06
modification de app.py pour la selection du model
Browse files
app.py
CHANGED
@@ -14,14 +14,15 @@ app = FastAPI()
|
|
14 |
|
15 |
class PromptRequest(BaseModel):
|
16 |
prompt: str
|
|
|
17 |
|
18 |
@app.post("/")
|
19 |
async def predict(request: PromptRequest):
|
20 |
try:
|
21 |
response = litellm.completion(
|
22 |
-
model=
|
23 |
messages=[{"role": "user", "content": request.prompt}]
|
24 |
)
|
25 |
-
return {"response": response.choices[0].message.content}
|
26 |
except Exception as e:
|
27 |
-
return {"error": str(e)}
|
|
|
14 |
|
15 |
class PromptRequest(BaseModel):
|
16 |
prompt: str
|
17 |
+
model: str = "gemini/gemini-1.5-pro" # Valeur par défaut
|
18 |
|
19 |
@app.post("/")
|
20 |
async def predict(request: PromptRequest):
|
21 |
try:
|
22 |
response = litellm.completion(
|
23 |
+
model=request.model,
|
24 |
messages=[{"role": "user", "content": request.prompt}]
|
25 |
)
|
26 |
+
return {"response": response.choices[0].message.content, "model": request.model}
|
27 |
except Exception as e:
|
28 |
+
return {"error": str(e)}
|