Spaces:
Running
Running
Upload main.py
Browse files
main.py
CHANGED
@@ -644,8 +644,8 @@ def chatgpt3_turbo(item: Chatgpt3Texts):
|
|
644 |
"chat_history": continue_chat
|
645 |
}
|
646 |
)
|
647 |
-
except Exception
|
648 |
-
return SuccessResponse(status="False", randydev={"message": f"Error responding
|
649 |
else:
|
650 |
url = "https://lexica.qewertyy.me/models"
|
651 |
params = {"model_id": 5, "prompt": item.query}
|
@@ -680,8 +680,8 @@ def chatgpt4_turbo(
|
|
680 |
"chat_history": continue_chat
|
681 |
}
|
682 |
)
|
683 |
-
except Exception
|
684 |
-
return SuccessResponse(status="False", randydev={"message": f"Error responding:
|
685 |
|
686 |
@app.post("/ryuzaki/google-ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
687 |
def v1beta3_google_ai(
|
|
|
644 |
"chat_history": continue_chat
|
645 |
}
|
646 |
)
|
647 |
+
except Exception:
|
648 |
+
return SuccessResponse(status="False", randydev={"message": f"Error responding"})
|
649 |
else:
|
650 |
url = "https://lexica.qewertyy.me/models"
|
651 |
params = {"model_id": 5, "prompt": item.query}
|
|
|
680 |
"chat_history": continue_chat
|
681 |
}
|
682 |
)
|
683 |
+
except Exception:
|
684 |
+
return SuccessResponse(status="False", randydev={"message": f"Error responding:"})
|
685 |
|
686 |
@app.post("/ryuzaki/google-ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
687 |
def v1beta3_google_ai(
|