Spaces:
Running
Running
Upload 2 files
Browse files
main.py
CHANGED
@@ -64,12 +64,12 @@ from fastapi.responses import FileResponse
|
|
64 |
from pymongo import MongoClient
|
65 |
|
66 |
from RyuzakiLib.hackertools.chatgpt import RendyDevChat
|
|
|
67 |
from RyuzakiLib.hackertools.openai_api import OpenAiToken
|
68 |
from RyuzakiLib.mental import BadWordsList
|
69 |
from RyuzakiLib.system import OpenReadSystem
|
70 |
|
71 |
from bardapi import Bard
|
72 |
-
import google.generativeai as genai
|
73 |
from models import *
|
74 |
from gpytranslate import SyncTranslator
|
75 |
|
@@ -116,8 +116,6 @@ collection = db["users"]
|
|
116 |
|
117 |
trans = SyncTranslator()
|
118 |
|
119 |
-
genai.configure(api_key=ASSISTANT_GOOGLE_API_KEYS)
|
120 |
-
|
121 |
app = FastAPI(docs_url=None, redoc_url="/")
|
122 |
|
123 |
def get_all_api_keys():
|
@@ -772,15 +770,13 @@ def gemini_pro(item: GeminiPro):
|
|
772 |
"""
|
773 |
if item.is_multi_chat:
|
774 |
try:
|
775 |
-
|
776 |
-
|
777 |
-
response_genai = model.generate_content(item.multi_chat_messages)
|
778 |
-
item.multi_chat_messages.append({"role": "model", "parts": [response_genai.text]})
|
779 |
return SuccessResponse(
|
780 |
status="True",
|
781 |
randydev={
|
782 |
-
"message":
|
783 |
-
"chat_history":
|
784 |
}
|
785 |
)
|
786 |
except:
|
|
|
64 |
from pymongo import MongoClient
|
65 |
|
66 |
from RyuzakiLib.hackertools.chatgpt import RendyDevChat
|
67 |
+
from RyuzakiLib.hackertools.gemini import GeminiLatest
|
68 |
from RyuzakiLib.hackertools.openai_api import OpenAiToken
|
69 |
from RyuzakiLib.mental import BadWordsList
|
70 |
from RyuzakiLib.system import OpenReadSystem
|
71 |
|
72 |
from bardapi import Bard
|
|
|
73 |
from models import *
|
74 |
from gpytranslate import SyncTranslator
|
75 |
|
|
|
116 |
|
117 |
trans = SyncTranslator()
|
118 |
|
|
|
|
|
119 |
app = FastAPI(docs_url=None, redoc_url="/")
|
120 |
|
121 |
def get_all_api_keys():
|
|
|
770 |
"""
|
771 |
if item.is_multi_chat:
|
772 |
try:
|
773 |
+
gt = GeminiLatest(api_key=ASSISTANT_GOOGLE_API_KEYS)
|
774 |
+
response = gt._get_response_gemini(query)
|
|
|
|
|
775 |
return SuccessResponse(
|
776 |
status="True",
|
777 |
randydev={
|
778 |
+
"message": response[0],
|
779 |
+
"chat_history": response[1]
|
780 |
}
|
781 |
)
|
782 |
except:
|
models.py
CHANGED
@@ -71,7 +71,6 @@ class ChatgptCustom(BaseModel):
|
|
71 |
|
72 |
class GeminiPro(BaseModel):
|
73 |
query: str
|
74 |
-
multi_chat_messages: Optional[list] = []
|
75 |
bard_api_key: Optional[str] = None
|
76 |
is_login: Optional[bool] = False
|
77 |
is_multi_chat: Optional[bool] = False
|
|
|
71 |
|
72 |
class GeminiPro(BaseModel):
|
73 |
query: str
|
|
|
74 |
bard_api_key: Optional[str] = None
|
75 |
is_login: Optional[bool] = False
|
76 |
is_multi_chat: Optional[bool] = False
|