Ok
Browse files- akn/Gemini/gemini.py +7 -6
akn/Gemini/gemini.py
CHANGED
@@ -317,11 +317,12 @@ async def chatbot_talk(client: Client, message: Message):
|
|
317 |
if query_base == "image":
|
318 |
return await message.reply_text("i don't have, what do you mean by image?")
|
319 |
try:
|
|
|
320 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
321 |
if new_check_flux_matches:
|
322 |
try:
|
323 |
backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
|
324 |
-
await message.reply_text("Please wait, it's still being processed")
|
325 |
response_js = await js.image.create(
|
326 |
"black-forest-labs/flux-1-schnell",
|
327 |
image_read=True,
|
@@ -330,25 +331,25 @@ async def chatbot_talk(client: Client, message: Message):
|
|
330 |
file_path = "randydev.jpg"
|
331 |
with open(file_path, "wb") as f:
|
332 |
f.write(response_js)
|
333 |
-
|
334 |
await message.reply_photo(
|
335 |
file_path,
|
336 |
progress=progress,
|
337 |
progress_args=(
|
338 |
-
|
339 |
time.time(),
|
340 |
"Uploading image..."
|
341 |
)
|
342 |
)
|
343 |
backup_chat.append({"role": "model", "parts": [{"text": f"IMAGE OUTPUT: {query_base}"}]})
|
344 |
await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
|
345 |
-
await
|
346 |
return
|
347 |
except ImageProcessFailed as e:
|
348 |
-
return await
|
349 |
except Exception as e:
|
350 |
LOGS.error(f"Error new_check_flux_matches {str(e)}")
|
351 |
-
return await
|
352 |
|
353 |
model_flash = genai.GenerativeModel(
|
354 |
model_name="gemini-1.5-flash"
|
|
|
317 |
if query_base == "image":
|
318 |
return await message.reply_text("i don't have, what do you mean by image?")
|
319 |
try:
|
320 |
+
new_js = None
|
321 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
322 |
if new_check_flux_matches:
|
323 |
try:
|
324 |
backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
|
325 |
+
new_js = await message.reply_text("Please wait, it's still being processed")
|
326 |
response_js = await js.image.create(
|
327 |
"black-forest-labs/flux-1-schnell",
|
328 |
image_read=True,
|
|
|
331 |
file_path = "randydev.jpg"
|
332 |
with open(file_path, "wb") as f:
|
333 |
f.write(response_js)
|
334 |
+
await new_js.edit_text("Uploading image...")
|
335 |
await message.reply_photo(
|
336 |
file_path,
|
337 |
progress=progress,
|
338 |
progress_args=(
|
339 |
+
new_js,
|
340 |
time.time(),
|
341 |
"Uploading image..."
|
342 |
)
|
343 |
)
|
344 |
backup_chat.append({"role": "model", "parts": [{"text": f"IMAGE OUTPUT: {query_base}"}]})
|
345 |
await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
|
346 |
+
await new_js.delete()
|
347 |
return
|
348 |
except ImageProcessFailed as e:
|
349 |
+
return await new_js.edit_text("The server failed to process your image")
|
350 |
except Exception as e:
|
351 |
LOGS.error(f"Error new_check_flux_matches {str(e)}")
|
352 |
+
return await new_js.edit_text("Try again error image")
|
353 |
|
354 |
model_flash = genai.GenerativeModel(
|
355 |
model_name="gemini-1.5-flash"
|