Upload chat.py
Browse files- chatbot/plugins/chat.py +73 -88
chatbot/plugins/chat.py
CHANGED
@@ -51,7 +51,6 @@ async def geni_files_delete(name: str):
|
|
51 |
return response.text
|
52 |
|
53 |
spam_chats = []
|
54 |
-
running_tasks = {}
|
55 |
|
56 |
DISABLE_COMMAND = [
|
57 |
"start",
|
@@ -80,21 +79,81 @@ I am ready to be a gemini bot developer
|
|
80 |
- Command: /offchat (pm or group)
|
81 |
"""
|
82 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
@Client.on_callback_query(filters.regex("^cancels"))
|
84 |
async def cancel_(client, callback_query):
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
else:
|
92 |
-
await callback_query.edit_message_text("⚠️ No active task to cancel.")
|
93 |
-
except asyncio.CancelledError:
|
94 |
-
await callback_query.edit_message_text("⚠️ Task already canceled.")
|
95 |
-
except Exception as e:
|
96 |
-
await callback_query.answer(f"Nothing to cancel: {e}")
|
97 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
|
99 |
@Client.on_message(
|
100 |
~filters.scheduled
|
@@ -324,81 +383,7 @@ async def chatbot_talk(client: Client, message: Message):
|
|
324 |
return await ai_reply.edit_text(f"Error: {e}")
|
325 |
|
326 |
if message.video:
|
327 |
-
|
328 |
-
async def task_video():
|
329 |
-
try:
|
330 |
-
spam_chats.append()
|
331 |
-
await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
|
332 |
-
await asyncio.sleep(1.5)
|
333 |
-
buttons = [
|
334 |
-
[
|
335 |
-
InlineKeyboardButton(
|
336 |
-
text="Cancel",
|
337 |
-
callback_data="cancels"
|
338 |
-
),
|
339 |
-
],
|
340 |
-
]
|
341 |
-
ai_reply = await message.reply_text(
|
342 |
-
f"Processing...\n\nYou can use stop everything",
|
343 |
-
reply_markup=InlineKeyboardMarkup(buttons)
|
344 |
-
)
|
345 |
-
await asyncio.sleep(5)
|
346 |
-
video_file_name = await message.download(file_name="newvideo.mp4")
|
347 |
-
caption = message.caption or "What's this?"
|
348 |
-
model = genai.GenerativeModel(
|
349 |
-
model_name=model_,
|
350 |
-
safety_settings={
|
351 |
-
genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
|
352 |
-
genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
|
353 |
-
genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
|
354 |
-
genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
|
355 |
-
}
|
356 |
-
)
|
357 |
-
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
358 |
-
backup_chat.append({"role": "user", "parts": [{"text": caption}]})
|
359 |
-
await ai_reply.edit_text("Uploading file..")
|
360 |
-
video_file = genai.upload_file(path=video_file_name)
|
361 |
-
while video_file.state.name == "PROCESSING":
|
362 |
-
await asyncio.sleep(10)
|
363 |
-
video_file = genai.get_file(video_file.name)
|
364 |
-
if video_file.state.name == "FAILED":
|
365 |
-
return await ai_reply.edit_text(f"Error: {video_file.state.name}")
|
366 |
-
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
|
367 |
-
await asyncio.sleep(1.5)
|
368 |
-
response = model.generate_content(
|
369 |
-
[video_file, caption],
|
370 |
-
request_options={"timeout": 600}
|
371 |
-
)
|
372 |
-
if len(response.text) > 4096:
|
373 |
-
with open("chat.txt", "w+", encoding="utf8") as out_file:
|
374 |
-
out_file.write(response.text)
|
375 |
-
await message.reply_document(
|
376 |
-
document="chat.txt",
|
377 |
-
disable_notification=True
|
378 |
-
)
|
379 |
-
await ai_reply.delete()
|
380 |
-
os.remove("chat.txt")
|
381 |
-
else:
|
382 |
-
await ai_reply.edit_text(response.text)
|
383 |
-
backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
|
384 |
-
await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
|
385 |
-
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
|
386 |
-
video_file.delete()
|
387 |
-
os.remove(video_file_name)
|
388 |
-
return
|
389 |
-
except asyncio.CancelledError:
|
390 |
-
await ai_reply.edit_text("✅ Video processed successfully!")
|
391 |
-
except InvalidArgument as e:
|
392 |
-
return await ai_reply.edit_text(f"Error: {e}")
|
393 |
-
except Exception as e:
|
394 |
-
return await ai_reply.edit_text(f"Error: {e}")
|
395 |
-
finally:
|
396 |
-
if chat_id in spam_chats:
|
397 |
-
spam_chats.remove(chat_id)
|
398 |
-
if chat_id in running_tasks:
|
399 |
-
del running_tasks[chat_id]
|
400 |
-
task = asyncio.create_task(task_video())
|
401 |
-
running_tasks[chat_id] = task
|
402 |
if message.text:
|
403 |
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
|
404 |
await asyncio.sleep(1.5)
|
|
|
51 |
return response.text
|
52 |
|
53 |
spam_chats = []
|
|
|
54 |
|
55 |
DISABLE_COMMAND = [
|
56 |
"start",
|
|
|
79 |
- Command: /offchat (pm or group)
|
80 |
"""
|
81 |
|
82 |
+
class TaskManager:
|
83 |
+
def __init__(self):
|
84 |
+
self.running_tasks = {}
|
85 |
+
|
86 |
+
async def add_task(self, chat_id, coro):
|
87 |
+
task = asyncio.create_task(coro)
|
88 |
+
self.running_tasks[chat_id] = task
|
89 |
+
try:
|
90 |
+
await task
|
91 |
+
finally:
|
92 |
+
self.running_tasks.pop(chat_id, None)
|
93 |
+
|
94 |
+
async def cancel_task(self, chat_id):
|
95 |
+
task = self.running_tasks.get(chat_id)
|
96 |
+
if task:
|
97 |
+
task.cancel()
|
98 |
+
del self.running_tasks[chat_id]
|
99 |
+
return True
|
100 |
+
return False
|
101 |
+
|
102 |
+
def list_tasks(self):
|
103 |
+
return list(self.running_tasks.keys())
|
104 |
+
|
105 |
+
task_manager = TaskManager()
|
106 |
+
|
107 |
@Client.on_callback_query(filters.regex("^cancels"))
|
108 |
async def cancel_(client, callback_query):
|
109 |
+
chat_id = callback_query.message.chat.id
|
110 |
+
if await task_manager.cancel_task(chat_id):
|
111 |
+
spam_chats.remove(chat_id)
|
112 |
+
await callback_query.edit_message_text("✅ Processing canceled.")
|
113 |
+
else:
|
114 |
+
await callback_query.edit_message_text("⚠️ No active task to cancel.")
|
|
|
|
|
|
|
|
|
|
|
|
|
115 |
|
116 |
+
async def handle_video(client, message, model_):
|
117 |
+
chat_id = message.chat.id
|
118 |
+
async def process_video():
|
119 |
+
buttons = [
|
120 |
+
[
|
121 |
+
InlineKeyboardButton(
|
122 |
+
text="Cancel",
|
123 |
+
callback_data="cancels"
|
124 |
+
)
|
125 |
+
],
|
126 |
+
]
|
127 |
+
try:
|
128 |
+
spam_chats.append(chat_id)
|
129 |
+
video_file_name = await message.download(file_name="newvideo.mp4")
|
130 |
+
caption = message.caption or "What's this?"
|
131 |
+
model = genai.GenerativeModel(model_name=model_)
|
132 |
+
|
133 |
+
ai_reply = await message.reply_text(
|
134 |
+
"Uploading file..",
|
135 |
+
reply_markup=InlineKeyboardMarkup(buttons)
|
136 |
+
)
|
137 |
+
video_file = genai.upload_file(path=video_file_name)
|
138 |
+
while video_file.state.name == "PROCESSING":
|
139 |
+
await asyncio.sleep(10)
|
140 |
+
video_file = genai.get_file(video_file.name)
|
141 |
+
|
142 |
+
if video_file.state.name == "FAILED":
|
143 |
+
return await ai_reply.edit_text(f"Error: {video_file.state.name}")
|
144 |
+
|
145 |
+
response = model.generate_content(
|
146 |
+
[video_file, caption],
|
147 |
+
request_options={"timeout": 600}
|
148 |
+
)
|
149 |
+
await ai_reply.edit_text(response.text)
|
150 |
+
except asyncio.CancelledError:
|
151 |
+
await ai_reply.edit_text("⚠️ Video processing was canceled.")
|
152 |
+
finally:
|
153 |
+
spam_chats.remove(chat_id)
|
154 |
+
task_manager.cancel_task(chat_id)
|
155 |
+
|
156 |
+
await task_manager.add_task(chat_id, process_video())
|
157 |
|
158 |
@Client.on_message(
|
159 |
~filters.scheduled
|
|
|
383 |
return await ai_reply.edit_text(f"Error: {e}")
|
384 |
|
385 |
if message.video:
|
386 |
+
await handle_video(client, message, model_)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
387 |
if message.text:
|
388 |
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
|
389 |
await asyncio.sleep(1.5)
|