randydev commited on
Commit
51de508
·
verified ·
1 Parent(s): 1ff2d48
Files changed (1) hide show
  1. chatbot/plugins/chat.py +0 -356
chatbot/plugins/chat.py DELETED
@@ -1,356 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
- # Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
4
- #
5
- # from : https://github.com/TeamKillerX
6
- # Channel : @RendyProjects
7
- # This program is free software: you can redistribute it and/or modify
8
- # it under the terms of the GNU Affero General Public License as published by
9
- # the Free Software Foundation, either version 3 of the License, or
10
- # (at your option) any later version.
11
- #
12
- # This program is distributed in the hope that it will be useful,
13
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
- # GNU Affero General Public License for more details.
16
- #
17
- # You should have received a copy of the GNU Affero General Public License
18
- # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
-
20
- import requests
21
- import time
22
- import json
23
- import asyncio
24
- import io
25
- import os
26
- import re
27
-
28
- from PIL import Image
29
- from pyrogram import *
30
- from pyrogram.enums import ChatMemberStatus
31
- from pyrogram import enums
32
- from pyrogram import Client, filters
33
- from pyrogram.types import *
34
- from pyrogram.errors import *
35
- from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
36
- from config import *
37
- from database import db
38
- from logger import LOGS
39
-
40
- import google.generativeai as genai
41
- import akenoai.pyro_decorator as akeno
42
- from google.api_core.exceptions import InvalidArgument
43
-
44
- async def geni_files_delete(name: str):
45
- url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
46
- params = {"key": GOOGLE_API_KEY}
47
- response = requests.delete(url, params=params)
48
- if response.status_code != 200:
49
- return None
50
- return response.text
51
-
52
- DISABLE_COMMAND = [
53
- "start",
54
- "status",
55
- "offchat",
56
- "onchat"
57
- ]
58
-
59
- FREE_GEMINI_TEXT = """
60
- • User Free : {name}
61
- - Text: `{check_enable}`
62
- - Image: `{check_enable}`
63
- - Video: `{check_enable}`
64
- - Voice: `{check_enable}`
65
- - Document: `{check_status}`
66
-
67
- {info}
68
- """
69
-
70
- @Client.on_message(
71
- ~filters.scheduled
72
- & filters.command(["status"])
73
- & ~filters.forwarded
74
- )
75
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
76
- async def userstatus(client: Client, message: Message):
77
- is_check_plan = await db.is_gemini_plan(user_id=message.from_user.id)
78
- chat_user = await db.get_chatbot(message.chat.id)
79
- if not is_check_plan and not chat_user:
80
- return await message.reply_text(
81
- FREE_GEMINI_TEXT.format(
82
- name=message.from_user.first_name,
83
- check_enable="Unlimited" if chat_user else "Stopped",
84
- check_status="Unlimited" if is_check_plan else "Stopped",
85
- info="You need email business: [register](https://forms.gle/egRciGY39mmhNyScA)\nRemember: **can't ownership**"
86
- ),
87
- disable_web_page_preview=True
88
- )
89
- return await message.reply_text(
90
- FREE_GEMINI_TEXT.format(
91
- name=message.from_user.first_name,
92
- check_enable="Unlimited" if chat_user else "Stopped",
93
- check_status="Unlimited" if is_check_plan else "Stopped",
94
- info="**All unlimited good!**"
95
- )
96
- )
97
-
98
- @Client.on_message(
99
- ~filters.scheduled
100
- & filters.command(["testpay"])
101
- & ~filters.forwarded
102
- )
103
- async def test_payment(client: Client, message: Message):
104
- await db.add_gemini_plan(user_id=message.from_user.id)
105
- return await message.reply_text("Successfully payment")
106
-
107
- @Client.on_message(
108
- ~filters.scheduled
109
- & filters.command(["onchat"])
110
- & ~filters.forwarded
111
- )
112
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
113
- async def addchatbot_user(client: Client, message: Message):
114
- await db.add_chatbot(message.chat.id, client.me.id)
115
- await message.reply_text("Added chatbot user")
116
-
117
-
118
- @Client.on_message(
119
- ~filters.scheduled
120
- & filters.command(["offchat"])
121
- & ~filters.forwarded
122
- )
123
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
124
- async def rmchatbot_user(client: Client, message: Message):
125
- await db.remove_chatbot(message.chat.id)
126
- await message.reply_text("ok stopped gemini")
127
-
128
- @Client.on_message(
129
- filters.incoming
130
- & (
131
- filters.text
132
- | filters.photo
133
- | filters.video
134
- | filters.audio
135
- | filters.voice
136
- | filters.document
137
- | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
138
- )
139
- & (filters.private | filters.group)
140
- & ~filters.command(DISABLE_COMMAND)
141
- & ~filters.bot
142
- & ~filters.via_bot
143
- & ~filters.forwarded,
144
- group=2,
145
- )
146
- @akeno.ForceSubscribe(where_from="RendyProjects", owner_id="xtdevs")
147
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
148
- async def chatbot_talk(client: Client, message: Message):
149
- genai.configure(api_key=GOOGLE_API_KEY)
150
- chat_user = await db.get_chatbot(message.chat.id)
151
- if not chat_user:
152
- return
153
- if message.reply_to_message and message.reply_to_message.from_user:
154
- if message.reply_to_message.from_user.id != client.me.id:
155
- return
156
- if message.photo:
157
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
158
- await asyncio.sleep(1.5)
159
- file_path = await message.download()
160
- caption = message.caption or "What's this?"
161
- x = GeminiLatest(api_keys=GOOGLE_API_KEY)
162
- if client.me.is_premium:
163
- ai_reply = await message.reply_text(f"{custom_loading}Processing...")
164
- else:
165
- ai_reply = await message.reply_text(f"Processing...")
166
- try:
167
- await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
168
- await asyncio.sleep(1.5)
169
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
170
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
171
- response_reads = x.get_response_image(caption, file_path)
172
- if len(response_reads) > 4096:
173
- with open("chat.txt", "w+", encoding="utf8") as out_file:
174
- out_file.write(response_reads)
175
- await message.reply_document(
176
- document="chat.txt",
177
- disable_notification=True
178
- )
179
- await ai_reply.delete()
180
- os.remove("chat.txt")
181
- else:
182
- await ai_reply.edit_text(response_reads)
183
- backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
184
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
185
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
186
- os.remove(file_path)
187
- return
188
- except InvalidArgument as e:
189
- return await ai_reply.edit_text(f"Error: {e}")
190
- except Exception as e:
191
- return await ai_reply.edit_text(f"Error: {e}")
192
-
193
- if message.document:
194
- is_check_plan = await db.is_gemini_plan(user_id=message.from_user.id)
195
- if not is_check_plan:
196
- return await message.reply_text("Only plan gemini")
197
- return await message.reply_text("maintenance soon")
198
- if message.audio or message.voice:
199
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_AUDIO)
200
- await asyncio.sleep(1.5)
201
- if client.me.is_premium:
202
- ai_reply = await message.reply_text(f"{custom_loading}Processing...")
203
- else:
204
- ai_reply = await message.reply_text(f"Processing...")
205
- if message.audio:
206
- audio_file_name = await message.download()
207
- if message.voice:
208
- audio_file_name = await message.download()
209
- caption = message.caption or "What's this?"
210
- model = genai.GenerativeModel(
211
- model_name="gemini-1.5-flash",
212
- safety_settings={
213
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
214
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
215
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
216
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
217
- }
218
- )
219
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
220
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
221
- if client.me.is_premium:
222
- await ai_reply.edit_text(f"{custom_loading}Uploading file..")
223
- else:
224
- await ai_reply.edit_text("Uploading file..")
225
- audio_file = genai.upload_file(path=audio_file_name)
226
- while audio_file.state.name == "PROCESSING":
227
- await asyncio.sleep(10)
228
- audio_file = genai.get_file(audio_file.name)
229
- if audio_file.state.name == "FAILED":
230
- return await ai_reply.edit_text(f"Error: {audio_file.state.name}")
231
- try:
232
- await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
233
- await asyncio.sleep(1.5)
234
- response = model.generate_content(
235
- [audio_file, caption],
236
- request_options={"timeout": 600}
237
- )
238
- if len(response.text) > 4096:
239
- with open("chat.txt", "w+", encoding="utf8") as out_file:
240
- out_file.write(response.text)
241
- await message.reply_document(
242
- document="chat.txt",
243
- disable_notification=True
244
- )
245
- await ai_reply.delete()
246
- os.remove("chat.txt")
247
- else:
248
- await ai_reply.edit_text(response.text)
249
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
250
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
251
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
252
- audio_file.delete()
253
- os.remove(audio_file_name)
254
- return
255
- except InvalidArgument as e:
256
- return await ai_reply.edit_text(f"Error: {e}")
257
- except Exception as e:
258
- return await ai_reply.edit_text(f"Error: {e}")
259
-
260
- if message.video:
261
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
262
- await asyncio.sleep(1.5)
263
- if client.me.is_premium:
264
- ai_reply = await message.reply_text(f"{custom_loading}Processing...")
265
- else:
266
- ai_reply = await message.reply_text(f"Processing...")
267
- video_file_name = await message.download(file_name="newvideo.mp4")
268
- caption = message.caption or "What's this?"
269
- model = genai.GenerativeModel(
270
- model_name="gemini-1.5-pro",
271
- safety_settings={
272
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
273
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
274
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
275
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
276
- }
277
- )
278
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
279
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
280
- if client.me.is_premium:
281
- await ai_reply.edit_text(f"{custom_loading}Uploading file..")
282
- else:
283
- await ai_reply.edit_text("Uploading file..")
284
- video_file = genai.upload_file(path=video_file_name)
285
- while video_file.state.name == "PROCESSING":
286
- await asyncio.sleep(10)
287
- video_file = genai.get_file(video_file.name)
288
- if video_file.state.name == "FAILED":
289
- return await ai_reply.edit_text(f"Error: {video_file.state.name}")
290
- try:
291
- await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
292
- await asyncio.sleep(1.5)
293
- response = model.generate_content(
294
- [video_file, caption],
295
- request_options={"timeout": 600}
296
- )
297
- if len(response.text) > 4096:
298
- with open("chat.txt", "w+", encoding="utf8") as out_file:
299
- out_file.write(response.text)
300
- await message.reply_document(
301
- document="chat.txt",
302
- disable_notification=True
303
- )
304
- await ai_reply.delete()
305
- os.remove("chat.txt")
306
- else:
307
- await ai_reply.edit_text(response.text)
308
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
309
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
310
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
311
- video_file.delete()
312
- os.remove(video_file_name)
313
- return
314
- except InvalidArgument as e:
315
- return await ai_reply.edit_text(f"Error: {e}")
316
- except Exception as e:
317
- return await ai_reply.edit_text(f"Error: {e}")
318
-
319
- if message.text:
320
- await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
321
- await asyncio.sleep(1.5)
322
- query = message.text.strip()
323
- match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
324
- if match:
325
- rest_of_sentence = match.group(2).strip()
326
- query_base = rest_of_sentence if rest_of_sentence else query
327
- else:
328
- query_base = query
329
- parts = query.split(maxsplit=1)
330
- command = parts[0].lower()
331
- pic_query = parts[1].strip() if len(parts) > 1 else ""
332
- try:
333
- model_flash = genai.GenerativeModel(
334
- model_name="gemini-1.5-flash"
335
- )
336
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
337
- backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
338
- chat_session = model_flash.start_chat(history=backup_chat)
339
- response_data = chat_session.send_message(query_base)
340
- output = response_data.text
341
- if len(output) > 4096:
342
- with open("chat.txt", "w+", encoding="utf8") as out_file:
343
- out_file.write(output)
344
- await message.reply_document(
345
- document="chat.txt",
346
- disable_notification=True
347
- )
348
- os.remove("chat.txt")
349
- else:
350
- await message.reply_text(output, disable_web_page_preview=True)
351
- backup_chat.append({"role": "model", "parts": [{"text": output}]})
352
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
353
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
354
- return
355
- except Exception as e:
356
- return await message.reply_text(f"Error: maintenance API")