randydev commited on
Commit
a77e93c
1 Parent(s): 52b19bc

Update chatbot/plugins/chat.py

Browse files
Files changed (1) hide show
  1. chatbot/plugins/chat.py +13 -12
chatbot/plugins/chat.py CHANGED
@@ -32,6 +32,7 @@ from pyrogram import Client, filters
32
  from pyrogram.types import *
33
  from pyrogram.errors import *
34
  from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
 
35
  from config import *
36
 
37
  from database import db
@@ -188,6 +189,7 @@ async def askcmd(client: Client, message: Message):
188
  pro = await message.reply("Processing your request...", quote=True)
189
  chat_user = await db.get_chatbot(message.chat.id)
190
  if not chat_user:
 
191
  return
192
  if len(message.command) > 1:
193
  prompt = message.text.split(maxsplit=1)[1]
@@ -201,13 +203,12 @@ async def askcmd(client: Client, message: Message):
201
  backup_chat = await db._get_openai_chat_from_db(message.from_user.id)
202
  backup_chat.append({"role": "system", "content": BASE_PROMPT})
203
  backup_chat.append({"role": "user", "content": prompt})
204
- response = await akeno.OpenAI.run(
205
- ...,
206
- openai_meta=openai,
207
- model="gpt-4o-mini-2024-07-18",
208
  messages=backup_chat
209
  )
210
- output = response
211
  if len(output) > 4096:
212
  with open("chat.txt", "w+", encoding="utf8") as out_file:
213
  out_file.write(output)
@@ -255,13 +256,14 @@ async def askcmd(client: Client, message: Message):
255
  @akeno_log.log_performance
256
  async def chatbot_talk(client: Client, message: Message):
257
  chat_user = await db.get_chatbot(message.chat.id)
 
258
  if not chat_user:
 
259
  return
260
  if message.reply_to_message and message.reply_to_message.from_user:
261
  if message.reply_to_message.from_user.id != client.me.id:
262
  return
263
  if message.text:
264
- pro = await message.reply("Processing your request...", quote=True)
265
  await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
266
  await asyncio.sleep(1.5)
267
  query = message.text.strip()
@@ -278,13 +280,12 @@ async def chatbot_talk(client: Client, message: Message):
278
  backup_chat = await db._get_openai_chat_from_db(message.from_user.id)
279
  backup_chat.append({"role": "system", "content": BASE_PROMPT})
280
  backup_chat.append({"role": "user", "content": query_base})
281
- response = await akeno.OpenAI.run(
282
- ...,
283
- openai_meta=openai,
284
- model="gpt-4o-mini-2024-07-18",
285
  messages=backup_chat
286
- )
287
- output = response
288
  if len(output) > 4096:
289
  with open("chat.txt", "w+", encoding="utf8") as out_file:
290
  out_file.write(output)
 
32
  from pyrogram.types import *
33
  from pyrogram.errors import *
34
  from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
35
+ from g4f.client import Client as Clients_g4f
36
  from config import *
37
 
38
  from database import db
 
189
  pro = await message.reply("Processing your request...", quote=True)
190
  chat_user = await db.get_chatbot(message.chat.id)
191
  if not chat_user:
192
+ await pro.edit_text("Ok disable")
193
  return
194
  if len(message.command) > 1:
195
  prompt = message.text.split(maxsplit=1)[1]
 
203
  backup_chat = await db._get_openai_chat_from_db(message.from_user.id)
204
  backup_chat.append({"role": "system", "content": BASE_PROMPT})
205
  backup_chat.append({"role": "user", "content": prompt})
206
+ clients_x = Clients_g4f()
207
+ response = clients_x.chat.completions.create(
208
+ model="gpt-4o",
 
209
  messages=backup_chat
210
  )
211
+ output = response.choices[0].message.content
212
  if len(output) > 4096:
213
  with open("chat.txt", "w+", encoding="utf8") as out_file:
214
  out_file.write(output)
 
256
  @akeno_log.log_performance
257
  async def chatbot_talk(client: Client, message: Message):
258
  chat_user = await db.get_chatbot(message.chat.id)
259
+ pro = await message.reply("Processing your request...", quote=True)
260
  if not chat_user:
261
+ await pro.edit_text("Ok disable")
262
  return
263
  if message.reply_to_message and message.reply_to_message.from_user:
264
  if message.reply_to_message.from_user.id != client.me.id:
265
  return
266
  if message.text:
 
267
  await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
268
  await asyncio.sleep(1.5)
269
  query = message.text.strip()
 
280
  backup_chat = await db._get_openai_chat_from_db(message.from_user.id)
281
  backup_chat.append({"role": "system", "content": BASE_PROMPT})
282
  backup_chat.append({"role": "user", "content": query_base})
283
+ clients_x = Clients_g4f()
284
+ response = clients_x.chat.completions.create(
285
+ model="gpt-4o",
 
286
  messages=backup_chat
287
+ )
288
+ output = response.choices[0].message.content
289
  if len(output) > 4096:
290
  with open("chat.txt", "w+", encoding="utf8") as out_file:
291
  out_file.write(output)