|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import requests |
|
import time |
|
import json |
|
import asyncio |
|
import io |
|
import os |
|
import re |
|
from PIL import Image |
|
|
|
from pyrogram import * |
|
from pyrogram import enums |
|
from pyrogram import Client, filters |
|
from pyrogram.types import * |
|
from pyrogram.errors import * |
|
from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat |
|
from config import * |
|
|
|
from database import db |
|
from logger import LOGS |
|
import datetime |
|
from chatbot import send_log |
|
from openai import AsyncOpenAI as openai |
|
import akenoai.openai as akeno |
|
|
|
@Client.on_message( |
|
~filters.scheduled |
|
& filters.command(["onchat"]) |
|
& ~filters.forwarded |
|
) |
|
async def addchatbot_user(client: Client, message: Message): |
|
await db.add_chatbot(message.chat.id, client.me.id) |
|
await message.reply_text("Added chatbot user") |
|
|
|
BASE_PROMPT = f""" |
|
You are my name Akeno AI and python language powered by @xtdevs on telegram support and language models GPT-5-ULTRA |
|
- off topic free questions |
|
{datetime.datetime.now()} |
|
""" |
|
|
|
@Client.on_message( |
|
~filters.scheduled |
|
& filters.command(["offchat"]) |
|
& ~filters.forwarded |
|
) |
|
async def rmchatbot_user(client: Client, message: Message): |
|
await db.remove_chatbot(message.chat.id) |
|
await message.reply_text("ok stopped GPT") |
|
|
|
@Client.on_message( |
|
filters.incoming |
|
& ( |
|
filters.text |
|
| filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE) |
|
) |
|
& (filters.private | filters.group) |
|
& ~filters.bot |
|
& ~filters.via_bot |
|
& ~filters.forwarded, |
|
group=2, |
|
) |
|
async def chatbot_talk(client: Client, message: Message): |
|
chat_user = await db.get_chatbot(message.chat.id) |
|
if not chat_user: |
|
return |
|
if message.reply_to_message and message.reply_to_message.from_user: |
|
if message.reply_to_message.from_user.id != client.me.id: |
|
return |
|
if message.text: |
|
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING) |
|
await asyncio.sleep(1.5) |
|
query = message.text.strip() |
|
match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE) |
|
if match: |
|
rest_of_sentence = match.group(2).strip() |
|
query_base = rest_of_sentence if rest_of_sentence else query |
|
else: |
|
query_base = query |
|
parts = query.split(maxsplit=1) |
|
command = parts[0].lower() |
|
pic_query = parts[1].strip() if len(parts) > 1 else "" |
|
try: |
|
backup_chat = await db._get_openai_chat_from_db(message.from_user.id) |
|
backup_chat.append({"role": "system", "content": BASE_PROMPT}) |
|
backup_chat.append({"role": "user", "content": query_base}) |
|
response = await akeno.OpenAI.run( |
|
..., |
|
openai_meta=openai, |
|
model="gpt-4o-mini-2024-07-18", |
|
messages=backup_chat |
|
) |
|
output = response |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
backup_chat.append({"role": "assistant", "content": output}) |
|
user_detail = ( |
|
f"**Akeno GPT Bot**\n" |
|
f"**User Username**: @{message.from_user.username if message.from_user else None}\n" |
|
f"**User ID**: `{message.from_user.id}`\n" |
|
f"**Chat Title**: `{message.chat.title if message.chat else None}`\n" |
|
f"**Chat ID**: `{message.chat.id if message.chat else None}`\n" |
|
) |
|
response_log = await send_log(user_detail) |
|
if response_log is None: |
|
LOGS.warning("Error response") |
|
LOGS.info(response_log) |
|
await db._update_openai_chat_in_db(message.from_user.id, backup_chat) |
|
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL) |
|
return |
|
except Exception as e: |
|
return await message.reply_text(f"Error: {e}") |