|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import asyncio |
|
import os |
|
from pyrogram import * |
|
from pyrogram import enums |
|
from pyrogram import Client, filters |
|
from pyrogram.types import * |
|
from pyrogram.errors import * |
|
from config import * |
|
|
|
from database import db |
|
from logger import LOGS |
|
import datetime |
|
|
|
from huggingface_hub import InferenceClient |
|
from chatbot.plugins.user_database import users_collection |
|
from chatbot.plugins.keyboards import get_language_keyboard |
|
from chatbot.plugins.languages import LANGUAGES |
|
|
|
from . import force_sub |
|
|
|
SYSTEM_PROMPT = f""" |
|
Your name is Randy Dev. A kind and friendly AI assistant that answers in a short and concise answer. |
|
Give short step-by-step reasoning if required. |
|
|
|
python language powered by @xtdevs on telegram support and language models Meta AI |
|
|
|
{datetime.datetime.now()} |
|
""" |
|
|
|
async def process_stream(message, prompt): |
|
client_hf = InferenceClient(api_key=HF_KEY) |
|
|
|
|
|
|
|
messages = [ |
|
{"role": "system", "content": SYSTEM_PROMPT}, |
|
{"role": "user", "content": prompt} |
|
] |
|
stream = client_hf.chat.completions.create( |
|
model="mistralai/Mixtral-8x7B-Instruct-v0.1", |
|
messages=messages, |
|
max_tokens=500, |
|
stream=True |
|
) |
|
accumulated_text = "" |
|
for chunk in stream: |
|
LOGS.info(chunk) |
|
new_content = chunk.choices[0].delta.content |
|
accumulated_text += new_content |
|
|
|
|
|
return accumulated_text |
|
|
|
@Client.on_message( |
|
~filters.scheduled |
|
& filters.command(["start"]) |
|
& ~filters.forwarded |
|
) |
|
async def startbot(client: Client, message: Message): |
|
buttons = [ |
|
[ |
|
InlineKeyboardButton( |
|
text="Developer", |
|
url=f"https://t.me/xtdevs" |
|
), |
|
InlineKeyboardButton( |
|
text="Channel", |
|
url='https://t.me/RendyProjects' |
|
), |
|
] |
|
] |
|
await message.reply_text( |
|
text="Woohoo! Welcome! I'm excited to get started as a Meta AI bot!\n\n• Command /ask hello", |
|
disable_web_page_preview=True, |
|
reply_markup=InlineKeyboardMarkup(buttons) |
|
) |
|
|
|
@Client.on_message( |
|
filters.private |
|
& filters.command(["ask"]) |
|
& ~filters.forwarded |
|
) |
|
@force_sub |
|
async def askcmd(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from Meta AI") |
|
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING) |
|
await asyncio.sleep(1.5) |
|
try: |
|
output = await process_stream(message, prompt) |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output, disable_web_page_preview=True) |
|
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL) |
|
return |
|
except Exception as e: |
|
return await message.reply_text(f"Error: {e}") |
|
|