File size: 3,751 Bytes
732fabe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b46c083
22aff9e
 
 
 
1fc4fa4
 
b46c083
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
de7e62a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b46c083
de7e62a
 
 
 
732fabe
d5c0a21
1e275e4
d5c0a21
 
 
bd49c1e
 
 
 
 
 
b46c083
d5c0a21
 
 
b46c083
d5c0a21
 
 
 
 
 
 
 
 
b46c083
d5c0a21
b46c083
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
#
# from : https://github.com/TeamKillerX
# Channel : @RendyProjects
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <https://www.gnu.org/licenses/>.

from pyrogram import *
from pyrogram import enums
from pyrogram import Client, filters
from pyrogram.types import *
from pyrogram.errors import *
from config import *

from database import db
from logger import LOGS
import datetime

from huggingface_hub import InferenceClient
from chatbot.plugins.user_database import users_collection
from chatbot.plugins.keyboards import get_language_keyboard
from chatbot.plugins.languages import LANGUAGES

from . import force_sub

async def process_stream(message):
    client_hf = InferenceClient(api_key=HF_KEY)
    messages=[
        {"role": "system", "content": f"Your name is Randy Dev. A kind and friendly AI assistant that answers in a short and concise answer.\nGive short step-by-step reasoning if required.\n\n{datetime.datetime.now()}"},
        {"role": "user", "content": message}
    ]
    stream = client_hf.chat.completions.create(
        model="mistralai/Mixtral-8x7B-Instruct-v0.1",
        messages=messages,
        max_tokens=500,
        stream=True
    )
    accumulated_text = ""
    for chunk in stream:
        new_content = chunk.choices[0].delta.content
        accumulated_text += new_content
    return accumulated_text

@Client.on_message(
    ~filters.scheduled
    & filters.command(["start"])
    & ~filters.forwarded
)
async def startbot(client: Client, message: Message):
    buttons = [
        [
            InlineKeyboardButton(
                text="Developer",
                url=f"https://t.me/xtdevs"
            ),
            InlineKeyboardButton(
                text="Channel",
                url='https://t.me/RendyProjects'
            ),
        ]
    ]
    await message.reply_text(
        text="Woohoo! Welcome! I'm excited to get started as a Meta AI bot!\n\n• Command /ask hello",
        disable_web_page_preview=True,
        reply_markup=InlineKeyboardMarkup(buttons)
    )


@Client.on_message(
    (filters.private | filters.group)
    & filters.command(["ask"])
    & ~filters.forwarded
)
async def askcmd(client: Client, message: Message):
    if len(message.command) > 1:
        prompt = message.text.split(maxsplit=1)[1]
    elif message.reply_to_message:
        prompt = message.reply_to_message.text
    else:
        return await message.reply_text("Give ask from Meta AI")
    await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
    await asyncio.sleep(1.5)
    try:
        output = await process_stream(prompt)
        if len(output) > 4096:
            with open("chat.txt", "w+", encoding="utf8") as out_file:
                out_file.write(output)
            await message.reply_document(
                document="chat.txt",
                disable_notification=True
            )
            os.remove("chat.txt")
        else:
            await message.reply_text(output, disable_web_page_preview=True)
    except Exception as e:
        return await message.reply_text(f"Error: {e}")