Upload chat.py
Browse files- chatbot/plugins/chat.py +23 -8
chatbot/plugins/chat.py
CHANGED
@@ -37,15 +37,23 @@ from chatbot.plugins.languages import LANGUAGES
|
|
37 |
|
38 |
from . import force_sub
|
39 |
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
client_hf = InferenceClient(api_key=HF_KEY)
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
]
|
46 |
stream = client_hf.chat.completions.create(
|
47 |
model="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
48 |
-
messages=
|
49 |
max_tokens=500,
|
50 |
stream=True
|
51 |
)
|
@@ -53,6 +61,8 @@ async def process_stream(message):
|
|
53 |
for chunk in stream:
|
54 |
new_content = chunk.choices[0].delta.content
|
55 |
accumulated_text += new_content
|
|
|
|
|
56 |
return accumulated_text
|
57 |
|
58 |
@Client.on_message(
|
@@ -79,11 +89,13 @@ async def startbot(client: Client, message: Message):
|
|
79 |
reply_markup=InlineKeyboardMarkup(buttons)
|
80 |
)
|
81 |
|
|
|
82 |
@Client.on_message(
|
83 |
-
|
84 |
& filters.command(["ask"])
|
85 |
& ~filters.forwarded
|
86 |
)
|
|
|
87 |
async def askcmd(client: Client, message: Message):
|
88 |
if len(message.command) > 1:
|
89 |
prompt = message.text.split(maxsplit=1)[1]
|
@@ -94,7 +106,7 @@ async def askcmd(client: Client, message: Message):
|
|
94 |
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
|
95 |
await asyncio.sleep(1.5)
|
96 |
try:
|
97 |
-
output = await process_stream(prompt)
|
98 |
if len(output) > 4096:
|
99 |
with open("chat.txt", "w+", encoding="utf8") as out_file:
|
100 |
out_file.write(output)
|
@@ -102,8 +114,11 @@ async def askcmd(client: Client, message: Message):
|
|
102 |
document="chat.txt",
|
103 |
disable_notification=True
|
104 |
)
|
|
|
105 |
os.remove("chat.txt")
|
106 |
else:
|
107 |
await message.reply_text(output, disable_web_page_preview=True)
|
|
|
|
|
108 |
except Exception as e:
|
109 |
return await message.reply_text(f"Error: {e}")
|
|
|
37 |
|
38 |
from . import force_sub
|
39 |
|
40 |
+
SYSTEM_PROMPT = f"""
|
41 |
+
Your name is Randy Dev. A kind and friendly AI assistant that answers in a short and concise answer.
|
42 |
+
Give short step-by-step reasoning if required.
|
43 |
+
|
44 |
+
python language powered by @xtdevs on telegram support and language models Meta AI
|
45 |
+
|
46 |
+
{datetime.datetime.now()}
|
47 |
+
"""
|
48 |
+
|
49 |
+
async def process_stream(message, prompt):
|
50 |
client_hf = InferenceClient(api_key=HF_KEY)
|
51 |
+
backup_chat = await db._get_openai_chat_from_db(message.from_user.id)
|
52 |
+
backup_chat.append({"role": "system", "content": BASE_PROMPT})
|
53 |
+
backup_chat.append({"role": "user", "content": prompt})
|
|
|
54 |
stream = client_hf.chat.completions.create(
|
55 |
model="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
56 |
+
messages=backup_chat,
|
57 |
max_tokens=500,
|
58 |
stream=True
|
59 |
)
|
|
|
61 |
for chunk in stream:
|
62 |
new_content = chunk.choices[0].delta.content
|
63 |
accumulated_text += new_content
|
64 |
+
backup_chat.append({"role": "assistant", "content": accumulated_text})
|
65 |
+
await db._update_openai_chat_in_db(message.from_user.id, backup_chat)
|
66 |
return accumulated_text
|
67 |
|
68 |
@Client.on_message(
|
|
|
89 |
reply_markup=InlineKeyboardMarkup(buttons)
|
90 |
)
|
91 |
|
92 |
+
|
93 |
@Client.on_message(
|
94 |
+
filters.private
|
95 |
& filters.command(["ask"])
|
96 |
& ~filters.forwarded
|
97 |
)
|
98 |
+
@force_sub
|
99 |
async def askcmd(client: Client, message: Message):
|
100 |
if len(message.command) > 1:
|
101 |
prompt = message.text.split(maxsplit=1)[1]
|
|
|
106 |
await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
|
107 |
await asyncio.sleep(1.5)
|
108 |
try:
|
109 |
+
output = await process_stream(message, prompt)
|
110 |
if len(output) > 4096:
|
111 |
with open("chat.txt", "w+", encoding="utf8") as out_file:
|
112 |
out_file.write(output)
|
|
|
114 |
document="chat.txt",
|
115 |
disable_notification=True
|
116 |
)
|
117 |
+
await pro.delete()
|
118 |
os.remove("chat.txt")
|
119 |
else:
|
120 |
await message.reply_text(output, disable_web_page_preview=True)
|
121 |
+
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
|
122 |
+
return
|
123 |
except Exception as e:
|
124 |
return await message.reply_text(f"Error: {e}")
|