Drag
commited on
Commit
Β·
fcf1b11
1
Parent(s):
8a289d9
Update palmchat.py
Browse files- Mikobot/plugins/palmchat.py +19 -29
Mikobot/plugins/palmchat.py
CHANGED
@@ -3,51 +3,41 @@ from pyrogram import filters
|
|
3 |
from Mikobot import app
|
4 |
from Mikobot.state import state
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
MODEL_ID = 1 # Modify this if you have a specific model ID to use
|
9 |
|
10 |
|
11 |
-
# Function to call the PALM API and get the response
|
12 |
-
async def get_palm_response(api_params):
|
13 |
-
try:
|
14 |
-
response = await state.post(PALM_API_URL, params=api_params)
|
15 |
-
if response.status_code == 200:
|
16 |
-
data = response.json()
|
17 |
-
return data.get(
|
18 |
-
"content", "Error: Empty response received from the PALM API."
|
19 |
-
)
|
20 |
-
else:
|
21 |
-
return f"Error: Request failed with status code {response.status_code}."
|
22 |
-
except fetch.RequestError as e:
|
23 |
-
return f"Error: An error occurred while calling the PALM API. {e}"
|
24 |
-
|
25 |
-
|
26 |
-
# Command handler for /palm
|
27 |
@app.on_message(filters.text)
|
28 |
async def palm_chatbot(client, message):
|
29 |
if not message.text.startswith("Miko"):
|
30 |
return
|
31 |
-
# your code here
|
32 |
-
args = message.text.split(maxsplit=1)
|
33 |
-
if len(args) < 2:
|
34 |
-
await message.reply("Give me a query to search.")
|
35 |
-
return
|
36 |
|
37 |
-
|
|
|
|
|
|
|
|
|
38 |
|
39 |
# Send the "giving results" message first
|
40 |
result_msg = await message.reply("π")
|
41 |
|
42 |
-
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
# Delete the "giving results" message
|
47 |
await result_msg.delete()
|
48 |
|
49 |
# Send the chatbot response to the user
|
50 |
-
await message.reply(
|
51 |
|
52 |
|
53 |
__help__ = """
|
|
|
3 |
from Mikobot import app
|
4 |
from Mikobot.state import state
|
5 |
|
6 |
+
import os
|
7 |
+
import google.generativeai as genai
|
|
|
8 |
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
@app.on_message(filters.text)
|
11 |
async def palm_chatbot(client, message):
|
12 |
if not message.text.startswith("Miko"):
|
13 |
return
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
+
query = " ".join(message.text.split()[1:])
|
16 |
+
|
17 |
+
if not query:
|
18 |
+
await message.reply("Please provide a query after Miko.")
|
19 |
+
return
|
20 |
|
21 |
# Send the "giving results" message first
|
22 |
result_msg = await message.reply("π")
|
23 |
|
24 |
+
try:
|
25 |
+
# Use the Gemini API to generate a response
|
26 |
+
genai.configure(api_key="AIzaSyB4CnCcJKXSlKyYbNu-loj6LoKFkceedps")
|
27 |
+
model = genai.GenerativeModel("gemini-1.5-pro")
|
28 |
+
response = model.generate_content(f"Generate a response to the following query: {query}")
|
29 |
+
|
30 |
+
# Extract only the reply text
|
31 |
+
reply_text = response.candidates[0].content.parts[0].text
|
32 |
+
|
33 |
+
except Exception as e:
|
34 |
+
reply_text = f"Error: An error occurred while calling the Gemini API. {e}"
|
35 |
|
36 |
# Delete the "giving results" message
|
37 |
await result_msg.delete()
|
38 |
|
39 |
# Send the chatbot response to the user
|
40 |
+
await message.reply(reply_text)
|
41 |
|
42 |
|
43 |
__help__ = """
|