Spaces:
Sleeping
Sleeping
import os | |
from collections import deque | |
from io import BytesIO | |
from . import ( | |
ultroid_cmd, | |
async_searcher, | |
udB, | |
LOGS, | |
get_paste, | |
) | |
CG_GPT_CHAT_HISTORY = deque(maxlen=30) | |
TELEGRAM_CHAR_LIMIT = 4096 # Telegram's message character limit | |
initprompt = """ | |
Your name is User Coding Helper. Your task is to create plugins for the Ultroid Telegram userbot. Follow these guidelines: | |
1. Imports: Include all necessary imports as demonstrated in the example code provided below. | |
2. Command Creation: Generate a random, suitable Ultroid command. Ensure that this command can either: | |
- Process a query, or | |
- Be used directly with the command. | |
3. Code Submission: Do not send any code without a corresponding post request or without the user providing a code snippet. | |
Example Code: | |
``` | |
from os import system, remove | |
from io import BytesIO | |
try: | |
import openai | |
except ImportError: | |
system("pip install -q openai") | |
import openai | |
from . import ultroid_cmd, check_filename, udB, LOGS, fast_download, run_async | |
@run_async | |
def get_gpt_answer(gen_image, question, api_key): | |
openai.api_key = api_key | |
if gen_image: | |
x = openai.Image.create( | |
prompt=question, | |
n=1, | |
size="1024x1024", | |
user="arc", | |
) | |
return x["data"][0]["url"] | |
x = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=[{"role": "user", "content": question}], | |
) | |
LOGS.debug(f'Token Used on ({question}) > {x["usage"]["total_tokens"]}') | |
return x["choices"][0]["message"]["content"].lstrip("+AFw-n") | |
@ultroid_cmd(pattern="(chat)?gpt( (.*)|$)") | |
async def openai_chat_gpt(e): | |
api_key = udB.get_key("OPENAI_API") | |
gen_image = False | |
if not api_key: | |
return await e.eor("OPENAI_API key missing..") | |
args = e.pattern_match.group(3) | |
reply = await e.get_reply_message() | |
if not args: | |
if reply and reply.text: | |
args = reply.message | |
if not args: | |
return await e.eor("Gimme a Question to ask from ChatGPT") | |
moi = await e.eor(f"+2D3cIw") | |
if args.startswith("-i"): | |
gen_image = True | |
args = args[2:].strip() | |
try: | |
response = await get_gpt_answer(gen_image, args, api_key) | |
except Exception as exc: | |
LOGS.warning(exc, exc_info=True) | |
return await moi.edit(f"Error: +AFw-n> {exc}") | |
else: | |
if gen_image: | |
path, _ = await fast_download( | |
response, filename=check_filename("dall-e.png") | |
) | |
await e.client.send_file( | |
e.chat_id, | |
path, | |
caption=f"{args[:1020]}", | |
reply_to=e.reply_to_msg_id, | |
) | |
await moi.delete() | |
return remove(path) | |
if len(response) < 4095: | |
answer = f"<b></b>+AFw-n <i>{response}</i>" | |
return await moi.edit(answer, parse_mode="html") | |
with BytesIO(response.encode()) as file: | |
file.name = "gpt_response.txt" | |
await e.client.send_file( | |
e.chat_id, file, caption=f"{args[:1020]}", reply_to=e.reply_to_msg_id | |
) | |
await moi.delete() | |
``` | |
Based on this template, whenever a new prompt is given, create a suitable Ultroid plugin code snippet that includes a POST request, handles the query, and assigns a new command. | |
""" | |
initset = False | |
async def pastee(data): | |
err, linky = await get_paste(data) | |
if err: | |
return f">> [Raw Code Pasted Here](https://spaceb.in/{linky})\n" | |
else: | |
LOGS.error(linky) | |
return "" | |
async def openai_chat_gpt(e): | |
global initset | |
api_key = "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" | |
if not api_key: | |
return await e.eor("`OPENAI_API` key missing..", time=10) | |
query = e.pattern_match.group(2) | |
reply = await e.get_reply_message() | |
file_content = None | |
if query: | |
# Check if query contains 'from filename' | |
if ' from ' in query: | |
query_text, filename = query.split(' from ', 1) | |
query_text = query_text.strip() | |
filename = filename.strip() | |
# Attempt to find and read the file from media in chat | |
file_found = False | |
async for message in e.client.iter_messages(e.chat_id, reverse=True, limit=50): | |
if message.media and message.file.name == filename: | |
if (message.file.name.endswith(".txt") or message.file.name.endswith(".py")): | |
file = await e.client.download_media(message) | |
try: | |
with open(file, "r", encoding='utf-8') as f: | |
file_content = f.read() | |
except Exception as exc: | |
LOGS.error(f"Error reading file: {exc}") | |
return await e.eor("`Failed to read file content.`", time=5) | |
finally: | |
os.remove(file) | |
file_found = True | |
break | |
if not file_found: | |
return await e.eor(f"`File {filename} not found in recent messages.`", time=5) | |
if file_content: | |
query = f"{query_text}\n\n{file_content}" if query_text else file_content | |
else: | |
return await e.eor("`Failed to read file content.`", time=5) | |
else: | |
if reply and reply.media and (reply.file.name.endswith(".txt") or reply.file.name.endswith(".py")): | |
# Use the query and the replied file content | |
file = await e.client.download_media(reply) | |
try: | |
with open(file, "r", encoding='utf-8') as f: | |
file_content = f.read() | |
except Exception as exc: | |
LOGS.error(f"Error reading file: {exc}") | |
return await e.eor("`Failed to read file content.`", time=5) | |
finally: | |
os.remove(file) | |
query = f"{query}\n\n{file_content}" | |
elif reply and reply.message: | |
# Use the query and the replied text message content | |
query = f"{query}\n\n{reply.message}" | |
# Else, use query as is | |
else: | |
if reply and reply.media and (reply.file.name.endswith(".txt") or reply.file.name.endswith(".py")): | |
# Use the replied file content | |
file = await e.client.download_media(reply) | |
try: | |
with open(file, "r", encoding='utf-8') as f: | |
file_content = f.read() | |
except Exception as exc: | |
LOGS.error(f"Error reading file: {exc}") | |
return await e.eor("`Failed to read file content.`", time=5) | |
finally: | |
os.remove(file) | |
query = file_content | |
elif reply and reply.message: | |
# Use the replied text message content | |
query = reply.message | |
else: | |
return await e.eor("`Please provide a question or reply to a message or .txt/.py file.`", time=5) | |
if query.strip() == "-c": | |
CG_GPT_CHAT_HISTORY.clear() | |
return await e.eor("__Cleared o1-mini Chat History!__", time=6) | |
if initset == False: | |
CG_GPT_CHAT_HISTORY.append({"role": "user", "content": initprompt}) | |
try: | |
data = { | |
"model": "o1-mini", | |
"messages": list(CG_GPT_CHAT_HISTORY), | |
} | |
request = await async_searcher( | |
"https://api.openai.com/v1/chat/completions", | |
headers={ | |
"Content-Type": "application/json", | |
"Authorization": f"Bearer {api_key}", | |
}, | |
json=data, | |
re_json=True, | |
post=True, | |
) | |
response = request["choices"][0]["message"]["content"] | |
CG_GPT_CHAT_HISTORY.append({"role": "assistant", "content": response}) | |
initset = True | |
except Exception as exc: | |
LOGS.warning(exc, exc_info=True) | |
CG_GPT_CHAT_HISTORY.pop() | |
return await eris.edit( | |
f"**Error while requesting data from OpenAI:** \n> `{exc}`" | |
) | |
eris = await e.eor(f"__Generating answer for:__\n`{query[:20]} ...`") | |
CG_GPT_CHAT_HISTORY.append({"role": "user", "content": query}) | |
try: | |
data = { | |
"model": "o1-mini", | |
"messages": list(CG_GPT_CHAT_HISTORY), | |
} | |
request = await async_searcher( | |
"https://api.openai.com/v1/chat/completions", | |
headers={ | |
"Content-Type": "application/json", | |
"Authorization": f"Bearer {api_key}", | |
}, | |
json=data, | |
re_json=True, | |
post=True, | |
) | |
response = request["choices"][0]["message"]["content"] | |
CG_GPT_CHAT_HISTORY.append({"role": "assistant", "content": response}) | |
except Exception as exc: | |
LOGS.warning(exc, exc_info=True) | |
CG_GPT_CHAT_HISTORY.pop() | |
return await eris.edit( | |
f"**Error while requesting data from OpenAI:** \n> `{exc}`" | |
) | |
LOGS.debug(f'Tokens Used on query: {request["usage"]["completion_tokens"]}') | |
# Truncate query to 50 characters for display | |
truncated_query = query[:100] | |
# Prepare the full message | |
full_message = f"**Query:**\n~ __{truncated_query}__\n\n**o1-mini:**\n~ {response}" | |
# Check if response contains code blocks | |
code_blocks = [] | |
in_code_block = False | |
code_block_lines = [] | |
for line in response.split('\n'): | |
if line.strip().startswith('```'): | |
if in_code_block: | |
# End of code block | |
in_code_block = False | |
code_blocks.append('\n'.join(code_block_lines)) | |
code_block_lines = [] | |
else: | |
# Start of code block | |
in_code_block = True | |
elif in_code_block: | |
code_block_lines.append(line) | |
# If the response contains code blocks, select the largest one and paste it | |
if code_blocks: | |
# Select the largest code block based on length | |
largest_code_block = max(code_blocks, key=lambda block: len(block)) | |
# Upload the largest code block to spaceb.in and get the link | |
paste_link = await pastee(largest_code_block) | |
else: | |
paste_link = "" | |
if len(full_message) <= TELEGRAM_CHAR_LIMIT: | |
# If it fits within the limit, send as a message | |
await eris.edit(full_message + f"\n\n{paste_link}") | |
else: | |
# If it exceeds the limit, send as a file and include paste link | |
file = BytesIO(full_message.encode('utf-8')) | |
file.name = "o1-mini-output.txt" | |
await eris.respond( | |
"__The query and response were too long, so they have been sent as a file.__\n\n" + paste_link, | |
file=file, | |
reply_to=e.reply_to_msg_id or e.id, | |
link_preview=False | |
) | |
await eris.delete() | |