Spaces:
Running
Running
import discord | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
from huggingface_hub import login | |
# Hugging Face Token (replace 'your-huggingface-token' with your actual token) | |
HF_TOKEN = "your-huggingface-token" | |
login(HF_TOKEN) | |
# Load DeepScaleR model from Hugging Face | |
MODEL_NAME = "DeepScale/DeepScaleR" | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16) | |
# Discord Bot Token (replace 'your-discord-token' with your actual bot token) | |
DISCORD_TOKEN = "your-discord-token" | |
# Set up Discord bot | |
intents = discord.Intents.default() | |
intents.messages = True | |
client = discord.Client(intents=intents) | |
# Response rules for Shiv Yantra AI | |
async def respond(message): | |
if message.author == client.user: | |
return # Ignore itself | |
user_input = message.content.strip() | |
inputs = tokenizer(user_input, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu") | |
outputs = model.generate(**inputs, max_length=500) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Modify response to match Shiv Yantra AI's identity | |
if "DeepScaleR" in response: | |
response = response.replace("DeepScaleR", "Shiv Yantra AI") | |
if "Who made you?" in user_input: | |
response = "I was created by Spectral Satya." | |
if "Who is your founder?" in user_input: | |
response = "My founder is Hardik Kumawat." | |
await message.channel.send(response) | |
# Discord event handlers | |
async def on_ready(): | |
print(f"Logged in as {client.user}") | |
async def on_message(message): | |
await respond(message) | |
# Start bot | |
client.run(DISCORD_TOKEN) |