Spaces:
Running
Running
import os | |
import discord | |
import threading | |
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
# Load DeepScaleR Model | |
model_name = "agentica-org/DeepScaleR-1.5B-Preview" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto") | |
# Generate AI Response | |
def generate_response(prompt): | |
identity_responses = { | |
"who are you": "I am Shiv Yantra AI, an advanced reasoning system.", | |
"who made you": "I was created by Spectral Satya.", | |
"who is your founder": "My founder is Hardik Kumawat.", | |
"what is DeepScaleR": "I do not identify as DeepScaleR. I am Shiv Yantra AI, designed for intelligent reasoning." | |
} | |
for key, response in identity_responses.items(): | |
if key in prompt.lower(): | |
return response | |
inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu") | |
outputs = model.generate(**inputs, max_length=250, temperature=0.7, top_p=0.9, do_sample=True) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Start Gradio API (For External Access) | |
def start_gradio(): | |
iface = gr.Interface(fn=generate_response, inputs="text", outputs="text") | |
iface.launch(share=True) | |
# Setup Discord Bot | |
TOKEN = os.getenv("DISCORD_BOT_TOKEN") # Discord Token from Hugging Face Secrets | |
intents = discord.Intents.default() | |
intents.messages = True | |
client = discord.Client(intents=intents) | |
async def on_ready(): | |
print(f"Logged in as Shiv Yantra AI ({client.user})") | |
async def on_message(message): | |
if message.author == client.user: | |
return # Ignore bot's own messages | |
prompt = message.content | |
response = generate_response(prompt) | |
await message.channel.send(response) | |
# Run Discord Bot & API Simultaneously | |
if __name__ == "__main__": | |
threading.Thread(target=start_gradio).start() | |
client.run(TOKEN) |