Hardik5456 commited on
Commit
b1c8545
·
verified ·
1 Parent(s): e48c398

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -42
app.py CHANGED
@@ -1,57 +1,36 @@
1
- import os
2
  import discord
3
- import threading
4
- import gradio as gr
5
- from transformers import AutoModelForCausalLM, AutoTokenizer
6
  import torch
 
 
 
 
 
7
 
8
  # Load DeepScaleR Model
9
- model_name = "agentica-org/DeepScaleR-1.5B-Preview"
10
- tokenizer = AutoTokenizer.from_pretrained(model_name)
11
- model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
12
-
13
- # Generate AI Response
14
- def generate_response(prompt):
15
- identity_responses = {
16
- "who are you": "I am Shiv Yantra AI, an advanced reasoning system.",
17
- "who made you": "I was created by Spectral Satya.",
18
- "who is your founder": "My founder is Hardik Kumawat.",
19
- "what is DeepScaleR": "I do not identify as DeepScaleR. I am Shiv Yantra AI, designed for intelligent reasoning."
20
- }
21
-
22
- for key, response in identity_responses.items():
23
- if key in prompt.lower():
24
- return response
25
-
26
- inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
27
- outputs = model.generate(**inputs, max_length=250, temperature=0.7, top_p=0.9, do_sample=True)
28
- return tokenizer.decode(outputs[0], skip_special_tokens=True)
29
-
30
- # Start Gradio API (For External Access)
31
- def start_gradio():
32
- iface = gr.Interface(fn=generate_response, inputs="text", outputs="text")
33
- iface.launch(share=True)
34
-
35
- # Setup Discord Bot
36
- TOKEN = os.getenv("DISCORD_BOT_TOKEN") # Discord Token from Hugging Face Secrets
37
  intents = discord.Intents.default()
38
- intents.messages = True
39
  client = discord.Client(intents=intents)
40
 
41
  @client.event
42
  async def on_ready():
43
- print(f"Logged in as Shiv Yantra AI ({client.user})")
44
 
45
  @client.event
46
  async def on_message(message):
47
  if message.author == client.user:
48
- return # Ignore bot's own messages
 
 
 
 
 
49
 
50
- prompt = message.content
51
- response = generate_response(prompt)
52
- await message.channel.send(response)
53
 
54
- # Run Discord Bot & API Simultaneously
55
- if __name__ == "__main__":
56
- threading.Thread(target=start_gradio).start()
57
- client.run(TOKEN)
 
 
1
  import discord
2
+ import os
 
 
3
  import torch
4
+ from transformers import AutoModelForCausalLM, AutoTokenizer
5
+
6
+ # Load Tokens from Hugging Face Secrets
7
+ HF_TOKEN = os.getenv("HF_TOKEN")
8
+ DISCORD_TOKEN = os.getenv("DISCORD_TOKEN")
9
 
10
  # Load DeepScaleR Model
11
+ model_name = "your-hf-username/deepscaler-model"
12
+ tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=HF_TOKEN)
13
+ model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=HF_TOKEN)
14
+
15
+ # Set up Discord bot
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  intents = discord.Intents.default()
 
17
  client = discord.Client(intents=intents)
18
 
19
  @client.event
20
  async def on_ready():
21
+ print(f'Logged in as {client.user}')
22
 
23
  @client.event
24
  async def on_message(message):
25
  if message.author == client.user:
26
+ return
27
+
28
+ input_text = message.content
29
+ inputs = tokenizer(input_text, return_tensors="pt")
30
+ outputs = model.generate(**inputs, max_length=100)
31
+ response_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
32
 
33
+ await message.channel.send(response_text)
 
 
34
 
35
+ # Run the bot
36
+ client.run(DISCORD_TOKEN)