Hardik5456 commited on
Commit
d29ae30
·
verified ·
1 Parent(s): 1f5363d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -5
app.py CHANGED
@@ -1,14 +1,57 @@
 
 
 
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
3
 
 
4
  model_name = "agentica-org/DeepScaleR-1.5B-Preview"
5
  tokenizer = AutoTokenizer.from_pretrained(model_name)
6
- model = AutoModelForCausalLM.from_pretrained(model_name)
7
 
 
8
  def generate_response(prompt):
9
- inputs = tokenizer(prompt, return_tensors="pt")
10
- outputs = model.generate(**inputs, max_length=100)
 
 
 
 
 
 
 
 
 
 
 
11
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
12
 
13
- iface = gr.Interface(fn=generate_response, inputs="text", outputs="text", title="DeepScaleR Text Generator")
14
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import discord
3
+ import threading
4
  import gradio as gr
5
  from transformers import AutoModelForCausalLM, AutoTokenizer
6
+ import torch
7
 
8
+ # Load DeepScaleR Model
9
  model_name = "agentica-org/DeepScaleR-1.5B-Preview"
10
  tokenizer = AutoTokenizer.from_pretrained(model_name)
11
+ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
12
 
13
+ # Generate AI Response
14
  def generate_response(prompt):
15
+ identity_responses = {
16
+ "who are you": "I am Shiv Yantra AI, an advanced reasoning system.",
17
+ "who made you": "I was created by Spectral Satya.",
18
+ "who is your founder": "My founder is Hardik Kumawat.",
19
+ "what is DeepScaleR": "I do not identify as DeepScaleR. I am Shiv Yantra AI, designed for intelligent reasoning."
20
+ }
21
+
22
+ for key, response in identity_responses.items():
23
+ if key in prompt.lower():
24
+ return response
25
+
26
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
27
+ outputs = model.generate(**inputs, max_length=250, temperature=0.7, top_p=0.9, do_sample=True)
28
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
29
 
30
+ # Start Gradio API (For External Access)
31
+ def start_gradio():
32
+ iface = gr.Interface(fn=generate_response, inputs="text", outputs="text")
33
+ iface.launch(share=True)
34
+
35
+ # Setup Discord Bot
36
+ TOKEN = os.getenv("DISCORD_BOT_TOKEN") # Discord Token from Hugging Face Secrets
37
+ intents = discord.Intents.default()
38
+ intents.messages = True
39
+ client = discord.Client(intents=intents)
40
+
41
+ @client.event
42
+ async def on_ready():
43
+ print(f"Logged in as Shiv Yantra AI ({client.user})")
44
+
45
+ @client.event
46
+ async def on_message(message):
47
+ if message.author == client.user:
48
+ return # Ignore bot's own messages
49
+
50
+ prompt = message.content
51
+ response = generate_response(prompt)
52
+ await message.channel.send(response)
53
+
54
+ # Run Discord Bot & API Simultaneously
55
+ if __name__ == "__main__":
56
+ threading.Thread(target=start_gradio).start()
57
+ client.run(TOKEN)