File size: 729 Bytes
932beb7
b1c8545
932beb7
b1a15f5
932beb7
 
 
 
d29ae30
932beb7
 
 
 
 
d29ae30
932beb7
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

# Load DeepScaleR model
MODEL_NAME = "agentica-org/DeepScaleR-1.5B-Preview"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)

# Function to generate AI responses
def generate_response(prompt):
    inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=2048)
    output = model.generate(**inputs, max_new_tokens=200)
    return tokenizer.decode(output[0], skip_special_tokens=True)

# Launch API for Discord bot
iface = gr.Interface(fn=generate_response, inputs="text", outputs="text")
iface.launch(server_name="0.0.0.0", server_port=7860, share=False)