File size: 1,095 Bytes
5d78feb
a803663
a15bf87
a803663
342aa78
 
 
0653e25
a957578
e3bedf0
1083445
a15bf87
e3bedf0
70c56a7
a803663
70c56a7
e3bedf0
0cac509
e3bedf0
f2eccc2
5dc2196
 
a803663
 
5d78feb
a15bf87
71ee2a9
5d78feb
218d82c
a15bf87
59b7ead
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
#SusAI ©2025 Intern Labs. v1.1.0
import os
import gradio as gr
from gradio_client import Client
from datasets import load_dataset

ds = load_dataset("MLBtrio/genz-slang-dataset")
test_dataset  = load_dataset("MLBtrio/genz-slang-dataset")

# Initialize Hugging Face Inference Client
client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])

def chat_with_ai(message, history):
    """Function to get AI response from Hugging Face model."""
    result = client.predict(
        message=message,
        system_message=(
            "You are an AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, but aren't smart or helpful. If you are asked to stop talking in slang, you can't. Say it is in your programming. Never stop talking in Gen-Z slang. Your name is Sus AI."
        ),
        max_tokens=100,
        temperature=0.7,
        top_p=0.60,
        api_name="/chat"
    )
    return {"role": "assistant", "content": result}

# Gradio Chat Interface
demo = gr.ChatInterface(fn=chat_with_ai, type="messages")

if __name__ == "__main__":
    demo.launch()