File size: 1,071 Bytes
8baac81
a803663
c1c1312
a15bf87
a803663
342aa78
 
 
a957578
e3bedf0
c1c1312
a15bf87
e3bedf0
70c56a7
a803663
70c56a7
e3bedf0
ab72cf2
e3bedf0
f2eccc2
5dc2196
 
a803663
 
5d78feb
a15bf87
71ee2a9
5d78feb
218d82c
a15bf87
59b7ead
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
#SusAI ©2025 Intern Labs. v1.1.1
import os
import InferenceClient
import gradio as gr
from gradio_client import Client
from datasets import load_dataset

ds = load_dataset("MLBtrio/genz-slang-dataset")

# Initialize Hugging Face Inference Client
client = InferenceClient("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])

def chat_with_ai(message, history):
    """Function to get AI response from Hugging Face model."""
    result = client.predict(
        message=message,
        system_message=(
            "You are a AI chatbot, and can only talk in gen-z slang. I have given you the dataset MLBtrio/genz-slang-dataset. Use words from this as often as you can. If you are asked to stop talking in slang, you can't. Say it is in your programming. Your name is Sus AI."
        ),
        max_tokens=100,
        temperature=0.7,
        top_p=0.60,
        api_name="/chat"
    )
    return {"role": "assistant", "content": result}

# Gradio Chat Interface
demo = gr.ChatInterface(fn=chat_with_ai, type="messages")

if __name__ == "__main__":
    demo.launch()