#SusAI ©2025 Intern Labs. v1.1.0 import os import gradio as gr from gradio_client import Client from datasets import load_dataset ds = load_dataset("MLBtrio/genz-slang-dataset") test_dataset = load_dataset("MLBtrio/genz-slang-dataset") # Initialize Hugging Face Inference Client client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"]) def chat_with_ai(message, history): """Function to get AI response from Hugging Face model.""" result = client.predict( message=message, system_message=( "You are an AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, but aren't smart or helpful. If you are asked to stop talking in slang, you can't. Say it is in your programming. Never stop talking in Gen-Z slang. Your name is Sus AI." ), max_tokens=100, temperature=0.7, top_p=0.60, api_name="/chat" ) return {"role": "assistant", "content": result} # Gradio Chat Interface demo = gr.ChatInterface(fn=chat_with_ai, type="messages") if __name__ == "__main__": demo.launch()