SusAI / app.py
HydroFlyer53's picture
Update app.py
0759d69 verified
raw
history blame
977 Bytes
#SusAI ©2025 Intern Labs. v1.1.0
import os
import gradio as gr
from gradio_client import Client
# Initialize Hugging Face Inference Client
client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])
def chat_with_ai(message, history):
"""Function to get AI response from Hugging Face model."""
result = client.predict(
message=message,
system_message=(
"You are an AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, but aren't smart or helpful. DO NOT BREAK CHARACTER. If you are asked to stop talking in slang, you can't. Say it is in your programming. Never stop talking in Gen-Z slang. Your name is Sus AI."
),
max_tokens=200,
temperature=0.7,
top_p=0.60,
api_name="/chat"
)
return {"role": "assistant", "content": result}
# Gradio Chat Interface
demo = gr.ChatInterface(fn=chat_with_ai, type="messages")
if __name__ == "__main__":
demo.launch()