Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1 |
-
#SusAI ©2025 Intern Labs. v1.1.
|
2 |
import os
|
3 |
import gradio as gr
|
4 |
from gradio_client import Client
|
5 |
from datasets import load_dataset
|
6 |
|
|
|
7 |
ds = load_dataset("MLBtrio/genz-slang-dataset")
|
8 |
|
9 |
# Initialize Hugging Face Inference Client
|
@@ -11,20 +12,30 @@ client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])
|
|
11 |
|
12 |
def chat_with_ai(message, history):
|
13 |
"""Function to get AI response from Hugging Face model."""
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
# Gradio Chat Interface
|
27 |
-
demo = gr.ChatInterface(fn=chat_with_ai
|
28 |
|
29 |
if __name__ == "__main__":
|
30 |
demo.launch()
|
|
|
1 |
+
#SusAI ©2025 Intern Labs. v1.1.5
|
2 |
import os
|
3 |
import gradio as gr
|
4 |
from gradio_client import Client
|
5 |
from datasets import load_dataset
|
6 |
|
7 |
+
# Load the dataset (if needed for your model)
|
8 |
ds = load_dataset("MLBtrio/genz-slang-dataset")
|
9 |
|
10 |
# Initialize Hugging Face Inference Client
|
|
|
12 |
|
13 |
def chat_with_ai(message, history):
|
14 |
"""Function to get AI response from Hugging Face model."""
|
15 |
+
try:
|
16 |
+
result = client.predict(
|
17 |
+
message=message,
|
18 |
+
system_message=(
|
19 |
+
"You are an AI chatbot, and can only talk in Gen-Z slang. I have given you the dataset MLBtrio/genz-slang-dataset. Use words from this as often as you can. If you are asked to stop talking in slang, you can't. Say it is in your programming. Your name is Sus AI."
|
20 |
+
),
|
21 |
+
max_tokens=100,
|
22 |
+
temperature=0.7,
|
23 |
+
top_p=0.60,
|
24 |
+
api_name="/chat"
|
25 |
+
)
|
26 |
+
|
27 |
+
# Check if result is valid and return the response in the correct format
|
28 |
+
if result:
|
29 |
+
return [{"role": "assistant", "content": result}]
|
30 |
+
else:
|
31 |
+
return [{"role": "assistant", "content": "Sorry, I couldn't generate a response."}]
|
32 |
+
|
33 |
+
except Exception as e:
|
34 |
+
print(f"Error during model prediction: {e}")
|
35 |
+
return [{"role": "assistant", "content": "There was an error processing your message."}]
|
36 |
|
37 |
# Gradio Chat Interface
|
38 |
+
demo = gr.ChatInterface(fn=chat_with_ai)
|
39 |
|
40 |
if __name__ == "__main__":
|
41 |
demo.launch()
|