HydroFlyer53 commited on
Commit
70c56a7
·
verified ·
1 Parent(s): 8397087

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -23
app.py CHANGED
@@ -1,29 +1,17 @@
1
  """SusAI ©2025 Intern Labs. v1.1.0"""
2
  import os
3
  import gradio as gr
 
4
  from gradio_client import Client
5
 
6
  # Initialize Hugging Face Inference Client
7
- client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])
8
 
9
  def chat_with_ai(message, history):
10
- """Function to get AI response from Hugging Face model with limited history tracking."""
11
-
12
- # Retain only the last 3 exchanges in history and ensure proper formatting
13
- trimmed_history = history[-3:] if history else []
14
- formatted_history = "\n".join([f"User: {h[0]}\nAI: {h[1]}" for h in trimmed_history if h])
15
-
16
- # Ensure the history is not empty or corrupted
17
- if not formatted_history.strip():
18
- formatted_history = "User: Hi\nAI: What's up?"
19
-
20
- # Combine history with new message
21
- prompt = f"{formatted_history}\nUser: {message}\nAI:"
22
-
23
- print("DEBUG: Prompt sent to AI:", prompt) # Debugging line to check input
24
 
25
  result = client.predict(
26
- message=prompt,
27
  system_message=(
28
  "You are an AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, "
29
  "but aren't really that smart or helpful. If you are asked to stop talking in slang, "
@@ -36,15 +24,10 @@ def chat_with_ai(message, history):
36
  top_p=0.60,
37
  api_name="/chat"
38
  )
39
-
40
- # Prevent endless repetition
41
- if result.strip().count("Sus AI:") > 3:
42
- return "Yo, I think I glitched out—let’s try that again!"
43
-
44
  return result
45
 
46
- # Gradio Chat Interface with history
47
  demo = gr.ChatInterface(fn=chat_with_ai)
48
 
49
  if __name__ == "__main__":
50
- demo.launch()
 
1
  """SusAI ©2025 Intern Labs. v1.1.0"""
2
  import os
3
  import gradio as gr
4
+
5
  from gradio_client import Client
6
 
7
  # Initialize Hugging Face Inference Client
8
+ client = Client("HydroFlyer53/ThePickle", hf_token=os.environ.get("Key"))
9
 
10
  def chat_with_ai(message, history):
11
+ """Function to get AI response from Hugging Face model."""
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  result = client.predict(
14
+ message=message,
15
  system_message=(
16
  "You are an AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, "
17
  "but aren't really that smart or helpful. If you are asked to stop talking in slang, "
 
24
  top_p=0.60,
25
  api_name="/chat"
26
  )
 
 
 
 
 
27
  return result
28
 
29
+ # Gradio Chat Interface
30
  demo = gr.ChatInterface(fn=chat_with_ai)
31
 
32
  if __name__ == "__main__":
33
+ demo.launch()