HydroFlyer53 commited on
Commit
a15bf87
·
verified ·
1 Parent(s): a957578

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -19
app.py CHANGED
@@ -1,24 +1,66 @@
1
- from gradio_client import Client
 
2
  import os
 
 
 
 
3
  api_key = os.getenv("API")
 
4
 
5
 
6
- client = Client("HydroFlyer53/ThePickle", hf_token="api_key")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- while True:
9
- # Get user input
10
- message = input("You: ")
11
-
12
- # Get AI response
13
- result = client.predict(
14
- message=message,
15
- system_message="You are a AI that talks in Gen-Z slang, and also says things like skibbidy and sigma, but aren't really that smart or helpful. If you are asked to stop talking in slang, you can't. Say it is in your programming. Your name is Sus AI.",
16
- max_tokens=100,
17
- temperature=0.7,
18
- top_p=0.60,
19
- api_name="/chat"
20
- )
21
-
22
- # Print response with a blank line for better readability
23
- print("\nAI:\n")
24
- print(result)
 
1
+ import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
  import os
4
+
5
+ """
6
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
7
+ """
8
  api_key = os.getenv("API")
9
+ client = Client("HydroFlyer53/ThePickle", hf_token="api_key")
10
 
11
 
12
+ def respond(
13
+ message,
14
+ history: list[tuple[str, str]],
15
+ system_message,
16
+ max_tokens,
17
+ temperature,
18
+ top_p,
19
+ ):
20
+ messages = [{"role": "system", "content": system_message}]
21
+
22
+ for val in history:
23
+ if val[0]:
24
+ messages.append({"role": "user", "content": val[0]})
25
+ if val[1]:
26
+ messages.append({"role": "assistant", "content": val[1]})
27
+
28
+ messages.append({"role": "user", "content": message})
29
+
30
+ response = ""
31
+
32
+ for message in client.chat_completion(
33
+ messages,
34
+ max_tokens=max_tokens,
35
+ stream=True,
36
+ temperature=temperature,
37
+ top_p=top_p,
38
+ ):
39
+ token = message.choices[0].delta.content
40
+
41
+ response += token
42
+ yield response
43
+
44
+
45
+ """
46
+ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
47
+ """
48
+ demo = gr.ChatInterface(
49
+ respond,
50
+ additional_inputs=[
51
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
52
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
53
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
54
+ gr.Slider(
55
+ minimum=0.1,
56
+ maximum=1.0,
57
+ value=0.95,
58
+ step=0.05,
59
+ label="Top-p (nucleus sampling)",
60
+ ),
61
+ ],
62
+ )
63
+
64
 
65
+ if __name__ == "__main__":
66
+ demo.launch()