Raven7 commited on
Commit
93ce35f
·
verified ·
1 Parent(s): b57eba0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -0
app.py CHANGED
@@ -1,9 +1,14 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
 
4
 
5
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("HF_TOKEN"))
6
 
 
 
 
 
7
  def respond(
8
  message,
9
  history: list[tuple[str, str]],
@@ -11,6 +16,7 @@ def respond(
11
  maxtokens,
12
  temperature,
13
  top_p,
 
14
  ):
15
  messages = [{"role": "system", "content": systemmessage}]
16
 
@@ -36,6 +42,8 @@ def respond(
36
  response += token
37
  yield response
38
 
 
 
39
  demo = gr.ChatInterface(
40
  respond,
41
  additional_inputs=[
@@ -50,6 +58,8 @@ demo = gr.ChatInterface(
50
  label="Top-p (nucleus sampling)",
51
  ),
52
  ],
 
 
53
  )
54
 
55
  if __name__ == "__main__":
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
+ import pandas as pd
5
 
6
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("HF_TOKEN"))
7
 
8
+ def load_prompts():
9
+ prompts = pd.read_csv("prompts.csv")
10
+ return prompts
11
+
12
  def respond(
13
  message,
14
  history: list[tuple[str, str]],
 
16
  maxtokens,
17
  temperature,
18
  top_p,
19
+ prompts,
20
  ):
21
  messages = [{"role": "system", "content": systemmessage}]
22
 
 
42
  response += token
43
  yield response
44
 
45
+ prompts = load_prompts()
46
+
47
  demo = gr.ChatInterface(
48
  respond,
49
  additional_inputs=[
 
58
  label="Top-p (nucleus sampling)",
59
  ),
60
  ],
61
+ inputs=history,
62
+ outputs="text",
63
  )
64
 
65
  if __name__ == "__main__":