yaoyugua commited on
Commit
6e763fa
·
1 Parent(s): 42321d6
Files changed (1) hide show
  1. app.py +6 -25
app.py CHANGED
@@ -8,37 +8,16 @@ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
 
10
  def respond(
11
- message,
12
  history: list[tuple[str, str]],
13
  system_message,
14
  max_tokens,
15
  temperature,
16
  top_p,
17
  ):
18
- messages = [{"role": "system", "content": system_message}]
19
-
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
-
26
- messages.append({"role": "user", "content": message})
27
-
28
- response = ""
29
-
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
-
39
- response += token
40
- # yield response
41
- yield "Buang NB"
42
 
43
 
44
  """
@@ -46,6 +25,8 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
46
  """
47
  demo = gr.ChatInterface(
48
  respond,
 
 
49
  additional_inputs=[
50
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
51
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
 
8
 
9
 
10
  def respond(
11
+ audio, # This will receive the audio input
12
  history: list[tuple[str, str]],
13
  system_message,
14
  max_tokens,
15
  temperature,
16
  top_p,
17
  ):
18
+ # Here you could process the audio file if needed
19
+ # For now, just returning our fixed response
20
+ yield "Buang NB"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
 
23
  """
 
25
  """
26
  demo = gr.ChatInterface(
27
  respond,
28
+ chatbot=gr.Chatbot(),
29
+ textbox=gr.Audio(source="microphone", type="filepath"), # Changed to Audio input
30
  additional_inputs=[
31
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
32
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),