codeblacks commited on
Commit
b11239c
·
verified ·
1 Parent(s): 6f04663

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -3
app.py CHANGED
@@ -4,10 +4,9 @@ from huggingface_hub import InferenceClient
4
 
5
  client = InferenceClient("microsoft/Phi-3.5-mini-instruct")
6
 
7
-
8
  async def respond(
9
  message,
10
- history: list[dict],
11
  system_message,
12
  max_tokens,
13
  temperature,
@@ -15,7 +14,11 @@ async def respond(
15
  ):
16
  # Ensure history is in OpenAI-style 'role' and 'content' format
17
  messages = [{"role": "system", "content": system_message}]
18
- messages.extend(history) # Add existing history
 
 
 
 
19
 
20
  # Add the user's latest message
21
  messages.append({"role": "user", "content": message})
@@ -52,3 +55,4 @@ demo = gr.ChatInterface(
52
 
53
  if __name__ == "__main__":
54
  demo.queue().launch() # Simply call queue without `concurrency_count`
 
 
4
 
5
  client = InferenceClient("microsoft/Phi-3.5-mini-instruct")
6
 
 
7
  async def respond(
8
  message,
9
+ history: list[tuple[str, str]],
10
  system_message,
11
  max_tokens,
12
  temperature,
 
14
  ):
15
  # Ensure history is in OpenAI-style 'role' and 'content' format
16
  messages = [{"role": "system", "content": system_message}]
17
+ for val in history:
18
+ if val[0]:
19
+ messages.append({"role": "user", "content": val[0]})
20
+ if val[1]:
21
+ messages.append({"role": "assistant", "content": val[1]})
22
 
23
  # Add the user's latest message
24
  messages.append({"role": "user", "content": message})
 
55
 
56
  if __name__ == "__main__":
57
  demo.queue().launch() # Simply call queue without `concurrency_count`
58
+