marcofrodl commited on
Commit
2d59812
·
1 Parent(s): db0e909

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -2
app.py CHANGED
@@ -1,5 +1,31 @@
1
  import gradio as gr
2
- from src import requests
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  def respond_stream(
5
  message,
@@ -13,7 +39,7 @@ def respond_stream(
13
  ):
14
  response = ""
15
  received_anything = False
16
- for chunk in requests.get_stream_chat_completion(
17
  message=message,
18
  chat_history=chat_history,
19
  model=model,
 
1
  import gradio as gr
2
+ from mistralai.client import MistralClient
3
+ from mistralai.models.chat_completion import ChatMessage
4
+
5
+
6
+ def get_stream_chat_completion(
7
+ message, chat_history, model, api_key, system=None, **kwargs
8
+ ):
9
+ messages = []
10
+ if system is not None:
11
+ messages.append(ChatMessage(role="system", content=system))
12
+ for chat in chat_history:
13
+ human_message, bot_message = chat
14
+ messages.extend(
15
+ (
16
+ ChatMessage(role="user", content=human_message),
17
+ ChatMessage(role="assistant", content=bot_message),
18
+ )
19
+ )
20
+ messages.append(ChatMessage(role="user", content=message))
21
+ client = MistralClient(api_key=api_key)
22
+ for chunk in client.chat_stream(
23
+ model=model,
24
+ messages=messages,
25
+ **kwargs,
26
+ ):
27
+ if chunk.choices[0].delta.content is not None:
28
+ yield chunk.choices[0].delta.content
29
 
30
  def respond_stream(
31
  message,
 
39
  ):
40
  response = ""
41
  received_anything = False
42
+ for chunk in get_stream_chat_completion(
43
  message=message,
44
  chat_history=chat_history,
45
  model=model,