vvolhejn commited on
Commit
3eda7dc
·
1 Parent(s): 815f573

Connect streaming bot to OpenAI

Browse files
Files changed (1) hide show
  1. brander/app.py +44 -12
brander/app.py CHANGED
@@ -24,9 +24,50 @@ def greet(topic: str):
24
  # interface.launch()
25
 
26
 
27
- import gradio as gr
28
- import random
29
- import time
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  with gr.Blocks() as interface:
32
  chatbot = gr.Chatbot()
@@ -36,15 +77,6 @@ with gr.Blocks() as interface:
36
  def user(user_message, history):
37
  return "", history + [[user_message, None]]
38
 
39
- def bot(history):
40
- print(history)
41
- bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
42
- history[-1][1] = ""
43
- for character in bot_message:
44
- history[-1][1] += character
45
- time.sleep(0.05)
46
- yield history
47
-
48
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
49
  bot, chatbot, chatbot
50
  )
 
24
  # interface.launch()
25
 
26
 
27
+ def gradio_history_to_openai_history(gradio_history: list[list[str]]):
28
+ openai_history = [
29
+ {
30
+ "role": "system",
31
+ "content": prompting.PROMPT_TEMPLATE,
32
+ },
33
+ {"role": "user", "content": prompting.EXAMPLE_INPUT},
34
+ {"role": "assistant", "content": prompting.EXAMPLE_OUTPUT},
35
+ ]
36
+
37
+ for gradio_message in gradio_history:
38
+ openai_history.append({"role": "user", "content": gradio_message[0]})
39
+ if gradio_message[1]:
40
+ openai_history.append({"role": "assistant", "content": gradio_message[1]})
41
+
42
+ return openai_history
43
+
44
+
45
+ def bot(history: list[list[str]]):
46
+ try:
47
+ response = openai.ChatCompletion.create(
48
+ model="gpt-3.5-turbo",
49
+ messages=gradio_history_to_openai_history(history),
50
+ stream=True,
51
+ )
52
+ except Exception as e:
53
+ # An openai.error.RateLimitError can happen,
54
+ # but we can also catch other exceptions just in case
55
+ history[-1][1] = f"[ERROR] {type(e)}: {e}"
56
+ return history
57
+
58
+ history[-1][1] = ""
59
+ for chunk in response:
60
+ choice = chunk.choices[0]
61
+ if choice.finish_reason is not None:
62
+ break
63
+
64
+ # The first chunk just says that the role is "assistant"
65
+ # and doesn't have any content (text)
66
+ if hasattr(choice.delta, "content"):
67
+ history[-1][1] += choice.delta.content
68
+
69
+ yield history
70
+
71
 
72
  with gr.Blocks() as interface:
73
  chatbot = gr.Chatbot()
 
77
  def user(user_message, history):
78
  return "", history + [[user_message, None]]
79
 
 
 
 
 
 
 
 
 
 
80
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
81
  bot, chatbot, chatbot
82
  )