Spaces:
Sleeping
Sleeping
from brander import prompting | |
import gradio as gr | |
import openai | |
def greet(topic: str): | |
completion = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=[ | |
{ | |
"role": "system", | |
"content": prompting.PROMPT_TEMPLATE.format(topic=topic), | |
}, | |
{"role": "user", "content": prompting.EXAMPLE_INPUT}, | |
{"role": "assistant", "content": prompting.EXAMPLE_OUTPUT}, | |
{"role": "user", "content": topic}, | |
], | |
) | |
return completion.choices[0].message.content | |
# interface = gr.Interface(fn=greet, inputs="text", outputs="text") | |
# interface.launch() | |
def gradio_history_to_openai_history(gradio_history: list[list[str]]): | |
openai_history = [ | |
{ | |
"role": "system", | |
"content": prompting.PROMPT_TEMPLATE, | |
}, | |
{"role": "user", "content": prompting.EXAMPLE_INPUT}, | |
{"role": "assistant", "content": prompting.EXAMPLE_OUTPUT}, | |
] | |
for gradio_message in gradio_history: | |
openai_history.append({"role": "user", "content": gradio_message[0]}) | |
if gradio_message[1]: | |
openai_history.append({"role": "assistant", "content": gradio_message[1]}) | |
return openai_history | |
def bot(history: list[list[str]]): | |
try: | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=gradio_history_to_openai_history(history), | |
stream=True, | |
) | |
except Exception as e: | |
# An openai.error.RateLimitError can happen, | |
# but we can also catch other exceptions just in case | |
history[-1][1] = f"[ERROR] {type(e)}: {e}" | |
return history | |
history[-1][1] = "" | |
for chunk in response: | |
choice = chunk.choices[0] | |
if choice.finish_reason is not None: | |
break | |
# The first chunk just says that the role is "assistant" | |
# and doesn't have any content (text) | |
if hasattr(choice.delta, "content"): | |
history[-1][1] += choice.delta.content | |
yield history | |
with gr.Blocks() as interface: | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox() | |
clear = gr.Button("Clear") | |
def user(user_message, history): | |
return "", history + [[user_message, None]] | |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
bot, chatbot, chatbot | |
) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
interface.queue() | |
# demo.launch() | |