Spaces:
Runtime error
Runtime error
File size: 1,802 Bytes
20c9ade 006cc46 20c9ade 006cc46 20c9ade 006cc46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import gradio as gr
from openai import OpenAI
import os
import time
# Initialize the OpenAI Client with your API key and endpoint
api_key = os.environ.get("RUNPOD_API_KEY") # Make sure to set your environment variable correctly
client = OpenAI(
api_key=api_key,
base_url="https://api.runpod.ai/v2/vllm-k0g4c60zor9xuu/openai/v1",
)
def get_response(user_message, history):
# Format the history for OpenAI
history_openai_format = []
for human, assistant in history:
if human: # Ensure there's a human message
history_openai_format.append({"role": "user", "content": human})
if assistant: # Ensure there's an assistant message
history_openai_format.append({"role": "assistant", "content": assistant})
history_openai_format.append({"role": "user", "content": user_message})
# Make the API call
response = client.chat.completions.create(
model='ambrosfitz/llama-3-history',
messages=history_openai_format,
temperature=0.5,
max_tokens=150
)
# Get the text response
bot_message = response.choices[0].message['content'].strip() if response.choices else "No response generated."
return bot_message
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.Button("Clear")
def user(user_message, history):
if not user_message.strip(): # Handle empty input gracefully
return "", history
bot_response = get_response(user_message, history)
return "", history + [[user_message, bot_response]]
def clear_chat():
return "", [] # Clear the chat history
msg.submit(user, inputs=[msg, chatbot], outputs=[msg, chatbot])
clear.click(clear_chat, inputs=None, outputs=[msg, chatbot])
demo.launch()
|