Spaces:
Running
Running
import gradio as gr | |
from huggingface_hub import InferenceClient | |
client = InferenceClient("https://vulture-awake-probably.ngrok-free.app/v1/chat/completions") | |
FIXED_MAX_TOKENS = 1024 | |
FIXED_TEMPERATURE = 1 | |
FIXED_TOP_P = 0.95 | |
def respond(message, history): | |
# --- Syntax Error was here --- | |
# Corrected: Initialize messages as an empty list. | |
# If you had a system message previously, you might want to add it back, e.g.: | |
# FIXED_SYSTEM_MESSAGE = "Your system message here" | |
# messages = [{"role": "system", "content": FIXED_SYSTEM_MESSAGE}] | |
messages = [] | |
# --- End of correction --- | |
for user_message, ai_message in history: | |
if user_message: | |
messages.append({"role": "user", "content": user_message}) | |
if ai_message: | |
messages.append({"role": "assistant", "content": ai_message}) | |
messages.append({"role": "user", "content": message}) | |
response = "" | |
try: | |
for chunk in client.chat.completions.create( | |
messages=messages, | |
max_tokens=FIXED_MAX_TOKENS, | |
stream=True, | |
temperature=FIXED_TEMPERATURE, | |
top_p=FIXED_TOP_P, | |
): | |
if chunk.choices[0].delta.content is not None: | |
token = chunk.choices[0].delta.content | |
response += token | |
yield response | |
except Exception as e: | |
yield f"An error occurred: {e}" | |
header_image_path = "https://cdn-uploads.huggingface.co/production/uploads/6540a02d1389943fef4d2640/j61iZTDaK9g0UW3aWGwWi.gif" | |
with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
gr.Image( | |
value=header_image_path, | |
label="Chatbot Header", | |
show_label=False, | |
interactive=False, | |
height=100, | |
elem_id="chatbot-logo" | |
) | |
gr.ChatInterface( | |
respond, | |
chatbot=gr.Chatbot( | |
height=700 | |
) | |
) | |
if __name__ == "__main__": | |
demo.launch(show_api=False, share=True) |