DeepSeek-R1 / app.py
NeoPy's picture
Create app.py
acc2627 verified
import os
import gradio as gr
from huggingface_hub import InferenceClient
# Retrieve the API key from environment variables
api_key = os.getenv("API_KEY")
# Initialize the InferenceClient with your provider and the API key from the environment variable
client = InferenceClient(
provider="together",
api_key=api_key
)
def chatbot_response(user_input, chat_history):
"""
Sends the user's input to the inference client and appends the response to the conversation history.
"""
messages = [{"role": "user", "content": user_input}]
# Get the response from the Hugging Face model
completion = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=messages,
max_tokens=500,
)
# Extract the model's response
bot_message = completion.choices[0].message
chat_history.append((user_input, bot_message))
# Return an empty string to clear the input textbox and the updated chat history
return "", chat_history
# Create the Gradio Blocks interface
with gr.Blocks() as demo:
gr.Markdown("# DeepSeek-R1")
chatbot = gr.Chatbot()
state = gr.State([])
with gr.Row():
txt = gr.Textbox(placeholder="Type your message here...", show_label=False)
send_btn = gr.Button("Send")
txt.submit(
chatbot_response,
inputs=[txt, state],
outputs=[txt, chatbot]
)
send_btn.click(
chatbot_response,
inputs=[txt, state],
outputs=[txt, chatbot]
)
# Launch the interface
demo.launch()