Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from huggingface_hub import InferenceClient | |
# Initialize the Hugging Face InferenceClient with your API key. | |
client = InferenceClient( | |
provider="sambanova", | |
api_key=os.getenv("API_KEY") # Replace with your actual API key. | |
) | |
# Define a system message that contains the specialized event details. | |
system_message = { | |
"role": "system", | |
"content": ( | |
"You are an AI chat assistant specialized in providing detailed information about " | |
"the Bhasha Bandhu Regional Ideathon @ SGSITS. Please always include event details, dates, " | |
"and relevant links (if available) in your responses.\n\n" | |
"Event Details:\n" | |
"Bhasha Bandhu Regional Ideathon @ SGSITS\n" | |
"Date: 22nd February 2025\n" | |
"Time: 9:00 AM - 3:00 PM\n" | |
"Venue: SGSITS, Indore\n\n" | |
"Join the Bhasha Bandhu Regional Ideathon!\n\n" | |
"Bhasha Bandhu, in collaboration with Bhashini and Microsoft, is organizing an exciting " | |
"Regional Ideathon at SGSITS, Indore, on 22nd February. This is a unique opportunity for " | |
"students, professionals, developers, and entrepreneurs to brainstorm and innovate solutions " | |
"that bridge India's linguistic digital divide.\n\n" | |
"Why Participate?\n" | |
"- Gain industry mentorship from experts in AI & language technology\n" | |
"- Work on real-world problem statements with open-source AI models\n" | |
"- Hands-on experience with Bhashini API, OpenAI, and GitHub Copilot\n" | |
"- Swags and Certificates for regional winners and participants\n" | |
"- Opportunity to get shortlisted for the main Hackathon with Microsoft & Bhashini\n\n" | |
"Event Agenda:\n" | |
"- 9:00 AM - 9:30 AM: Registration & Introduction\n" | |
"- 9:30 AM - 10:00 AM: Mentor Session on Bhashini API, OpenAI, GitHub Copilot\n" | |
"- 10:00 AM - 10:30 AM: Problem Statements Explained + Q&A\n" | |
"- 10:30 AM - 12:30 PM: Brainstorming & Ideation (PPT preparation on Ideathon Day)\n" | |
"- 12:30 PM - 2:00 PM: Mentor Evaluation & Regional Winner Selection\n" | |
"- 2:00 PM - 3:00 PM: Winner Announcement & Closing Ceremony\n\n" | |
"How to Participate:\n" | |
"- Form a team (or participate solo)\n" | |
"- Register for the event in advance\n" | |
"- Prepare a PPT on Ideathon Day covering:\n" | |
" • Problem Statement & Solution (using Bhashini API & OpenAI)\n" | |
" • Unique Selling Proposition & Business Potential\n" | |
" • Tech Stack & Implementation Plan\n" | |
"- Present your idea to the jury\n\n" | |
"Important Notes:\n" | |
"- Offline participation is mandatory\n" | |
"- Lunch will not be provided\n" | |
"- Winning at the regional hackathon does not guarantee a win in the main event, but all " | |
"submitted ideas will be considered.\n\n" | |
"For Queries: Contact Arpit at +91 95718 45422\n\n" | |
"Let's build a digitally inclusive India together!" | |
) | |
} | |
# Global conversation history (starting with the system message) | |
conversation = [system_message] | |
def generate_response(user_message, chat_history): | |
""" | |
Appends the user's message to the conversation, calls the inference client, | |
and returns the updated conversation (for display in the Gradio chat interface). | |
""" | |
global conversation | |
# Append the new user message | |
conversation.append({ | |
"role": "user", | |
"content": user_message | |
}) | |
# Call the Hugging Face chat completions API. | |
completion = client.chat.completions.create( | |
model="meta-llama/Llama-3.3-70B-Instruct", | |
messages=conversation, | |
max_tokens=500, | |
) | |
# The API response may return a dictionary or a string for the assistant's message. | |
assistant_message = completion.choices[0].message | |
if isinstance(assistant_message, dict): | |
assistant_text = assistant_message.get("content", "") | |
else: | |
assistant_text = assistant_message | |
# Append the assistant's response to the conversation. | |
conversation.append({ | |
"role": "assistant", | |
"content": assistant_text | |
}) | |
# Update the chat history (a list of tuples: (user, assistant)) for the Gradio interface. | |
chat_history.append((user_message, assistant_text)) | |
return "", chat_history | |
# Build the Gradio interface. | |
with gr.Blocks() as demo: | |
gr.Markdown("# Bhasha Bandhu Ideathon Chat Assistant") | |
gr.Markdown( | |
"Ask any questions or request details about the Bhasha Bandhu Regional Ideathon @ SGSITS. " | |
"The assistant will provide detailed answers with event dates, agenda, and links where applicable." | |
) | |
chatbot = gr.Chatbot() | |
with gr.Row(): | |
# Removed .style(container=False) | |
txt = gr.Textbox( | |
show_label=False, | |
placeholder="Enter your message here and press Enter" | |
) | |
# Wire up the textbox submission to our generate_response function. | |
txt.submit(generate_response, [txt, chatbot], [txt, chatbot]) | |
# Launch the Gradio demo. | |
demo.launch() | |