arpit13 commited on
Commit
8eb685b
·
verified ·
1 Parent(s): 0fab018

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +118 -0
app.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+
5
+ # Initialize the Hugging Face InferenceClient with your API key.
6
+ client = InferenceClient(
7
+ provider="sambanova",
8
+ api_key=os.getenv("API_KEY") # Replace with your actual API key.
9
+ )
10
+
11
+ # Define a system message that contains the specialized event details.
12
+ system_message = {
13
+ "role": "system",
14
+ "content": (
15
+ "You are an AI chat assistant specialized in providing detailed information about "
16
+ "the Bhasha Bandhu Regional Ideathon @ SGSITS. Please always include event details, dates, "
17
+ "and relevant links (if available) in your responses.\n\n"
18
+ "Event Details:\n"
19
+ "Bhasha Bandhu Regional Ideathon @ SGSITS\n"
20
+ "Date: 22nd February 2025\n"
21
+ "Time: 9:00 AM - 3:00 PM\n"
22
+ "Venue: SGSITS, Indore\n\n"
23
+ "Join the Bhasha Bandhu Regional Ideathon!\n\n"
24
+ "Bhasha Bandhu, in collaboration with Bhashini and Microsoft, is organizing an exciting "
25
+ "Regional Ideathon at SGSITS, Indore, on 22nd February. This is a unique opportunity for "
26
+ "students, professionals, developers, and entrepreneurs to brainstorm and innovate solutions "
27
+ "that bridge India's linguistic digital divide.\n\n"
28
+ "Why Participate?\n"
29
+ "- Gain industry mentorship from experts in AI & language technology\n"
30
+ "- Work on real-world problem statements with open-source AI models\n"
31
+ "- Hands-on experience with Bhashini API, OpenAI, and GitHub Copilot\n"
32
+ "- Swags and Certificates for regional winners and participants\n"
33
+ "- Opportunity to get shortlisted for the main Hackathon with Microsoft & Bhashini\n\n"
34
+ "Event Agenda:\n"
35
+ "- 9:00 AM - 9:30 AM: Registration & Introduction\n"
36
+ "- 9:30 AM - 10:00 AM: Mentor Session on Bhashini API, OpenAI, GitHub Copilot\n"
37
+ "- 10:00 AM - 10:30 AM: Problem Statements Explained + Q&A\n"
38
+ "- 10:30 AM - 12:30 PM: Brainstorming & Ideation (PPT preparation on Ideathon Day)\n"
39
+ "- 12:30 PM - 2:00 PM: Mentor Evaluation & Regional Winner Selection\n"
40
+ "- 2:00 PM - 3:00 PM: Winner Announcement & Closing Ceremony\n\n"
41
+ "How to Participate:\n"
42
+ "- Form a team (or participate solo)\n"
43
+ "- Register for the event in advance\n"
44
+ "- Prepare a PPT on Ideathon Day covering:\n"
45
+ " • Problem Statement & Solution (using Bhashini API & OpenAI)\n"
46
+ " • Unique Selling Proposition & Business Potential\n"
47
+ " • Tech Stack & Implementation Plan\n"
48
+ "- Present your idea to the jury\n\n"
49
+ "Important Notes:\n"
50
+ "- Offline participation is mandatory\n"
51
+ "- Lunch will not be provided\n"
52
+ "- Winning at the regional hackathon does not guarantee a win in the main event, but all "
53
+ "submitted ideas will be considered.\n\n"
54
+ "For Queries: Contact Arpit at +91 95718 45422\n\n"
55
+ "Let's build a digitally inclusive India together!"
56
+ )
57
+ }
58
+
59
+ # Global conversation history (starting with the system message)
60
+ conversation = [system_message]
61
+
62
+ def generate_response(user_message, chat_history):
63
+ """
64
+ Appends the user's message to the conversation, calls the inference client,
65
+ and returns the updated conversation (for display in the Gradio chat interface).
66
+ """
67
+ global conversation
68
+
69
+ # Append the new user message
70
+ conversation.append({
71
+ "role": "user",
72
+ "content": user_message
73
+ })
74
+
75
+ # Call the Hugging Face chat completions API.
76
+ completion = client.chat.completions.create(
77
+ model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
78
+ messages=conversation,
79
+ max_tokens=5000,
80
+ )
81
+
82
+ # The API response may return a dictionary or a string for the assistant's message.
83
+ assistant_message = completion.choices[0].message
84
+ if isinstance(assistant_message, dict):
85
+ assistant_text = assistant_message.get("content", "")
86
+ else:
87
+ assistant_text = assistant_message
88
+
89
+ # Append the assistant's response to the conversation.
90
+ conversation.append({
91
+ "role": "assistant",
92
+ "content": assistant_text
93
+ })
94
+
95
+ # Update the chat history (a list of tuples: (user, assistant)) for the Gradio interface.
96
+ chat_history.append((user_message, assistant_text))
97
+ return "", chat_history
98
+
99
+ # Build the Gradio interface.
100
+ with gr.Blocks() as demo:
101
+ gr.Markdown("# Bhasha Bandhu Ideathon Chat Assistant")
102
+ gr.Markdown(
103
+ "Ask any questions or request details about the Bhasha Bandhu Regional Ideathon @ SGSITS. "
104
+ "The assistant will provide detailed answers with event dates, agenda, and links where applicable."
105
+ )
106
+
107
+ chatbot = gr.Chatbot()
108
+ with gr.Row():
109
+ txt = gr.Textbox(
110
+ show_label=False,
111
+ placeholder="Enter your message here and press Enter"
112
+ ).style(container=False)
113
+
114
+ # Wire up the textbox submission to our generate_response function.
115
+ txt.submit(generate_response, [txt, chatbot], [txt, chatbot])
116
+
117
+ # Launch the Gradio demo.
118
+ demo.launch()