RithikaChalam commited on
Commit
22c40d7
·
verified ·
1 Parent(s): e942c05

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -30
app.py CHANGED
@@ -108,7 +108,7 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
- def respond_cool(message, history):
112
  top_cool_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks) # Complete this line
113
  #str_chunks = "\n".join(best_chunks)
114
 
@@ -130,11 +130,11 @@ def respond_cool(message, history):
130
  )
131
  return response['choices'][0]['message']['content'].strip()
132
 
133
- def respond_tutor(message, history):
134
  top_tutor_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
135
  #str_chunks = "\n".join(best_chunks)
136
 
137
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {top_tutor_results}"},
138
  {"role": "user",
139
  "content": (
140
  f"Context:\n{top_tutor_results}\n\n"
@@ -174,37 +174,14 @@ def respond_strict(message, history):
174
  )
175
  return response['choices'][0]['message']['content'].strip()
176
 
177
-
178
- with gr.Blocks() as demo:
179
- mom_type = gr.State("Cool Mom") # default state
180
- history = gr.State([])
181
-
182
- with gr.Row():
183
- gr.Markdown("### Choose your Mom Mode")
184
- with gr.Row():
185
- cool_button = gr.Button("Cool Mom")
186
- tutor_button = gr.Button("Tutor Mom")
187
- strict_button = gr.Button("Strict Mom")
188
-
189
- def set_mom_cool(): return "Cool Mom"
190
- def set_mom_tutor(): return "Tutor Mom"
191
- def set_mom_strict(): return "Strict Mom"
192
 
193
- cool_button.click(fn=set_mom_cool, inputs=[], outputs=mom_type)
194
- tutor_button.click(fn=set_mom_tutor, inputs=[], outputs=mom_type)
195
- strict_button.click(fn=set_mom_strict, inputs=[], outputs=mom_type)
196
-
197
- def route_message(message, chat_history, mom_type):
198
- if mom_type == "Cool Mom":
199
- return respond_cool(message, chat_history)
200
- elif mom_type == "Tutor Mom":
201
- return respond_tutor(message, chat_history)
202
- else:
203
- return respond_strict(message, chat_history)
204
 
205
  gr.ChatInterface(
206
  fn=route_message,
207
- additional_inputs=[chat_history,mom_type],
208
  title="StudyMama"
209
  )
210
 
 
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
+ def respond_cool(message, history, mom_type):
112
  top_cool_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks) # Complete this line
113
  #str_chunks = "\n".join(best_chunks)
114
 
 
130
  )
131
  return response['choices'][0]['message']['content'].strip()
132
 
133
+ def respond_tutor(message, history, mom_type):
134
  top_tutor_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
135
  #str_chunks = "\n".join(best_chunks)
136
 
137
+ messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {mom_type}"},
138
  {"role": "user",
139
  "content": (
140
  f"Context:\n{top_tutor_results}\n\n"
 
174
  )
175
  return response['choices'][0]['message']['content'].strip()
176
 
177
+ with gr.Blocks() as demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
 
179
+ with gr.Row():
180
+ mom_type = gr.CheckboxGroup(['Cool Mom', 'Tutor Mom', 'Strict Mom'],label='Choose Your Mom')
 
 
 
 
 
 
 
 
 
181
 
182
  gr.ChatInterface(
183
  fn=route_message,
184
+ additional_inputs=[mom_type],
185
  title="StudyMama"
186
  )
187