RithikaChalam commited on
Commit
0b1603b
·
verified ·
1 Parent(s): 96b34ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -108,7 +108,7 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
- def respond(messages, history, mom_type) :
112
  mom_type = []
113
  if mom_type == "Cool Mom" :
114
  top_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks)
@@ -120,7 +120,7 @@ def respond(messages, history, mom_type) :
120
  messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {mom_type}"},
121
  {"role": "user",
122
  "content": (
123
- f"Question{messages}"
124
  )}]
125
  if history:
126
  messages.extend(history)
 
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
+ def respond(message, history, mom_type) :
112
  mom_type = []
113
  if mom_type == "Cool Mom" :
114
  top_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks)
 
120
  messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {mom_type}"},
121
  {"role": "user",
122
  "content": (
123
+ f"Question{message}"
124
  )}]
125
  if history:
126
  messages.extend(history)