Update app.py
Browse files
app.py
CHANGED
@@ -166,14 +166,26 @@ def threaded_chat_response(previous_chat: str, msg_prompt: str) -> dict:
|
|
166 |
"""
|
167 |
generation_params = {"do_sample": True,"temperature": 0.7,"top_p": 0.95,"top_k": 40,"max_new_tokens": 512,"repetition_penalty": 1.1}
|
168 |
pipe = pipeline("text-generation",model=model, tokenizer=tokenizer, **generation_params)
|
169 |
-
history_message= f''' incase you are asked about something you talked about the previous days or times or periods, use this as reference to recall {previous_chat}. Do not point out everything just major keypoints. '''
|
170 |
-
|
|
|
|
|
171 |
prompt_template=f'''
|
172 |
<|im_start|>system{system_message}<|im_end|>
|
173 |
<|im_start|>user {msg_prompt}<|im_end|>
|
174 |
<|im_start|>system{history_message}<|im_end|>
|
175 |
<|im_start|>assistant
|
176 |
'''
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
177 |
pipe_output = pipe(prompt_template)[0]['generated_text']
|
178 |
|
179 |
# Separate user's prompt and assistant's response
|
@@ -301,6 +313,7 @@ async def chat_thread(request: Request, user_id: str):
|
|
301 |
|
302 |
data = await request.json()
|
303 |
msg_prompt = data.get('msg_prompt')
|
|
|
304 |
|
305 |
if not msg_prompt:
|
306 |
raise HTTPException(status_code=400, detail="Prompt not provided")
|
|
|
166 |
"""
|
167 |
generation_params = {"do_sample": True,"temperature": 0.7,"top_p": 0.95,"top_k": 40,"max_new_tokens": 512,"repetition_penalty": 1.1}
|
168 |
pipe = pipeline("text-generation",model=model, tokenizer=tokenizer, **generation_params)
|
169 |
+
history_message= f''' incase you are asked about something you talked about the previous days or times or periods, use this as reference to recall {previous_chat}. Do not point out everything just major keypoints. If nothing is mentioned about the past conversation ingore this. '''
|
170 |
+
|
171 |
+
if previous_chat:
|
172 |
+
|
173 |
prompt_template=f'''
|
174 |
<|im_start|>system{system_message}<|im_end|>
|
175 |
<|im_start|>user {msg_prompt}<|im_end|>
|
176 |
<|im_start|>system{history_message}<|im_end|>
|
177 |
<|im_start|>assistant
|
178 |
'''
|
179 |
+
else:
|
180 |
+
prompt_template=f'''
|
181 |
+
<|im_start|>system{system_message}<|im_end|>
|
182 |
+
<|im_start|>user {msg_prompt}<|im_end|>
|
183 |
+
<|im_start|>assistant
|
184 |
+
'''
|
185 |
+
|
186 |
+
|
187 |
+
try:
|
188 |
+
|
189 |
pipe_output = pipe(prompt_template)[0]['generated_text']
|
190 |
|
191 |
# Separate user's prompt and assistant's response
|
|
|
313 |
|
314 |
data = await request.json()
|
315 |
msg_prompt = data.get('msg_prompt')
|
316 |
+
previous_chat=data.get('msg_history')
|
317 |
|
318 |
if not msg_prompt:
|
319 |
raise HTTPException(status_code=400, detail="Prompt not provided")
|