Spaces:
Sleeping
Sleeping
Update kadiApy_ragchain.py
Browse files- kadiApy_ragchain.py +5 -5
kadiApy_ragchain.py
CHANGED
@@ -167,7 +167,7 @@ class KadiApyRagchain:
|
|
167 |
"""
|
168 |
Generate a response using the retrieved contexts and the LLM.
|
169 |
"""
|
170 |
-
formatted_history = format_history(
|
171 |
|
172 |
# Update the prompt with history included
|
173 |
prompt = f"""
|
@@ -224,13 +224,13 @@ class KadiApyRagchain:
|
|
224 |
|
225 |
return formatted_docs
|
226 |
|
227 |
-
def format_history(
|
228 |
formatted_history = []
|
229 |
-
for i, entry in enumerate(
|
230 |
user_query = entry.get("query", "No query provided")
|
231 |
-
assistant_response = entry.get("response", "No response yet")
|
232 |
formatted_history.append(f"Turn {i}:")
|
233 |
formatted_history.append(f"User Query: {user_query}")
|
234 |
-
formatted_history.append(f"Assistant Response: {assistant_response}")
|
235 |
formatted_history.append("\n")
|
236 |
return "\n".join(formatted_history)
|
|
|
167 |
"""
|
168 |
Generate a response using the retrieved contexts and the LLM.
|
169 |
"""
|
170 |
+
formatted_history = format_history(chat_history)
|
171 |
|
172 |
# Update the prompt with history included
|
173 |
prompt = f"""
|
|
|
224 |
|
225 |
return formatted_docs
|
226 |
|
227 |
+
def format_history(chat_history):
|
228 |
formatted_history = []
|
229 |
+
for i, entry in enumerate(chat_history, start=1):
|
230 |
user_query = entry.get("query", "No query provided")
|
231 |
+
assistant_response = entry.get("response", "No response yet")
|
232 |
formatted_history.append(f"Turn {i}:")
|
233 |
formatted_history.append(f"User Query: {user_query}")
|
234 |
+
formatted_history.append(f"Assistant Response: {assistant_response}")
|
235 |
formatted_history.append("\n")
|
236 |
return "\n".join(formatted_history)
|