Update services/chat_service.py
Browse files- services/chat_service.py +86 -46
services/chat_service.py
CHANGED
@@ -41,6 +41,7 @@ class ConversationManager:
|
|
41 |
del self.conversations[session_id]
|
42 |
|
43 |
class ChatService:
|
|
|
44 |
"""Main chat service that coordinates responses"""
|
45 |
def __init__(
|
46 |
self,
|
@@ -55,7 +56,91 @@ class ChatService:
|
|
55 |
self.pdf_service = pdf_service
|
56 |
self.faq_service = faq_service
|
57 |
self.conversation_manager = ConversationManager()
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
async def search_all_sources(
|
60 |
self,
|
61 |
query: str,
|
@@ -174,48 +259,3 @@ class ChatService:
|
|
174 |
logger.error(f"Error generating response: {e}")
|
175 |
raise
|
176 |
|
177 |
-
async def chat(
|
178 |
-
self,
|
179 |
-
user_input: str,
|
180 |
-
session_id: Any, # Allow any type to ensure flexibility, validate later
|
181 |
-
max_length: int = 1000
|
182 |
-
) -> Tuple[str, List[Dict[str, Any]]]:
|
183 |
-
"""Main chat method that coordinates the entire conversation flow"""
|
184 |
-
try:
|
185 |
-
# Ensure session_id is a string
|
186 |
-
if not isinstance(session_id, str):
|
187 |
-
session_id = str(session_id)
|
188 |
-
|
189 |
-
# Get chat history
|
190 |
-
chat_history = self.conversation_manager.get_history(session_id)
|
191 |
-
|
192 |
-
# Search all sources
|
193 |
-
search_results = await self.search_all_sources(user_input)
|
194 |
-
|
195 |
-
# Build context
|
196 |
-
context = self.build_context(search_results, chat_history)
|
197 |
-
|
198 |
-
# Create prompt
|
199 |
-
prompt = (
|
200 |
-
f"Context:\n{context}\n\n"
|
201 |
-
f"User: {user_input}\n"
|
202 |
-
"Assistant:"
|
203 |
-
)
|
204 |
-
|
205 |
-
# Generate response
|
206 |
-
response = await self.generate_response(prompt, max_length)
|
207 |
-
|
208 |
-
# Store interaction
|
209 |
-
self.conversation_manager.add_interaction(
|
210 |
-
session_id,
|
211 |
-
user_input,
|
212 |
-
response,
|
213 |
-
{'search_results': search_results}
|
214 |
-
)
|
215 |
-
|
216 |
-
return response, self.conversation_manager.get_history(session_id), search_results
|
217 |
-
|
218 |
-
|
219 |
-
except Exception as e:
|
220 |
-
logger.error(f"Error in chat: {e}")
|
221 |
-
raise
|
|
|
41 |
del self.conversations[session_id]
|
42 |
|
43 |
class ChatService:
|
44 |
+
"""Main chat service that coordinates responses"""
|
45 |
"""Main chat service that coordinates responses"""
|
46 |
def __init__(
|
47 |
self,
|
|
|
56 |
self.pdf_service = pdf_service
|
57 |
self.faq_service = faq_service
|
58 |
self.conversation_manager = ConversationManager()
|
59 |
+
|
60 |
+
def construct_system_prompt(self, context: str) -> str:
|
61 |
+
"""Constructs the system message."""
|
62 |
+
return (
|
63 |
+
"You are a friendly bot specializing in Bofrost products. "
|
64 |
+
"Return comprehensive German answers. Always add product IDs. "
|
65 |
+
"Use the following product descriptions:\n\n"
|
66 |
+
f"{context}\n\n"
|
67 |
+
)
|
68 |
+
|
69 |
+
def construct_prompt(
|
70 |
+
self,
|
71 |
+
user_input: str,
|
72 |
+
context: str,
|
73 |
+
chat_history: List[Tuple[str, str]],
|
74 |
+
max_history_turns: int = 1
|
75 |
+
) -> str:
|
76 |
+
"""Constructs the full prompt."""
|
77 |
+
# System message
|
78 |
+
system_message = self.construct_system_prompt(context)
|
79 |
+
|
80 |
+
# Start with system message
|
81 |
+
prompt = f"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{system_message}<|eot_id|>"
|
82 |
+
|
83 |
+
# Add chat history (limit to last `max_history_turns` interactions)
|
84 |
+
for user_msg, assistant_msg in chat_history[-max_history_turns:]:
|
85 |
+
prompt += f"<|start_header_id|>user<|end_header_id|>\n\n{user_msg}<|eot_id|>"
|
86 |
+
prompt += f"<|start_header_id|>assistant<|end_header_id|>\n\n{assistant_msg}<|eot_id|>"
|
87 |
+
|
88 |
+
# Add the current user input
|
89 |
+
prompt += f"<|start_header_id|>user<|end_header_id|>\n\n{user_input}<|eot_id|>"
|
90 |
+
prompt += "<|start_header_id|>assistant<|end_header_id|>\n\n"
|
91 |
+
|
92 |
+
return prompt
|
93 |
+
|
94 |
+
async def chat(
|
95 |
+
self,
|
96 |
+
user_input: str,
|
97 |
+
session_id: Any,
|
98 |
+
max_length: int = 1000
|
99 |
+
) -> Tuple[str, List[Tuple[str, str]], Dict[str, List[Dict[str, Any]]]]:
|
100 |
+
"""Main chat method that coordinates the entire conversation flow."""
|
101 |
+
try:
|
102 |
+
# Ensure session_id is a string
|
103 |
+
if not isinstance(session_id, str):
|
104 |
+
session_id = str(session_id)
|
105 |
+
|
106 |
+
# Get chat history
|
107 |
+
chat_history_raw = self.conversation_manager.get_history(session_id)
|
108 |
+
chat_history = [
|
109 |
+
(entry['user_input'], entry['response']) for entry in chat_history_raw
|
110 |
+
]
|
111 |
+
|
112 |
+
# Search all sources
|
113 |
+
search_results = await self.search_all_sources(user_input)
|
114 |
+
|
115 |
+
# Build context
|
116 |
+
context = self.build_context(search_results, chat_history_raw)
|
117 |
+
|
118 |
+
# Construct the prompt
|
119 |
+
prompt = self.construct_prompt(user_input, context, chat_history)
|
120 |
+
|
121 |
+
# Generate response
|
122 |
+
response = await self.generate_response(prompt, max_length)
|
123 |
+
|
124 |
+
# Store interaction
|
125 |
+
self.conversation_manager.add_interaction(
|
126 |
+
session_id,
|
127 |
+
user_input,
|
128 |
+
response,
|
129 |
+
{'search_results': search_results}
|
130 |
+
)
|
131 |
+
|
132 |
+
# Prepare the chat history for Gradio
|
133 |
+
formatted_history = [
|
134 |
+
(entry['user_input'], entry['response']) for entry in self.conversation_manager.get_history(session_id)
|
135 |
+
]
|
136 |
+
|
137 |
+
return response, formatted_history, search_results
|
138 |
+
|
139 |
+
except Exception as e:
|
140 |
+
logger.error(f"Error in chat: {e}")
|
141 |
+
raise
|
142 |
+
|
143 |
+
|
144 |
async def search_all_sources(
|
145 |
self,
|
146 |
query: str,
|
|
|
259 |
logger.error(f"Error generating response: {e}")
|
260 |
raise
|
261 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|