Chris4K commited on
Commit
257879f
·
verified ·
1 Parent(s): 4920c28

Create services/chat_service.py

Browse files
Files changed (1) hide show
  1. services/chat_service.py +216 -0
services/chat_service.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # services/chat_service.py
2
+ from typing import List, Dict, Any, Optional, Tuple
3
+ from datetime import datetime
4
+ import logging
5
+ from config.config import settings
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ class ConversationManager:
10
+ """Manages conversation history and context"""
11
+ def __init__(self):
12
+ self.conversations: Dict[str, List[Dict[str, Any]]] = {}
13
+ self.max_history = 10
14
+
15
+ def add_interaction(
16
+ self,
17
+ session_id: str,
18
+ user_input: str,
19
+ response: str,
20
+ context: Optional[Dict[str, Any]] = None
21
+ ) -> None:
22
+ if session_id not in self.conversations:
23
+ self.conversations[session_id] = []
24
+
25
+ self.conversations[session_id].append({
26
+ 'timestamp': datetime.now().isoformat(),
27
+ 'user_input': user_input,
28
+ 'response': response,
29
+ 'context': context
30
+ })
31
+
32
+ # Trim history if needed
33
+ if len(self.conversations[session_id]) > self.max_history:
34
+ self.conversations[session_id] = self.conversations[session_id][-self.max_history:]
35
+
36
+ def get_history(self, session_id: str) -> List[Dict[str, Any]]:
37
+ return self.conversations.get(session_id, [])
38
+
39
+ def clear_history(self, session_id: str) -> None:
40
+ if session_id in self.conversations:
41
+ del self.conversations[session_id]
42
+
43
+ class ChatService:
44
+ """Main chat service that coordinates responses"""
45
+ def __init__(
46
+ self,
47
+ model_service,
48
+ data_service,
49
+ pdf_service,
50
+ faq_service
51
+ ):
52
+ self.model = model_service.model
53
+ self.tokenizer = model_service.tokenizer
54
+ self.data_service = data_service
55
+ self.pdf_service = pdf_service
56
+ self.faq_service = faq_service
57
+ self.conversation_manager = ConversationManager()
58
+
59
+ async def search_all_sources(
60
+ self,
61
+ query: str,
62
+ top_k: int = 3
63
+ ) -> Dict[str, List[Dict[str, Any]]]:
64
+ """Search across all available data sources"""
65
+ try:
66
+ # Run searches in parallel
67
+ product_task = asyncio.create_task(
68
+ self.data_service.search(query, top_k)
69
+ )
70
+ pdf_task = asyncio.create_task(
71
+ self.pdf_service.search(query, top_k)
72
+ )
73
+ faq_task = asyncio.create_task(
74
+ self.faq_service.search_faqs(query, top_k)
75
+ )
76
+
77
+ # Gather results
78
+ products, pdfs, faqs = await asyncio.gather(
79
+ product_task, pdf_task, faq_task
80
+ )
81
+
82
+ return {
83
+ 'products': products,
84
+ 'documents': pdfs,
85
+ 'faqs': faqs
86
+ }
87
+
88
+ except Exception as e:
89
+ logger.error(f"Error searching sources: {e}")
90
+ return {'products': [], 'documents': [], 'faqs': []}
91
+
92
+ def build_context(
93
+ self,
94
+ search_results: Dict[str, List[Dict[str, Any]]],
95
+ chat_history: List[Dict[str, Any]]
96
+ ) -> str:
97
+ """Build context for the model from search results and chat history"""
98
+ context_parts = []
99
+
100
+ # Add relevant products
101
+ if search_results.get('products'):
102
+ products = search_results['products'][:2] # Limit to top 2 products
103
+ for product in products:
104
+ context_parts.append(
105
+ f"Produkt: {product['Name']}\n"
106
+ f"Beschreibung: {product['Description']}\n"
107
+ f"Preis: {product['Price']}€\n"
108
+ f"Kategorie: {product['ProductCategory']}"
109
+ )
110
+
111
+ # Add relevant PDF content
112
+ if search_results.get('documents'):
113
+ docs = search_results['documents'][:2]
114
+ for doc in docs:
115
+ context_parts.append(
116
+ f"Aus Dokument '{doc['source']}' (Seite {doc['page']}):\n"
117
+ f"{doc['text']}"
118
+ )
119
+
120
+ # Add relevant FAQs
121
+ if search_results.get('faqs'):
122
+ faqs = search_results['faqs'][:2]
123
+ for faq in faqs:
124
+ context_parts.append(
125
+ f"FAQ:\n"
126
+ f"Frage: {faq['question']}\n"
127
+ f"Antwort: {faq['answer']}"
128
+ )
129
+
130
+ # Add recent chat history
131
+ if chat_history:
132
+ recent_history = chat_history[-3:] # Last 3 interactions
133
+ history_text = "\n".join(
134
+ f"User: {h['user_input']}\nAssistant: {h['response']}"
135
+ for h in recent_history
136
+ )
137
+ context_parts.append(f"Letzte Interaktionen:\n{history_text}")
138
+
139
+ return "\n\n".join(context_parts)
140
+
141
+ async def generate_response(
142
+ self,
143
+ prompt: str,
144
+ max_length: int = 1000
145
+ ) -> str:
146
+ """Generate response using the language model"""
147
+ try:
148
+ inputs = self.tokenizer(
149
+ prompt,
150
+ return_tensors="pt",
151
+ truncation=True,
152
+ max_length=4096
153
+ ).to(settings.DEVICE)
154
+
155
+ outputs = self.model.generate(
156
+ **inputs,
157
+ max_length=max_length,
158
+ num_return_sequences=1,
159
+ temperature=0.7,
160
+ top_p=0.9,
161
+ do_sample=True,
162
+ no_repeat_ngram_size=3,
163
+ early_stopping=True
164
+ )
165
+
166
+ response = self.tokenizer.decode(
167
+ outputs[0],
168
+ skip_special_tokens=True
169
+ )
170
+
171
+ return response.strip()
172
+
173
+ except Exception as e:
174
+ logger.error(f"Error generating response: {e}")
175
+ raise
176
+
177
+ async def chat(
178
+ self,
179
+ user_input: str,
180
+ session_id: str,
181
+ max_length: int = 1000
182
+ ) -> Tuple[str, List[Dict[str, Any]]]:
183
+ """Main chat method that coordinates the entire conversation flow"""
184
+ try:
185
+ # Get chat history
186
+ chat_history = self.conversation_manager.get_history(session_id)
187
+
188
+ # Search all sources
189
+ search_results = await self.search_all_sources(user_input)
190
+
191
+ # Build context
192
+ context = self.build_context(search_results, chat_history)
193
+
194
+ # Create prompt
195
+ prompt = (
196
+ f"Context:\n{context}\n\n"
197
+ f"User: {user_input}\n"
198
+ "Assistant:"
199
+ )
200
+
201
+ # Generate response
202
+ response = await self.generate_response(prompt, max_length)
203
+
204
+ # Store interaction
205
+ self.conversation_manager.add_interaction(
206
+ session_id,
207
+ user_input,
208
+ response,
209
+ {'search_results': search_results}
210
+ )
211
+
212
+ return response, search_results
213
+
214
+ except Exception as e:
215
+ logger.error(f"Error in chat: {e}")
216
+ raise