File size: 7,494 Bytes
0632cd1 56779ed 0632cd1 56779ed 0632cd1 56779ed 0632cd1 56779ed 0632cd1 56779ed 3d560e1 56779ed 3d560e1 56779ed 0632cd1 f65729d 0632cd1 0ed0ef8 0632cd1 0ed0ef8 0632cd1 56779ed 0632cd1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
import asyncio
import json
import re
from typing import List, Dict
import faiss
import httpx
import numpy as np
import pandas as pd
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.websockets import WebSocket
from transformers import pipeline
from project.bot.models import MessagePair
from project.config import settings
class SearchBot:
chat_history = []
# is_unknown = False
# unknown_counter = 0
def __init__(self, memory=None):
if memory is None:
memory = []
self.chat_history = memory
@staticmethod
def _cls_pooling(model_output):
return model_output.last_hidden_state[:, 0]
@staticmethod
async def enrich_information_from_google(search_word: str) -> str:
url = "https://places.googleapis.com/v1/places:searchText"
headers = {
"Content-Type": "application/json",
"X-Goog-Api-Key": settings.GOOGLE_PLACES_API_KEY,
"X-Goog-FieldMask": "places.shortFormattedAddress,places.websiteUri,places.internationalPhoneNumber,"
"places.googleMapsUri,places.photos"
}
data = {
"textQuery": f"{search_word} in Javea",
"languageCode": "nl",
"maxResultCount": 1,
}
async with httpx.AsyncClient() as client:
response = await client.post(url, headers=headers, content=json.dumps(data))
place_response = response.json()
place_response = place_response['places'][0]
photo_name = place_response.get('photos')
photo_uri = None
if photo_name:
async with httpx.AsyncClient() as client:
response = await client.get(
f'https://places.googleapis.com/v1/{photo_name[0]["name"]}/media?maxWidthPx=350&key={settings.GOOGLE_PLACES_API_KEY}')
photo_response = response.json()
photo_uri = photo_response.get('photoUri')
google_maps_uri = place_response.get('googleMapsUri')
phone_number = place_response.get('internationalPhoneNumber')
formatted_address = place_response.get('shortFormattedAddress')
website_uri = place_response.get('websiteUri')
if not google_maps_uri:
return search_word
enriched_word = f'<a class="extraDataLink" href="{google_maps_uri}" target="_blank">{search_word}</a><div class="tooltip-elem">'
if photo_uri:
enriched_word += f'<img src="{photo_uri}" alt="Image" class="tooltip-img">'
if formatted_address:
enriched_word += f'<p><a href="{google_maps_uri}" target="_blank">{formatted_address}</a></p>'
if website_uri:
enriched_word += f'<p><a href="{website_uri}">Google Maps URI</a></p>'
if phone_number:
phone_str = re.sub(r' ', '', phone_number)
enriched_word += f'<p><a href="tel:{phone_str}">Phone number</a></p>'
enriched_word += f"</div>"
return enriched_word
async def analyze_full_response(self) -> str:
assistant_message = self.chat_history.pop()['content']
nlp = pipeline("ner", model=settings.NLP_MODEL, tokenizer=settings.NLP_TOKENIZER, aggregation_strategy="simple")
ner_result = nlp(assistant_message)
analyzed_assistant_message = assistant_message
for entity in ner_result:
if entity['entity_group'] in ("LOC", "ORG", "MISC") and entity['word'] != "Javea":
enriched_information = await self.enrich_information_from_google(entity['word'])
analyzed_assistant_message = analyzed_assistant_message.replace(entity['word'], enriched_information, 1)
return "ENRICHED:" + analyzed_assistant_message
async def _convert_to_embeddings(self, text_list):
encoded_input = settings.INFO_TOKENIZER(
text_list, padding=True, truncation=True, return_tensors="pt"
)
encoded_input = {k: v.to(settings.device) for k, v in encoded_input.items()}
model_output = settings.INFO_MODEL(**encoded_input)
return self._cls_pooling(model_output).cpu().detach().numpy().astype('float32')
@staticmethod
async def _get_context_data(user_query: list[float]) -> list[dict]:
radius = 5
_, distances, indices = settings.FAISS_INDEX.range_search(user_query, radius)
indices_distances_df = pd.DataFrame({'index': indices, 'distance': distances})
filtered_data_df = settings.products_dataset.iloc[indices].copy()
filtered_data_df.loc[:, 'distance'] = indices_distances_df['distance'].values
sorted_data_df: pd.DataFrame = filtered_data_df.sort_values(by='distance').reset_index(drop=True)
sorted_data_df = sorted_data_df.drop('distance', axis=1)
data = sorted_data_df.head(3).to_dict(orient='records')
cleaned_data = []
for chunk in data:
if "Comments:" in chunk['chunks']:
cleaned_data.append(chunk)
return cleaned_data
@staticmethod
async def create_context_str(context: List[Dict]) -> str:
context_str = ''
for i, chunk in enumerate(context):
context_str += f'{i + 1}) {chunk["chunks"]}'
return context_str
async def _rag(self, context: List[Dict], query: str, session: AsyncSession, country: str):
if context:
context_str = await self.create_context_str(context)
assistant_message = {"role": 'assistant', "content": context_str}
self.chat_history.append(assistant_message)
content = settings.PROMPT
else:
content = settings.EMPTY_PROMPT
user_message = {"role": 'user', "content": query}
self.chat_history.append(user_message)
messages = [
{
'role': 'system',
'content': content
},
]
messages = messages + self.chat_history
stream = await settings.OPENAI_CLIENT.chat.completions.create(
messages=messages,
temperature=0.1,
n=1,
model="gpt-3.5-turbo",
stream=True
)
response = ''
async for chunk in stream:
if chunk.choices[0].delta.content is not None:
chunk_content = chunk.choices[0].delta.content
response += chunk_content
yield response
await asyncio.sleep(0.02)
assistant_message = {"role": 'assistant', "content": response}
self.chat_history.append(assistant_message)
try:
session.add(MessagePair(user_message=query, bot_response=response, country=country))
except Exception as e:
print(e)
async def ask_and_send(self, data: Dict, websocket: WebSocket, session: AsyncSession):
query = data['query']
country = data['country']
transformed_query = await self._convert_to_embeddings(query)
context = await self._get_context_data(transformed_query)
try:
async for chunk in self._rag(context, query, session, country):
await websocket.send_text(chunk)
analyzing = await self.analyze_full_response()
await websocket.send_text(analyzing)
except Exception:
await self.emergency_db_saving(session)
@staticmethod
async def emergency_db_saving(session: AsyncSession):
await session.commit()
await session.close()
|