Futuresony commited on
Commit
a27ed71
·
verified ·
1 Parent(s): 9cd0bee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -169,8 +169,7 @@ def load_business_info():
169
  print(traceback.format_exc())
170
  business_info_available = False
171
 
172
- # --- Business Info Retrieval (RAG) ---
173
- def retrieve_business_info(query: str, top_n: int = 3) -> list:
174
  """
175
  Retrieves relevant business information from loaded data based on a query.
176
  """
@@ -182,7 +181,11 @@ def retrieve_business_info(query: str, top_n: int = 3) -> list:
182
  try:
183
  query_embedding = embedder.encode(query, convert_to_tensor=True)
184
  cosine_scores = util.cos_sim(query_embedding, embeddings)[0]
185
- top_results_indices = torch.topk(cosine_scores, k=min(top_n, len(data)))[1].tolist()
 
 
 
 
186
  top_results = [data[i] for i in top_results_indices]
187
 
188
  if reranker is not None and top_results:
 
169
  print(traceback.format_exc())
170
  business_info_available = False
171
 
172
+ def retrieve_business_info(query: str, top_n: int = 2) -> list: # Reduced top_n
 
173
  """
174
  Retrieves relevant business information from loaded data based on a query.
175
  """
 
181
  try:
182
  query_embedding = embedder.encode(query, convert_to_tensor=True)
183
  cosine_scores = util.cos_sim(query_embedding, embeddings)[0]
184
+ # Get the top N indices based on cosine similarity
185
+ # Make sure k does not exceed the number of available descriptions
186
+ top_results_indices = torch.topk(cosine_scores, k=min(top_n, len(descriptions_for_embedding)))[1].tolist()
187
+
188
+ # Retrieve the actual data entries corresponding to the top indices
189
  top_results = [data[i] for i in top_results_indices]
190
 
191
  if reranker is not None and top_results: