bupa1018 commited on
Commit
0b12717
·
verified ·
1 Parent(s): 1ed0495

Update kadiApy_ragchain.py

Browse files
Files changed (1) hide show
  1. kadiApy_ragchain.py +7 -26
kadiApy_ragchain.py CHANGED
@@ -2,23 +2,21 @@ class KadiApyRagchain:
2
 
3
  def __init__(self, llm, vector_store):
4
  """
5
- Initialize the RAGChain with an LLM instance, a vector store, and a conversation history.
6
  """
7
  self.llm = llm
8
  self.vector_store = vector_store
9
- self.conversation = []
10
 
11
 
12
  def process_query(self, query, chat_history):
13
  """
14
  Process a user query, handle history, retrieve contexts, and generate a response.
15
  """
16
- # Add the user query to the conversation history
17
- self.add_to_conversation(user_query=query)
18
 
19
  # Rewrite query
20
  rewritten_query = self.rewrite_query(query)
21
- print("RRRRRRRRRREEEEEEEEEEWRITEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE: ",rewritten_query)
22
  # Predict library usage
23
  print("Start prediction:")
24
  code_library_usage_prediction = self.predict_library_usage(query)
@@ -26,10 +24,10 @@ class KadiApyRagchain:
26
  # Retrieve contexts
27
  print("Start retrieving:")
28
  #doc_contexts = self.retrieve_contexts(query, k=2, filter={"dataset_category": "kadi_apy_docs"})
29
- #code_contexts = self.retrieve_contexts(rewritten_query, k=3, filter={"usage": code_library_usage_prediction})
30
- code_contexts = self.retrieve_contexts(query, k=2)
31
 
32
- query_formulated_question= self.formulate_question(code_contexts)
33
  print("question:", query_formulated_question)
34
  doc_contexts = self.retrieve_contexts(query_formulated_question, k=2, filter={"dataset_category": "kadi_apy_docs"})
35
 
@@ -50,26 +48,9 @@ class KadiApyRagchain:
50
  print("Start generatin repsonsse:")
51
  response = self.generate_response(query, chat_history, formatted_doc_contexts, formatted_code_contexts)
52
  #response = self.generate_response(query, chat_history, formatted_contexts)
53
-
54
- # Add the response to the existing query in the conversation history
55
- #self.add_to_conversation(llm_response=response)
56
-
57
  return response
58
 
59
- #not supported yet, need session handling in app.py
60
- def add_to_conversation(self, user_query=None, llm_response=None):
61
- """
62
- Add either the user's query, the LLM's response, or both to the conversation history.
63
- """
64
- if user_query and llm_response:
65
- # Add a full query-response pair
66
- self.conversation.append({"query": user_query, "response": llm_response})
67
- elif user_query:
68
- # Add a query with no response yet
69
- self.conversation.append({"query": user_query, "response": None})
70
- elif llm_response and self.conversation:
71
- # Add a response to the most recent query
72
- self.conversation[-1]["response"] = llm_response
73
 
74
  def get_history(self):
75
  """
 
2
 
3
  def __init__(self, llm, vector_store):
4
  """
5
+ Initialize the RAGChain with an LLM instance, a vector store
6
  """
7
  self.llm = llm
8
  self.vector_store = vector_store
 
9
 
10
 
11
  def process_query(self, query, chat_history):
12
  """
13
  Process a user query, handle history, retrieve contexts, and generate a response.
14
  """
15
+
 
16
 
17
  # Rewrite query
18
  rewritten_query = self.rewrite_query(query)
19
+ print("Rewritten Query: ",rewritten_query)
20
  # Predict library usage
21
  print("Start prediction:")
22
  code_library_usage_prediction = self.predict_library_usage(query)
 
24
  # Retrieve contexts
25
  print("Start retrieving:")
26
  #doc_contexts = self.retrieve_contexts(query, k=2, filter={"dataset_category": "kadi_apy_docs"})
27
+ code_contexts = self.retrieve_contexts(rewritten_query, k=3, filter={"usage": code_library_usage_prediction})
28
+ #code_contexts = self.retrieve_contexts(query, k=2)
29
 
30
+ #query_formulated_question= self.formulate_question(code_contexts)
31
  print("question:", query_formulated_question)
32
  doc_contexts = self.retrieve_contexts(query_formulated_question, k=2, filter={"dataset_category": "kadi_apy_docs"})
33
 
 
48
  print("Start generatin repsonsse:")
49
  response = self.generate_response(query, chat_history, formatted_doc_contexts, formatted_code_contexts)
50
  #response = self.generate_response(query, chat_history, formatted_contexts)
 
 
 
 
51
  return response
52
 
53
+
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
  def get_history(self):
56
  """