bupa1018 commited on
Commit
8785db1
·
verified ·
1 Parent(s): 7b7f2c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -7
app.py CHANGED
@@ -9,7 +9,6 @@ from vectorstore import get_chroma_vectorstore
9
  from embeddings import get_SFR_Code_embedding_model
10
  from kadiApy_ragchain import KadiApyRagchain
11
 
12
- # Load environment variables from .env file
13
  load_dotenv()
14
 
15
  vectorstore_path = "data/vectorstore"
@@ -23,26 +22,29 @@ with open("config.json", "r") as file:
23
  login(HF_TOKEN)
24
  hf_api = HfApi()
25
 
26
- # Access the values
27
  LLM_MODEL_NAME = config["llm_model_name"]
28
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
29
 
30
 
31
  class KadiBot:
32
  def __init__(self):
33
- # Initialize vector store and language model
34
  vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
35
  llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
36
 
37
- # Initialize RAG chain
38
  self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
39
 
40
- def bot_kadi(self, chat_history):
41
- user_query = chat_history[-1][0]
 
 
 
 
 
42
  response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
43
  chat_history[-1] = (user_query, response)
44
 
45
- return chat_history
 
46
 
47
 
48
  def add_text_to_chat_history(chat_history, user_input):
 
9
  from embeddings import get_SFR_Code_embedding_model
10
  from kadiApy_ragchain import KadiApyRagchain
11
 
 
12
  load_dotenv()
13
 
14
  vectorstore_path = "data/vectorstore"
 
22
  login(HF_TOKEN)
23
  hf_api = HfApi()
24
 
 
25
  LLM_MODEL_NAME = config["llm_model_name"]
26
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
27
 
28
 
29
  class KadiBot:
30
  def __init__(self):
 
31
  vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
32
  llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
33
 
 
34
  self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
35
 
36
+ def handle_chat(self, chat_history):
37
+ if not chat_history:
38
+ return chat_history
39
+
40
+ # Get the last user query from the chat history
41
+ user_query = chat_history[-1][0]
42
+
43
  response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
44
  chat_history[-1] = (user_query, response)
45
 
46
+ return chat_history
47
+
48
 
49
 
50
  def add_text_to_chat_history(chat_history, user_input):