bupa1018 commited on
Commit
1ed0495
·
verified ·
1 Parent(s): 936d603

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -24
app.py CHANGED
@@ -27,41 +27,40 @@ hf_api = HfApi()
27
  LLM_MODEL_NAME = config["llm_model_name"]
28
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
29
 
30
- def initialize():
31
- global kadiAPY_ragchain
32
 
33
- vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
34
- llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
 
 
 
35
 
36
- kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
 
37
 
38
- initialize()
 
 
 
39
 
 
40
 
41
 
42
- def bot_kadi(chat_history):
43
- user_query = chat_history[-1][0]
44
- response = kadiAPY_ragchain.process_query(user_query, chat_history)
45
- chat_history[-1] = (user_query, response)
46
-
47
- return chat_history
48
-
49
-
50
- import gradio as gr
51
-
52
  def add_text_to_chat_history(chat_history, user_input):
53
-
54
  chat_history = chat_history + [(user_input, None)]
55
  return chat_history, ""
56
 
57
-
58
  def show_history(chat_history):
59
  return chat_history
60
-
 
61
  def reset_all():
62
  return [], "", ""
63
-
 
64
  def main():
 
 
65
  with gr.Blocks() as demo:
66
  gr.Markdown("## KadiAPY - AI Coding-Assistant")
67
  gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
@@ -95,10 +94,10 @@ def main():
95
  )
96
 
97
  # Use the state to persist chat history between interactions
98
- user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history,[chat_history], [chatbot])\
99
- .then(bot_kadi, [chat_history], [chatbot])
100
- submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history,[chat_history], [chatbot])\
101
- .then(bot_kadi, [chat_history], [chatbot])
102
  clear_btn.click(
103
  reset_all,
104
  None,
@@ -107,5 +106,6 @@ def main():
107
  )
108
  demo.launch()
109
 
 
110
  if __name__ == "__main__":
111
  main()
 
27
  LLM_MODEL_NAME = config["llm_model_name"]
28
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
29
 
 
 
30
 
31
+ class KadiBot:
32
+ def __init__(self):
33
+ # Initialize vector store and language model
34
+ vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
35
+ llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
36
 
37
+ # Initialize RAG chain
38
+ self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
39
 
40
+ def bot_kadi(self, chat_history):
41
+ user_query = chat_history[-1][0]
42
+ response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
43
+ chat_history[-1] = (user_query, response)
44
 
45
+ return chat_history
46
 
47
 
 
 
 
 
 
 
 
 
 
 
48
  def add_text_to_chat_history(chat_history, user_input):
 
49
  chat_history = chat_history + [(user_input, None)]
50
  return chat_history, ""
51
 
52
+
53
  def show_history(chat_history):
54
  return chat_history
55
+
56
+
57
  def reset_all():
58
  return [], "", ""
59
+
60
+
61
  def main():
62
+ kadi_bot = KadiBot() # Initialize the KadiBot class
63
+
64
  with gr.Blocks() as demo:
65
  gr.Markdown("## KadiAPY - AI Coding-Assistant")
66
  gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
 
94
  )
95
 
96
  # Use the state to persist chat history between interactions
97
+ user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
98
+ .then(kadi_bot.bot_kadi, [chat_history], [chatbot])
99
+ submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
100
+ .then(kadi_bot.bot_kadi, [chat_history], [chatbot])
101
  clear_btn.click(
102
  reset_all,
103
  None,
 
106
  )
107
  demo.launch()
108
 
109
+
110
  if __name__ == "__main__":
111
  main()