bupa1018 commited on
Commit
9a381ee
·
verified ·
1 Parent(s): 1ef39ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -24
app.py CHANGED
@@ -13,7 +13,6 @@ from kadiApy_ragchain import KadiApyRagchain
13
  load_dotenv()
14
 
15
  vectorstore_path = "data/vectorstore"
16
-
17
  GROQ_API_KEY = os.environ["GROQ_API_KEY"]
18
  HF_TOKEN = os.environ["HF_Token"]
19
 
@@ -21,41 +20,47 @@ with open("config.json", "r") as file:
21
  config = json.load(file)
22
 
23
  login(HF_TOKEN)
24
- hf_api = HfApi()
25
 
 
26
  LLM_MODEL_NAME = config["llm_model_name"]
27
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
28
 
29
- def initialize():
30
- vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
31
- llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
32
- return KadiApyRagchain(llm, vectorstore)
33
 
34
- def bot_kadi(chat_history, kadiAPY_ragchain):
35
- user_query = chat_history[-1][0]
36
- response = kadiAPY_ragchain.process_query(user_query, chat_history)
37
- chat_history[-1] = (user_query, response)
38
- return chat_history
 
 
 
 
 
 
39
 
40
 
41
- #gradio utils
42
  def add_text_to_chat_history(chat_history, user_input):
43
  chat_history = chat_history + [(user_input, None)]
44
  return chat_history, ""
45
 
 
46
  def show_history(chat_history):
47
  return chat_history
48
 
 
49
  def reset_all():
50
  return [], "", ""
51
 
 
52
  def main():
53
- kadiAPY_ragchain = initialize() # Initialize inside main()
 
54
 
55
  with gr.Blocks() as demo:
56
  gr.Markdown("## KadiAPY - AI Coding-Assistant")
57
  gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
58
 
 
59
  chat_history = gr.State([])
60
 
61
  with gr.Tab("KadiAPY - AI Assistant"):
@@ -63,13 +68,13 @@ def main():
63
  with gr.Column(scale=10):
64
  chatbot = gr.Chatbot([], elem_id="chatbot", label="Kadi Bot", bubble_full_width=False, show_copy_button=True, height=600)
65
  user_txt = gr.Textbox(label="Question", placeholder="Type in your question and press Enter or click Submit")
66
-
67
  with gr.Row():
68
  with gr.Column(scale=1):
69
  submit_btn = gr.Button("Submit", variant="primary")
70
  with gr.Column(scale=1):
71
  clear_btn = gr.Button("Clear", variant="stop")
72
-
73
  gr.Examples(
74
  examples=[
75
  "Write me a python script with which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
@@ -83,22 +88,19 @@ def main():
83
  examples_per_page=3,
84
  )
85
 
86
- user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt])\
87
- .then(show_history, [chat_history], [chatbot])\
88
- .then(bot_kadi, [chat_history, kadiAPY_ragchain], [chatbot])
89
-
90
- submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt])\
91
- .then(show_history, [chat_history], [chatbot])\
92
- .then(bot_kadi, [chat_history, kadiAPY_ragchain], [chatbot])
93
-
94
  clear_btn.click(
95
  reset_all,
96
  None,
97
  [chat_history, chatbot, user_txt],
98
  queue=False
99
  )
100
-
101
  demo.launch()
102
 
 
103
  if __name__ == "__main__":
104
  main()
 
13
  load_dotenv()
14
 
15
  vectorstore_path = "data/vectorstore"
 
16
  GROQ_API_KEY = os.environ["GROQ_API_KEY"]
17
  HF_TOKEN = os.environ["HF_Token"]
18
 
 
20
  config = json.load(file)
21
 
22
  login(HF_TOKEN)
 
23
 
24
+ # Access the values
25
  LLM_MODEL_NAME = config["llm_model_name"]
26
  LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
27
 
 
 
 
 
28
 
29
+ # A class to encapsulate the bot logic
30
+ class KadiBot:
31
+ def __init__(self, hf_token: str, groq_api_key: str, config: dict, vectorstore_path: str):
32
+ self.vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
33
+ self.llm = get_groq_llm(config["llm_model_name"], float(config["llm_model_temperature"]), groq_api_key)
34
+ self.kadiAPY_ragchain = KadiApyRagchain(self.llm, self.vectorstore)
35
+
36
+ def process_query(self, user_query, chat_history):
37
+ response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
38
+ chat_history[-1] = (user_query, response)
39
+ return chat_history
40
 
41
 
 
42
  def add_text_to_chat_history(chat_history, user_input):
43
  chat_history = chat_history + [(user_input, None)]
44
  return chat_history, ""
45
 
46
+
47
  def show_history(chat_history):
48
  return chat_history
49
 
50
+
51
  def reset_all():
52
  return [], "", ""
53
 
54
+
55
  def main():
56
+ # Initialize the KadiBot
57
+ kadi_bot = KadiBot(HF_TOKEN, GROQ_API_KEY, config, vectorstore_path)
58
 
59
  with gr.Blocks() as demo:
60
  gr.Markdown("## KadiAPY - AI Coding-Assistant")
61
  gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
62
 
63
+ # Create a state for session management
64
  chat_history = gr.State([])
65
 
66
  with gr.Tab("KadiAPY - AI Assistant"):
 
68
  with gr.Column(scale=10):
69
  chatbot = gr.Chatbot([], elem_id="chatbot", label="Kadi Bot", bubble_full_width=False, show_copy_button=True, height=600)
70
  user_txt = gr.Textbox(label="Question", placeholder="Type in your question and press Enter or click Submit")
71
+
72
  with gr.Row():
73
  with gr.Column(scale=1):
74
  submit_btn = gr.Button("Submit", variant="primary")
75
  with gr.Column(scale=1):
76
  clear_btn = gr.Button("Clear", variant="stop")
77
+
78
  gr.Examples(
79
  examples=[
80
  "Write me a python script with which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
 
88
  examples_per_page=3,
89
  )
90
 
91
+ # Use the state to persist chat history between interactions
92
+ user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
93
+ .then(kadi_bot.process_query, [chat_history], [chatbot])
94
+ submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
95
+ .then(kadi_bot.process_query, [chat_history], [chatbot])
 
 
 
96
  clear_btn.click(
97
  reset_all,
98
  None,
99
  [chat_history, chatbot, user_txt],
100
  queue=False
101
  )
 
102
  demo.launch()
103
 
104
+
105
  if __name__ == "__main__":
106
  main()