Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,19 +27,14 @@ LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
|
|
27 |
|
28 |
|
29 |
class KadiBot:
|
30 |
-
def __init__(self):
|
31 |
-
vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
|
32 |
-
llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
|
33 |
-
|
34 |
self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
|
35 |
|
36 |
def handle_chat(self, chat_history):
|
37 |
if not chat_history:
|
38 |
return chat_history
|
39 |
|
40 |
-
# Get the last user query from the chat history
|
41 |
user_query = chat_history[-1][0]
|
42 |
-
|
43 |
response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
|
44 |
chat_history[-1] = (user_query, response)
|
45 |
|
@@ -47,6 +42,7 @@ class KadiBot:
|
|
47 |
|
48 |
|
49 |
|
|
|
50 |
def add_text_to_chat_history(chat_history, user_input):
|
51 |
chat_history = chat_history + [(user_input, None)]
|
52 |
return chat_history, ""
|
@@ -61,8 +57,11 @@ def reset_all():
|
|
61 |
|
62 |
|
63 |
def main():
|
64 |
-
|
65 |
-
|
|
|
|
|
|
|
66 |
with gr.Blocks() as demo:
|
67 |
gr.Markdown("## KadiAPY - AI Coding-Assistant")
|
68 |
gr.Markdown("AI Coding-Assistnat for KadiAPY based on RAG architecture powered by LLM")
|
|
|
27 |
|
28 |
|
29 |
class KadiBot:
|
30 |
+
def __init__(self, llm, vectorstore):
|
|
|
|
|
|
|
31 |
self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
|
32 |
|
33 |
def handle_chat(self, chat_history):
|
34 |
if not chat_history:
|
35 |
return chat_history
|
36 |
|
|
|
37 |
user_query = chat_history[-1][0]
|
|
|
38 |
response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
|
39 |
chat_history[-1] = (user_query, response)
|
40 |
|
|
|
42 |
|
43 |
|
44 |
|
45 |
+
|
46 |
def add_text_to_chat_history(chat_history, user_input):
|
47 |
chat_history = chat_history + [(user_input, None)]
|
48 |
return chat_history, ""
|
|
|
57 |
|
58 |
|
59 |
def main():
|
60 |
+
vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
|
61 |
+
llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
|
62 |
+
|
63 |
+
kadi_bot = KadiBot(llm, vectorstore)
|
64 |
+
|
65 |
with gr.Blocks() as demo:
|
66 |
gr.Markdown("## KadiAPY - AI Coding-Assistant")
|
67 |
gr.Markdown("AI Coding-Assistnat for KadiAPY based on RAG architecture powered by LLM")
|