Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -43,33 +43,28 @@ initialize()
|
|
43 |
|
44 |
|
45 |
|
46 |
-
def bot_kadi(history):
|
47 |
-
user_query = history[-1][0]
|
48 |
-
response = kadiAPY_bot.process_query(user_query)
|
49 |
-
history[-1] = (user_query, response)
|
50 |
-
|
51 |
-
yield history
|
52 |
-
|
53 |
|
|
|
|
|
54 |
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
raise TypeError
|
60 |
-
return True
|
61 |
-
|
62 |
-
def add_text(history, text):
|
63 |
-
history = history + [(text, None)]
|
64 |
-
yield history, ""
|
65 |
|
|
|
|
|
|
|
66 |
|
67 |
-
|
68 |
|
|
|
69 |
def main():
|
70 |
with gr.Blocks() as demo:
|
71 |
gr.Markdown("## KadiAPY - AI Coding-Assistant")
|
72 |
gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
|
|
|
|
|
73 |
|
74 |
with gr.Tab("KadiAPY - AI Assistant"):
|
75 |
with gr.Row():
|
@@ -85,7 +80,7 @@ def main():
|
|
85 |
|
86 |
gr.Examples(
|
87 |
examples=[
|
88 |
-
"Write me a python script
|
89 |
"I need a method to upload a file to a record. The id of the record is 3",
|
90 |
],
|
91 |
inputs=user_txt,
|
@@ -96,12 +91,18 @@ def main():
|
|
96 |
examples_per_page=3,
|
97 |
)
|
98 |
|
99 |
-
|
100 |
-
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
|
103 |
-
demo.launch()
|
104 |
|
105 |
-
|
106 |
if __name__ == "__main__":
|
107 |
main()
|
|
|
43 |
|
44 |
|
45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
+
def bot_kadi(history, session_state):
|
48 |
+
user_query = history[-1][0]
|
49 |
|
50 |
+
response = kadiAPY_bot.process_query({
|
51 |
+
"query": user_query,
|
52 |
+
"history": session_state["conversation"] # Pass full conversation history
|
53 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
|
55 |
+
# Update the session history
|
56 |
+
history[-1] = (user_query, response)
|
57 |
+
session_state["conversation"].append({"query": user_query, "response": response})
|
58 |
|
59 |
+
yield history
|
60 |
|
61 |
+
# Gradio utils with session state
|
62 |
def main():
|
63 |
with gr.Blocks() as demo:
|
64 |
gr.Markdown("## KadiAPY - AI Coding-Assistant")
|
65 |
gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
|
66 |
+
|
67 |
+
session_state = gr.State({"conversation": []})
|
68 |
|
69 |
with gr.Tab("KadiAPY - AI Assistant"):
|
70 |
with gr.Row():
|
|
|
80 |
|
81 |
gr.Examples(
|
82 |
examples=[
|
83 |
+
"Write me a python script which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
|
84 |
"I need a method to upload a file to a record. The id of the record is 3",
|
85 |
],
|
86 |
inputs=user_txt,
|
|
|
91 |
examples_per_page=3,
|
92 |
)
|
93 |
|
94 |
+
# Bind input and button to modified bot_kadi
|
95 |
+
user_txt.submit(check_input_text, user_txt, None).success(add_text, [chatbot, user_txt], [chatbot, user_txt]).then(
|
96 |
+
bot_kadi, [chatbot, session_state], [chatbot]
|
97 |
+
)
|
98 |
+
submit_btn.click(check_input_text, user_txt, None).success(add_text, [chatbot, user_txt], [chatbot, user_txt]).then(
|
99 |
+
bot_kadi, [chatbot, session_state], [chatbot]
|
100 |
+
)
|
101 |
+
clear_btn.click(lambda: None, None, chatbot, queue=False).then(
|
102 |
+
lambda: {"conversation": []}, None, session_state, queue=False # Clear session state
|
103 |
+
)
|
104 |
|
105 |
+
demo.launch()
|
106 |
|
|
|
107 |
if __name__ == "__main__":
|
108 |
main()
|