Spaces:
Sleeping
Sleeping
File size: 4,080 Bytes
6df5c93 21b7541 fcfb36c 125fa0c f356efb 6df5c93 ebd0b92 936d603 6df5c93 ebd0b92 0b47392 6df5c93 936d603 6df5c93 ebd0b92 f79e678 936d603 1ed0495 2c9ea8f 1ed0495 936d603 8785db1 1ed0495 936d603 8785db1 0ae54ee 9a381ee 2c9ea8f d771edf 54aec66 7bd6818 98397d1 1ed0495 98397d1 e2e1781 1ed0495 89ea156 c040269 1ed0495 aace96d 2c9ea8f c115841 2c9ea8f aace96d c115841 4508a9a 9a381ee 4508a9a aace96d 7e12a3e 9a381ee aace96d c115841 f02dae4 9a381ee aace96d d771edf aace96d 936d603 9a381ee 1ed0495 9f6f379 1ed0495 9f6f379 c115841 aace96d 1ed0495 4508a9a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import os
import json
import gradio as gr
from huggingface_hub import HfApi, login
from dotenv import load_dotenv
from llm import get_groq_llm
from vectorstore import get_chroma_vectorstore
from embeddings import get_SFR_Code_embedding_model
from kadiApy_ragchain import KadiApyRagchain
load_dotenv()
vectorstore_path = "data/vectorstore"
GROQ_API_KEY = os.environ["GROQ_API_KEY"]
HF_TOKEN = os.environ["HF_Token"]
with open("config.json", "r") as file:
config = json.load(file)
login(HF_TOKEN)
hf_api = HfApi()
LLM_MODEL_NAME = config["llm_model_name"]
LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
class KadiBot:
def __init__(self, llm, vectorstore):
self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
def handle_chat(self, chat_history):
if not chat_history:
return chat_history
user_query = chat_history[-1][0]
response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
chat_history[-1] = (user_query, response)
return chat_history
def add_text_to_chat_history(chat_history, user_input):
chat_history = chat_history + [(user_input, None)]
return chat_history, ""
def show_history(chat_history):
return chat_history
def clear_history(history):
return []
def main():
vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
llm = get_groq_llm("qwen-2.5-coder-32b", "0.0", GROQ_API_KEY)
kadi_bot = KadiBot(llm, vectorstore)
with gr.Blocks() as demo:
gr.Markdown("## KadiAPY - AI Coding-Assistant")
gr.Markdown("AI Coding-Assistnat for KadiAPY based on RAG architecture powered by LLM")
# Create a state for session management
chat_history = gr.State([])
with gr.Tab("KadiAPY - AI Assistant"):
with gr.Row():
with gr.Column(scale=10):
chatbot = gr.Chatbot([], elem_id="chatbot", label="Kadi Bot", bubble_full_width=False, show_copy_button=True, height=600)
user_txt = gr.Textbox(label="Question", placeholder="Type in your question and press Enter or click Submit")
with gr.Row():
with gr.Column(scale=1):
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
clear_input_btn = gr.Button("Clear Input", variant="stop")
with gr.Column(scale=1):
clear_chat_btn = gr.Button("Reset Chat", variant="stop") # New button to clear chat history
gr.Examples(
examples=[
"Write me a python script with which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
"I need a method to upload a file to a record. The id of the record is 3",
],
inputs=user_txt,
outputs=chatbot,
fn=add_text_to_chat_history,
label="Try asking...",
cache_examples=False,
examples_per_page=3,
)
# Use the state to persist chat history between interactions
user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
.then(kadi_bot.handle_chat, [chat_history], [chatbot])
submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
.then(kadi_bot.handle_chat, [chat_history], [chatbot])
clear_input_btn.click(lambda: ("",), [], [user_txt])
clear_chat_btn.click(lambda: ([], ""), [], [chat_history, chatbot])
demo.launch()
if __name__ == "__main__":
main()
|