Spaces:
Sleeping
Sleeping
File size: 3,986 Bytes
6df5c93 21b7541 fcfb36c 125fa0c f356efb 6df5c93 ebd0b92 936d603 6df5c93 ebd0b92 0b47392 6df5c93 936d603 6df5c93 9a381ee ebd0b92 f79e678 936d603 1ed0495 936d603 1ed0495 936d603 1ed0495 936d603 1ed0495 0ae54ee 9a381ee d771edf 54aec66 7bd6818 98397d1 1ed0495 98397d1 e2e1781 1ed0495 29c0439 1ed0495 aace96d 1ed0495 aace96d 0d12338 4508a9a 9a381ee 4508a9a aace96d 9a381ee aace96d 9a381ee aace96d d771edf aace96d 936d603 9a381ee 1ed0495 29c0439 aace96d 1ed0495 4508a9a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
import os
import json
import gradio as gr
from huggingface_hub import HfApi, login
from dotenv import load_dotenv
from llm import get_groq_llm
from vectorstore import get_chroma_vectorstore
from embeddings import get_SFR_Code_embedding_model
from kadiApy_ragchain import KadiApyRagchain
# Load environment variables from .env file
load_dotenv()
vectorstore_path = "data/vectorstore"
GROQ_API_KEY = os.environ["GROQ_API_KEY"]
HF_TOKEN = os.environ["HF_Token"]
with open("config.json", "r") as file:
config = json.load(file)
login(HF_TOKEN)
hf_api = HfApi()
# Access the values
LLM_MODEL_NAME = config["llm_model_name"]
LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
class KadiBot:
def __init__(self):
# Initialize vector store and language model
vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
# Initialize RAG chain
self.kadiAPY_ragchain = KadiApyRagchain(llm, vectorstore)
def bot_kadi(self, chat_history):
user_query = chat_history[-1][0]
response = self.kadiAPY_ragchain.process_query(user_query, chat_history)
chat_history[-1] = (user_query, response)
return chat_history
def add_text_to_chat_history(chat_history, user_input):
chat_history = chat_history + [(user_input, None)]
return chat_history, ""
def show_history(chat_history):
return chat_history
def reset_all():
return [], "", ""
def main():
kadi_bot = KadiBot() # Initialize the KadiBot class
with gr.Blocks() as demo:
gr.Markdown("## KadiAPY - AI Coding-Assistant")
gr.Markdown("AI Coding-Assistnat for KadiAPY based on RAG architecture powered by LLM")
# Create a state for session management
chat_history = gr.State([])
with gr.Tab("KadiAPY - AI Assistant"):
with gr.Row():
with gr.Column(scale=10):
chatbot = gr.Chatbot([], elem_id="chatbot", label="Kadi Bot", bubble_full_width=False, show_copy_button=True, height=600)
user_txt = gr.Textbox(label="Question", placeholder="Type in your question and press Enter or click Submit")
with gr.Row():
with gr.Column(scale=1):
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
clear_btn = gr.Button("Clear", variant="stop")
gr.Examples(
examples=[
"Write me a python script with which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
"I need a method to upload a file to a record. The id of the record is 3",
],
inputs=user_txt,
outputs=chatbot,
fn=add_text_to_chat_history,
label="Try asking...",
cache_examples=False,
examples_per_page=3,
)
# Use the state to persist chat history between interactions
user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
.then(kadi_bot.bot_kadi, [chat_history], [chatbot])
submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt]).then(show_history, [chat_history], [chatbot])\
.then(kadi_bot.bot_kadi, [chat_history], [chatbot])
clear_btn.click(
reset_all,
None,
[chat_history, chatbot, user_txt],
queue=False
)
demo.launch()
if __name__ == "__main__":
main()
|