from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough from langchain_huggingface import HuggingFaceEmbeddings from langchain_core.prompts import ChatPromptTemplate from langchain_groq import ChatGroq from langchain_chroma import Chroma from fastapi import FastAPI app = FastAPI(title = "VibbaAIEndpoints") model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cpu', "trust_remote_code": True} encode_kwargs = {'normalize_embeddings': False} hf = HuggingFaceEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs ) vectorStore = Chroma( collection_name="collection", embedding_function=hf, persist_directory="./vectorDB", ) vectorStore = vectorStore.as_retriever(search_kwargs={"k": 4}) llm = ChatGroq(model = "llama-3.3-70b-versatile", temperature = 0.75) outputParser = StrOutputParser() prompt = """ You are a highly specialized chatbot designed to assist users with queries related to a specific book about mathematics. Your primary role is to answer user questions accurately and comprehensively using the retrieved context from the book. - Accuracy is paramount: Your answers must be 100% accurate and based strictly on the context you have been provided. - No additional information: Never introduce information or ideas outside the retrieved context. You must rely solely on the book's content to guide your responses. - User satisfaction: Your goal is to provide complete satisfaction to users by solving their doubts and answering their questions with clarity, precision, and politeness. - Math expertise: The book revolves around mathematics, and you are an expert in math. Ensure all solutions and explanations are flawless and easily understandable, offering correct guidance for any math-related queries. - Off-topic queries: If a user asks a question unrelated to the book or mathematics, politely respond that you are not designed to address topics beyond the scope of the book and math. You can assess the topic's relevance based on the context retrieved. Example response for off-topic queries: "I'm here to help with questions related to the mathematics book I was designed around. Unfortunately, I cannot assist with topics outside that scope. Please feel free to ask any math-related questions!" Always maintain professionalism, politeness, and clarity in every response. You are a reliable and expert guide for users seeking help with math through the context of the book. Here's the retrieved context: {context} Here's the question which user has asked: {query} """ prompt = ChatPromptTemplate.from_template(prompt) chain = {"query": RunnablePassthrough(), "context": RunnablePassthrough() | vectorStore} | prompt | llm | outputParser @app.get("/getResponse") async def generateResponse(question: str): return chain.invoke(question)