import os import joblib from langchain.callbacks import get_openai_callback from langchain.chains.question_answering import load_qa_chain from langchain.embeddings.sentence_transformer import \ SentenceTransformerEmbeddings from langchain.llms import OpenAI from langchain.vectorstores import Pinecone from pages.admin_utils import pine_cone_index #Function to pull index data from Pinecone def pull_from_pinecone(embeddings,pinecone_index_name: str | None=None): index_name = pine_cone_index(pinecone_index_name) index = Pinecone.from_existing_index(index_name, embeddings) return index def create_embeddings(): embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2") return embeddings #This function will help us in fetching the top relevent documents from our vector store - Pinecone Index def get_similar_docs(index,query,k=2): similar_docs = index.similarity_search(query, k=k) return similar_docs def get_answer(docs,user_input): chain = load_qa_chain(OpenAI(), chain_type="stuff") with get_openai_callback() as cb: response = chain.run(input_documents=docs, question=user_input) return response def predict(query_result): if os.path.exists('modelsvm.pk1'): Fitmodel = joblib.load('modelsvm.pk1') result=Fitmodel.predict([query_result]) return result[0] return "No Idea?"