from langchain.vectorstores import FAISS from langchain.memory import ConversationBufferMemory from langchain.chains import ConversationalRetrievalChain, ConversationChain from langchain.llms import HuggingFaceHub class ConversationChainSingleton: _instance = None def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(ConversationChainSingleton, cls).__new__(cls) # Initialize your conversation chain here cls._instance.conversation_chain = get_conversation_chain() return cls._instance def get_conversation_chain(self): return self.conversation_chain def get_conversation_chain( ): """ Create a conversational retrieval chain and a language model. """ llm = HuggingFaceHub( repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", model_kwargs={"max_length": 1048, "temperature":0.2, "max_new_tokens":256, "top_p":0.95, "repetition_penalty":1.0}, ) # llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613") memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) conversation_chain = ConversationChain( llm=llm, verbose=True, memory=memory ) return conversation_chain