File size: 1,316 Bytes
b8b0b89 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from langchain.vectorstores import FAISS
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain, ConversationChain
from langchain.llms import HuggingFaceHub
class ConversationChainSingleton:
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(ConversationChainSingleton, cls).__new__(cls)
# Initialize your conversation chain here
cls._instance.conversation_chain = get_conversation_chain()
return cls._instance
def get_conversation_chain(self):
return self.conversation_chain
def get_conversation_chain( ):
"""
Create a conversational retrieval chain and a language model.
"""
llm = HuggingFaceHub(
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
model_kwargs={"max_length": 1048, "temperature":0.2, "max_new_tokens":256, "top_p":0.95, "repetition_penalty":1.0},
)
# llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613")
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
conversation_chain = ConversationChain(
llm=llm, verbose=True, memory=memory
)
return conversation_chain |