Update controller.py
Browse files- controller.py +15 -3
controller.py
CHANGED
@@ -43,7 +43,7 @@ def cut_text_after_keyword(text, keyword):
|
|
43 |
|
44 |
def handle_submission_chat(user_message, response):
|
45 |
# os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HF_token']
|
46 |
-
agent_chat_bot = get_conversation_chain()
|
47 |
|
48 |
if response is not None:
|
49 |
text = agent_chat_bot.predict(input=user_message + response)
|
@@ -55,13 +55,25 @@ def handle_submission_chat(user_message, response):
|
|
55 |
|
56 |
return result
|
57 |
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
def get_conversation_chain( ):
|
60 |
"""
|
61 |
Create a conversational retrieval chain and a language model.
|
62 |
|
63 |
"""
|
64 |
-
|
65 |
|
66 |
llm = HuggingFaceHub(
|
67 |
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
43 |
|
44 |
def handle_submission_chat(user_message, response):
|
45 |
# os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HF_token']
|
46 |
+
agent_chat_bot = ConversationChainSingleton().get_conversation_chain()
|
47 |
|
48 |
if response is not None:
|
49 |
text = agent_chat_bot.predict(input=user_message + response)
|
|
|
55 |
|
56 |
return result
|
57 |
|
58 |
+
class ConversationChainSingleton:
|
59 |
+
_instance = None
|
60 |
+
|
61 |
+
def __new__(cls, *args, **kwargs):
|
62 |
+
if not cls._instance:
|
63 |
+
cls._instance = super(ConversationChainSingleton, cls).__new__(cls)
|
64 |
+
# Initialize your conversation chain here
|
65 |
+
cls._instance.conversation_chain = get_conversation_chain()
|
66 |
+
return cls._instance
|
67 |
+
|
68 |
+
def get_conversation_chain(self):
|
69 |
+
return self.conversation_chain
|
70 |
+
|
71 |
+
|
72 |
def get_conversation_chain( ):
|
73 |
"""
|
74 |
Create a conversational retrieval chain and a language model.
|
75 |
|
76 |
"""
|
|
|
77 |
|
78 |
llm = HuggingFaceHub(
|
79 |
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
|