Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -24,6 +24,8 @@ import chainlit as cl
|
|
24 |
|
25 |
#from tiktoken import encoding_for_model
|
26 |
|
|
|
|
|
27 |
from langchain import hub
|
28 |
from langchain_openai import OpenAI
|
29 |
from langchain.chains import LLMChain, APIChain
|
@@ -33,15 +35,12 @@ from langchain.memory import ConversationTokenBufferMemory
|
|
33 |
from langchain.memory import ConversationSummaryMemory
|
34 |
|
35 |
from api_docs_mck import api_docs_str
|
36 |
-
from frequently_asked_questions import instruction_text, frequently_asked_questions
|
37 |
from personvernspolicy import instruction_text_priv, personvernspolicy_data
|
38 |
from frequently_asked_questions import instruction_text_faq, faq
|
39 |
|
40 |
|
41 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
42 |
|
43 |
-
|
44 |
-
|
45 |
# If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
46 |
daysoff_assistant_template = """
|
47 |
You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
|
@@ -136,21 +135,32 @@ def setup_multiple_chains():
|
|
136 |
#conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
|
137 |
#return_messages=True,
|
138 |
#)
|
139 |
-
|
140 |
-
llm_chain = LLMChain(llm=llm,
|
141 |
-
prompt=daysoff_assistant_prompt,
|
142 |
-
memory=conversation_memory
|
143 |
-
)
|
144 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
cl.user_session.set("llm_chain", llm_chain)
|
146 |
|
147 |
api_chain = APIChain.from_llm_and_api_docs(
|
148 |
llm=llm,
|
149 |
api_docs=api_docs_str,
|
150 |
-
instruction_text_faq=instruction_text_faq,
|
151 |
-
faq=faq,
|
152 |
-
instruction_text_priv=instruction_text_priv,
|
153 |
-
personvernspolicy_data=personvernspolicy_data,
|
154 |
api_url_prompt=api_url_prompt,
|
155 |
api_response_prompt=api_response_prompt,
|
156 |
verbose=True,
|
|
|
24 |
|
25 |
#from tiktoken import encoding_for_model
|
26 |
|
27 |
+
from pydantic import BaseModel, ConfigDict
|
28 |
+
|
29 |
from langchain import hub
|
30 |
from langchain_openai import OpenAI
|
31 |
from langchain.chains import LLMChain, APIChain
|
|
|
35 |
from langchain.memory import ConversationSummaryMemory
|
36 |
|
37 |
from api_docs_mck import api_docs_str
|
|
|
38 |
from personvernspolicy import instruction_text_priv, personvernspolicy_data
|
39 |
from frequently_asked_questions import instruction_text_faq, faq
|
40 |
|
41 |
|
42 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
43 |
|
|
|
|
|
44 |
# If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
45 |
daysoff_assistant_template = """
|
46 |
You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
|
|
|
135 |
#conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
|
136 |
#return_messages=True,
|
137 |
#)
|
|
|
|
|
|
|
|
|
|
|
138 |
|
139 |
+
|
140 |
+
class LLMChainConfig(BaseModel):
|
141 |
+
model_config = ConfigDict(extra='allow')
|
142 |
+
instruction_text_faq: str
|
143 |
+
faq: dict
|
144 |
+
instruction_text_priv: str
|
145 |
+
personvernspolicy_data: dict
|
146 |
+
|
147 |
+
llm_chain = LLMChain(
|
148 |
+
llm=llm,
|
149 |
+
prompt=daysoff_assistant_prompt,
|
150 |
+
memory=conversation_memory,
|
151 |
+
**LLMChainConfig(
|
152 |
+
instruction_text_faq=instruction_text_faq,
|
153 |
+
faq=faq,
|
154 |
+
instruction_text_priv=instruction_text_priv,
|
155 |
+
personvernspolicy_data=personvernspolicy_data
|
156 |
+
).model_dump()
|
157 |
+
)
|
158 |
+
|
159 |
cl.user_session.set("llm_chain", llm_chain)
|
160 |
|
161 |
api_chain = APIChain.from_llm_and_api_docs(
|
162 |
llm=llm,
|
163 |
api_docs=api_docs_str,
|
|
|
|
|
|
|
|
|
164 |
api_url_prompt=api_url_prompt,
|
165 |
api_response_prompt=api_response_prompt,
|
166 |
verbose=True,
|