Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,17 +3,6 @@
|
|
3 |
# ver01.01-5.workload-----app.py
|
4 |
# ===========================================
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
#You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
|
9 |
-
#By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
10 |
-
#Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
|
11 |
-
#to this.
|
12 |
-
#You do not provide information outside of this scope. If a question is not about this topic, respond with
|
13 |
-
#"Ooops da, jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser,
|
14 |
-
#mรฅ du nok kontakte kundeservice pรฅ [email protected]๐"
|
15 |
-
|
16 |
-
|
17 |
import asyncio
|
18 |
import os
|
19 |
import re
|
@@ -22,8 +11,6 @@ import json
|
|
22 |
|
23 |
import chainlit as cl
|
24 |
|
25 |
-
#from tiktoken import encoding_for_model
|
26 |
-
|
27 |
from pydantic import BaseModel, ConfigDict
|
28 |
|
29 |
from langchain import hub
|
@@ -42,14 +29,14 @@ OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
|
42 |
|
43 |
class LLMChainConfig(BaseModel):
|
44 |
model_config = ConfigDict(extra='allow')
|
45 |
-
instruction_text_faq: str
|
46 |
-
faq: dict
|
47 |
instruction_text_priv: str
|
48 |
personvernspolicy_data: dict
|
49 |
|
50 |
-
instruction_text_faq = instruction_text_faq
|
51 |
-
faq = faq
|
52 |
-
instruction_text_priv = instruction_text_priv
|
53 |
personvernspolicy_data = personvernspolicy_data
|
54 |
|
55 |
daysoff_assistant_template = """
|
@@ -104,70 +91,60 @@ api_response_prompt = PromptTemplate(
|
|
104 |
@cl.on_chat_start
|
105 |
def setup_multiple_chains():
|
106 |
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
)
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
api_chain = APIChain.from_llm_and_api_docs(
|
137 |
-
llm=llm,
|
138 |
-
api_docs=api_docs_str,
|
139 |
-
api_url_prompt=api_url_prompt,
|
140 |
-
api_response_prompt=api_response_prompt,
|
141 |
-
verbose=True,
|
142 |
-
limit_to_domains=None
|
143 |
-
)
|
144 |
-
|
145 |
-
cl.user_session.set("api_chain", api_chain)
|
146 |
|
147 |
@cl.on_message
|
148 |
async def handle_message(message: cl.Message):
|
149 |
-
user_message = message.content
|
150 |
llm_chain = cl.user_session.get("llm_chain")
|
151 |
api_chain = cl.user_session.get("api_chain")
|
152 |
-
|
153 |
-
|
|
|
|
|
|
|
|
|
154 |
endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
|
155 |
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
response_key = "output" if "output" in response else "text"
|
172 |
-
await cl.Message(response.get(response_key, "")).send()
|
173 |
-
return message.content
|
|
|
3 |
# ver01.01-5.workload-----app.py
|
4 |
# ===========================================
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
import asyncio
|
7 |
import os
|
8 |
import re
|
|
|
11 |
|
12 |
import chainlit as cl
|
13 |
|
|
|
|
|
14 |
from pydantic import BaseModel, ConfigDict
|
15 |
|
16 |
from langchain import hub
|
|
|
29 |
|
30 |
class LLMChainConfig(BaseModel):
|
31 |
model_config = ConfigDict(extra='allow')
|
32 |
+
instruction_text_faq: str
|
33 |
+
faq: dict
|
34 |
instruction_text_priv: str
|
35 |
personvernspolicy_data: dict
|
36 |
|
37 |
+
instruction_text_faq = instruction_text_faq
|
38 |
+
faq = faq
|
39 |
+
instruction_text_priv = instruction_text_priv
|
40 |
personvernspolicy_data = personvernspolicy_data
|
41 |
|
42 |
daysoff_assistant_template = """
|
|
|
91 |
@cl.on_chat_start
|
92 |
def setup_multiple_chains():
|
93 |
|
94 |
+
try:
|
95 |
+
llm_chain = LLMChain(
|
96 |
+
llm=llm,
|
97 |
+
prompt=daysoff_assistant_prompt,
|
98 |
+
memory=conversation_memory,
|
99 |
+
**LLMChainConfig(
|
100 |
+
instruction_text_faq=instruction_text_faq,
|
101 |
+
faq=faq,
|
102 |
+
instruction_text_priv=instruction_text_priv,
|
103 |
+
personvernspolicy_data=personvernspolicy_data
|
104 |
+
).model_dump()
|
105 |
+
)
|
106 |
+
cl.user_session.set("llm_chain", llm_chain)
|
107 |
+
except Exception as e:
|
108 |
+
print(f"Error setting LLMChain: {e}")
|
109 |
+
|
110 |
+
try:
|
111 |
+
api_chain = APIChain.from_llm_and_api_docs(
|
112 |
+
llm=llm,
|
113 |
+
api_docs=api_docs_str,
|
114 |
+
api_url_prompt=api_url_prompt,
|
115 |
+
api_response_prompt=api_response_prompt,
|
116 |
+
verbose=True,
|
117 |
+
limit_to_domains=None
|
118 |
+
)
|
119 |
+
cl.user_session.set("api_chain", api_chain)
|
120 |
+
except Exception as e:
|
121 |
+
print(f"Error setting APIChain: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
|
123 |
@cl.on_message
|
124 |
async def handle_message(message: cl.Message):
|
125 |
+
user_message = message.content
|
126 |
llm_chain = cl.user_session.get("llm_chain")
|
127 |
api_chain = cl.user_session.get("api_chain")
|
128 |
+
|
129 |
+
if llm_chain is None or api_chain is None:
|
130 |
+
await cl.Message("Error: LLMChain or APIChain not initialized properly.").send()
|
131 |
+
return
|
132 |
+
|
133 |
+
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
|
134 |
endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
|
135 |
|
136 |
+
try:
|
137 |
+
if re.search(booking_pattern, user_message):
|
138 |
+
bestillingskode = re.search(booking_pattern, user_message).group(0)
|
139 |
+
question = f"Retrieve information for booking ID {endpoint_url}?search={bestillingskode}"
|
140 |
+
response = await api_chain.acall(
|
141 |
+
{"bestillingskode": bestillingskode, "question": question},
|
142 |
+
callbacks=[cl.AsyncLangchainCallbackHandler()]
|
143 |
+
)
|
144 |
+
else:
|
145 |
+
response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
|
146 |
+
|
147 |
+
response_key = "output" if "output" in response else "text"
|
148 |
+
await cl.Message(response.get(response_key, "")).send()
|
149 |
+
except Exception as e:
|
150 |
+
await cl.Message(f"Error during message handling: {e}").send()
|
|
|
|
|
|