Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
|
2 |
# ===================================================
|
3 |
-
# "the-very-latest
|
4 |
# ===================================================
|
5 |
|
6 |
import asyncio
|
@@ -16,11 +16,11 @@ from dotenv import load_dotenv
|
|
16 |
from langchain import hub
|
17 |
from langchain_openai import OpenAI
|
18 |
from tiktoken import encoding_for_model
|
19 |
-
from langchain.chains import LLMChain
|
20 |
from langchain_core.prompts import PromptTemplate
|
21 |
|
22 |
from langchain_community.tools.requests.tool import RequestsPostTool
|
23 |
-
from
|
24 |
|
25 |
from langchain.memory.buffer import ConversationBufferMemory
|
26 |
from langchain.memory import ConversationTokenBufferMemory
|
@@ -32,13 +32,42 @@ load_dotenv()
|
|
32 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
33 |
auth_token = os.getenv("DAYSOFF_API_TOKEN")
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
daysoff_assistant_template = """
|
36 |
-
#You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
|
37 |
-
#By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
38 |
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
|
39 |
to this.
|
40 |
-
You do not provide information outside of this scope. If a question is not about this topic, respond with
|
41 |
-
"Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser
|
42 |
må du nok kontakte kundeservice på [email protected]😊"
|
43 |
Chat History: {chat_history}
|
44 |
Question: {question}
|
@@ -60,7 +89,7 @@ API URL:
|
|
60 |
"""
|
61 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
62 |
template=api_url_template)
|
63 |
-
|
64 |
api_response_template = """
|
65 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
66 |
and the specific user question: {question},
|
@@ -68,7 +97,7 @@ and given this API URL: {api_url} for querying,
|
|
68 |
and response from Daysoff's API: {api_response},
|
69 |
never refer the user to the API URL as your answer!
|
70 |
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
|
71 |
-
This way you directly address the user's question in a manner that reflects the professionalism and warmth
|
72 |
of a human customer service agent.
|
73 |
Summary:
|
74 |
"""
|
@@ -77,6 +106,7 @@ api_response_prompt = PromptTemplate(
|
|
77 |
template=api_response_template
|
78 |
)
|
79 |
|
|
|
80 |
@cl.on_chat_start
|
81 |
def setup_multiple_chains():
|
82 |
llm = OpenAI(
|
@@ -89,8 +119,8 @@ def setup_multiple_chains():
|
|
89 |
presence_penalty=0.1
|
90 |
)
|
91 |
|
92 |
-
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
93 |
-
max_len=30,
|
94 |
return_messages=True,
|
95 |
)
|
96 |
|
@@ -99,25 +129,33 @@ def setup_multiple_chains():
|
|
99 |
prompt=daysoff_assistant_prompt,
|
100 |
memory=conversation_memory,
|
101 |
)
|
102 |
-
|
103 |
-
cl.user_session.set("llm_chain", llm_chain)
|
104 |
|
105 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
llm=llm,
|
107 |
api_docs=api_docs_str,
|
108 |
api_url_prompt=api_url_prompt,
|
109 |
-
api_response_prompt=api_response_prompt
|
110 |
-
verbose=True,
|
111 |
-
limit_to_domains=None
|
112 |
)
|
113 |
-
|
114 |
-
cl.user_session.set("
|
|
|
|
|
|
|
115 |
|
116 |
@cl.on_message
|
117 |
async def handle_message(message: cl.Message):
|
118 |
-
user_message = message.content
|
119 |
-
llm_chain = cl.user_session.get("llm_chain")
|
120 |
-
|
121 |
|
122 |
# --------------------------------AD VAL.II.7--------------------------------
|
123 |
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
|
@@ -126,17 +164,6 @@ async def handle_message(message: cl.Message):
|
|
126 |
if re.search(booking_pattern, user_message):
|
127 |
bestillingskode = re.search(booking_pattern, user_message).group(0)
|
128 |
|
129 |
-
requests_wrapper = TextRequestsWrapper(
|
130 |
-
headers={
|
131 |
-
"Authorization": auth_token,
|
132 |
-
"Content-Type": "application/json"
|
133 |
-
}
|
134 |
-
)
|
135 |
-
|
136 |
-
post_tool = RequestsPostTool(
|
137 |
-
requests_wrapper=requests_wrapper,
|
138 |
-
allow_dangerous_requests=True
|
139 |
-
)
|
140 |
|
141 |
post_data = {
|
142 |
"url": endpoint_url,
|
@@ -150,12 +177,14 @@ async def handle_message(message: cl.Message):
|
|
150 |
callbacks=[cl.AsyncLangchainCallbackHandler()]
|
151 |
)
|
152 |
# --------------------------------AD VAL.@End--------------------------------
|
153 |
-
|
154 |
else:
|
155 |
-
response = await llm_chain.
|
|
|
|
|
|
|
156 |
|
157 |
response_key = "output" if "output" in response else "text"
|
158 |
await cl.Message(response.get(response_key, "")).send()
|
159 |
-
return message.content
|
160 |
-
|
161 |
|
|
|
1 |
|
2 |
# ===================================================
|
3 |
+
# "the-very-latest-latest-POST-it"-----app.py✍🏽 👾
|
4 |
# ===================================================
|
5 |
|
6 |
import asyncio
|
|
|
16 |
from langchain import hub
|
17 |
from langchain_openai import OpenAI
|
18 |
from tiktoken import encoding_for_model
|
19 |
+
from langchain.chains import LLMChain #APIChain
|
20 |
from langchain_core.prompts import PromptTemplate
|
21 |
|
22 |
from langchain_community.tools.requests.tool import RequestsPostTool
|
23 |
+
from langchain_community.utilities.requests import TextRequestsWrapper
|
24 |
|
25 |
from langchain.memory.buffer import ConversationBufferMemory
|
26 |
from langchain.memory import ConversationTokenBufferMemory
|
|
|
32 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
33 |
auth_token = os.getenv("DAYSOFF_API_TOKEN")
|
34 |
|
35 |
+
|
36 |
+
class EnhancedRequestsPostTool(RequestsPostTool):
|
37 |
+
def __init__(self, requests_wrapper, llm, api_docs, api_url_prompt, api_response_prompt):
|
38 |
+
super().__init__(requests_wrapper=requests_wrapper)
|
39 |
+
self.url_chain = LLMChain(llm=llm, prompt=api_url_prompt)
|
40 |
+
self.response_chain = LLMChain(llm=llm, prompt=api_response_prompt)
|
41 |
+
self.api_docs = api_docs
|
42 |
+
|
43 |
+
async def ainvoke(self, input_data, callbacks=None):
|
44 |
+
# -- create API URL
|
45 |
+
url_response = await self.url_chain.ainvoke({
|
46 |
+
"api_docs": self.api_docs,
|
47 |
+
"question": input_data.get("question")
|
48 |
+
})
|
49 |
+
|
50 |
+
# -- make POST request
|
51 |
+
api_response = await super().ainvoke(input_data, callbacks)
|
52 |
+
|
53 |
+
# --form(at) response/:response_chain
|
54 |
+
formatted_response = await self.response_chain.ainvoke({
|
55 |
+
"api_docs": self.api_docs,
|
56 |
+
"question": input_data.get("question"),
|
57 |
+
"api_url": url_response.get("text"),
|
58 |
+
"api_response": api_response
|
59 |
+
})
|
60 |
+
|
61 |
+
return formatted_response # when to call?
|
62 |
+
|
63 |
+
|
64 |
daysoff_assistant_template = """
|
65 |
+
#You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
|
66 |
+
#By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
67 |
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
|
68 |
to this.
|
69 |
+
You do not provide information outside of this scope. If a question is not about this topic, respond with
|
70 |
+
"Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser
|
71 |
må du nok kontakte kundeservice på [email protected]😊"
|
72 |
Chat History: {chat_history}
|
73 |
Question: {question}
|
|
|
89 |
"""
|
90 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
91 |
template=api_url_template)
|
92 |
+
|
93 |
api_response_template = """
|
94 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
95 |
and the specific user question: {question},
|
|
|
97 |
and response from Daysoff's API: {api_response},
|
98 |
never refer the user to the API URL as your answer!
|
99 |
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
|
100 |
+
This way you directly address the user's question in a manner that reflects the professionalism and warmth
|
101 |
of a human customer service agent.
|
102 |
Summary:
|
103 |
"""
|
|
|
106 |
template=api_response_template
|
107 |
)
|
108 |
|
109 |
+
|
110 |
@cl.on_chat_start
|
111 |
def setup_multiple_chains():
|
112 |
llm = OpenAI(
|
|
|
119 |
presence_penalty=0.1
|
120 |
)
|
121 |
|
122 |
+
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
123 |
+
max_len=30,
|
124 |
return_messages=True,
|
125 |
)
|
126 |
|
|
|
129 |
prompt=daysoff_assistant_prompt,
|
130 |
memory=conversation_memory,
|
131 |
)
|
|
|
|
|
132 |
|
133 |
+
requests_wrapper = TextRequestsWrapper(
|
134 |
+
headers={
|
135 |
+
"Authorization": auth_token,
|
136 |
+
"Content-Type": "application/json"
|
137 |
+
}
|
138 |
+
)
|
139 |
+
|
140 |
+
post_tool = EnhancedRequestsPostTool(
|
141 |
+
requests_wrapper=requests_wrapper,
|
142 |
+
allow_dangerous_requests=True,
|
143 |
llm=llm,
|
144 |
api_docs=api_docs_str,
|
145 |
api_url_prompt=api_url_prompt,
|
146 |
+
api_response_prompt=api_response_prompt
|
|
|
|
|
147 |
)
|
148 |
+
|
149 |
+
cl.user_session.set("llm_chain", llm_chain)
|
150 |
+
|
151 |
+
|
152 |
+
|
153 |
|
154 |
@cl.on_message
|
155 |
async def handle_message(message: cl.Message):
|
156 |
+
user_message = message.content
|
157 |
+
llm_chain = cl.user_session.get("llm_chain")
|
158 |
+
post_tool = cl.user_session.get("post_tool")
|
159 |
|
160 |
# --------------------------------AD VAL.II.7--------------------------------
|
161 |
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
|
|
|
164 |
if re.search(booking_pattern, user_message):
|
165 |
bestillingskode = re.search(booking_pattern, user_message).group(0)
|
166 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
167 |
|
168 |
post_data = {
|
169 |
"url": endpoint_url,
|
|
|
177 |
callbacks=[cl.AsyncLangchainCallbackHandler()]
|
178 |
)
|
179 |
# --------------------------------AD VAL.@End--------------------------------
|
180 |
+
|
181 |
else:
|
182 |
+
response = await llm_chain.ainvoke(
|
183 |
+
user_message,
|
184 |
+
callbacks=[cl.AsyncLangchainCallbackHandler()]
|
185 |
+
)
|
186 |
|
187 |
response_key = "output" if "output" in response else "text"
|
188 |
await cl.Message(response.get(response_key, "")).send()
|
189 |
+
return message.content
|
|
|
190 |
|