Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,12 @@
|
|
1 |
-
|
2 |
-
#
|
3 |
-
#
|
4 |
-
|
5 |
-
|
|
|
|
|
|
|
|
|
6 |
import asyncio
|
7 |
import os
|
8 |
import re
|
@@ -10,58 +14,66 @@ import time
|
|
10 |
import json
|
11 |
|
12 |
import chainlit as cl
|
|
|
13 |
|
14 |
-
|
15 |
|
16 |
from langchain import hub
|
17 |
from langchain_openai import OpenAI
|
18 |
-
from
|
|
|
19 |
from langchain_core.prompts import PromptTemplate
|
|
|
|
|
|
|
|
|
20 |
from langchain.memory.buffer import ConversationBufferMemory
|
21 |
from langchain.memory import ConversationTokenBufferMemory
|
22 |
from langchain.memory import ConversationSummaryMemory
|
23 |
|
24 |
-
from
|
25 |
-
from personvernspolicy import instruction_text_priv, personvernspolicy_data
|
26 |
-
from frequently_asked_questions import instruction_text_faq, faq
|
27 |
|
|
|
28 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
|
|
29 |
|
30 |
-
#class LLMChainConfig(BaseModel):
|
31 |
-
#model_config = ConfigDict(extra='allow')
|
32 |
|
33 |
-
#instruction_text_faq: str = instruction_text_faq
|
34 |
-
#faq: dict = faq
|
35 |
-
#instruction_text_priv: str = instruction_text_priv
|
36 |
-
#personvernspolicy_data: dict = personvernspolicy_data
|
37 |
|
38 |
-
|
39 |
-
|
40 |
|
41 |
-
#
|
42 |
-
|
43 |
-
|
44 |
-
You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
|
45 |
-
By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
46 |
-
Your expertise is exclusively in retrieving booking information for a given booking id and answering
|
47 |
-
questions about firmahytteorning and personvernspolicy.
|
48 |
-
If a question does not involve booking information for a given booking id, ask: "Gjelder spørsmålet firmahytteordning?",
|
49 |
-
upon user confirmation, do your best to try to answer accordingly by referring to {instruction_text_faq} and {faq}.
|
50 |
-
If a query does not involve booking information for a given booking id or firmahytteordning, ask: "Gjelder spørsmålet personvernspolicy?"
|
51 |
-
upon user confirmation, do your best to provide a precise privacy-related response by referring to: {instruction_text_priv} and {personvernspolicy_data}.
|
52 |
-
If the query does not involve booking information for a given booking id, firmahytteordning or personvernspolicy,
|
53 |
-
respond with: "Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon og ofte-stilte-spørsmål i forbindelse
|
54 |
-
med DaysOff firmahytteordning (inkludert personvernspolicyn). Gjelder det andre henvendelser, må du nok kontakte kundeservice på [email protected]😊"
|
55 |
-
Chat History: {chat_history}
|
56 |
-
Question: {question}
|
57 |
-
Answer:
|
58 |
-
"""
|
59 |
|
|
|
|
|
|
|
|
|
|
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
daysoff_assistant_template = """
|
63 |
#You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
|
64 |
-
By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
65 |
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
|
66 |
to this.
|
67 |
You do not provide information outside of this scope. If a question is not about this topic, respond with
|
@@ -87,7 +99,7 @@ API URL:
|
|
87 |
"""
|
88 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
89 |
template=api_url_template)
|
90 |
-
|
91 |
api_response_template = """
|
92 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
93 |
and the specific user question: {question},
|
@@ -95,7 +107,7 @@ and given this API URL: {api_url} for querying,
|
|
95 |
and response from Daysoff's API: {api_response},
|
96 |
never refer the user to the API URL as your answer!
|
97 |
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
|
98 |
-
This way you directly address the user's question in a manner that reflects the professionalism and warmth
|
99 |
of a human customer service agent.
|
100 |
Summary:
|
101 |
"""
|
@@ -107,86 +119,77 @@ api_response_prompt = PromptTemplate(
|
|
107 |
|
108 |
@cl.on_chat_start
|
109 |
def setup_multiple_chains():
|
110 |
-
|
111 |
llm = OpenAI(
|
112 |
model='gpt-3.5-turbo-instruct',
|
113 |
-
temperature=0.7,
|
114 |
openai_api_key=OPENAI_API_KEY,
|
115 |
-
max_tokens=2048,
|
116 |
-
top_p=0.9,
|
117 |
frequency_penalty=0.1,
|
118 |
-
presence_penalty=0.1
|
119 |
)
|
120 |
|
121 |
-
|
122 |
-
|
123 |
-
max_len=30, # --retains only the last 30 exchanges
|
124 |
return_messages=True,
|
125 |
)
|
126 |
-
|
127 |
-
# --ConversationTokenBufferMemory
|
128 |
-
#conversation_memory = ConversationTokenBufferMemory(memory_key="chat_history",
|
129 |
-
#max_token_limit=1318,
|
130 |
-
#return_messages=True,
|
131 |
-
#)
|
132 |
-
|
133 |
-
# --ConversationSummaryMemory
|
134 |
-
#conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
|
135 |
-
#return_messages=True,
|
136 |
-
#)
|
137 |
|
138 |
llm_chain = LLMChain(
|
139 |
llm=llm,
|
140 |
prompt=daysoff_assistant_prompt,
|
141 |
-
memory=conversation_memory
|
142 |
)
|
143 |
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
#).model_dump()
|
151 |
-
|
152 |
-
|
153 |
-
cl.user_session.set("llm_chain", llm_chain)
|
154 |
|
155 |
-
|
|
|
156 |
llm=llm,
|
157 |
-
|
158 |
api_url_prompt=api_url_prompt,
|
159 |
-
api_response_prompt=api_response_prompt
|
160 |
-
verbose=True,
|
161 |
-
limit_to_domains=None
|
162 |
)
|
163 |
|
164 |
-
cl.user_session.set("
|
|
|
|
|
165 |
|
166 |
@cl.on_message
|
167 |
async def handle_message(message: cl.Message):
|
168 |
-
user_message = message.content
|
169 |
llm_chain = cl.user_session.get("llm_chain")
|
170 |
-
|
171 |
-
|
172 |
-
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
|
173 |
-
endpoint_url = "https://
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
{
|
181 |
-
"
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
|
190 |
response_key = "output" if "output" in response else "text"
|
|
|
191 |
await cl.Message(response.get(response_key, "")).send()
|
192 |
return message.content
|
|
|
1 |
+
# ===================================================
|
2 |
+
# "the-very-latest-latest-POST-it"-----app.py✍🏽 👾
|
3 |
+
# ===================================================
|
4 |
+
"""
|
5 |
+
Change from APIChain to EnhancedRequestsPostTool was heisted by the need to handle POST requests.
|
6 |
+
APIChain internally was responsible for URL construction, API interaction, and response formatting:
|
7 |
+
EnhancedRequestsPostTool is an enabler for POST handling@RequestsPostTool.
|
8 |
+
Routing mechanisms for prompts (api_url_prompt+api_response_prompt) remain are being used across distinct components.
|
9 |
+
"""
|
10 |
import asyncio
|
11 |
import os
|
12 |
import re
|
|
|
14 |
import json
|
15 |
|
16 |
import chainlit as cl
|
17 |
+
from dotenv import load_dotenv
|
18 |
|
19 |
+
from pydantic import BaseModel, PrivateAttr
|
20 |
|
21 |
from langchain import hub
|
22 |
from langchain_openai import OpenAI
|
23 |
+
from tiktoken import encoding_for_model
|
24 |
+
from langchain.chains import LLMChain #APIChain
|
25 |
from langchain_core.prompts import PromptTemplate
|
26 |
+
|
27 |
+
from langchain_community.tools.requests.tool import RequestsPostTool
|
28 |
+
from langchain_community.utilities.requests import TextRequestsWrapper
|
29 |
+
|
30 |
from langchain.memory.buffer import ConversationBufferMemory
|
31 |
from langchain.memory import ConversationTokenBufferMemory
|
32 |
from langchain.memory import ConversationSummaryMemory
|
33 |
|
34 |
+
from api_docs import api_docs_str
|
|
|
|
|
35 |
|
36 |
+
load_dotenv()
|
37 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
38 |
+
auth_token = os.getenv("DAYSOFF_API_TOKEN")
|
39 |
|
|
|
|
|
40 |
|
|
|
|
|
|
|
|
|
41 |
|
42 |
+
class EnhancedRequestsPostTool(RequestsPostTool, BaseModel):
|
43 |
+
api_docs: str = api_docs_str
|
44 |
|
45 |
+
# --PrivateAttr@dynanmc init attrbts
|
46 |
+
_url_chain: LLMChain = PrivateAttr()
|
47 |
+
_response_chain: LLMChain = PrivateAttr()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
+
def __init__(self, requests_wrapper, llm, api_docs_str, api_url_prompt, api_response_prompt):
|
50 |
+
super().__init__(requests_wrapper=requests_wrapper, allow_dangerous_requests=True)
|
51 |
+
object.__setattr__(self, 'api_docs', api_docs_str) # self.api_docs = api_docs_str
|
52 |
+
object.__setattr__(self, 'url_chain', LLMChain(llm=llm, prompt=api_url_prompt)) # --dynanmc init1
|
53 |
+
object.__setattr__(self, 'response_chain', LLMChain(llm=llm, prompt=api_response_prompt)) # --dynanmc init2
|
54 |
|
55 |
+
async def ainvoke(self, input_data, callbacks=None):
|
56 |
+
# -- create API URL
|
57 |
+
url_response = await self.url_chain.ainvoke({
|
58 |
+
"api_docs": self.api_docs,
|
59 |
+
"question": input_data.get("question")
|
60 |
+
})
|
61 |
+
|
62 |
+
api_response = await super().ainvoke(input_data, callbacks) # --make POST request
|
63 |
+
|
64 |
+
# --form(at) response/:response_chain
|
65 |
+
formatted_response = await self.response_chain.ainvoke({
|
66 |
+
"api_docs": self.api_docs,
|
67 |
+
"question": input_data.get("question"),
|
68 |
+
"api_url": url_response.get("text"),
|
69 |
+
"api_response": api_response
|
70 |
+
})
|
71 |
+
|
72 |
+
return formatted_response
|
73 |
|
74 |
daysoff_assistant_template = """
|
75 |
#You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
|
76 |
+
#By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
77 |
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
|
78 |
to this.
|
79 |
You do not provide information outside of this scope. If a question is not about this topic, respond with
|
|
|
99 |
"""
|
100 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
101 |
template=api_url_template)
|
102 |
+
|
103 |
api_response_template = """
|
104 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
105 |
and the specific user question: {question},
|
|
|
107 |
and response from Daysoff's API: {api_response},
|
108 |
never refer the user to the API URL as your answer!
|
109 |
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
|
110 |
+
This way you directly address the user's question in a manner that reflects the professionalism and warmth
|
111 |
of a human customer service agent.
|
112 |
Summary:
|
113 |
"""
|
|
|
119 |
|
120 |
@cl.on_chat_start
|
121 |
def setup_multiple_chains():
|
|
|
122 |
llm = OpenAI(
|
123 |
model='gpt-3.5-turbo-instruct',
|
124 |
+
temperature=0.7,
|
125 |
openai_api_key=OPENAI_API_KEY,
|
126 |
+
max_tokens=2048,
|
127 |
+
top_p=0.9,
|
128 |
frequency_penalty=0.1,
|
129 |
+
presence_penalty=0.1
|
130 |
)
|
131 |
|
132 |
+
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
133 |
+
max_len=30,
|
|
|
134 |
return_messages=True,
|
135 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
|
137 |
llm_chain = LLMChain(
|
138 |
llm=llm,
|
139 |
prompt=daysoff_assistant_prompt,
|
140 |
+
memory=conversation_memory,
|
141 |
)
|
142 |
|
143 |
+
requests_wrapper = TextRequestsWrapper(
|
144 |
+
headers={
|
145 |
+
"Authorization": auth_token,
|
146 |
+
"Content-Type": "application/json"
|
147 |
+
}
|
148 |
+
)
|
|
|
|
|
|
|
|
|
149 |
|
150 |
+
post_tool = EnhancedRequestsPostTool(
|
151 |
+
requests_wrapper=requests_wrapper,
|
152 |
llm=llm,
|
153 |
+
api_docs_str=api_docs_str,
|
154 |
api_url_prompt=api_url_prompt,
|
155 |
+
api_response_prompt=api_response_prompt
|
|
|
|
|
156 |
)
|
157 |
|
158 |
+
cl.user_session.set("llm_chain", llm_chain)
|
159 |
+
cl.user_session.set("post_tool", post_tool)
|
160 |
+
|
161 |
|
162 |
@cl.on_message
|
163 |
async def handle_message(message: cl.Message):
|
164 |
+
user_message = message.content
|
165 |
llm_chain = cl.user_session.get("llm_chain")
|
166 |
+
post_tool = cl.user_session.get("post_tool")
|
167 |
+
|
168 |
+
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
|
169 |
+
endpoint_url = "https://aivisions.no/data/daysoff/api/v1/booking/"
|
170 |
+
|
171 |
+
match = re.search(booking_pattern, user_message)
|
172 |
+
if match:
|
173 |
+
bestillingskode = match.group()
|
174 |
+
post_data = {
|
175 |
+
"url": endpoint_url,
|
176 |
+
"body": {
|
177 |
+
"booking_id": bestillingskode
|
178 |
+
}
|
179 |
+
}
|
180 |
+
|
181 |
+
response = await post_tool.arun(
|
182 |
+
json.dumps(post_data),
|
183 |
+
callbacks=[cl.AsyncLangchainCallbackHandler()]
|
184 |
+
)
|
185 |
+
|
186 |
+
else:
|
187 |
+
response = await llm_chain.ainvoke(
|
188 |
+
user_message, # {"chat_history": "", "question": user_message}
|
189 |
+
)
|
190 |
+
await cl.Message(content=response).send()
|
191 |
|
192 |
response_key = "output" if "output" in response else "text"
|
193 |
+
|
194 |
await cl.Message(response.get(response_key, "")).send()
|
195 |
return message.content
|