camparchimedes commited on
Commit
9bf3ea7
ยท
verified ยท
1 Parent(s): 2ae2e3b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -58
app.py CHANGED
@@ -28,16 +28,14 @@ from frequently_asked_questions import instruction_text_faq, faq
28
  OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
29
 
30
  class LLMChainConfig(BaseModel):
31
- model_config = ConfigDict(extra='allow')
32
- instruction_text_faq: str
33
- faq: dict
34
- instruction_text_priv: str
35
- personvernspolicy_data: dict
36
 
37
- instruction_text_faq = instruction_text_faq
38
- faq = faq
39
- instruction_text_priv = instruction_text_priv
40
- personvernspolicy_data = personvernspolicy_data
 
 
41
 
42
  daysoff_assistant_template = """
43
  You are a customer support assistant (โ€™kundeservice AI assistentโ€™) for Daysoff.
@@ -88,62 +86,93 @@ api_response_prompt = PromptTemplate(
88
  template=api_response_template
89
  )
90
 
 
91
  @cl.on_chat_start
92
  def setup_multiple_chains():
93
- try:
94
- llm_chain = LLMChain(
95
- llm=llm,
96
- prompt=daysoff_assistant_prompt,
97
- memory=conversation_memory,
98
- **LLMChainConfig(
99
- instruction_text_faq=instruction_text_faq,
100
- faq=faq,
101
- instruction_text_priv=instruction_text_priv,
102
- personvernspolicy_data=personvernspolicy_data
103
- ).model_dump()
104
- )
105
- cl.user_session.set("llm_chain", llm_chain)
106
- except Exception as e:
107
- print(f"Error setting LLMChain: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
 
109
- try:
110
- api_chain = APIChain.from_llm_and_api_docs(
111
- llm=llm,
112
- api_docs=api_docs_str,
113
- api_url_prompt=api_url_prompt,
114
- api_response_prompt=api_response_prompt,
115
- verbose=True,
116
- limit_to_domains=None
117
- )
118
- cl.user_session.set("api_chain", api_chain)
119
- except Exception as e:
120
- print(f"Error setting APIChain: {e}")
121
 
122
  @cl.on_message
123
  async def handle_message(message: cl.Message):
124
- user_message = message.content
125
  llm_chain = cl.user_session.get("llm_chain")
126
  api_chain = cl.user_session.get("api_chain")
127
-
128
- if llm_chain is None or api_chain is None:
129
- await cl.Message("Error: LLMChain or APIChain not initialized properly.").send()
130
- return
131
-
132
- booking_pattern = r'\b[A-Z]{6}\d{6}\b'
133
  endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
134
 
135
- try:
136
- if re.search(booking_pattern, user_message):
137
- bestillingskode = re.search(booking_pattern, user_message).group(0)
138
- question = f"Retrieve information for booking ID {endpoint_url}?search={bestillingskode}"
139
- response = await api_chain.acall(
140
- {"bestillingskode": bestillingskode, "question": question},
141
- callbacks=[cl.AsyncLangchainCallbackHandler()]
142
- )
143
- else:
144
- response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
145
-
146
- response_key = "output" if "output" in response else "text"
147
- await cl.Message(response.get(response_key, "")).send()
148
- except Exception as e:
149
- await cl.Message(f"Error during message handling: {e}").send()
 
 
 
 
28
  OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
29
 
30
  class LLMChainConfig(BaseModel):
31
+ model_config = ConfigDict(extra='allow')
 
 
 
 
32
 
33
+ instruction_text_faq: str = instruction_text_faq
34
+ faq: dict = faq
35
+ instruction_text_priv: str = instruction_text_priv
36
+ personvernspolicy_data: dict = personvernspolicy_data
37
+
38
+
39
 
40
  daysoff_assistant_template = """
41
  You are a customer support assistant (โ€™kundeservice AI assistentโ€™) for Daysoff.
 
86
  template=api_response_template
87
  )
88
 
89
+
90
  @cl.on_chat_start
91
  def setup_multiple_chains():
92
+
93
+ llm = OpenAI(
94
+ model='gpt-3.5-turbo-instruct',
95
+ temperature=0.7,
96
+ openai_api_key=OPENAI_API_KEY,
97
+ max_tokens=2048,
98
+ top_p=0.9,
99
+ frequency_penalty=0.1,
100
+ presence_penalty=0.1
101
+ )
102
+
103
+ # --ConversationBufferMemory
104
+ conversation_memory = ConversationBufferMemory(memory_key="chat_history",
105
+ max_len=30, # --retains only the last 30 exchanges
106
+ return_messages=True,
107
+ )
108
+
109
+ # --ConversationTokenBufferMemory
110
+ #conversation_memory = ConversationTokenBufferMemory(memory_key="chat_history",
111
+ #max_token_limit=1318,
112
+ #return_messages=True,
113
+ #)
114
+
115
+ # --ConversationSummaryMemory
116
+ #conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
117
+ #return_messages=True,
118
+ #)
119
+
120
+ llm_chain = LLMChain(
121
+ llm=llm,
122
+ prompt=daysoff_assistant_prompt,
123
+ memory=conversation_memory,
124
+ instruction_text_faq=instruction_text_faq,
125
+ faq=faq,
126
+ instruction_text_priv=instruction_text_priv,
127
+ personvernspolicy_data=personvernspolicy_data
128
+ )
129
+
130
+
131
+ #**LLMChainConfig(
132
+ #instruction_text_faq=instruction_text_faq,
133
+ #faq=faq,
134
+ #instruction_text_priv=instruction_text_priv,
135
+ #personvernspolicy_data=personvernspolicy_data
136
+ #).model_dump()
137
+
138
 
139
+ cl.user_session.set("llm_chain", llm_chain)
140
+
141
+ api_chain = APIChain.from_llm_and_api_docs(
142
+ llm=llm,
143
+ api_docs=api_docs_str,
144
+ api_url_prompt=api_url_prompt,
145
+ api_response_prompt=api_response_prompt,
146
+ verbose=True,
147
+ limit_to_domains=None
148
+ )
149
+
150
+ cl.user_session.set("api_chain", api_chain)
151
 
152
  @cl.on_message
153
  async def handle_message(message: cl.Message):
154
+ user_message = message.content #.lower()
155
  llm_chain = cl.user_session.get("llm_chain")
156
  api_chain = cl.user_session.get("api_chain")
157
+
158
+ booking_pattern = r'\b[A-Z]{6}\d{6}\b'
 
 
 
 
159
  endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
160
 
161
+ if re.search(booking_pattern, user_message):
162
+ bestillingskode = re.search(booking_pattern, user_message).group(0)
163
+ question = f"Retrieve information for booking ID {endpoint_url}?search={bestillingskode}"
164
+
165
+ response = await api_chain.acall(
166
+ {
167
+ "bestillingskode": bestillingskode,
168
+ "question": question
169
+
170
+ },
171
+ callbacks=[cl.AsyncLangchainCallbackHandler()])
172
+
173
+ else:
174
+ response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
175
+
176
+ response_key = "output" if "output" in response else "text"
177
+ await cl.Message(response.get(response_key, "")).send()
178
+ return message.content