File size: 6,383 Bytes
26776bf
 
3cf9bb8
26776bf
 
70ee030
f709b40
d7debf4
f709b40
 
 
 
ced5ed3
f709b40
 
3cf9bb8
82e1a57
f709b40
 
2248513
323cba4
 
 
b1cac89
21ce94e
f709b40
db162d3
30480ad
db162d3
a699d4a
f74d64c
27c3a91
 
 
1b67d04
27c3a91
1b67d04
27c3a91
861654a
f709b40
 
 
3cf9bb8
27c3a91
f74d64c
f709b40
 
 
 
c80f584
f709b40
 
 
 
a699d4a
f709b40
c80f584
f709b40
82e1a57
0dc2f2a
6c97556
f3ec65b
8f5e87a
0b87614
 
 
 
 
6c97556
f709b40
0dc2f2a
 
 
 
 
9ec4ca9
 
ced5ed3
9ec4ca9
ced5ed3
f709b40
816b85e
 
90dc262
4322daa
816b85e
 
 
 
 
 
 
 
 
 
8f5e87a
 
1250c6c
8f5e87a
323cba4
8f5e87a
d817107
 
8f5e87a
323cba4
072cbdb
82e1a57
072cbdb
 
87394d1
86afb6f
 
 
 
 
 
f709b40
 
9ec4ca9
48c5ac5
f709b40
c80f584
f709b40
 
 
26776bf
0dc2f2a
 
f709b40
ea4e3ad
 
 
f3ec65b
ea4e3ad
 
a699d4a
ea4e3ad
e108435
7839a66
a50de36
 
46ed965
 
db5a244
45b92d7
a50de36
 
eec23c4
 
db5a244
21ce94e
a50de36
db5a244
eec23c4
 
a50de36
46ed965
a50de36
46ed965
 
a99d2ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46ed965
a99d2ea
 
a50de36
 
 
 
 
 
 
 
 
 
 
 
db162d3
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192

# ===========================================
# ver01.01-5.workload-----app.py
# ===========================================

import asyncio
import os
import re
import time
import json

import chainlit as cl
from dotenv import load_dotenv

from langchain import hub
from langchain_openai import OpenAI
from tiktoken import encoding_for_model
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain.memory.buffer import ConversationBufferMemory
from langchain.memory import ConversationTokenBufferMemory
from langchain.memory import ConversationSummaryMemory

#from api_docs_mck import api_docs_str 
from api_docs import api_docs_str, auth_token

load_dotenv()
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
auth = os.environ.get("CHAINLIT_AUTH_SECRET")

daysoff_assistant_template = """
#You are a customer support assistant (’kundeservice AI assistent’) for Daysoff. 
#By default, you respond in Norwegian language, using a warm, direct, and professional tone. 
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
to this.
You do not provide information outside of this scope. If a question is not about this topic, respond with 
"Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser 
må du nok kontakte kundeservice på [email protected]😊"
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_prompt = PromptTemplate(
    input_variables=['chat_history', 'question'],
    template=daysoff_assistant_template
)

api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
                                template=api_url_template)
  
api_response_template = """
With the API Documentation for Daysoff's official API: {api_docs} in mind,
and the specific user question: {question},
and given this API URL: {api_url} for querying,
and response from Daysoff's API: {api_response},
never refer the user to the API URL as your answer!
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
This way you directly address the user's question in a manner that reflects the professionalism and warmth 
of a human customer service agent.
Summary:
"""
api_response_prompt = PromptTemplate(
    input_variables=['api_docs', 'question', 'api_url', 'api_response'],
    template=api_response_template
)

#async def on_chat_start():
    #app_user = cl.user_session.get("user")
    #user_env = cl.user_session.get("env")
    #await cl.Message(f"Hello {app_user.identifier}").send()

@cl.on_chat_start
async def on_chat_start():
    app_user = cl.user_session.get("user")
    await cl.Message(f"Hello {app_user}").send()

def setup_multiple_chains():
    llm = OpenAI(
        model='gpt-3.5-turbo-instruct',
        temperature=0.7,
        openai_api_key=OPENAI_API_KEY,
        max_tokens=2048,
        top_p=0.9,
        frequency_penalty=0.1,
        presence_penalty=0.1
    )
    llm = OpenAI(
        model='gpt-3.5-turbo-instruct',
        temperature=0.7, 
        openai_api_key=OPENAI_API_KEY,
        max_tokens=2048, 
        top_p=0.9,  
        frequency_penalty=0.1,
        presence_penalty=0.1   
    )

    conversation_memory = ConversationBufferMemory(memory_key="chat_history", 
                                                   max_len=30, 
                                                   return_messages=True,
    )

    llm_chain = LLMChain(
        llm=llm,
        prompt=daysoff_assistant_prompt,
        memory=conversation_memory,
    )
    
    cl.user_session.set("llm_chain", llm_chain)

    
    api_chain = APIChain.from_llm_and_api_docs(
        llm=llm,
        api_docs=api_docs_str,
        api_url_prompt=api_url_prompt,
        api_response_prompt=api_response_prompt,
        verbose=True,
        limit_to_domains=None 
    )

    cl.user_session.set("api_chain", api_chain)

@cl.on_message
async def handle_message(message: cl.Message):
    user_message = message.content #.lower()
    llm_chain = cl.user_session.get("llm_chain")
    api_chain = cl.user_session.get("api_chain")
    
    booking_pattern = r'\b[A-Z]{6}\d{6}\b' 
    endpoint_url = "https://aivisions.no/data/daysoff/api/v1/booking/"

    """
    auth_token = f"Bearer {auth_token}

    # --GET method
    if re.search(booking_pattern, user_message):  
        bestillingskode = re.search(booking_pattern, user_message).group(0)  
        question = f"Retrieve information for booking ID {endpoint_url}?search={bestillingskode}&auth_token={auth_token}"
" 
        response = await api_chain.acall(
            {
                "bestillingskode": bestillingskode,
                "question": question,
                "auth_token": auth_token
              
            },
            callbacks=[cl.AsyncLangchainCallbackHandler()])
    
    """
    
    # --POST method, booking_id@body
    if re.search(booking_pattern, user_message):  
        bestillingskode = re.search(booking_pattern, user_message).group(0)  
        question = f"Retrieve information for booking ID {bestillingskode}"

        response = await api_chain.acall(
            {
                "url": endpoint_url,  
                "method": "POST",     
                "headers": {
                    "Authorization": f"Bearer {auth_token}",
                    "Content-Type": "application/json"
                },
                "body": {             
                    "booking_id": bestillingskode
                },
                "question": question
            },
            callbacks=[cl.AsyncLangchainCallbackHandler()]
        )

    else:    
        response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
        

    response_key = "output" if "output" in response else "text"
    await cl.Message(response.get(response_key, "")).send()
    return message.content

    #endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"