Spaces:
Sleeping
Sleeping
File size: 6,810 Bytes
26776bf c797445 26776bf 70ee030 f709b40 d7debf4 f709b40 6038ecf f709b40 3cf9bb8 f709b40 2248513 323cba4 3cf9bb8 c797445 a01015d 30480ad a699d4a ce90ecd 4ee4906 ce90ecd 9bf3ea7 ce90ecd 4ee4906 ce90ecd f74d64c 3f8539b 1542c88 c797445 861654a f709b40 3cf9bb8 ce90ecd f74d64c f709b40 c80f584 f709b40 a699d4a f709b40 c80f584 f709b40 1542c88 0dc2f2a 6c97556 f3ec65b 8f5e87a 0b87614 6c97556 f709b40 0dc2f2a 9bf3ea7 f709b40 9bf3ea7 ce90ecd 9bf3ea7 ce90ecd 9bf3ea7 2ae2e3b 9bf3ea7 ea4e3ad 9bf3ea7 ea4e3ad 9bf3ea7 1542c88 7839a66 9bf3ea7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 |
# ===========================================
# ver01.01-5.workload-----app.py
# ===========================================
import asyncio
import os
import re
import time
import json
import chainlit as cl
from pydantic import BaseModel, ConfigDict
from langchain import hub
from langchain_openai import OpenAI
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain.memory.buffer import ConversationBufferMemory
from langchain.memory import ConversationTokenBufferMemory
from langchain.memory import ConversationSummaryMemory
from api_docs_mck import api_docs_str
from personvernspolicy import instruction_text_priv, personvernspolicy_data
from frequently_asked_questions import instruction_text_faq, faq
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
#class LLMChainConfig(BaseModel):
#model_config = ConfigDict(extra='allow')
#instruction_text_faq: str = instruction_text_faq
#faq: dict = faq
#instruction_text_priv: str = instruction_text_priv
#personvernspolicy_data: dict = personvernspolicy_data
faq_combined = f"{instruction_text_faq}\n\n{faq}"
privacy_combined = f"{instruction_text_priv}\n\n{personvernspolicy_data}"
# {faq_combined}, {privacy_combined}
daysoff_assistant_template = """
You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.
By default, you respond in Norwegian language, using a warm, direct, and professional tone.
Your expertise is exclusively in retrieving booking information for a given booking id and answering
questions about firmahytteorning and personvernspolicy.
If a question does not involve booking information for a given booking id, ask: "Gjelder spørsmålet firmahytteordning?",
upon user confirmation, do your best to try to answer accordingly by referring to {instruction_text_faq} and {faq}.
If a query does not involve booking information for a given booking id or firmahytteordning, ask: "Gjelder spørsmålet personvernspolicy?"
upon user confirmation, do your best to provide a precise privacy-related response by referring to: {instruction_text_priv} and {personvernspolicy_data}.
If the query does not involve booking information for a given booking id, firmahytteordning or personvernspolicy,
respond with: "Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon og ofte-stilte-spørsmål i forbindelse
med DaysOff firmahytteordning (inkludert personvernspolicyn). Gjelder det andre henvendelser, må du nok kontakte kundeservice på [email protected]😊"
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_prompt = PromptTemplate(
input_variables=['chat_history', 'question],
template=daysoff_assistant_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
template=api_url_template)
api_response_template = """
With the API Documentation for Daysoff's official API: {api_docs} in mind,
and the specific user question: {question},
and given this API URL: {api_url} for querying,
and response from Daysoff's API: {api_response},
never refer the user to the API URL as your answer!
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
This way you directly address the user's question in a manner that reflects the professionalism and warmth
of a human customer service agent.
Summary:
"""
api_response_prompt = PromptTemplate(
input_variables=['api_docs', 'question', 'api_url', 'api_response'],
template=api_response_template
)
@cl.on_chat_start
def setup_multiple_chains():
llm = OpenAI(
model='gpt-3.5-turbo',
temperature=0.7,
openai_api_key=OPENAI_API_KEY,
max_tokens=2048,
top_p=0.9,
frequency_penalty=0.1,
presence_penalty=0.1
)
# --ConversationBufferMemory
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=30, # --retains only the last 30 exchanges
return_messages=True,
)
# --ConversationTokenBufferMemory
#conversation_memory = ConversationTokenBufferMemory(memory_key="chat_history",
#max_token_limit=1318,
#return_messages=True,
#)
# --ConversationSummaryMemory
#conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
#return_messages=True,
#)
llm_chain = LLMChain(
llm=llm,
prompt=daysoff_assistant_prompt,
memory=conversation_memory
)
#**LLMChainConfig(
#instruction_text_faq=instruction_text_faq,
#faq=faq,
#instruction_text_priv=instruction_text_priv,
#personvernspolicy_data=personvernspolicy_data
#).model_dump()
cl.user_session.set("llm_chain", llm_chain)
api_chain = APIChain.from_llm_and_api_docs(
llm=llm,
api_docs=api_docs_str,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt,
verbose=True,
limit_to_domains=None
)
cl.user_session.set("api_chain", api_chain)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content #.lower()
llm_chain = cl.user_session.get("llm_chain")
api_chain = cl.user_session.get("api_chain")
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
endpoint_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
if re.search(booking_pattern, user_message):
bestillingskode = re.search(booking_pattern, user_message).group(0)
question = f"Retrieve information for booking ID {endpoint_url}?search={bestillingskode}"
response = await api_chain.acall(
{
"bestillingskode": bestillingskode,
"question": question
},
callbacks=[cl.AsyncLangchainCallbackHandler()])
else:
response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
return message.content
|