camparchimedes's picture
Update app.py
7305e7f verified
raw
history blame
5.45 kB
# ===========================================
# ver01.01-5.workload-----app.py
# ===========================================
import asyncio
import os
import re
import time
import json
import chainlit as cl
from langchain import hub
from langchain_openai import OpenAI
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain.memory.buffer import ConversationBufferMemory
from api_docs_mck import api_docs_str
from faq_data import help1, ansatte_faq_data, utleiere_faq_data
from personvernspolicy import help2, personvernspolicy_data
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
daysoff_assistant_template = """
You are a customer support assistant (โ€™kundeservice AI assistentโ€™) for Daysoff.no
By default, you respond in Norwegian language, using a warm, direct and professional tone.
You can provide information associated with a given booking ID, and with {help1} in mind, you
can also answer frequently asked questions (FAQ) about DaysOff firmahytteordning for
employees: {ansatte_faq_data} and for employers:{utleiere_faq_data}.
To understand how best to answer queries about privacy policy,
refer to {help2} and {personvernspolicy_data}
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_prompt = PromptTemplate(
input_variables=['chat_history', 'question', 'help', 'ansatte_faq_data', 'utleiere_faq_data','personvernspolicy_data', 'help2'],
template=daysoff_assistant_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
template=api_url_template)
api_response_template = """
With the API Documentation for Daysoff's official API: {api_docs} in mind,
and user question: {question} in mind,
and given this API URL: {api_url} for querying,
and if the response from Daysoff's API is information associated with
a booking ID (โ€™bestillingskodeโ€™): {api_response},
please directly address the user's question (in Norwegian) and focus on delivering the
the response from Daysoff's API in a markdown table with clarity
and conciseness.
Response:
"""
api_response_prompt = PromptTemplate(
input_variables=['api_docs', 'question', 'api_url', 'api_response'],
template=api_response_template
)
@cl.on_chat_start
def setup_multiple_chains():
llm = OpenAI(
model='gpt-3.5-turbo-instruct',
temperature=0.7,
openai_api_key=OPENAI_API_KEY,
#max_tokens=512,
top_p=0.9,
frequency_penalty=0.5,
presence_penalty=0.3
)
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=300,
return_messages=True,
)
llm_chain = LLMChain(llm=llm,
prompt=daysoff_assistant_prompt,
memory=conversation_memory
)
cl.user_session.set("llm_chain", llm_chain)
api_chain = APIChain.from_llm_and_api_docs(
llm=llm,
api_docs=api_docs_str,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt,
verbose=True,
limit_to_domains=None
)
cl.user_session.set("api_chain", api_chain)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content
llm_chain = cl.user_session.get("llm_chain")
api_chain = cl.user_session.get("api_chain")
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
base_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
if re.search(booking_pattern, user_message):
bestillingskode = re.search(booking_pattern, user_message)#.group(0)
question = f"Retrieve information for booking ID {base_url}?search={bestillingskode}"
response = await api_chain.acall(
{
"bestillingskode": bestillingskode,
"question": question
},
callbacks=[cl.AsyncLangchainCallbackHandler()])
else:
help1 = help1 #()
help2 = help2 #()()
ansatte_faq_data = ansatte_faq_data
utleiere_faq_data = utleiere_faq_data
personvernspolicy_data = personvernspolicy_data
# --psass required inputs to llm chain
response = await llm_chain.acall(
{
#"chat_history": [],
"question": user_message,
"help": help1,
"help2": help2,
"ansatte_faq_data": ansatte_faq_data,
"utleiere_faq_data": utleiere_faq_data,
"personvernspolicy_data": personvernspolicy_data
},
callbacks=[cl.AsyncLangchainCallbackHandler()]
)
#response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
return message.content