Spaces:
Sleeping
Sleeping
File size: 5,445 Bytes
26776bf 3cf9bb8 26776bf 70ee030 f709b40 d7debf4 f709b40 3cf9bb8 f709b40 2248513 3cf9bb8 7305e7f 3cf9bb8 f709b40 30480ad a699d4a f74d64c 8f5e87a 7305e7f 3cf9bb8 861654a f709b40 3cf9bb8 9479063 f74d64c f709b40 c80f584 f709b40 a699d4a f709b40 c80f584 f709b40 0dc2f2a 47d7912 9479063 8f5e87a 9479063 9dc4afe f709b40 0dc2f2a f709b40 4322daa 8f5e87a 4b98d45 8f5e87a 2248513 f709b40 8f5e87a f709b40 f74d64c f709b40 48c5ac5 f709b40 c80f584 f709b40 26776bf 0dc2f2a f709b40 ea4e3ad a699d4a ea4e3ad 7839a66 eec23c4 db5a244 9dc4afe db5a244 eec23c4 db5a244 eec23c4 3cf9bb8 9dc4afe 7305e7f 9dc4afe 7305e7f 9dc4afe 7305e7f 9dc4afe ea4e3ad |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
# ===========================================
# ver01.01-5.workload-----app.py
# ===========================================
import asyncio
import os
import re
import time
import json
import chainlit as cl
from langchain import hub
from langchain_openai import OpenAI
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain.memory.buffer import ConversationBufferMemory
from api_docs_mck import api_docs_str
from faq_data import help1, ansatte_faq_data, utleiere_faq_data
from personvernspolicy import help2, personvernspolicy_data
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
daysoff_assistant_template = """
You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.no
By default, you respond in Norwegian language, using a warm, direct and professional tone.
You can provide information associated with a given booking ID, and with {help1} in mind, you
can also answer frequently asked questions (FAQ) about DaysOff firmahytteordning for
employees: {ansatte_faq_data} and for employers:{utleiere_faq_data}.
To understand how best to answer queries about privacy policy,
refer to {help2} and {personvernspolicy_data}
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_prompt = PromptTemplate(
input_variables=['chat_history', 'question', 'help', 'ansatte_faq_data', 'utleiere_faq_data','personvernspolicy_data', 'help2'],
template=daysoff_assistant_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
template=api_url_template)
api_response_template = """
With the API Documentation for Daysoff's official API: {api_docs} in mind,
and user question: {question} in mind,
and given this API URL: {api_url} for querying,
and if the response from Daysoff's API is information associated with
a booking ID (โbestillingskodeโ): {api_response},
please directly address the user's question (in Norwegian) and focus on delivering the
the response from Daysoff's API in a markdown table with clarity
and conciseness.
Response:
"""
api_response_prompt = PromptTemplate(
input_variables=['api_docs', 'question', 'api_url', 'api_response'],
template=api_response_template
)
@cl.on_chat_start
def setup_multiple_chains():
llm = OpenAI(
model='gpt-3.5-turbo-instruct',
temperature=0.7,
openai_api_key=OPENAI_API_KEY,
#max_tokens=512,
top_p=0.9,
frequency_penalty=0.5,
presence_penalty=0.3
)
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=300,
return_messages=True,
)
llm_chain = LLMChain(llm=llm,
prompt=daysoff_assistant_prompt,
memory=conversation_memory
)
cl.user_session.set("llm_chain", llm_chain)
api_chain = APIChain.from_llm_and_api_docs(
llm=llm,
api_docs=api_docs_str,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt,
verbose=True,
limit_to_domains=None
)
cl.user_session.set("api_chain", api_chain)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content
llm_chain = cl.user_session.get("llm_chain")
api_chain = cl.user_session.get("api_chain")
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
base_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking"
if re.search(booking_pattern, user_message):
bestillingskode = re.search(booking_pattern, user_message)#.group(0)
question = f"Retrieve information for booking ID {base_url}?search={bestillingskode}"
response = await api_chain.acall(
{
"bestillingskode": bestillingskode,
"question": question
},
callbacks=[cl.AsyncLangchainCallbackHandler()])
else:
help1 = help1 #()
help2 = help2 #()()
ansatte_faq_data = ansatte_faq_data
utleiere_faq_data = utleiere_faq_data
personvernspolicy_data = personvernspolicy_data
# --psass required inputs to llm chain
response = await llm_chain.acall(
{
#"chat_history": [],
"question": user_message,
"help": help1,
"help2": help2,
"ansatte_faq_data": ansatte_faq_data,
"utleiere_faq_data": utleiere_faq_data,
"personvernspolicy_data": personvernspolicy_data
},
callbacks=[cl.AsyncLangchainCallbackHandler()]
)
#response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()])
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
return message.content
|