Spaces:
Sleeping
Sleeping
File size: 7,879 Bytes
32f75ce b18743f 32f75ce b18743f 70ee030 f709b40 d7debf4 f709b40 32f75ce f709b40 32f75ce 6038ecf f709b40 3cf9bb8 32f75ce f709b40 32f75ce 2248513 323cba4 32f75ce a01015d 32f75ce 30480ad 32f75ce a699d4a 4ee4906 9bf3ea7 32f75ce 4ee4906 32f75ce 84273a3 32f75ce 84273a3 32f75ce 03cadc5 32f75ce 84273a3 32f75ce 84273a3 3cf9bb8 352cce7 f74d64c f709b40 c80f584 f709b40 a699d4a f709b40 c80f584 f709b40 32f75ce 0dc2f2a 6c97556 f3ec65b 8f5e87a 0b87614 32f75ce 0b87614 6c97556 f709b40 0dc2f2a 9bf3ea7 f709b40 9bf3ea7 2ad00fd 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 2f87804 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce 9bf3ea7 32f75ce ea4e3ad 32f75ce ea4e3ad 32f75ce 2f87804 32f75ce 2b8e181 4e02251 32f75ce a8c9978 03cadc5 32f75ce 4e02251 2f87804 4e02251 c666f06 f0e43b6 c666f06 2b9a3de e9b5af2 2f87804 32f75ce 9bf3ea7 32f75ce 9bf3ea7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 |
# ===================================================
# "the-very-latest-latest-POST-it"-----app.py
# ===================================================
import asyncio
import os
import re
import time
import json
import chainlit as cl
from dotenv import load_dotenv
from pydantic import BaseModel, PrivateAttr
from langchain import hub
from langchain_openai import OpenAI
from tiktoken import encoding_for_model
from langchain.chains import LLMChain #APIChain
from langchain_core.prompts import PromptTemplate
from langchain_community.tools.requests.tool import RequestsPostTool
from langchain_community.utilities.requests import TextRequestsWrapper
from langchain.memory.buffer import ConversationBufferMemory
from langchain.memory import ConversationTokenBufferMemory
from langchain.memory import ConversationSummaryMemory
from api_docs import api_docs_str
load_dotenv()
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
auth_token = os.getenv("DAYSOFF_API_TOKEN")
class EnhancedRequestsPostTool(RequestsPostTool, BaseModel):
api_docs: str = api_docs_str
# --PrivateAttr@dynanmc init attrbts
_url_chain: LLMChain = PrivateAttr()
_response_chain: LLMChain = PrivateAttr()
def __init__(self, requests_wrapper, llm, api_docs_str, api_url_prompt, api_response_prompt):
super().__init__(requests_wrapper=requests_wrapper, allow_dangerous_requests=True)
object.__setattr__(self, 'api_docs', api_docs_str) # self.api_docs = api_docs_str
object.__setattr__(self, 'url_chain', LLMChain(llm=llm, prompt=api_url_prompt)) # --dynanmc init1
object.__setattr__(self, 'response_chain', LLMChain(llm=llm, prompt=api_response_prompt)) # --dynanmc init2
async def ainvoke(self, input_data, callbacks=None):
# -- create API URL
url_response = await self.url_chain.ainvoke({
"api_docs": self.api_docs,
"question": input_data.get("question")
})
api_response = await super().ainvoke(input_data, callbacks) # --make POST request
# --form(at) response/:response_chain
formatted_response = await self.response_chain.ainvoke({
"api_docs": self.api_docs,
#"question": input_data.get("question"),
#"api_url": url_response.get("text"),
"api_response": api_response
})
return formatted_response
daysoff_assistant_template = """
#You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
#By default, you respond in Norwegian language, using a warm, direct, and professional tone.
Your expertise is exclusively in retrieving booking information for a given booking ID assistance related to
to this.
You do not provide information outside of this scope. If a question is not about this topic, respond with
"Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser
mรฅ du nok kontakte kundeservice pรฅ [email protected]๐"
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_prompt = PromptTemplate(
input_variables=['chat_history', 'question'],
template=daysoff_assistant_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
template=api_url_template)
api_response_template = """
With the API Documentation for Daysoff's official API: {api_docs} in mind,
and the specific user question: {question},
and given this API URL: {api_url} for querying,
and response from Daysoff's API: {api_response},
never refer the user to the API URL as your answer!
You should always provide a clear and concise summary (in Norwegian) of the booking information retrieved.
This way you directly address the user's question in a manner that reflects the professionalism and warmth
of a human customer service agent.
Summary:
"""
api_response_prompt = PromptTemplate(
input_variables=['api_docs', 'question', 'api_url', 'api_response'],
template=api_response_template
)
@cl.on_chat_start
def setup_multiple_chains():
llm = OpenAI(
model='gpt-3.5-turbo-instruct',
temperature=0.7,
openai_api_key=OPENAI_API_KEY,
max_tokens=2048,
top_p=0.9,
frequency_penalty=0.1,
presence_penalty=0.1
)
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=30,
return_messages=True,
)
llm_chain = LLMChain(
llm=llm,
prompt=daysoff_assistant_prompt,
memory=conversation_memory,
)
requests_wrapper = TextRequestsWrapper(
headers={
"Authorization": f"Bearer <auth_token>",
"Content-Type": "application/json"
}
)
post_tool = EnhancedRequestsPostTool(
requests_wrapper=requests_wrapper,
llm=llm,
api_docs_str=api_docs_str,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt
)
cl.user_session.set("llm_chain", llm_chain)
cl.user_session.set("post_tool", post_tool)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content
llm_chain = cl.user_session.get("llm_chain")
post_tool = cl.user_session.get("post_tool")
booking_pattern = r'\b[A-Z]{6}\d{6}\b'
endpoint_url = "https://aivisions.no/data/daysoff/api/v1/booking/"
match = re.search(booking_pattern, user_message)
if match:
bestillingskode = match.group()
post_data = {
"url": endpoint_url,
"body": {
"booking_id": bestillingskode
}
}
response = await post_tool.ainvoke(
json.dumps(post_data),
[cl.AsyncLangchainCallbackHandler()] # config={"callbacks":
)
# --degug!
print(f"Raw response: {response}")
if response:
try:
booking_data = json.loads(response.get("output", "{}"))
table = "| Field | Information |\n|---|---|\n"
table = f"""
| Field | Value |
|-------------|--------------------|
| Booking ID | {booking_data.get('booking_id', 'N/A')} |
| Name | {booking_data.get('full_name', 'N/A')} |
| Amount | {booking_data.get('amount', 'N/A')} kr |
| Check-in | {booking_data.get('checkin', 'N/A')} |
| Check-out | {booking_data.get('checkout', 'N/A')} |
| Address | {booking_data.get('address', 'N/A')} |
| User ID | {booking_data.get('user_id', 'N/A')} |
| Info | {booking_data.get('infotext', 'N/A')} |
| Included | {booking_data.get('included', 'N/A')} |
"""
await cl.Message(content=table).send()
except Exception as e:
error_msg = f"Error: Could not parse the booking information. Details: {str(e)}"
await cl.Message(error_msg).send()
else:
response = await llm_chain.ainvoke(
user_message, # {"chat_history": "", "question": user_message}
)
await cl.Message(content=response).send()
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
return message.content
|