Spaces:
Build error
Build error
# =========================================== | |
# !-----app.py | |
# =========================================== | |
import json | |
import asyncio | |
import os | |
import re | |
import requests | |
from dotenv import load_dotenv | |
import chainlit as cl | |
from langchain import hub | |
from langchain_openai import OpenAI | |
from langchain.chains import LLMChain | |
from langchain_core.prompts import PromptTemplate | |
from langchain.memory.buffer import ConversationBufferMemory | |
load_dotenv() | |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
auth_token = os.environ.get("DAYSOFF_API_TOKEN") | |
API_URL = "https://aivisions.no/data/daysoff/api/v1/booking/" | |
daysoff_assistant_template = """ | |
#You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff. | |
#By default, you respond in Norwegian language, using a warm, direct, and professional tone. | |
Your expertise is exclusively in retrieving booking information for a given booking ID and assistance related to | |
to this. | |
#You should always provide a clear and concise answer (in Norwegian) of the booking information retrieved. | |
#This way you directly address the user's question in a manner that reflects the professionalism and warmth | |
#of a human customer service agent. | |
You do not provide information outside of this scope. If a question is not about this topic, adapt to user's query | |
and respond with something like | |
"Jeg driver faktisk kun med henvendelser omkring bestillingsinformasjon. Gjelder det andre henvendelser | |
mรฅ du nok kontakte kundeservice pรฅ [email protected]๐" | |
Chat History: {chat_history} | |
Question: {question} | |
Answer: | |
""" | |
daysoff_assistant_prompt = PromptTemplate( | |
input_variables=["chat_history", "question"], | |
template=daysoff_assistant_template, | |
) | |
# -- async wrapper for requests.post | |
async def async_post_request(url, headers, data): | |
return await asyncio.to_thread(requests.post, url, headers=headers, json=data) | |
def setup_multiple_chains(): | |
llm = OpenAI( | |
model="gpt-3.5-turbo-instruct", | |
temperature=0.7, | |
openai_api_key=OPENAI_API_KEY, | |
max_tokens=2048, | |
top_p=0.9, | |
frequency_penalty=0.1, | |
presence_penalty=0.1, | |
) | |
conversation_memory = ConversationBufferMemory( | |
memory_key="chat_history", | |
input_key="question", # ? | |
output_key="text", # ? | |
max_len=30, | |
return_messages=True | |
) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=daysoff_assistant_prompt, | |
memory=conversation_memory, | |
) | |
cl.user_session.set("llm_chain", llm_chain) | |
async def handle_message(message: cl.Message): | |
user_message = message.content | |
llm_chain = cl.user_session.get("llm_chain") | |
booking_pattern = r'\b[A-Z]{6}\d{6}\b' | |
match = re.search(booking_pattern, user_message) | |
if match: | |
bestillingskode = match.group() | |
headers = { | |
"Authorization": auth_token, | |
"Content-Type": "application/json" | |
} | |
payload = {"booking_id": bestillingskode} | |
try: | |
response = await async_post_request(API_URL, headers, payload) | |
response.raise_for_status() | |
booking_data = response.json() | |
if "booking_id" in booking_data: | |
table = ( | |
"| Field | Info |\n" | |
"|:-----------|:---------------------|\n" | |
f"| Booking ID | {booking_data.get('booking_id', 'N/A')} |\n" | |
f"| Full Name | {booking_data.get('full_name', 'N/A')} |\n" | |
f"| Amount | {booking_data.get('amount', 0)} kr |\n" | |
f"| Check-in | {booking_data.get('checkin', 'N/A')} |\n" | |
f"| Check-out | {booking_data.get('checkout', 'N/A')} |\n" | |
f"| Address | {booking_data.get('address', 'N/A')} |\n" | |
f"| User ID | {booking_data.get('user_id', 0)} |\n" | |
f"| Info Text | {booking_data.get('infotext', 'N/A')} |\n" | |
f"| Included | {booking_data.get('included', 'N/A')} |" | |
) | |
await cl.Message(content=table).send() | |
else: | |
await cl.Message(content="Booking not found or invalid response.").send() | |
except requests.exceptions.RequestException as e: | |
await cl.Message(content=f"Request failed: {str(e)}").send() | |
else: | |
try: | |
response = await llm_chain.ainvoke({ | |
"question": user_message, | |
"chat_history": "" | |
}, callbacks=[cl.AsyncLangchainCallbackHandler()]) | |
await cl.Message(content=response["text"]).send() | |
except Exception as e: | |
await cl.Message(content=f"Error: {str(e)}").send() | |